Seurat/0000755000176200001440000000000014170340713011515 5ustar liggesusersSeurat/NAMESPACE0000644000176200001440000004732114170106730012742 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("SCTResults<-",SCTAssay) S3method("SCTResults<-",SCTModel) S3method("[",SlideSeq) S3method("[",VisiumV1) S3method("levels<-",SCTAssay) S3method(AnnotateAnchors,IntegrationAnchorSet) S3method(AnnotateAnchors,TransferAnchorSet) S3method(AnnotateAnchors,default) S3method(Cells,SCTModel) S3method(Cells,STARmap) S3method(Cells,SlideSeq) S3method(Cells,VisiumV1) S3method(FindClusters,Seurat) S3method(FindClusters,default) S3method(FindMarkers,Assay) S3method(FindMarkers,DimReduc) S3method(FindMarkers,SCTAssay) S3method(FindMarkers,Seurat) S3method(FindMarkers,default) S3method(FindNeighbors,Assay) S3method(FindNeighbors,Seurat) S3method(FindNeighbors,default) S3method(FindNeighbors,dist) S3method(FindSpatiallyVariableFeatures,Assay) S3method(FindSpatiallyVariableFeatures,Seurat) S3method(FindSpatiallyVariableFeatures,default) S3method(FindVariableFeatures,Assay) S3method(FindVariableFeatures,SCTAssay) S3method(FindVariableFeatures,Seurat) S3method(FindVariableFeatures,default) S3method(FoldChange,Assay) S3method(FoldChange,DimReduc) S3method(FoldChange,Seurat) S3method(FoldChange,default) S3method(GetAssay,Seurat) S3method(GetImage,STARmap) S3method(GetImage,SlideSeq) S3method(GetImage,VisiumV1) S3method(GetTissueCoordinates,STARmap) S3method(GetTissueCoordinates,SlideSeq) S3method(GetTissueCoordinates,VisiumV1) S3method(HVFInfo,SCTAssay) S3method(IntegrateEmbeddings,IntegrationAnchorSet) S3method(IntegrateEmbeddings,TransferAnchorSet) S3method(MappingScore,AnchorSet) S3method(MappingScore,default) S3method(NormalizeData,Assay) S3method(NormalizeData,Seurat) S3method(NormalizeData,default) S3method(ProjectUMAP,DimReduc) S3method(ProjectUMAP,Seurat) S3method(ProjectUMAP,default) S3method(Radius,STARmap) S3method(Radius,SlideSeq) S3method(Radius,VisiumV1) S3method(RenameCells,SCTAssay) S3method(RenameCells,STARmap) S3method(RenameCells,SlideSeq) S3method(RenameCells,VisiumV1) S3method(RunCCA,Seurat) S3method(RunCCA,default) S3method(RunICA,Assay) S3method(RunICA,Seurat) S3method(RunICA,default) S3method(RunLDA,Assay) S3method(RunLDA,Seurat) S3method(RunLDA,default) S3method(RunPCA,Assay) S3method(RunPCA,Seurat) S3method(RunPCA,default) S3method(RunSLSI,Assay) S3method(RunSLSI,Seurat) S3method(RunSLSI,default) S3method(RunSPCA,Assay) S3method(RunSPCA,Seurat) S3method(RunSPCA,default) S3method(RunTSNE,DimReduc) S3method(RunTSNE,Seurat) S3method(RunTSNE,dist) S3method(RunTSNE,matrix) S3method(RunUMAP,Graph) S3method(RunUMAP,Neighbor) S3method(RunUMAP,Seurat) S3method(RunUMAP,default) S3method(SCTResults,SCTAssay) S3method(SCTResults,SCTModel) S3method(SCTResults,Seurat) S3method(ScaleData,Assay) S3method(ScaleData,Seurat) S3method(ScaleData,default) S3method(ScaleFactors,VisiumV1) S3method(ScoreJackStraw,DimReduc) S3method(ScoreJackStraw,JackStrawData) S3method(ScoreJackStraw,Seurat) S3method(as.CellDataSet,Seurat) S3method(as.Seurat,CellDataSet) S3method(as.Seurat,SingleCellExperiment) S3method(as.SingleCellExperiment,Seurat) S3method(as.data.frame,Matrix) S3method(as.sparse,H5Group) S3method(dim,STARmap) S3method(dim,SlideSeq) S3method(dim,VisiumV1) S3method(levels,SCTAssay) S3method(merge,SCTAssay) S3method(subset,AnchorSet) S3method(subset,SCTAssay) S3method(subset,STARmap) S3method(subset,SlideSeq) S3method(subset,VisiumV1) export("DefaultAssay<-") export("Idents<-") export("Index<-") export("JS<-") export("Key<-") export("Loadings<-") export("Misc<-") export("Project<-") export("SCTResults<-") export("Tool<-") export("VariableFeatures<-") export(AddAzimuthResults) export(AddMetaData) export(AddModuleScore) export(AggregateExpression) export(AnnotateAnchors) export(Assays) export(AugmentPlot) export(AutoPointSize) export(AverageExpression) export(BGTextColor) export(BarcodeInflectionsPlot) export(BlackAndWhite) export(BlueAndRed) export(BoldTitle) export(BuildClusterTree) export(CalcPerturbSig) export(CalculateBarcodeInflections) export(CaseMatch) export(CellCycleScoring) export(CellScatter) export(CellSelector) export(Cells) export(CellsByIdentities) export(CenterTitle) export(CollapseEmbeddingOutliers) export(CollapseSpeciesExpressionMatrix) export(ColorDimSplit) export(CombinePlots) export(Command) export(CreateAssayObject) export(CreateDimReducObject) export(CreateSCTAssayObject) export(CreateSeuratObject) export(CustomDistance) export(CustomPalette) export(DEenrichRPlot) export(DarkTheme) export(DefaultAssay) export(DietSeurat) export(DimHeatmap) export(DimPlot) export(DiscretePalette) export(Distances) export(DoHeatmap) export(DotPlot) export(ElbowPlot) export(Embeddings) export(ExpMean) export(ExpSD) export(ExpVar) export(FastRowScale) export(FeatureLocator) export(FeaturePlot) export(FeatureScatter) export(FetchData) export(FilterSlideSeq) export(FindAllMarkers) export(FindClusters) export(FindConservedMarkers) export(FindIntegrationAnchors) export(FindMarkers) export(FindMultiModalNeighbors) export(FindNeighbors) export(FindSpatiallyVariableFeatures) export(FindSubCluster) export(FindTransferAnchors) export(FindVariableFeatures) export(FoldChange) export(FontSize) export(GeneSymbolThesarus) export(GetAssay) export(GetAssayData) export(GetImage) export(GetIntegrationData) export(GetResidual) export(GetTissueCoordinates) export(GetTransferPredictions) export(GroupCorrelation) export(GroupCorrelationPlot) export(HTODemux) export(HTOHeatmap) export(HVFInfo) export(HoverLocator) export(IFeaturePlot) export(ISpatialDimPlot) export(ISpatialFeaturePlot) export(Idents) export(Images) export(Index) export(Indices) export(IntegrateData) export(IntegrateEmbeddings) export(Intensity) export(IsGlobal) export(JS) export(JackStraw) export(JackStrawPlot) export(Key) export(L2CCA) export(L2Dim) export(LabelClusters) export(LabelPoints) export(LinkedDimPlot) export(LinkedFeaturePlot) export(Load10X_Spatial) export(LoadAnnoyIndex) export(LoadSTARmap) export(Loadings) export(LocalStruct) export(LogNormalize) export(LogSeuratCommand) export(LogVMR) export(Luminance) export(MULTIseqDemux) export(MapQuery) export(MappingScore) export(MetaFeature) export(MinMax) export(Misc) export(MixingMetric) export(MixscapeHeatmap) export(MixscapeLDA) export(NNPlot) export(Neighbors) export(NoAxes) export(NoGrid) export(NoLegend) export(NormalizeData) export(PCAPlot) export(PCASigGenes) export(PCHeatmap) export(PercentAbove) export(PercentageFeatureSet) export(PlotClusterTree) export(PlotPerturbScore) export(PolyDimPlot) export(PolyFeaturePlot) export(PredictAssay) export(PrepLDA) export(PrepSCTFindMarkers) export(PrepSCTIntegration) export(Project) export(ProjectDim) export(ProjectUMAP) export(PurpleAndYellow) export(Radius) export(Read10X) export(Read10X_Image) export(Read10X_h5) export(ReadMtx) export(ReadParseBio) export(ReadSTARsolo) export(ReadSlideSeq) export(Reductions) export(RegroupIdents) export(RelativeCounts) export(RenameCells) export(RenameIdents) export(ReorderIdent) export(RestoreLegend) export(RidgePlot) export(RotatedAxis) export(RowMergeSparseMatrices) export(RunCCA) export(RunICA) export(RunLDA) export(RunMarkVario) export(RunMixscape) export(RunMoransI) export(RunPCA) export(RunSLSI) export(RunSPCA) export(RunTSNE) export(RunUMAP) export(SCTResults) export(SCTransform) export(SVFInfo) export(SampleUMI) export(SaveAnnoyIndex) export(ScaleData) export(ScaleFactors) export(ScoreJackStraw) export(SelectIntegrationFeatures) export(SetAssayData) export(SetIdent) export(SetIntegrationData) export(SetQuantile) export(SeuratAxes) export(SeuratTheme) export(SingleCorPlot) export(SingleDimPlot) export(SingleExIPlot) export(SingleImageMap) export(SingleRasterMap) export(SingleSpatialPlot) export(SpatialDimPlot) export(SpatialFeaturePlot) export(SpatialPlot) export(SpatialTheme) export(SpatiallyVariableFeatures) export(SplitObject) export(StashIdent) export(Stdev) export(SubsetByBarcodeInflections) export(TSNEPlot) export(Tool) export(TopCells) export(TopFeatures) export(TopNeighbors) export(TransferData) export(UMAPPlot) export(UpdateSCTAssays) export(UpdateSeuratObject) export(UpdateSymbolList) export(VariableFeaturePlot) export(VariableFeatures) export(VizDimLoadings) export(VlnPlot) export(WhichCells) export(WhiteBackground) export(as.CellDataSet) export(as.Graph) export(as.Neighbor) export(as.Seurat) export(as.SingleCellExperiment) export(as.sparse) export(scalefactors) exportClasses(AnchorSet) exportClasses(Assay) exportClasses(DimReduc) exportClasses(Graph) exportClasses(IntegrationAnchorSet) exportClasses(IntegrationData) exportClasses(JackStrawData) exportClasses(ModalityWeights) exportClasses(Neighbor) exportClasses(Seurat) exportClasses(SeuratCommand) exportClasses(SpatialImage) exportClasses(TransferAnchorSet) exportClasses(VisiumV1) importClassesFrom(Matrix,dgCMatrix) importClassesFrom(SeuratObject,Assay) importClassesFrom(SeuratObject,DimReduc) importClassesFrom(SeuratObject,Graph) importClassesFrom(SeuratObject,JackStrawData) importClassesFrom(SeuratObject,Neighbor) importClassesFrom(SeuratObject,Seurat) importClassesFrom(SeuratObject,SeuratCommand) importClassesFrom(SeuratObject,SpatialImage) importFrom(KernSmooth,bkde) importFrom(MASS,glm.nb) importFrom(MASS,lda) importFrom(Matrix,Matrix) importFrom(Matrix,as.matrix) importFrom(Matrix,colMeans) importFrom(Matrix,colSums) importFrom(Matrix,crossprod) importFrom(Matrix,readMM) importFrom(Matrix,rowMeans) importFrom(Matrix,rowSums) importFrom(Matrix,sparse.model.matrix) importFrom(Matrix,sparseMatrix) importFrom(Matrix,t) importFrom(RANN,nn2) importFrom(RColorBrewer,brewer.pal) importFrom(RColorBrewer,brewer.pal.info) importFrom(ROCR,performance) importFrom(ROCR,prediction) importFrom(Rcpp,evalCpp) importFrom(RcppAnnoy,AnnoyAngular) importFrom(RcppAnnoy,AnnoyEuclidean) importFrom(RcppAnnoy,AnnoyHamming) importFrom(RcppAnnoy,AnnoyManhattan) importFrom(Rtsne,Rtsne) importFrom(SeuratObject,"DefaultAssay<-") importFrom(SeuratObject,"Idents<-") importFrom(SeuratObject,"Index<-") importFrom(SeuratObject,"JS<-") importFrom(SeuratObject,"Key<-") importFrom(SeuratObject,"Loadings<-") importFrom(SeuratObject,"Misc<-") importFrom(SeuratObject,"Project<-") importFrom(SeuratObject,"Tool<-") importFrom(SeuratObject,"VariableFeatures<-") importFrom(SeuratObject,AddMetaData) importFrom(SeuratObject,Assays) importFrom(SeuratObject,Cells) importFrom(SeuratObject,CellsByIdentities) importFrom(SeuratObject,Command) importFrom(SeuratObject,CreateAssayObject) importFrom(SeuratObject,CreateDimReducObject) importFrom(SeuratObject,CreateSeuratObject) importFrom(SeuratObject,DefaultAssay) importFrom(SeuratObject,DefaultDimReduc) importFrom(SeuratObject,Distances) importFrom(SeuratObject,Embeddings) importFrom(SeuratObject,FetchData) importFrom(SeuratObject,GetAssayData) importFrom(SeuratObject,GetImage) importFrom(SeuratObject,GetTissueCoordinates) importFrom(SeuratObject,HVFInfo) importFrom(SeuratObject,Idents) importFrom(SeuratObject,Images) importFrom(SeuratObject,Index) importFrom(SeuratObject,Indices) importFrom(SeuratObject,IsGlobal) importFrom(SeuratObject,JS) importFrom(SeuratObject,Key) importFrom(SeuratObject,Loadings) importFrom(SeuratObject,LogSeuratCommand) importFrom(SeuratObject,Misc) importFrom(SeuratObject,Neighbors) importFrom(SeuratObject,PackageCheck) importFrom(SeuratObject,Project) importFrom(SeuratObject,Radius) importFrom(SeuratObject,Reductions) importFrom(SeuratObject,RenameCells) importFrom(SeuratObject,RenameIdents) importFrom(SeuratObject,ReorderIdent) importFrom(SeuratObject,RowMergeSparseMatrices) importFrom(SeuratObject,SVFInfo) importFrom(SeuratObject,SetAssayData) importFrom(SeuratObject,SetIdent) importFrom(SeuratObject,SpatiallyVariableFeatures) importFrom(SeuratObject,StashIdent) importFrom(SeuratObject,Stdev) importFrom(SeuratObject,Tool) importFrom(SeuratObject,UpdateSeuratObject) importFrom(SeuratObject,VariableFeatures) importFrom(SeuratObject,WhichCells) importFrom(SeuratObject,as.Graph) importFrom(SeuratObject,as.Neighbor) importFrom(SeuratObject,as.Seurat) importFrom(SeuratObject,as.sparse) importFrom(cluster,clara) importFrom(cowplot,get_legend) importFrom(cowplot,plot_grid) importFrom(cowplot,theme_cowplot) importFrom(fitdistrplus,fitdist) importFrom(future,nbrOfWorkers) importFrom(future,plan) importFrom(future.apply,future_lapply) importFrom(future.apply,future_sapply) importFrom(ggplot2,Geom) importFrom(ggplot2,GeomPolygon) importFrom(ggplot2,GeomViolin) importFrom(ggplot2,aes) importFrom(ggplot2,aes_string) importFrom(ggplot2,alpha) importFrom(ggplot2,annotation_raster) importFrom(ggplot2,coord_cartesian) importFrom(ggplot2,coord_fixed) importFrom(ggplot2,coord_flip) importFrom(ggplot2,cut_number) importFrom(ggplot2,discrete_scale) importFrom(ggplot2,draw_key_point) importFrom(ggplot2,dup_axis) importFrom(ggplot2,element_blank) importFrom(ggplot2,element_line) importFrom(ggplot2,element_rect) importFrom(ggplot2,element_text) importFrom(ggplot2,facet_grid) importFrom(ggplot2,facet_wrap) importFrom(ggplot2,geom_abline) importFrom(ggplot2,geom_bar) importFrom(ggplot2,geom_blank) importFrom(ggplot2,geom_boxplot) importFrom(ggplot2,geom_density) importFrom(ggplot2,geom_hline) importFrom(ggplot2,geom_jitter) importFrom(ggplot2,geom_label) importFrom(ggplot2,geom_line) importFrom(ggplot2,geom_point) importFrom(ggplot2,geom_polygon) importFrom(ggplot2,geom_raster) importFrom(ggplot2,geom_rect) importFrom(ggplot2,geom_smooth) importFrom(ggplot2,geom_text) importFrom(ggplot2,geom_tile) importFrom(ggplot2,geom_violin) importFrom(ggplot2,geom_vline) importFrom(ggplot2,ggplot) importFrom(ggplot2,ggplot_build) importFrom(ggplot2,ggproto) importFrom(ggplot2,ggproto_parent) importFrom(ggplot2,ggsave) importFrom(ggplot2,ggtitle) importFrom(ggplot2,guide_colorbar) importFrom(ggplot2,guide_legend) importFrom(ggplot2,guides) importFrom(ggplot2,labs) importFrom(ggplot2,layer) importFrom(ggplot2,layer_scales) importFrom(ggplot2,margin) importFrom(ggplot2,position_dodge) importFrom(ggplot2,position_jitterdodge) importFrom(ggplot2,scale_alpha) importFrom(ggplot2,scale_alpha_ordinal) importFrom(ggplot2,scale_color_brewer) importFrom(ggplot2,scale_color_discrete) importFrom(ggplot2,scale_color_distiller) importFrom(ggplot2,scale_color_gradient) importFrom(ggplot2,scale_color_gradientn) importFrom(ggplot2,scale_color_identity) importFrom(ggplot2,scale_color_manual) importFrom(ggplot2,scale_fill_brewer) importFrom(ggplot2,scale_fill_continuous) importFrom(ggplot2,scale_fill_gradient) importFrom(ggplot2,scale_fill_gradientn) importFrom(ggplot2,scale_fill_manual) importFrom(ggplot2,scale_fill_viridis_c) importFrom(ggplot2,scale_radius) importFrom(ggplot2,scale_size) importFrom(ggplot2,scale_x_continuous) importFrom(ggplot2,scale_x_log10) importFrom(ggplot2,scale_y_continuous) importFrom(ggplot2,scale_y_discrete) importFrom(ggplot2,scale_y_log10) importFrom(ggplot2,stat_density2d) importFrom(ggplot2,stat_qq) importFrom(ggplot2,sym) importFrom(ggplot2,theme) importFrom(ggplot2,theme_classic) importFrom(ggplot2,theme_void) importFrom(ggplot2,transform_position) importFrom(ggplot2,unit) importFrom(ggplot2,vars) importFrom(ggplot2,waiver) importFrom(ggplot2,xlab) importFrom(ggplot2,xlim) importFrom(ggplot2,ylab) importFrom(ggplot2,ylim) importFrom(ggrepel,geom_label_repel) importFrom(ggrepel,geom_text_repel) importFrom(ggridges,geom_density_ridges) importFrom(ggridges,theme_ridges) importFrom(grDevices,as.raster) importFrom(grDevices,col2rgb) importFrom(grDevices,colorRampPalette) importFrom(grDevices,rgb) importFrom(graphics,axis) importFrom(graphics,image) importFrom(graphics,locator) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,plot.new) importFrom(graphics,smoothScatter) importFrom(graphics,title) importFrom(grid,addGrob) importFrom(grid,editGrob) importFrom(grid,gTree) importFrom(grid,gpar) importFrom(grid,grobName) importFrom(grid,grobTree) importFrom(grid,nullGrob) importFrom(grid,pointsGrob) importFrom(grid,rasterGrob) importFrom(grid,unit) importFrom(grid,viewport) importFrom(httr,GET) importFrom(httr,accept_json) importFrom(httr,build_url) importFrom(httr,content) importFrom(httr,parse_url) importFrom(httr,status_code) importFrom(httr,timeout) importFrom(ica,icafast) importFrom(ica,icaimax) importFrom(ica,icajade) importFrom(igraph,E) importFrom(igraph,graph.adjacency) importFrom(igraph,graph_from_adj_list) importFrom(igraph,graph_from_adjacency_matrix) importFrom(igraph,plot.igraph) importFrom(irlba,irlba) importFrom(jsonlite,fromJSON) importFrom(leiden,leiden) importFrom(lmtest,lrtest) importFrom(matrixStats,rowAnyNAs) importFrom(matrixStats,rowMeans2) importFrom(matrixStats,rowSds) importFrom(matrixStats,rowSums2) importFrom(methods,"slot<-") importFrom(methods,.hasSlot) importFrom(methods,as) importFrom(methods,is) importFrom(methods,new) importFrom(methods,setAs) importFrom(methods,setClass) importFrom(methods,setClassUnion) importFrom(methods,setMethod) importFrom(methods,setOldClass) importFrom(methods,setValidity) importFrom(methods,signature) importFrom(methods,slot) importFrom(methods,slotNames) importFrom(miniUI,gadgetTitleBar) importFrom(miniUI,miniButtonBlock) importFrom(miniUI,miniContentPanel) importFrom(miniUI,miniPage) importFrom(miniUI,miniTitleBarButton) importFrom(patchwork,wrap_plots) importFrom(pbapply,pbapply) importFrom(pbapply,pblapply) importFrom(pbapply,pbsapply) importFrom(plotly,add_annotations) importFrom(plotly,layout) importFrom(plotly,plot_ly) importFrom(plotly,raster2uri) importFrom(png,readPNG) importFrom(reticulate,import) importFrom(reticulate,py_module_available) importFrom(reticulate,py_set_seed) importFrom(rlang,"!!") importFrom(rlang,as_label) importFrom(rlang,invoke) importFrom(scales,brewer_pal) importFrom(scales,hue_pal) importFrom(scales,rescale) importFrom(scales,squish_infinite) importFrom(scales,zero_range) importFrom(scattermore,geom_scattermore) importFrom(sctransform,correct_counts) importFrom(sctransform,get_residual_var) importFrom(sctransform,get_residuals) importFrom(sctransform,vst) importFrom(shiny,brushOpts) importFrom(shiny,brushedPoints) importFrom(shiny,clickOpts) importFrom(shiny,fillRow) importFrom(shiny,hoverOpts) importFrom(shiny,nearPoints) importFrom(shiny,observe) importFrom(shiny,observeEvent) importFrom(shiny,plotOutput) importFrom(shiny,reactiveValues) importFrom(shiny,renderPlot) importFrom(shiny,renderPrint) importFrom(shiny,runGadget) importFrom(shiny,selectInput) importFrom(shiny,sidebarPanel) importFrom(shiny,sliderInput) importFrom(shiny,stopApp) importFrom(shiny,updateSelectInput) importFrom(shiny,verbatimTextOutput) importFrom(spatstat.core,markvario) importFrom(spatstat.geom,ppp) importFrom(stats,aggregate) importFrom(stats,anova) importFrom(stats,approxfun) importFrom(stats,as.dist) importFrom(stats,as.formula) importFrom(stats,ave) importFrom(stats,coef) importFrom(stats,complete.cases) importFrom(stats,cor) importFrom(stats,dist) importFrom(stats,dnorm) importFrom(stats,glm) importFrom(stats,hclust) importFrom(stats,kmeans) importFrom(stats,lm) importFrom(stats,loess) importFrom(stats,median) importFrom(stats,na.omit) importFrom(stats,p.adjust) importFrom(stats,pchisq) importFrom(stats,pnbinom) importFrom(stats,poisson) importFrom(stats,prcomp) importFrom(stats,predict) importFrom(stats,prop.test) importFrom(stats,quantile) importFrom(stats,qunif) importFrom(stats,relevel) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,setNames) importFrom(stats,t.test) importFrom(stats,var) importFrom(stats,wilcox.test) importFrom(tibble,tibble) importFrom(tools,file_ext) importFrom(utils,argsAnywhere) importFrom(utils,capture.output) importFrom(utils,file_test) importFrom(utils,globalVariables) importFrom(utils,head) importFrom(utils,isS3method) importFrom(utils,isS3stdGeneric) importFrom(utils,methods) importFrom(utils,packageVersion) importFrom(utils,read.csv) importFrom(utils,read.delim) importFrom(utils,read.table) importFrom(utils,setTxtProgressBar) importFrom(utils,tail) importFrom(utils,txtProgressBar) importFrom(utils,write.table) importFrom(uwot,umap) importFrom(uwot,umap_transform) importMethodsFrom(Matrix,t) useDynLib(Seurat) Seurat/LICENSE0000644000176200001440000000005414156670503012530 0ustar liggesusersYEAR: 2021 COPYRIGHT HOLDER: Seurat authors Seurat/README.md0000644000176200001440000000237314170333512013000 0ustar liggesusers[![Build Status](https://travis-ci.com/satijalab/seurat.svg?branch=master)](https://app.travis-ci.com:443/github/satijalab/seurat) [![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/satijalab/seurat?branch=master&svg=true)](https://ci.appveyor.com/project/satijalab/seurat) [![CRAN Version](https://www.r-pkg.org/badges/version/Seurat)](https://cran.r-project.org/package=Seurat) [![CRAN Downloads](https://cranlogs.r-pkg.org/badges/Seurat)](https://cran.r-project.org/package=Seurat) # Seurat v4.1 Seurat is an R toolkit for single cell genomics, developed and maintained by the Satija Lab at NYGC. Instructions, documentation, and tutorials can be found at: * https://satijalab.org/seurat Seurat is also hosted on GitHub, you can view and clone the repository at * https://github.com/satijalab/seurat Seurat has been successfully installed on Mac OS X, Linux, and Windows, using the devtools package to install directly from GitHub Improvements and new features will be added on a regular basis, please post on the [github page](https://github.com/satijalab/seurat) with any questions or if you would like to contribute For a version history/changelog, please see the [NEWS file](https://github.com/satijalab/seurat/blob/master/NEWS.md). Seurat/data/0000755000176200001440000000000014005656653012440 5ustar liggesusersSeurat/data/cc.genes.rda0000644000176200001440000000101013712563445014605 0ustar liggesusersBZh91AY&SYJM*H ?*@ݻ[ib&!OI1=!@jj&=Fh LM4@#! 4H4҃&i=Cd JHWI#'& kSmj'nKdVZИil1MT"Fy.D UF.7 L4P!hb@.UE]c`[[Q35\VdQp DYg9=XY*O>- zƀE߮!Pcvo.H'gI׸Jۺl|J i3 dh8IXcHfK$J/ YaU=9֥M |nR2,b_"iKF)B.$){-L5P"o?4rx&Xms)%I1,vǝ msY8/sgblv 9`ѯvE(_:CqUN)y!C?Ի,6 wə) A;{{GjIiDS4\AVEqK(82,zln)ʄ*Җ3DcK&*j#8b~L&lyf"Lk ^D Rɚo9 RAOL ˾hR!i͹kD24^/>} F߮b? 5..p!aSeurat/man/0000755000176200001440000000000014170333512012267 5ustar liggesusersSeurat/man/Load10X_Spatial.Rd0000644000176200001440000000305114152476164015415 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Load10X_Spatial} \alias{Load10X_Spatial} \title{Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object} \usage{ Load10X_Spatial( data.dir, filename = "filtered_feature_bc_matrix.h5", assay = "Spatial", slice = "slice1", filter.matrix = TRUE, to.upper = FALSE, image = NULL, ... ) } \arguments{ \item{data.dir}{Directory containing the H5 file specified by \code{filename} and the image data in a subdirectory called \code{spatial}} \item{filename}{Name of H5 file containing the feature barcode matrix} \item{assay}{Name of the initial assay} \item{slice}{Name for the stored image of the tissue slice} \item{filter.matrix}{Only keep spots that have been determined to be over tissue} \item{to.upper}{Converts all feature names to upper case. This can provide an approximate conversion of mouse to human gene names which can be useful in an explorative analysis. For cross-species comparisons, orthologous genes should be identified across species and used instead.} \item{image}{An object of class VisiumV1. Typically, an output from \code{\link{Read10X_Image}}} \item{...}{Arguments passed to \code{\link{Read10X_h5}}} } \value{ A \code{Seurat} object } \description{ Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object } \examples{ \dontrun{ data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show filtered_feature_bc_matrix.h5 Load10X_Spatial(data.dir = data_dir) } } \concept{preprocessing} Seurat/man/DimHeatmap.Rd0000644000176200001440000000463314156670503014605 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{DimHeatmap} \alias{DimHeatmap} \alias{PCHeatmap} \title{Dimensional reduction heatmap} \usage{ DimHeatmap( object, dims = 1, nfeatures = 30, cells = NULL, reduction = "pca", disp.min = -2.5, disp.max = NULL, balanced = TRUE, projected = FALSE, ncol = NULL, fast = TRUE, raster = TRUE, slot = "scale.data", assays = NULL, combine = TRUE ) PCHeatmap(object, ...) } \arguments{ \item{object}{Seurat object} \item{dims}{Dimensions to plot} \item{nfeatures}{Number of genes to plot} \item{cells}{A list of cells to plot. If numeric, just plots the top cells.} \item{reduction}{Which dimensional reduction to use} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped); defaults to 2.5 if \code{slot} is 'scale.data', 6 otherwise} \item{balanced}{Plot an equal number of genes with both + and - scores.} \item{projected}{Use the full projected dimensional reduction} \item{ncol}{Number of columns to plot} \item{fast}{If true, use \code{image} to generate plots; faster than using ggplot2, but not customizable} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} \item{slot}{Data slot to use, choose from 'raw.data', 'data', or 'scale.data'} \item{assays}{A vector of assays to pull data from} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{...}{Extra parameters passed to \code{DimHeatmap}} } \value{ No return value by default. If using fast = FALSE, will return a \code{\link[patchwork]{patchwork}ed} ggplot object if combine = TRUE, otherwise returns a list of ggplot objects } \description{ Draws a heatmap focusing on a principal component. Both cells and genes are sorted by their principal component scores. Allows for nice visualization of sources of heterogeneity in the dataset. } \examples{ data("pbmc_small") DimHeatmap(object = pbmc_small) } \seealso{ \code{\link[graphics]{image}} \code{\link[ggplot2]{geom_raster}} } \concept{convenience} \concept{visualization} Seurat/man/ExpMean.Rd0000644000176200001440000000072214005656653014127 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpMean} \alias{ExpMean} \title{Calculate the mean of logged values} \usage{ ExpMean(x, ...) } \arguments{ \item{x}{A vector of values} \item{...}{Other arguments (not used)} } \value{ Returns the mean in log-space } \description{ Calculate mean of logged values in non-log space (return answer in log-space) } \examples{ ExpMean(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/SingleRasterMap.Rd0000644000176200001440000000201614156670503015625 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleRasterMap} \alias{SingleRasterMap} \title{A single heatmap from ggplot2 using geom_raster} \usage{ SingleRasterMap( data, raster = TRUE, cell.order = NULL, feature.order = NULL, colors = PurpleAndYellow(), disp.min = -2.5, disp.max = 2.5, limits = NULL, group.by = NULL ) } \arguments{ \item{data}{A matrix or data frame with data to plot} \item{raster}{switch between geom_raster and geom_tile} \item{cell.order}{...} \item{feature.order}{...} \item{colors}{A vector of colors to use} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped)} \item{limits}{A two-length numeric vector with the limits for colors on the plot} \item{group.by}{A vector to group cells by, should be one grouping identity per cell} } \value{ A ggplot2 object } \description{ A single heatmap from ggplot2 using geom_raster } \keyword{internal} Seurat/man/JackStrawData-class.Rd0000644000176200001440000000062214005656653016357 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{JackStrawData-class} \alias{JackStrawData-class} \title{The JackStrawData Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:JackStrawData]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:JackStrawData]{SeuratObject::JackStrawData-class}} } Seurat/man/cc.genes.updated.2019.Rd0000644000176200001440000000237614152507372016301 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{cc.genes.updated.2019} \alias{cc.genes.updated.2019} \title{Cell cycle genes: 2019 update} \format{ A list of two vectors \describe{ \item{s.genes}{Genes associated with S-phase} \item{g2m.genes}{Genes associated with G2M-phase} } } \source{ \url{https://www.science.org/doi/abs/10.1126/science.aad0501} } \usage{ cc.genes.updated.2019 } \description{ A list of genes used in cell-cycle regression, updated with 2019 symbols } \section{Updated symbols}{ The following symbols were updated from \code{\link{cc.genes}} \describe{ \item{s.genes}{ \itemize{ \item \emph{MCM2}: \emph{MCM7} \item \emph{MLF1IP}: \emph{CENPU} \item \emph{RPA2}: \emph{POLR1B} \item \emph{BRIP1}: \emph{MRPL36} } } \item{g2m.genes}{ \itemize{ \item \emph{FAM64A}: \emph{PIMREG} \item \emph{HN1}: \emph{JPT1} } } } } \examples{ \dontrun{ cc.genes.updated.2019 <- cc.genes cc.genes.updated.2019$s.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$s.genes) cc.genes.updated.2019$g2m.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$g2m.genes) } } \seealso{ \code{\link{cc.genes}} } \concept{data} \keyword{datasets} Seurat/man/ProjectDim.Rd0000644000176200001440000000262714005656653014640 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{ProjectDim} \alias{ProjectDim} \title{Project Dimensional reduction onto full dataset} \usage{ ProjectDim( object, reduction = "pca", assay = NULL, dims.print = 1:5, nfeatures.print = 20, overwrite = FALSE, do.center = FALSE, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Reduction to use} \item{assay}{Assay to use} \item{dims.print}{Number of dims to print features for} \item{nfeatures.print}{Number of features with highest/lowest loadings to print for each dimension} \item{overwrite}{Replace the existing data in feature.loadings} \item{do.center}{Center the dataset prior to projection (should be set to TRUE)} \item{verbose}{Print top genes associated with the projected dimensions} } \value{ Returns Seurat object with the projected values } \description{ Takes a pre-computed dimensional reduction (typically calculated on a subset of genes) and projects this onto the entire dataset (all genes). Note that the cell loadings will remain unchanged, but now there are gene loadings for all genes. } \examples{ data("pbmc_small") pbmc_small pbmc_small <- ProjectDim(object = pbmc_small, reduction = "pca") # Vizualize top projected genes in heatmap DimHeatmap(object = pbmc_small, reduction = "pca", dims = 1, balanced = TRUE) } \concept{dimensional_reduction} Seurat/man/GetAssay.Rd0000644000176200001440000000114114005656653014306 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{GetAssay} \alias{GetAssay} \alias{GetAssay.Seurat} \title{Get an Assay object from a given Seurat object.} \usage{ GetAssay(object, ...) \method{GetAssay}{Seurat}(object, assay = NULL, ...) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{assay}{Assay to get} } \value{ Returns an Assay object } \description{ Get an Assay object from a given Seurat object. } \examples{ data("pbmc_small") GetAssay(object = pbmc_small, assay = "RNA") } \concept{objects} Seurat/man/FindSubCluster.Rd0000644000176200001440000000205314005656653015465 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{FindSubCluster} \alias{FindSubCluster} \title{Find subclusters under one cluster} \usage{ FindSubCluster( object, cluster, graph.name, subcluster.name = "sub.cluster", resolution = 0.5, algorithm = 1 ) } \arguments{ \item{object}{An object} \item{cluster}{the cluster to be sub-clustered} \item{graph.name}{Name of graph to use for the clustering algorithm} \item{subcluster.name}{the name of sub cluster added in the meta.data} \item{resolution}{Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities.} \item{algorithm}{Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python.} } \value{ return a object with sub cluster labels in the sub-cluster.name variable } \description{ Find subclusters under one cluster } \concept{clustering} Seurat/man/PolyDimPlot.Rd0000644000176200001440000000151614005656653015010 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PolyDimPlot} \alias{PolyDimPlot} \title{Polygon DimPlot} \usage{ PolyDimPlot( object, group.by = NULL, cells = NULL, poly.data = "spatial", flip.coords = FALSE ) } \arguments{ \item{object}{Seurat object} \item{group.by}{A grouping variable present in the metadata. Default is to use the groupings present in the current cell identities (\code{Idents(object = object)})} \item{cells}{Vector of cells to plot (default is all cells)} \item{poly.data}{Name of the polygon dataframe in the misc slot} \item{flip.coords}{Flip x and y coordinates} } \value{ Returns a ggplot object } \description{ Plot cells as polygons, rather than single points. Color cells by identity, or a categorical variable in metadata } \concept{visualization} Seurat/man/SubsetByBarcodeInflections.Rd0000644000176200001440000000203514005656653020007 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{SubsetByBarcodeInflections} \alias{SubsetByBarcodeInflections} \title{Subset a Seurat Object based on the Barcode Distribution Inflection Points} \usage{ SubsetByBarcodeInflections(object) } \arguments{ \item{object}{Seurat object} } \value{ Returns a subsetted Seurat object. } \description{ This convenience function subsets a Seurat object based on calculated inflection points. } \details{ See [CalculateBarcodeInflections()] to calculate inflection points and [BarcodeInflectionsPlot()] to visualize and test inflection point calculations. } \examples{ data("pbmc_small") pbmc_small <- CalculateBarcodeInflections( object = pbmc_small, group.column = 'groups', threshold.low = 20, threshold.high = 30 ) SubsetByBarcodeInflections(object = pbmc_small) } \seealso{ \code{\link{CalculateBarcodeInflections}} \code{\link{BarcodeInflectionsPlot}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{preprocessing} Seurat/man/subset.AnchorSet.Rd0000644000176200001440000000265714024674706015775 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{subset.AnchorSet} \alias{subset.AnchorSet} \title{Subset an AnchorSet object} \usage{ \method{subset}{AnchorSet}( x, score.threshold = NULL, disallowed.dataset.pairs = NULL, dataset.matrix = NULL, group.by = NULL, disallowed.ident.pairs = NULL, ident.matrix = NULL, ... ) } \arguments{ \item{x}{object to be subsetted.} \item{score.threshold}{Only anchor pairs with scores greater than this value are retained.} \item{disallowed.dataset.pairs}{Remove any anchors formed between the provided pairs. E.g. \code{list(c(1, 5), c(1, 2))} filters out any anchors between datasets 1 and 5 and datasets 1 and 2.} \item{dataset.matrix}{Provide a binary matrix specifying whether a dataset pair is allowable (1) or not (0). Should be a dataset x dataset matrix.} \item{group.by}{Grouping variable to determine allowable ident pairs} \item{disallowed.ident.pairs}{Remove any anchors formed between provided ident pairs. E.g. \code{list(c("CD4", "CD8"), c("B-cell", "T-cell"))}} \item{ident.matrix}{Provide a binary matrix specifying whether an ident pair is allowable (1) or not (0). Should be an ident x ident symmetric matrix} \item{...}{further arguments to be passed to or from other methods.} } \value{ Returns an \code{\link{AnchorSet}} object with specified anchors filtered out } \description{ Subset an AnchorSet object } \concept{objects} Seurat/man/Read10X.Rd0000644000176200001440000000357414024674706013746 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X} \alias{Read10X} \title{Load in data from 10X} \usage{ Read10X( data.dir, gene.column = 2, cell.column = 1, unique.features = TRUE, strip.suffix = FALSE ) } \arguments{ \item{data.dir}{Directory containing the matrix.mtx, genes.tsv (or features.tsv), and barcodes.tsv files provided by 10X. A vector or named vector can be given in order to load several data directories. If a named vector is given, the cell barcode names will be prefixed with the name.} \item{gene.column}{Specify which column of genes.tsv or features.tsv to use for gene names; default is 2} \item{cell.column}{Specify which column of barcodes.tsv to use for cell names; default is 1} \item{unique.features}{Make feature names unique (default TRUE)} \item{strip.suffix}{Remove trailing "-1" if present in all cell barcodes.} } \value{ If features.csv indicates the data has multiple data types, a list containing a sparse matrix of the data from each type will be returned. Otherwise a sparse matrix containing the expression data will be returned. } \description{ Enables easy loading of sparse data matrices provided by 10X genomics. } \examples{ \dontrun{ # For output from CellRanger < 3.0 data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show barcodes.tsv, genes.tsv, and matrix.mtx expression_matrix <- Read10X(data.dir = data_dir) seurat_object = CreateSeuratObject(counts = expression_matrix) # For output from CellRanger >= 3.0 with multiple data types data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show barcodes.tsv.gz, features.tsv.gz, and matrix.mtx.gz data <- Read10X(data.dir = data_dir) seurat_object = CreateSeuratObject(counts = data$`Gene Expression`) seurat_object[['Protein']] = CreateAssayObject(counts = data$`Antibody Capture`) } } \concept{preprocessing} Seurat/man/GetTissueCoordinates.Rd0000644000176200001440000000200114024674706016671 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetTissueCoordinates.SlideSeq} \alias{GetTissueCoordinates.SlideSeq} \alias{GetTissueCoordinates.STARmap} \alias{GetTissueCoordinates.VisiumV1} \title{Get Tissue Coordinates} \usage{ \method{GetTissueCoordinates}{SlideSeq}(object, ...) \method{GetTissueCoordinates}{STARmap}(object, qhulls = FALSE, ...) \method{GetTissueCoordinates}{VisiumV1}( object, scale = "lowres", cols = c("imagerow", "imagecol"), ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{qhulls}{return qhulls instead of centroids} \item{scale}{A factor to scale the coordinates by; choose from: 'tissue', 'fiducial', 'hires', 'lowres', or \code{NULL} for no scaling} \item{cols}{Columns of tissue coordinates data.frame to pull} } \description{ Get Tissue Coordinates } \seealso{ \code{\link[SeuratObject:GetTissueCoordinates]{SeuratObject::GetTissueCoordinates}} } \concept{objects} \concept{spatial} Seurat/man/RunMarkVario.Rd0000644000176200001440000000117114005656653015151 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RunMarkVario} \alias{RunMarkVario} \title{Run the mark variogram computation on a given position matrix and expression matrix.} \usage{ RunMarkVario(spatial.location, data, ...) } \arguments{ \item{spatial.location}{A 2 column matrix giving the spatial locations of each of the data points also in data} \item{data}{Matrix containing the data used as "marks" (e.g. gene expression)} \item{...}{Arguments passed to markvario} } \description{ Wraps the functionality of markvario from the spatstat package. } \concept{preprocessing} Seurat/man/FoldChange.Rd0000644000176200001440000000616714005656653014575 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/differential_expression.R \name{FoldChange} \alias{FoldChange} \alias{FoldChange.default} \alias{FoldChange.Assay} \alias{FoldChange.DimReduc} \alias{FoldChange.Seurat} \title{Fold Change} \usage{ FoldChange(object, ...) \method{FoldChange}{default}(object, cells.1, cells.2, mean.fxn, fc.name, features = NULL, ...) \method{FoldChange}{Assay}( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, ... ) \method{FoldChange}{DimReduc}( object, cells.1, cells.2, features = NULL, slot = NULL, pseudocount.use = NULL, fc.name = NULL, mean.fxn = NULL, ... ) \method{FoldChange}{Seurat}( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = "data", reduction = NULL, features = NULL, pseudocount.use = 1, mean.fxn = NULL, base = 2, fc.name = NULL, ... ) } \arguments{ \item{object}{A Seurat object} \item{...}{Arguments passed to other methods} \item{cells.1}{Vector of cell names belonging to group 1} \item{cells.2}{Vector of cell names belonging to group 2} \item{mean.fxn}{Function to use for fold change or average difference calculation} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame} \item{features}{Features to calculate fold change for. If NULL, use all features} \item{slot}{Slot to pull data from} \item{pseudocount.use}{Pseudocount to add to averaged expression values when calculating logFC. 1 by default.} \item{base}{The base with respect to which logarithms are computed.} \item{ident.1}{Identity class to calculate fold change for; pass an object of class \code{phylo} or 'clustertree' to calculate fold change for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to calculate fold change for} \item{group.by}{Regroup cells into a different identity class prior to calculating fold change (see example in \code{\link{FindMarkers}})} \item{subset.ident}{Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example in \code{\link{FindMarkers}})} \item{assay}{Assay to use in fold change calculation} \item{reduction}{Reduction to use - will calculate average difference on cell embeddings} } \value{ Returns a data.frame } \description{ Calculate log fold change and percentage of cells expressing each feature for different identity classes. } \details{ If the slot is \code{scale.data} or a reduction is specified, average difference is returned instead of log fold change and the column is named "avg_diff". Otherwise, log2 fold change is returned with column named "avg_log2_FC". } \examples{ data("pbmc_small") FoldChange(pbmc_small, ident.1 = 1) } \seealso{ \code{FindMarkers} } \concept{differential_expression} Seurat/man/CustomDistance.Rd0000644000176200001440000000147114005656653015521 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CustomDistance} \alias{CustomDistance} \title{Run a custom distance function on an input data matrix} \usage{ CustomDistance(my.mat, my.function, ...) } \arguments{ \item{my.mat}{A matrix to calculate distance on} \item{my.function}{A function to calculate distance} \item{...}{Extra parameters to my.function} } \value{ A distance matrix } \description{ Run a custom distance function on an input data matrix } \examples{ data("pbmc_small") # Define custom distance matrix manhattan.distance <- function(x, y) return(sum(abs(x-y))) input.data <- GetAssayData(pbmc_small, assay.type = "RNA", slot = "scale.data") cell.manhattan.dist <- CustomDistance(input.data, manhattan.distance) } \author{ Jean Fan } \concept{utilities} Seurat/man/AverageExpression.Rd0000644000176200001440000000440514005656653016226 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AverageExpression} \alias{AverageExpression} \title{Averaged feature expression by identity class} \usage{ AverageExpression( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = "ident", add.ident = NULL, slot = "data", verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{assays}{Which assays to use. Default is all assays} \item{features}{Features to analyze. Default is all features in the assay} \item{return.seurat}{Whether to return the data as a Seurat object. Default is FALSE} \item{group.by}{Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default} \item{add.ident}{(Deprecated) Place an additional label on each cell prior to pseudobulking (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example)} \item{slot}{Slot(s) to use; if multiple slots are given, assumed to follow the order of 'assays' (if specified) or object's assays} \item{verbose}{Print messages and show progress bar} \item{...}{Arguments to be passed to methods such as \code{\link{CreateSeuratObject}}} } \value{ Returns a matrix with genes as rows, identity classes as columns. If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. } \description{ Returns averaged expression values for each identity class } \details{ If slot is set to 'data', this function assumes that the data has been log normalized and therefore feature values are exponentiated prior to averaging so that averaging is done in non-log space. Otherwise, if slot is set to either 'counts' or 'scale.data', no exponentiation is performed prior to averaging If \code{return.seurat = TRUE} and slot is not 'scale.data', averaged values are placed in the 'counts' slot of the returned object and the log of averaged values are placed in the 'data' slot. \code{\link{ScaleData}} is then run on the default assay before returning the object. If \code{return.seurat = TRUE} and slot is 'scale.data', the 'counts' slot is left empty, the 'data' slot is filled with NA, and 'scale.data' is set to the aggregated values. } \examples{ data("pbmc_small") head(AverageExpression(object = pbmc_small)) } \concept{utilities} Seurat/man/PredictAssay.Rd0000644000176200001440000000255614005656653015174 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{PredictAssay} \alias{PredictAssay} \title{Predict value from nearest neighbors} \usage{ PredictAssay( object, nn.idx, assay, reduction = NULL, dims = NULL, return.assay = TRUE, slot = "scale.data", features = NULL, mean.function = rowMeans, seed = 4273, verbose = TRUE ) } \arguments{ \item{object}{The object used to calculate knn} \item{nn.idx}{k near neighbour indices. A cells x k matrix.} \item{assay}{Assay used for prediction} \item{reduction}{Cell embedding of the reduction used for prediction} \item{dims}{Number of dimensions of cell embedding} \item{return.assay}{Return an assay or a predicted matrix} \item{slot}{slot used for prediction} \item{features}{features used for prediction} \item{mean.function}{the function used to calculate row mean} \item{seed}{Sets the random seed to check if the nearest neighbor is query cell} \item{verbose}{Print progress} } \value{ return an assay containing predicted expression value in the data slot } \description{ This function will predict expression or cell embeddings from its k nearest neighbors index. For each cell, it will average its k neighbors value to get its new imputed value. It can average expression value in assays and cell embeddings from dimensional reductions. } \concept{integration} Seurat/man/Radius.Rd0000644000176200001440000000076414024674706014027 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{Radius.SlideSeq} \alias{Radius.SlideSeq} \alias{Radius.STARmap} \alias{Radius.VisiumV1} \title{Get Spot Radius} \usage{ \method{Radius}{SlideSeq}(object) \method{Radius}{STARmap}(object) \method{Radius}{VisiumV1}(object) } \arguments{ \item{object}{An image object} } \description{ Get Spot Radius } \seealso{ \code{\link[SeuratObject:Radius]{SeuratObject::Radius}} } \concept{objects} \concept{spatial} Seurat/man/SCTAssay-class.Rd0000644000176200001440000000560014170106500015307 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{SCTAssay-class} \alias{SCTAssay-class} \alias{SCTModel} \alias{SCTAssay} \alias{levels.SCTAssay} \alias{levels<-.SCTAssay} \title{The SCTModel Class} \usage{ \method{levels}{SCTAssay}(x) \method{levels}{SCTAssay}(x) <- value } \arguments{ \item{x}{An \code{SCTAssay} object} \item{value}{New levels, must be in the same order as the levels present} } \value{ \code{levels}: SCT model names \code{levels<-}: \code{x} with updated SCT model names } \description{ The SCTModel object is a model and parameters storage from SCTransform. It can be used to calculate Pearson residuals for new genes. The SCTAssay object contains all the information found in an \code{\link{Assay}} object, with extra information from the results of \code{\link{SCTransform}} } \section{Slots}{ \describe{ \item{\code{feature.attributes}}{A data.frame with feature attributes in SCTransform} \item{\code{cell.attributes}}{A data.frame with cell attributes in SCTransform} \item{\code{clips}}{A list of two numeric of length two specifying the min and max values the Pearson residual will be clipped to. One for vst and one for SCTransform} \item{\code{umi.assay}}{Name of the assay of the seurat object containing UMI matrix and the default is RNA} \item{\code{model}}{A formula used in SCTransform} \item{\code{arguments}}{other information used in SCTransform} \item{\code{median_umi}}{Median UMI (or scale factor) used to calculate corrected counts} \item{\code{SCTModel.list}}{A list containing SCT models} }} \section{Get and set SCT model names}{ SCT results are named by initial run of \code{\link{SCTransform}} in order to keep SCT parameters straight between runs. When working with merged \code{SCTAssay} objects, these model names are important. \code{levels} allows querying the models present. \code{levels<-} allows the changing of the names of the models present, useful when merging \code{SCTAssay} objects. Note: unlike normal \code{\link[base]{levels<-}}, \code{levels<-.SCTAssay} allows complete changing of model names, not reordering. } \section{Creating an \code{SCTAssay} from an \code{Assay}}{ Conversion from an \code{Assay} object to an \code{SCTAssay} object by is done by adding the additional slots to the object. If \code{from} has results generated by \code{\link{SCTransform}} from Seurat v3.0.0 to v3.1.1, the conversion will automagically fill the new slots with the data } \examples{ \dontrun{ # SCTAssay objects are generated from SCTransform pbmc_small <- SCTransform(pbmc_small) } # SCTAssay objects are generated from SCTransform pbmc_small <- SCTransform(pbmc_small) pbmc_small[["SCT"]] \dontrun{ # Query and change SCT model names levels(pbmc_small[['SCT']]) levels(pbmc_small[['SCT']]) <- '3' levels(pbmc_small[['SCT']]) } } \seealso{ \code{\link{Assay}} \code{\link{Assay}} } \concept{objects} Seurat/man/NNPlot.Rd0000644000176200001440000000354714005656653013754 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{NNPlot} \alias{NNPlot} \title{Highlight Neighbors in DimPlot} \usage{ NNPlot( object, reduction, nn.idx, query.cells, dims = 1:2, label = FALSE, label.size = 4, repel = FALSE, sizes.highlight = 2, pt.size = 1, cols.highlight = c("#377eb8", "#e41a1c"), na.value = "#bdbdbd", order = c("self", "neighbors", "other"), show.all.cells = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{nn.idx}{the neighbor index of all cells} \item{query.cells}{cells used to find their neighbors} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{repel}{Repel labels} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight} \item{pt.size}{Adjust point size for plotting} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{na.value}{Color value for NA points when using custom scale} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{show.all.cells}{Show all cells or only query and neighbor cells} \item{...}{Extra parameters passed to \code{DimPlot}} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ It will color the query cells and the neighbors of the query cells in the DimPlot } \concept{visualization} Seurat/man/SingleCorPlot.Rd0000644000176200001440000000243714165416216015317 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleCorPlot} \alias{SingleCorPlot} \title{A single correlation plot} \usage{ SingleCorPlot( data, col.by = NULL, cols = NULL, pt.size = NULL, smooth = FALSE, rows.highlight = NULL, legend.title = NULL, na.value = "grey50", span = NULL, raster = NULL, raster.dpi = NULL, plot.cor = TRUE, jitter = TRUE ) } \arguments{ \item{data}{A data frame with two columns to be plotted} \item{col.by}{A vector or factor of values to color the plot by} \item{cols}{An optional vector of colors to use} \item{pt.size}{Point size for the plot} \item{smooth}{Make a smoothed scatter plot} \item{legend.title}{Optional legend title} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{the pixel resolution for rastered plots, passed to geom_scattermore(). Default is c(512, 512)} \item{plot.cor}{...} \item{jitter}{Jitter for easier visualization of crowded points} \item{rows.highight}{A vector of rows to highlight (like cells.highlight in \code{\link{SingleDimPlot}})} } \value{ A ggplot2 object } \description{ A single correlation plot } \keyword{internal} Seurat/man/RunUMAP.Rd0000644000176200001440000002262414152476164014026 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunUMAP} \alias{RunUMAP} \alias{RunUMAP.default} \alias{RunUMAP.Graph} \alias{RunUMAP.Neighbor} \alias{RunUMAP.Seurat} \title{Run UMAP} \usage{ RunUMAP(object, ...) \method{RunUMAP}{default}( object, reduction.key = "UMAP_", assay = NULL, reduction.model = NULL, return.model = FALSE, umap.method = "uwot", n.neighbors = 30L, n.components = 2L, metric = "cosine", n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, ... ) \method{RunUMAP}{Graph}( object, assay = NULL, umap.method = "umap-learn", n.components = 2L, metric = "correlation", n.epochs = 0L, learning.rate = 1, min.dist = 0.3, spread = 1, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, densmap = FALSE, densmap.kwds = NULL, verbose = TRUE, reduction.key = "UMAP_", ... ) \method{RunUMAP}{Neighbor}(object, reduction.model, ...) \method{RunUMAP}{Seurat}( object, dims = NULL, reduction = "pca", features = NULL, graph = NULL, assay = DefaultAssay(object = object), nn.name = NULL, slot = "data", umap.method = "uwot", reduction.model = NULL, return.model = FALSE, n.neighbors = 30L, n.components = 2L, metric = "cosine", n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, reduction.name = "umap", reduction.key = "UMAP_", ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and UMAP} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. UMAP by default} \item{assay}{Assay to pull data for when using \code{features}, or assay used to construct Graph if running UMAP on a Graph} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{return.model}{whether UMAP will return the uwot model} \item{umap.method}{UMAP implementation to run. Can be \describe{ \item{\code{uwot}:}{Runs umap via the uwot R package} \item{\code{uwot-learn}:}{Runs umap via the uwot R package and return the learned umap model} \item{\code{umap-learn}:}{Run the Seurat wrapper of the python umap-learn package} }} \item{n.neighbors}{This determines the number of neighboring points used in local approximations of manifold structure. Larger values will result in more global structure being preserved at the loss of detailed local structure. In general this parameter should often be in the range 5 to 50.} \item{n.components}{The dimension of the space to embed into.} \item{metric}{metric: This determines the choice of metric used to measure distance in the input space. A wide variety of metrics are already coded, and a user defined function can be passed as long as it has been JITd by numba.} \item{n.epochs}{he number of training epochs to be used in optimizing the low dimensional embedding. Larger values result in more accurate embeddings. If NULL is specified, a value will be selected based on the size of the input dataset (200 for large datasets, 500 for small).} \item{learning.rate}{The initial learning rate for the embedding optimization.} \item{min.dist}{This controls how tightly the embedding is allowed compress points together. Larger values ensure embedded points are moreevenly distributed, while smaller values allow the algorithm to optimise more accurately with regard to local structure. Sensible values are in the range 0.001 to 0.5.} \item{spread}{The effective scale of embedded points. In combination with min.dist this determines how clustered/clumped the embedded points are.} \item{set.op.mix.ratio}{Interpolate between (fuzzy) union and intersection as the set operation used to combine local fuzzy simplicial sets to obtain a global fuzzy simplicial sets. Both fuzzy set operations use the product t-norm. The value of this parameter should be between 0.0 and 1.0; a value of 1.0 will use a pure fuzzy union, while 0.0 will use a pure fuzzy intersection.} \item{local.connectivity}{The local connectivity required - i.e. the number of nearest neighbors that should be assumed to be connected at a local level. The higher this value the more connected the manifold becomes locally. In practice this should be not more than the local intrinsic dimension of the manifold.} \item{repulsion.strength}{Weighting applied to negative samples in low dimensional embedding optimization. Values higher than one will result in greater weight being given to negative samples.} \item{negative.sample.rate}{The number of negative samples to select per positive sample in the optimization process. Increasing this value will result in greater repulsive force being applied, greater optimization cost, but slightly more accuracy.} \item{a}{More specific parameters controlling the embedding. If NULL, these values are set automatically as determined by min. dist and spread. Parameter of differentiable approximation of right adjoint functor.} \item{b}{More specific parameters controlling the embedding. If NULL, these values are set automatically as determined by min. dist and spread. Parameter of differentiable approximation of right adjoint functor.} \item{uwot.sgd}{Set \code{uwot::umap(fast_sgd = TRUE)}; see \code{\link[uwot]{umap}} for more details} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed} \item{metric.kwds}{A dictionary of arguments to pass on to the metric, such as the p value for Minkowski distance. If NULL then no arguments are passed on.} \item{angular.rp.forest}{Whether to use an angular random projection forest to initialise the approximate nearest neighbor search. This can be faster, but is mostly on useful for metric that use an angular style distance such as cosine, correlation etc. In the case of those metrics angular forests will be chosen automatically.} \item{densmap}{Whether to use the density-augmented objective of densMAP. Turning on this option generates an embedding where the local densities are encouraged to be correlated with those in the original space. Parameters below with the prefix ‘dens’ further control the behavior of this extension. Default is FALSE. Only compatible with 'umap-learn' method and version of umap-learn >= 0.5.0} \item{dens.lambda}{Specific parameter which controls the regularization weight of the density correlation term in densMAP. Higher values prioritize density preservation over the UMAP objective, and vice versa for values closer to zero. Setting this parameter to zero is equivalent to running the original UMAP algorithm. Default value is 2.} \item{dens.frac}{Specific parameter which controls the fraction of epochs (between 0 and 1) where the density-augmented objective is used in densMAP. The first (1 - dens_frac) fraction of epochs optimize the original UMAP objective before introducing the density correlation term. Default is 0.3.} \item{dens.var.shift}{Specific parameter which specifies a small constant added to the variance of local radii in the embedding when calculating the density correlation objective to prevent numerical instability from dividing by a small number. Default is 0.1.} \item{verbose}{Controls verbosity} \item{densmap.kwds}{A dictionary of arguments to pass on to the densMAP optimization.} \item{dims}{Which dimensions to use as input features, used only if \code{features} is NULL} \item{reduction}{Which dimensional reduction (PCA or ICA) to use for the UMAP input. Default is PCA} \item{features}{If set, run UMAP on this subset of features (instead of running on a set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run on features} \item{graph}{Name of graph on which to run UMAP} \item{nn.name}{Name of knn output on which to run UMAP} \item{slot}{The slot used to pull data for when using \code{features}. data slot is by default.} \item{reduction.name}{Name to store dimensional reduction under in the Seurat object} } \value{ Returns a Seurat object containing a UMAP representation } \description{ Runs the Uniform Manifold Approximation and Projection (UMAP) dimensional reduction technique. To run, you must first install the umap-learn python package (e.g. via \code{pip install umap-learn}). Details on this package can be found here: \url{https://github.com/lmcinnes/umap}. For a more in depth discussion of the mathematics underlying UMAP, see the ArXiv paper here: \url{https://arxiv.org/abs/1802.03426}. } \examples{ \dontrun{ data("pbmc_small") pbmc_small # Run UMAP map on first 5 PCs pbmc_small <- RunUMAP(object = pbmc_small, dims = 1:5) # Plot results DimPlot(object = pbmc_small, reduction = 'umap') } } \references{ McInnes, L, Healy, J, UMAP: Uniform Manifold Approximation and Projection for Dimension Reduction, ArXiv e-prints 1802.03426, 2018 } \concept{dimensional_reduction} Seurat/man/ScaleFactors.Rd0000644000176200001440000000161314024674706015143 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{ScaleFactors} \alias{ScaleFactors} \alias{scalefactors} \alias{ScaleFactors.VisiumV1} \title{Get image scale factors} \usage{ ScaleFactors(object, ...) scalefactors(spot, fiducial, hires, lowres) \method{ScaleFactors}{VisiumV1}(object, ...) \method{ScaleFactors}{VisiumV1}(object, ...) } \arguments{ \item{object}{An object to get scale factors from} \item{...}{Arguments passed to other methods} \item{spot}{Spot full resolution scale factor} \item{fiducial}{Fiducial full resolution scale factor} \item{hires}{High resolutoin scale factor} \item{lowres}{Low resolution scale factor} } \value{ An object of class \code{scalefactors} } \description{ Get image scale factors } \note{ \code{scalefactors} objects can be created with \code{scalefactors()} } \concept{objects} \concept{spatial} Seurat/man/ExpSD.Rd0000644000176200001440000000066714005656653013565 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpSD} \alias{ExpSD} \title{Calculate the standard deviation of logged values} \usage{ ExpSD(x) } \arguments{ \item{x}{A vector of values} } \value{ Returns the standard deviation in log-space } \description{ Calculate SD of logged values in non-log space (return answer in log-space) } \examples{ ExpSD(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/FindMultiModalNeighbors.Rd0000644000176200001440000000451014005656653017302 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{FindMultiModalNeighbors} \alias{FindMultiModalNeighbors} \title{Construct weighted nearest neighbor graph} \usage{ FindMultiModalNeighbors( object, reduction.list, dims.list, k.nn = 20, l2.norm = TRUE, knn.graph.name = "wknn", snn.graph.name = "wsnn", weighted.nn.name = "weighted.nn", modality.weight.name = NULL, knn.range = 200, prune.SNN = 1/15, sd.scale = 1, cross.contant.list = NULL, smooth = FALSE, return.intermediate = FALSE, modality.weight = NULL, verbose = TRUE ) } \arguments{ \item{object}{A Seurat object} \item{reduction.list}{A list of two dimensional reductions, one for each of the modalities to be integrated} \item{dims.list}{A list containing the dimensions for each reduction to use} \item{k.nn}{the number of multimodal neighbors to compute. 20 by default} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction. TRUE by default.} \item{knn.graph.name}{Multimodal knn graph name} \item{snn.graph.name}{Multimodal snn graph name} \item{weighted.nn.name}{Multimodal neighbor object name} \item{modality.weight.name}{Variable name to store modality weight in object meta data} \item{knn.range}{The number of approximate neighbors to compute} \item{prune.SNN}{Cutoff not to discard edge in SNN graph} \item{sd.scale}{The scaling factor for kernel width. 1 by default} \item{cross.contant.list}{Constant used to avoid divide-by-zero errors. 1e-4 by default} \item{smooth}{Smoothing modality score across each individual modality neighbors. FALSE by default} \item{return.intermediate}{Store intermediate results in misc} \item{modality.weight}{A \code{\link{ModalityWeights}} object generated by \code{FindModalityWeights}} \item{verbose}{Print progress bars and output} } \value{ Seurat object containing a nearest-neighbor object, KNN graph, and SNN graph - each based on a weighted combination of modalities. } \description{ This function will construct a weighted nearest neighbor (WNN) graph. For each cell, we identify the nearest neighbors based on a weighted combination of two modalities. Takes as input two dimensional reductions, one computed for each modality.Other parameters are listed for debugging, but can be left as default values. } \concept{clustering} Seurat/man/HoverLocator.Rd0000644000176200001440000000167414005656653015210 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{HoverLocator} \alias{HoverLocator} \title{Hover Locator} \usage{ HoverLocator(plot, information = NULL, axes = TRUE, dark.theme = FALSE, ...) } \arguments{ \item{plot}{A ggplot2 plot} \item{information}{An optional dataframe or matrix of extra information to be displayed on hover} \item{axes}{Display or hide x- and y-axes} \item{dark.theme}{Plot using a dark theme?} \item{...}{Extra parameters to be passed to \code{\link[plotly]{layout}}} } \description{ Get quick information from a scatterplot by hovering over points } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) HoverLocator(plot = plot, information = FetchData(object = pbmc_small, vars = 'percent.mito')) } } \seealso{ \code{\link[plotly]{layout}} \code{\link[ggplot2]{ggplot_build}} \code{\link{DimPlot}} \code{\link{FeaturePlot}} } \concept{visualization} Seurat/man/AddAzimuthScores.Rd0000644000176200001440000000112214152476164015776 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddAzimuthScores} \alias{AddAzimuthScores} \title{Add Azimuth Scores} \usage{ AddAzimuthScores(object, filename) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{filename}{Path to Azimuth mapping scores file} } \value{ \code{object} with the mapping scores added } \description{ Add mapping and prediction scores from Azimuth to a \code{\link[SeuratObject]{Seurat}} object } \examples{ \dontrun{ object <- AddAzimuthScores(object, filename = "azimuth_pred.tsv") } } Seurat/man/SingleSpatialPlot.Rd0000644000176200001440000000404014156670503016162 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleSpatialPlot} \alias{SingleSpatialPlot} \title{Base plotting function for all Spatial plots} \usage{ SingleSpatialPlot( data, image, cols = NULL, image.alpha = 1, pt.alpha = NULL, crop = TRUE, pt.size.factor = NULL, stroke = 0.25, col.by = NULL, alpha.by = NULL, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), geom = c("spatial", "interactive", "poly"), na.value = "grey50" ) } \arguments{ \item{data}{Data.frame with info to be plotted} \item{image}{\code{SpatialImage} object to be plotted} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors} \item{image.alpha}{Adjust the opacity of the background images. Set to 0 to remove.} \item{pt.alpha}{Adjust the opacity of the points if plotting a \code{SpatialDimPlot}} \item{crop}{Crop the plot in to focus on points plotted. Set to \code{FALSE} to show entire background image.} \item{pt.size.factor}{Sets the size of the points relative to spot.radius} \item{stroke}{Control the width of the border around the spots} \item{col.by}{Mapping variable for the point color} \item{alpha.by}{Mapping variable for the point alpha value} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in cols.highlight} \item{cols.highlight}{A vector of colors to highlight the cells as; ordered the same as the groups in cells.highlight; last color corresponds to unselected cells.} \item{geom}{Switch between normal spatial geom and geom to enable hover functionality} \item{na.value}{Color for spots with NA values} } \value{ A ggplot2 object } \description{ Base plotting function for all Spatial plots } \keyword{internal} Seurat/man/cc.genes.Rd0000644000176200001440000000075414152507372014260 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{cc.genes} \alias{cc.genes} \title{Cell cycle genes} \format{ A list of two vectors \describe{ \item{s.genes}{Genes associated with S-phase} \item{g2m.genes}{Genes associated with G2M-phase} } } \source{ \url{https://www.science.org/doi/abs/10.1126/science.aad0501} } \usage{ cc.genes } \description{ A list of genes used in cell-cycle regression } \concept{data} \keyword{datasets} Seurat/man/SingleDimPlot.Rd0000644000176200001440000000477214165416216015311 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleDimPlot} \alias{SingleDimPlot} \title{Plot a single dimension} \usage{ SingleDimPlot( data, dims, col.by = NULL, cols = NULL, pt.size = NULL, shape.by = NULL, alpha.by = NULL, order = NULL, label = FALSE, repel = FALSE, label.size = 4, cells.highlight = NULL, cols.highlight = "#DE2D26", sizes.highlight = 1, na.value = "grey50", raster = NULL, raster.dpi = NULL ) } \arguments{ \item{data}{Data to plot} \item{dims}{A two-length numeric vector with dimensions to use} \item{col.by}{...} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}.By default, ggplot2 assigns colors} \item{pt.size}{Adjust point size for plotting} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with \code{\link{FetchData}}) allowing for both different colors and different shapes on cells.} \item{alpha.by}{Mapping variable for the point alpha value} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top).} \item{label}{Whether to label the clusters} \item{repel}{Repel labels} \item{label.size}{Sets size of labels} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight} \item{na.value}{Color value for NA points when using custom scale.} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{the pixel resolution for rastered plots, passed to geom_scattermore(). Default is c(512, 512)} } \value{ A ggplot2 object } \description{ Plot a single dimension } \keyword{internal} Seurat/man/SampleUMI.Rd0000644000176200001440000000145514005656653014372 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{SampleUMI} \alias{SampleUMI} \title{Sample UMI} \usage{ SampleUMI(data, max.umi = 1000, upsample = FALSE, verbose = FALSE) } \arguments{ \item{data}{Matrix with the raw count data} \item{max.umi}{Number of UMIs to sample to} \item{upsample}{Upsamples all cells with fewer than max.umi} \item{verbose}{Display the progress bar} } \value{ Matrix with downsampled data } \description{ Downsample each cell to a specified number of UMIs. Includes an option to upsample cells below specified UMI as well. } \examples{ data("pbmc_small") counts = as.matrix(x = GetAssayData(object = pbmc_small, assay = "RNA", slot = "counts")) downsampled = SampleUMI(data = counts) head(x = downsampled) } \concept{preprocessing} Seurat/man/SingleImageMap.Rd0000644000176200001440000000113514156670503015410 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleImageMap} \alias{SingleImageMap} \title{A single heatmap from base R using \code{\link[graphics]{image}}} \usage{ SingleImageMap(data, order = NULL, title = NULL) } \arguments{ \item{data}{matrix of data to plot} \item{order}{optional vector of cell names to specify order in plot} \item{title}{Title for plot} } \value{ No return, generates a base-R heatmap using \code{\link[graphics]{image}} } \description{ A single heatmap from base R using \code{\link[graphics]{image}} } \keyword{internal} Seurat/man/HTOHeatmap.Rd0000644000176200001440000000316614005656653014531 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{HTOHeatmap} \alias{HTOHeatmap} \title{Hashtag oligo heatmap} \usage{ HTOHeatmap( object, assay = "HTO", classification = paste0(assay, "_classification"), global.classification = paste0(assay, "_classification.global"), ncells = 5000, singlet.names = NULL, raster = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized, and demultiplexing has been run with HTODemux().} \item{assay}{Hashtag assay name.} \item{classification}{The naming for metadata column with classification result from HTODemux().} \item{global.classification}{The slot for metadata column specifying a cell as singlet/doublet/negative.} \item{ncells}{Number of cells to plot. Default is to choose 5000 cells by random subsampling, to avoid having to draw exceptionally large heatmaps.} \item{singlet.names}{Namings for the singlets. Default is to use the same names as HTOs.} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} } \value{ Returns a ggplot2 plot object. } \description{ Draws a heatmap of hashtag oligo signals across singlets/doublets/negative cells. Allows for the visualization of HTO demultiplexing results. } \examples{ \dontrun{ object <- HTODemux(object) HTOHeatmap(object) } } \seealso{ \code{\link{HTODemux}} } \concept{visualization} Seurat/man/ISpatialFeaturePlot.Rd0000644000176200001440000000147514152476164016461 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ISpatialFeaturePlot} \alias{ISpatialFeaturePlot} \title{Visualize features spatially and interactively} \usage{ ISpatialFeaturePlot( object, feature, image = NULL, slot = "data", alpha = c(0.1, 1) ) } \arguments{ \item{object}{Seurat object} \item{feature}{Feature to visualize} \item{image}{Name of the image to use in the plot} \item{slot}{Which slot to pull expression data from?} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} } \value{ Returns final plot as a ggplot object } \description{ Visualize features spatially and interactively } \concept{spatial} \concept{visualization} Seurat/man/Assay-class.Rd0000644000176200001440000000062714005656653014761 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Assay-class} \alias{Assay-class} \title{The Assay Class} \description{ The \code{Assay} object is the basic unit of Seurat; for more details, please see the documentation in \code{\link[SeuratObject:Assay]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Assay]{SeuratObject::Assay-class}} } Seurat/man/MixscapeLDA.Rd0000644000176200001440000000323114005656653014662 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{MixscapeLDA} \alias{MixscapeLDA} \title{Linear discriminant analysis on pooled CRISPR screen data.} \usage{ MixscapeLDA( object, assay = NULL, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Assay to use for performing Linear Discriminant Analysis (LDA).} \item{ndims.print}{Number of LDA dimensions to print.} \item{nfeatures.print}{Number of features to print for each LDA component.} \item{reduction.key}{Reduction key name.} \item{seed}{Value for random seed} \item{pc.assay}{Assay to use for running Principle components analysis.} \item{labels}{Meta data column with target gene class labels.} \item{nt.label}{Name of non-targeting cell class.} \item{npcs}{Number of principle components to use.} \item{verbose}{Print progress bar.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} } \value{ Returns a Seurat object with LDA added in the reduction slot. } \description{ This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all cells in the data. Finally, it uses the first 10 principle components from each projection as input to lda in MASS package together with mixscape class labels. } \concept{mixscape} Seurat/man/IntegrationData-class.Rd0000644000176200001440000000165514005656653016760 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{IntegrationData-class} \alias{IntegrationData-class} \alias{IntegrationData} \title{The IntegrationData Class} \description{ The IntegrationData object is an intermediate storage container used internally throughout the integration procedure to hold bits of data that are useful downstream. } \section{Slots}{ \describe{ \item{\code{neighbors}}{List of neighborhood information for cells (outputs of \code{RANN::nn2})} \item{\code{weights}}{Anchor weight matrix} \item{\code{integration.matrix}}{Integration matrix} \item{\code{anchors}}{Anchor matrix} \item{\code{offsets}}{The offsets used to enable cell look up in downstream functions} \item{\code{objects.ncell}}{Number of cells in each object in the object.list} \item{\code{sample.tree}}{Sample tree used for ordering multi-dataset integration} }} \concept{objects} Seurat/man/RunMixscape.Rd0000644000176200001440000000603714005656653015035 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{RunMixscape} \alias{RunMixscape} \title{Run Mixscape} \usage{ RunMixscape( object, assay = "PRTB", slot = "scale.data", labels = "gene", nt.class.name = "NT", new.class.name = "mixscape_class", min.de.genes = 5, min.cells = 5, de.assay = "RNA", logfc.threshold = 0.25, iter.num = 10, verbose = FALSE, split.by = NULL, fine.mode = FALSE, fine.mode.labels = "guide_ID", prtb.type = "KO" ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Assay to use for mixscape classification.} \item{slot}{Assay data slot to use.} \item{labels}{metadata column with target gene labels.} \item{nt.class.name}{Classification name of non-targeting gRNA cells.} \item{new.class.name}{Name of mixscape classification to be stored in metadata.} \item{min.de.genes}{Required number of genes that are differentially expressed for method to separate perturbed and non-perturbed cells.} \item{min.cells}{Minimum number of cells in target gene class. If fewer than this many cells are assigned to a target gene class during classification, all are assigned NP.} \item{de.assay}{Assay to use when performing differential expression analysis. Usually RNA.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{iter.num}{Number of normalmixEM iterations to run if convergence does not occur.} \item{verbose}{Display messages} \item{split.by}{metadata column with experimental condition/cell type classification information. This is meant to be used to account for cases a perturbation is condition/cell type -specific.} \item{fine.mode}{When this is equal to TRUE, DE genes for each target gene class will be calculated for each gRNA separately and pooled into one DE list for calculating the perturbation score of every cell and their subsequent classification.} \item{fine.mode.labels}{metadata column with gRNA ID labels.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} } \value{ Returns Seurat object with with the following information in the meta data and tools slots: \describe{ \item{mixscape_class}{Classification result with cells being either classified as perturbed (KO, by default) or non-perturbed (NP) based on their target gene class.} \item{mixscape_class.global}{Global classification result (perturbed, NP or NT)} \item{p_ko}{Posterior probabilities used to determine if a cell is KO (default). Name of this item will change to match prtb.type parameter setting. (>0.5) or NP} \item{perturbation score}{Perturbation scores for every cell calculated in the first iteration of the function.} } } \description{ Function to identify perturbed and non-perturbed gRNA expressing cells that accounts for multiple treatments/conditions/chemical perturbations. } \concept{mixscape} Seurat/man/Cells.Rd0000644000176200001440000000100314024674706013625 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{Cells.SCTModel} \alias{Cells.SCTModel} \alias{Cells.SlideSeq} \alias{Cells.STARmap} \alias{Cells.VisiumV1} \title{Get Cell Names} \usage{ \method{Cells}{SCTModel}(x) \method{Cells}{SlideSeq}(x) \method{Cells}{STARmap}(x) \method{Cells}{VisiumV1}(x) } \arguments{ \item{x}{An object} } \description{ Get Cell Names } \seealso{ \code{\link[SeuratObject:Cells]{SeuratObject::Cells}} } \concept{objects} \concept{spatial} Seurat/man/RunMoransI.Rd0000644000176200001440000000073314005656653014631 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RunMoransI} \alias{RunMoransI} \title{Compute Moran's I value.} \usage{ RunMoransI(data, pos, verbose = TRUE) } \arguments{ \item{data}{Expression matrix} \item{pos}{Position matrix} \item{verbose}{Display messages/progress} } \description{ Wraps the functionality of the Moran.I function from the ape package. Weights are computed as 1/distance. } \concept{preprocessing} Seurat/man/VisiumV1-class.Rd0000644000176200001440000000137714024674706015367 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{VisiumV1-class} \alias{VisiumV1-class} \alias{VisiumV1} \title{The VisiumV1 class} \description{ The VisiumV1 class represents spatial information from the 10X Genomics Visium platform } \section{Slots}{ \describe{ \item{\code{image}}{A three-dimensional array with PNG image data, see \code{\link[png]{readPNG}} for more details} \item{\code{scale.factors}}{An object of class \code{\link{scalefactors}}; see \code{\link{scalefactors}} for more information} \item{\code{coordinates}}{A data frame with tissue coordinate information} \item{\code{spot.radius}}{Single numeric value giving the radius of the spots} }} \concept{objects} \concept{spatial} Seurat/man/Read10X_h5.Rd0000644000176200001440000000132614005656653014333 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X_h5} \alias{Read10X_h5} \title{Read 10X hdf5 file} \usage{ Read10X_h5(filename, use.names = TRUE, unique.features = TRUE) } \arguments{ \item{filename}{Path to h5 file} \item{use.names}{Label row names with feature names rather than ID numbers.} \item{unique.features}{Make feature names unique (default TRUE)} } \value{ Returns a sparse matrix with rows and columns labeled. If multiple genomes are present, returns a list of sparse matrices (one per genome). } \description{ Read count matrix from 10X CellRanger hdf5 file. This can be used to read both scATAC-seq and scRNA-seq matrices. } \concept{preprocessing} Seurat/man/SeuratTheme.Rd0000644000176200001440000000602114005656653015016 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SeuratTheme} \alias{SeuratTheme} \alias{CenterTitle} \alias{DarkTheme} \alias{FontSize} \alias{NoAxes} \alias{NoLegend} \alias{NoGrid} \alias{SeuratAxes} \alias{SpatialTheme} \alias{RestoreLegend} \alias{RotatedAxis} \alias{BoldTitle} \alias{WhiteBackground} \title{Seurat Themes} \usage{ SeuratTheme() CenterTitle(...) DarkTheme(...) FontSize( x.text = NULL, y.text = NULL, x.title = NULL, y.title = NULL, main = NULL, ... ) NoAxes(..., keep.text = FALSE, keep.ticks = FALSE) NoLegend(...) NoGrid(...) SeuratAxes(...) SpatialTheme(...) RestoreLegend(..., position = "right") RotatedAxis(...) BoldTitle(...) WhiteBackground(...) } \arguments{ \item{...}{Extra parameters to be passed to \code{theme}} \item{x.text, y.text}{X and Y axis text sizes} \item{x.title, y.title}{X and Y axis title sizes} \item{main}{Plot title size} \item{keep.text}{Keep axis text} \item{keep.ticks}{Keep axis ticks} \item{position}{A position to restore the legend to} } \value{ A ggplot2 theme object } \description{ Various themes to be applied to ggplot2-based plots \describe{ \item{\code{SeuratTheme}}{The curated Seurat theme, consists of ...} \item{\code{DarkTheme}}{A dark theme, axes and text turn to white, the background becomes black} \item{\code{NoAxes}}{Removes axis lines, text, and ticks} \item{\code{NoLegend}}{Removes the legend} \item{\code{FontSize}}{Sets axis and title font sizes} \item{\code{NoGrid}}{Removes grid lines} \item{\code{SeuratAxes}}{Set Seurat-style axes} \item{\code{SpatialTheme}}{A theme designed for spatial visualizations (eg \code{\link{PolyFeaturePlot}}, \code{\link{PolyDimPlot}})} \item{\code{RestoreLegend}}{Restore a legend after removal} \item{\code{RotatedAxis}}{Rotate X axis text 45 degrees} \item{\code{BoldTitle}}{Enlarges and emphasizes the title} } } \examples{ # Generate a plot with a dark theme library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + DarkTheme(legend.position = 'none') # Generate a plot with no axes library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoAxes() # Generate a plot with no legend library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoLegend() # Generate a plot with no grid lines library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoGrid() } \seealso{ \code{\link[ggplot2]{theme}} } \concept{visualization} Seurat/man/ProjectUMAP.Rd0000644000176200001440000000611314156670503014660 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{ProjectUMAP} \alias{ProjectUMAP} \alias{ProjectUMAP.default} \alias{ProjectUMAP.DimReduc} \alias{ProjectUMAP.Seurat} \title{Project query into UMAP coordinates of a reference} \usage{ ProjectUMAP(query, ...) \method{ProjectUMAP}{default}( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) \method{ProjectUMAP}{DimReduc}( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) \method{ProjectUMAP}{Seurat}( query, query.reduction, query.dims = NULL, reference, reference.reduction, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, reduction.name = "ref.umap", reduction.key = "refUMAP_", ... ) } \arguments{ \item{query}{Query dataset} \item{...}{Additional parameters to \code{\link{RunUMAP}}} \item{query.dims}{Dimensions (columns) to use from query} \item{reference}{Reference dataset} \item{reference.dims}{Dimensions (columns) to use from reference} \item{k.param}{Defines k for the k-nearest neighbor algorithm} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{annoy.metric}{Distance metric for annoy. Options include: euclidean, cosine, manhattan, and hamming} \item{l2.norm}{Take L2Norm of the data} \item{cache.index}{Include cached index in returned Neighbor object (only relevant if return.neighbor = TRUE)} \item{index}{Precomputed index. Useful if querying new data against existing index to avoid recomputing.} \item{neighbor.name}{Name to store neighbor information in the query} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{query.reduction}{Name of reduction to use from the query for neighbor finding} \item{reference.reduction}{Name of reduction to use from the reference for neighbor finding} \item{reduction.name}{Name of projected UMAP to store in the query} \item{reduction.key}{Value for the projected UMAP key} } \description{ This function will take a query dataset and project it into the coordinates of a provided reference UMAP. This is essentially a wrapper around two steps: \itemize{ \item{FindNeighbors - Find the nearest reference cell neighbors and their distances for each query cell.} \item{RunUMAP - Perform umap projection by providing the neighbor set calculated above and the umap model previously computed in the reference.} } } \concept{dimensional_reduction} Seurat/man/FindNeighbors.Rd0000644000176200001440000001271414024674706015317 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/clustering.R \name{FindNeighbors} \alias{FindNeighbors} \alias{FindNeighbors.default} \alias{FindNeighbors.Assay} \alias{FindNeighbors.dist} \alias{FindNeighbors.Seurat} \title{(Shared) Nearest-neighbor graph construction} \usage{ FindNeighbors(object, ...) \method{FindNeighbors}{default}( object, query = NULL, distance.matrix = FALSE, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, index = NULL, ... ) \method{FindNeighbors}{Assay}( object, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, ... ) \method{FindNeighbors}{dist}( object, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, ... ) \method{FindNeighbors}{Seurat}( object, reduction = "pca", dims = 1:10, assay = NULL, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, do.plot = FALSE, graph.name = NULL, l2.norm = FALSE, cache.index = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{query}{Matrix of data to query against object. If missing, defaults to object.} \item{distance.matrix}{Boolean value of whether the provided matrix is a distance matrix; note, for objects of class \code{dist}, this parameter will be set automatically} \item{k.param}{Defines k for the k-nearest neighbor algorithm} \item{return.neighbor}{Return result as \code{\link{Neighbor}} object. Not used with distance matrix input.} \item{compute.SNN}{also compute the shared nearest neighbor graph} \item{prune.SNN}{Sets the cutoff for acceptable Jaccard index when computing the neighborhood overlap for the SNN construction. Any edges with values less than or equal to this will be set to 0 and removed from the SNN graph. Essentially sets the stringency of pruning (0 --- no pruning, 1 --- prune everything).} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{annoy.metric}{Distance metric for annoy. Options include: euclidean, cosine, manhattan, and hamming} \item{nn.eps}{Error bound when performing nearest neighbor seach using RANN; default of 0.0 implies exact nearest neighbor search} \item{verbose}{Whether or not to print output to the console} \item{force.recalc}{Force recalculation of (S)NN.} \item{l2.norm}{Take L2Norm of the data} \item{cache.index}{Include cached index in returned Neighbor object (only relevant if return.neighbor = TRUE)} \item{index}{Precomputed index. Useful if querying new data against existing index to avoid recomputing.} \item{features}{Features to use as input for building the (S)NN; used only when \code{dims} is \code{NULL}} \item{reduction}{Reduction to use as input for building the (S)NN} \item{dims}{Dimensions of reduction to use as input} \item{assay}{Assay to use in construction of (S)NN; used only when \code{dims} is \code{NULL}} \item{do.plot}{Plot SNN graph on tSNE coordinates} \item{graph.name}{Optional naming parameter for stored (S)NN graph (or Neighbor object, if return.neighbor = TRUE). Default is assay.name_(s)nn. To store both the neighbor graph and the shared nearest neighbor (SNN) graph, you must supply a vector containing two names to the \code{graph.name} parameter. The first element in the vector will be used to store the nearest neighbor (NN) graph, and the second element used to store the SNN graph. If only one name is supplied, only the NN graph is stored.} } \value{ This function can either return a \code{\link{Neighbor}} object with the KNN information or a list of \code{\link{Graph}} objects with the KNN and SNN depending on the settings of \code{return.neighbor} and \code{compute.SNN}. When running on a \code{\link{Seurat}} object, this returns the \code{\link{Seurat}} object with the Graphs or Neighbor objects stored in their respective slots. Names of the Graph or Neighbor object can be found with \code{\link{Graphs}} or \code{\link{Neighbors}}. } \description{ Computes the \code{k.param} nearest neighbors for a given dataset. Can also optionally (via \code{compute.SNN}), construct a shared nearest neighbor graph by calculating the neighborhood overlap (Jaccard index) between every cell and its \code{k.param} nearest neighbors. } \examples{ data("pbmc_small") pbmc_small # Compute an SNN on the gene expression level pbmc_small <- FindNeighbors(pbmc_small, features = VariableFeatures(object = pbmc_small)) # More commonly, we build the SNN on a dimensionally reduced form of the data # such as the first 10 principle components. pbmc_small <- FindNeighbors(pbmc_small, reduction = "pca", dims = 1:10) } \concept{clustering} Seurat/man/Seurat-class.Rd0000644000176200001440000000066414005656653015145 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Seurat-class} \alias{Seurat-class} \title{The Seurat Class} \description{ The Seurat object is a representation of single-cell expression data for R; for more details, please see the documentation in \code{\link[SeuratObject:Seurat]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Seurat]{SeuratObject::Seurat-class}} } Seurat/man/GroupCorrelationPlot.Rd0000644000176200001440000000152214005656653016726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{GroupCorrelationPlot} \alias{GroupCorrelationPlot} \title{Boxplot of correlation of a variable (e.g. number of UMIs) with expression data} \usage{ GroupCorrelationPlot( object, assay = NULL, feature.group = "feature.grp", cor = "nCount_RNA_cor" ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay where the feature grouping info and correlations are stored} \item{feature.group}{Name of the column in meta.features where the feature grouping info is stored} \item{cor}{Name of the column in meta.features where correlation info is stored} } \value{ Returns a ggplot boxplot of correlations split by group } \description{ Boxplot of correlation of a variable (e.g. number of UMIs) with expression data } \concept{visualization} Seurat/man/LoadSTARmap.Rd0000644000176200001440000000201114005656653014632 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{LoadSTARmap} \alias{LoadSTARmap} \title{Load STARmap data} \usage{ LoadSTARmap( data.dir, counts.file = "cell_barcode_count.csv", gene.file = "genes.csv", qhull.file = "qhulls.tsv", centroid.file = "centroids.tsv", assay = "Spatial", image = "image" ) } \arguments{ \item{data.dir}{location of data directory that contains the counts matrix, gene name, qhull, and centroid files.} \item{counts.file}{name of file containing the counts matrix (csv)} \item{gene.file}{name of file containing the gene names (csv)} \item{qhull.file}{name of file containing the hull coordinates (tsv)} \item{centroid.file}{name of file containing the centroid positions (tsv)} \item{assay}{Name of assay to associate spatial data to} \item{image}{Name of "image" object storing spatial coordinates} } \value{ A \code{\link{Seurat}} object } \description{ Load STARmap data } \seealso{ \code{\link{STARmap}} } \concept{preprocessing} Seurat/man/DoHeatmap.Rd0000644000176200001440000000462514005656653014442 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DoHeatmap} \alias{DoHeatmap} \title{Feature expression heatmap} \usage{ DoHeatmap( object, features = NULL, cells = NULL, group.by = "ident", group.bar = TRUE, group.colors = NULL, disp.min = -2.5, disp.max = NULL, slot = "scale.data", assay = NULL, label = TRUE, size = 5.5, hjust = 0, angle = 45, raster = TRUE, draw.lines = TRUE, lines.width = NULL, group.bar.height = 0.02, combine = TRUE ) } \arguments{ \item{object}{Seurat object} \item{features}{A vector of features to plot, defaults to \code{VariableFeatures(object = object)}} \item{cells}{A vector of cells to plot} \item{group.by}{A vector of variables to group cells by; pass 'ident' to group by cell identity classes} \item{group.bar}{Add a color bar showing group status for cells} \item{group.colors}{Colors to use for the color bar} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped); defaults to 2.5 if \code{slot} is 'scale.data', 6 otherwise} \item{slot}{Data slot to use, choose from 'raw.data', 'data', or 'scale.data'} \item{assay}{Assay to pull from} \item{label}{Label the cell identies above the color bar} \item{size}{Size of text above color bar} \item{hjust}{Horizontal justification of text above color bar} \item{angle}{Angle of text above color bar} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} \item{draw.lines}{Include white lines to separate the groups} \item{lines.width}{Integer number to adjust the width of the separating white lines. Corresponds to the number of "cells" between each group.} \item{group.bar.height}{Scale the height of the color bar} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a heatmap of single cell feature expression. } \examples{ data("pbmc_small") DoHeatmap(object = pbmc_small) } \concept{visualization} Seurat/man/CalculateBarcodeInflections.Rd0000644000176200001440000000455614005656653020156 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{CalculateBarcodeInflections} \alias{CalculateBarcodeInflections} \title{Calculate the Barcode Distribution Inflection} \usage{ CalculateBarcodeInflections( object, barcode.column = "nCount_RNA", group.column = "orig.ident", threshold.low = NULL, threshold.high = NULL ) } \arguments{ \item{object}{Seurat object} \item{barcode.column}{Column to use as proxy for barcodes ("nCount_RNA" by default)} \item{group.column}{Column to group by ("orig.ident" by default)} \item{threshold.low}{Ignore barcodes of rank below this threshold in inflection calculation} \item{threshold.high}{Ignore barcodes of rank above thisf threshold in inflection calculation} } \value{ Returns Seurat object with a new list in the `tools` slot, `CalculateBarcodeInflections` with values: * `barcode_distribution` - contains the full barcode distribution across the entire dataset * `inflection_points` - the calculated inflection points within the thresholds * `threshold_values` - the provided (or default) threshold values to search within for inflections * `cells_pass` - the cells that pass the inflection point calculation } \description{ This function calculates an adaptive inflection point ("knee") of the barcode distribution for each sample group. This is useful for determining a threshold for removing low-quality samples. } \details{ The function operates by calculating the slope of the barcode number vs. rank distribution, and then finding the point at which the distribution changes most steeply (the "knee"). Of note, this calculation often must be restricted as to the range at which it performs, so `threshold` parameters are provided to restrict the range of the calculation based on the rank of the barcodes. [BarcodeInflectionsPlot()] is provided as a convenience function to visualize and test different thresholds and thus provide more sensical end results. See [BarcodeInflectionsPlot()] to visualize the calculated inflection points and [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. } \examples{ data("pbmc_small") CalculateBarcodeInflections(pbmc_small, group.column = 'groups') } \seealso{ \code{\link{BarcodeInflectionsPlot}} \code{\link{SubsetByBarcodeInflections}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{preprocessing} Seurat/man/CollapseSpeciesExpressionMatrix.Rd0000644000176200001440000000275214005656653021122 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CollapseSpeciesExpressionMatrix} \alias{CollapseSpeciesExpressionMatrix} \title{Slim down a multi-species expression matrix, when only one species is primarily of interenst.} \usage{ CollapseSpeciesExpressionMatrix( object, prefix = "HUMAN_", controls = "MOUSE_", ncontrols = 100 ) } \arguments{ \item{object}{A UMI count matrix. Should contain rownames that start with the ensuing arguments prefix.1 or prefix.2} \item{prefix}{The prefix denoting rownames for the species of interest. Default is "HUMAN_". These rownames will have this prefix removed in the returned matrix.} \item{controls}{The prefix denoting rownames for the species of 'negative control' cells. Default is "MOUSE_".} \item{ncontrols}{How many of the most highly expressed (average) negative control features (by default, 100 mouse genes), should be kept? All other rownames starting with prefix.2 are discarded.} } \value{ A UMI count matrix. Rownames that started with \code{prefix} have this prefix discarded. For rownames starting with \code{controls}, only the \code{ncontrols} most highly expressed features are kept, and the prefix is kept. All other rows are retained. } \description{ Valuable for CITE-seq analyses, where we typically spike in rare populations of 'negative control' cells from a different species. } \examples{ \dontrun{ cbmc.rna.collapsed <- CollapseSpeciesExpressionMatrix(cbmc.rna) } } \concept{utilities} Seurat/man/PCASigGenes.Rd0000644000176200001440000000206414005656653014623 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{PCASigGenes} \alias{PCASigGenes} \title{Significant genes from a PCA} \usage{ PCASigGenes( object, pcs.use, pval.cut = 0.1, use.full = FALSE, max.per.pc = NULL ) } \arguments{ \item{object}{Seurat object} \item{pcs.use}{PCS to use.} \item{pval.cut}{P-value cutoff} \item{use.full}{Use the full list of genes (from the projected PCA). Assumes that \code{ProjectDim} has been run. Currently, must be set to FALSE.} \item{max.per.pc}{Maximum number of genes to return per PC. Used to avoid genes from one PC dominating the entire analysis.} } \value{ A vector of genes whose p-values are statistically significant for at least one of the given PCs. } \description{ Returns a set of genes, based on the JackStraw analysis, that have statistically significant associations with a set of PCs. } \examples{ data("pbmc_small") PCASigGenes(pbmc_small, pcs.use = 1:2) } \seealso{ \code{\link{ProjectDim}} \code{\link{JackStraw}} } \concept{dimensional_reduction} Seurat/man/RunCCA.Rd0000644000176200001440000000476714005656653013662 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunCCA} \alias{RunCCA} \alias{RunCCA.default} \alias{RunCCA.Seurat} \title{Perform Canonical Correlation Analysis} \usage{ RunCCA(object1, object2, ...) \method{RunCCA}{default}( object1, object2, standardize = TRUE, num.cc = 20, seed.use = 42, verbose = FALSE, ... ) \method{RunCCA}{Seurat}( object1, object2, assay1 = NULL, assay2 = NULL, num.cc = 20, features = NULL, renormalize = FALSE, rescale = FALSE, compute.gene.loadings = TRUE, add.cell.id1 = NULL, add.cell.id2 = NULL, verbose = TRUE, ... ) } \arguments{ \item{object1}{First Seurat object} \item{object2}{Second Seurat object.} \item{...}{Extra parameters (passed onto MergeSeurat in case with two objects passed, passed onto ScaleData in case with single object and rescale.groups set to TRUE)} \item{standardize}{Standardize matrices - scales columns to have unit variance and mean 0} \item{num.cc}{Number of canonical vectors to calculate} \item{seed.use}{Random seed to set. If NULL, does not set a seed} \item{verbose}{Show progress messages} \item{assay1, assay2}{Assays to pull from in the first and second objects, respectively} \item{features}{Set of genes to use in CCA. Default is the union of both the variable features sets present in both objects.} \item{renormalize}{Renormalize raw data after merging the objects. If FALSE, merge the data matrices also.} \item{rescale}{Rescale the datasets prior to CCA. If FALSE, uses existing data in the scale data slots.} \item{compute.gene.loadings}{Also compute the gene loadings. NOTE - this will scale every gene in the dataset which may impose a high memory cost.} \item{add.cell.id1, add.cell.id2}{Add ...} } \value{ Returns a combined Seurat object with the CCA results stored. } \description{ Runs a canonical correlation analysis using a diagonal implementation of CCA. For details about stored CCA calculation parameters, see \code{PrintCCAParams}. } \examples{ data("pbmc_small") pbmc_small # As CCA requires two datasets, we will split our test object into two just for this example pbmc1 <- subset(pbmc_small, cells = colnames(pbmc_small)[1:40]) pbmc2 <- subset(pbmc_small, cells = colnames(x = pbmc_small)[41:80]) pbmc1[["group"]] <- "group1" pbmc2[["group"]] <- "group2" pbmc_cca <- RunCCA(object1 = pbmc1, object2 = pbmc2) # Print results print(x = pbmc_cca[["cca"]]) } \seealso{ \code{\link{merge.Seurat}} } \concept{dimensional_reduction} Seurat/man/GetIntegrationData.Rd0000644000176200001440000000100014005656653016275 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetIntegrationData} \alias{GetIntegrationData} \title{Get integration data} \usage{ GetIntegrationData(object, integration.name, slot) } \arguments{ \item{object}{Seurat object} \item{integration.name}{Name of integration object} \item{slot}{Which slot in integration object to get} } \value{ Returns data from the requested slot within the integrated object } \description{ Get integration data } \concept{objects} Seurat/man/DiscretePalette.Rd0000644000176200001440000000175714005656653015664 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DiscretePalette} \alias{DiscretePalette} \title{Discrete colour palettes from the pals package} \usage{ DiscretePalette(n, palette = NULL) } \arguments{ \item{n}{Number of colours to be generated.} \item{palette}{Options are "alphabet", "alphabet2", "glasbey", "polychrome", and "stepped". Can be omitted and the function will use the one based on the requested n.} } \value{ A vector of colors } \description{ These are included here because pals depends on a number of compiled packages, and this can lead to increases in run time for Travis, and generally should be avoided when possible. } \details{ These palettes are a much better default for data with many classes than the default ggplot2 palette. Many thanks to Kevin Wright for writing the pals package. Taken from the pals package (Licence: GPL-3). \url{https://cran.r-project.org/package=pals} Credit: Kevin Wright } \concept{visualization} Seurat/man/SetIntegrationData.Rd0000644000176200001440000000102314005656653016316 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{SetIntegrationData} \alias{SetIntegrationData} \title{Set integration data} \usage{ SetIntegrationData(object, integration.name, slot, new.data) } \arguments{ \item{object}{Seurat object} \item{integration.name}{Name of integration object} \item{slot}{Which slot in integration object to set} \item{new.data}{New data to insert} } \value{ Returns a \code{\link{Seurat}} object } \description{ Set integration data } \concept{objects} Seurat/man/RelativeCounts.Rd0000644000176200001440000000143214005656653015540 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RelativeCounts} \alias{RelativeCounts} \title{Normalize raw data to fractions} \usage{ RelativeCounts(data, scale.factor = 1, verbose = TRUE) } \arguments{ \item{data}{Matrix with the raw count data} \item{scale.factor}{Scale the result. Default is 1} \item{verbose}{Print progress} } \value{ Returns a matrix with the relative counts } \description{ Normalize count data to relative counts per cell by dividing by the total per cell. Optionally use a scale factor, e.g. for counts per million (CPM) use \code{scale.factor = 1e6}. } \examples{ mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) mat mat_norm <- RelativeCounts(data = mat) mat_norm } \concept{preprocessing} Seurat/man/TopFeatures.Rd0000644000176200001440000000172614005656653015040 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopFeatures} \alias{TopFeatures} \title{Find features with highest scores for a given dimensional reduction technique} \usage{ TopFeatures( object, dim = 1, nfeatures = 20, projected = FALSE, balanced = FALSE, ... ) } \arguments{ \item{object}{DimReduc object} \item{dim}{Dimension to use} \item{nfeatures}{Number of features to return} \item{projected}{Use the projected feature loadings} \item{balanced}{Return an equal number of features with both + and - scores.} \item{...}{Extra parameters passed to \code{\link{Loadings}}} } \value{ Returns a vector of features } \description{ Return a list of features with the strongest contribution to a set of components } \examples{ data("pbmc_small") pbmc_small TopFeatures(object = pbmc_small[["pca"]], dim = 1) # After projection: TopFeatures(object = pbmc_small[["pca"]], dim = 1, projected = TRUE) } \concept{objects} Seurat/man/RunSLSI.Rd0000644000176200001440000000336114156670503014030 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunSLSI} \alias{RunSLSI} \alias{RunSLSI.default} \alias{RunSLSI.Assay} \alias{RunSLSI.Seurat} \title{Run Supervised Latent Semantic Indexing} \usage{ RunSLSI(object, ...) \method{RunSLSI}{default}( object, assay = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSLSI}{Assay}( object, assay = NULL, features = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSLSI}{Seurat}( object, assay = NULL, features = NULL, n = 50, reduction.name = "slsi", reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to IRLBA irlba} \item{assay}{Name of Assay SLSI is being run on} \item{n}{Total Number of SLSI components to compute and store} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names} \item{graph}{Graph used supervised by SLSI} \item{verbose}{Display messages} \item{seed.use}{Set a random seed. Setting NULL will not set a seed.} \item{features}{Features to compute SLSI on. If NULL, SLSI will be run using the variable features for the Assay.} \item{reduction.name}{dimensional reduction name} } \value{ Returns Seurat object with the SLSI calculation stored in the reductions slot } \description{ Run a supervised LSI (SLSI) dimensionality reduction supervised by a cell-cell kernel. SLSI is used to capture a linear transformation of peaks that maximizes its dependency to the given cell-cell kernel. } \concept{dimensional_reduction} Seurat/man/IFeaturePlot.Rd0000644000176200001440000000144514005656653015140 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{IFeaturePlot} \alias{IFeaturePlot} \title{Visualize features in dimensional reduction space interactively} \usage{ IFeaturePlot(object, feature, dims = c(1, 2), reduction = NULL, slot = "data") } \arguments{ \item{object}{Seurat object} \item{feature}{Feature to plot} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{slot}{Which slot to pull expression data from?} } \value{ Returns the final plot as a ggplot object } \description{ Visualize features in dimensional reduction space interactively } \concept{visualization} Seurat/man/ExpVar.Rd0000644000176200001440000000065514005656653014004 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpVar} \alias{ExpVar} \title{Calculate the variance of logged values} \usage{ ExpVar(x) } \arguments{ \item{x}{A vector of values} } \value{ Returns the variance in log-space } \description{ Calculate variance of logged values in non-log space (return answer in log-space) } \examples{ ExpVar(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/SelectIntegrationFeatures.Rd0000644000176200001440000000353714005656653017723 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{SelectIntegrationFeatures} \alias{SelectIntegrationFeatures} \title{Select integration features} \usage{ SelectIntegrationFeatures( object.list, nfeatures = 2000, assay = NULL, verbose = TRUE, fvf.nfeatures = 2000, ... ) } \arguments{ \item{object.list}{List of seurat objects} \item{nfeatures}{Number of features to return} \item{assay}{Name or vector of assay names (one for each object) from which to pull the variable features.} \item{verbose}{Print messages} \item{fvf.nfeatures}{nfeatures for \code{\link{FindVariableFeatures}}. Used if \code{VariableFeatures} have not been set for any object in \code{object.list}.} \item{...}{Additional parameters to \code{\link{FindVariableFeatures}}} } \value{ A vector of selected features } \description{ Choose the features to use when integrating multiple datasets. This function ranks features by the number of datasets they are deemed variable in, breaking ties by the median variable feature rank across datasets. It returns the top scoring features by this ranking. } \details{ If for any assay in the list, \code{\link{FindVariableFeatures}} hasn't been run, this method will try to run it using the \code{fvf.nfeatures} parameter and any additional ones specified through the \dots. } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset and take the first 2 pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] # perform SCTransform normalization pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) # select integration features features <- SelectIntegrationFeatures(pancreas.list) } } \concept{integration} Seurat/man/CustomPalette.Rd0000644000176200001440000000223514005656653015364 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BlackAndWhite} \alias{BlackAndWhite} \alias{BlueAndRed} \alias{CustomPalette} \alias{PurpleAndYellow} \title{Create a custom color palette} \usage{ BlackAndWhite(mid = NULL, k = 50) BlueAndRed(k = 50) CustomPalette(low = "white", high = "red", mid = NULL, k = 50) PurpleAndYellow(k = 50) } \arguments{ \item{mid}{middle color. Optional.} \item{k}{number of steps (colors levels) to include between low and high values} \item{low}{low color} \item{high}{high color} } \value{ A color palette for plotting } \description{ Creates a custom color palette based on low, middle, and high color values } \examples{ df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = BlackAndWhite()) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = BlueAndRed()) myPalette <- CustomPalette() myPalette df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = PurpleAndYellow()) } \concept{visualization} Seurat/man/FindIntegrationAnchors.Rd0000644000176200001440000001344114152476164017176 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindIntegrationAnchors} \alias{FindIntegrationAnchors} \title{Find integration anchors} \usage{ FindIntegrationAnchors( object.list = NULL, assay = NULL, reference = NULL, anchor.features = 2000, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), sct.clip.range = NULL, reduction = c("cca", "rpca", "rlsi"), l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) } \arguments{ \item{object.list}{A list of \code{\link{Seurat}} objects between which to find anchors for downstream integration.} \item{assay}{A vector of assay names specifying which assay to use when constructing anchors. If NULL, the current default assay for each object is used.} \item{reference}{A vector specifying the object/s to be used as a reference during integration. If NULL (default), all pairwise anchors are found (no reference/s). If not NULL, the corresponding objects in \code{object.list} will be used as references. When using a set of specified references, anchors are first found between each query and each reference. The references are then integrated through pairwise integration. Each query is then mapped to the integrated reference.} \item{anchor.features}{Can be either: \itemize{ \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} to select the provided number of features to be used in anchor finding} \item{A vector of features to be used as input to the anchor finding process} }} \item{scale}{Whether or not to scale the features provided. Only set to FALSE if you have previously scaled the features you want to use for each object in the object.list} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{sct.clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{reduction}{Dimensional reduction to perform when finding anchors. Can be one of: \itemize{ \item{cca: Canonical correlation analysis} \item{rpca: Reciprocal PCA} \item{rlsi: Reciprocal LSI} }} \item{l2.norm}{Perform L2 normalization on the CCA cell embeddings after dimensional reduction} \item{dims}{Which dimensions to use from the CCA to specify the neighbor search space} \item{k.anchor}{How many neighbors (k) to use when picking anchors} \item{k.filter}{How many neighbors (k) to use when filtering anchors} \item{k.score}{How many neighbors (k) to use when scoring anchors} \item{max.features}{The maximum number of features to use when specifying the neighborhood search space in the anchor filtering} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{eps}{Error bound on the neighbor finding algorithm (from RANN/Annoy)} \item{verbose}{Print progress bars and output} } \value{ Returns an \code{\link{AnchorSet}} object that can be used as input to \code{\link{IntegrateData}}. } \description{ Find a set of anchors between a list of \code{\link{Seurat}} objects. These anchors can later be used to integrate the objects using the \code{\link{IntegrateData}} function. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019: \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} First, determine anchor.features if not explicitly specified using \code{\link{SelectIntegrationFeatures}}. Then for all pairwise combinations of reference and query datasets: \itemize{ \item{Perform dimensional reduction on the dataset pair as specified via the \code{reduction} parameter. If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the embedding vectors.} \item{Identify anchors - pairs of cells from each dataset that are contained within each other's neighborhoods (also known as mutual nearest neighbors).} \item{Filter low confidence anchors to ensure anchors in the low dimension space are in broad agreement with the high dimensional measurements. This is done by looking at the neighbors of each query cell in the reference dataset using \code{max.features} to define this space. If the reference cell isn't found within the first \code{k.filter} neighbors, remove the anchor.} \item{Assign each remaining anchor a score. For each anchor cell, determine the nearest \code{k.score} anchors within its own dataset and within its pair's dataset. Based on these neighborhoods, construct an overall neighbor graph and then compute the shared neighbor overlap between anchor and query cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on these scores to dampen outlier effects and rescale to range between 0-1.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset pancreas.list <- SplitObject(panc8, split.by = "tech") # perform standard preprocessing on each object for (i in 1:length(pancreas.list)) { pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) pancreas.list[[i]] <- FindVariableFeatures( pancreas.list[[i]], selection.method = "vst", nfeatures = 2000, verbose = FALSE ) } # find anchors anchors <- FindIntegrationAnchors(object.list = pancreas.list) # integrate data integrated <- IntegrateData(anchorset = anchors) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/CellScatter.Rd0000644000176200001440000000245714165416216015002 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CellScatter} \alias{CellScatter} \alias{CellPlot} \title{Cell-cell scatter plot} \usage{ CellScatter( object, cell1, cell2, features = NULL, highlight = NULL, cols = NULL, pt.size = 1, smooth = FALSE, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{cell1}{Cell 1 name} \item{cell2}{Cell 2 name} \item{features}{Features to plot (default, all features)} \item{highlight}{Features to highlight} \item{cols}{Colors to use for identity class plotting.} \item{pt.size}{Size of the points on the plot} \item{smooth}{Smooth the graph (similar to smoothScatter)} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A ggplot object } \description{ Creates a plot of scatter plot of features across two single cells. Pearson correlation between the two cells is displayed above the plot. } \examples{ data("pbmc_small") CellScatter(object = pbmc_small, cell1 = 'ATAGGAGAAACAGA', cell2 = 'CATCAGGATGCACA') } \concept{visualization} Seurat/man/LocalStruct.Rd0000644000176200001440000000242214005656653015030 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{LocalStruct} \alias{LocalStruct} \title{Calculate the local structure preservation metric} \usage{ LocalStruct( object, grouping.var, idents = NULL, neighbors = 100, reduction = "pca", reduced.dims = 1:10, orig.dims = 1:10, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{grouping.var}{Grouping variable} \item{idents}{Optionally specify a set of idents to compute metric for} \item{neighbors}{Number of neighbors to compute in pca/corrected pca space} \item{reduction}{Dimensional reduction to use for corrected space} \item{reduced.dims}{Number of reduced dimensions to use} \item{orig.dims}{Number of PCs to use in original space} \item{verbose}{Display progress bar} } \value{ Returns the average preservation metric } \description{ Calculates a metric that describes how well the local structure of each group prior to integration is preserved after integration. This procedure works as follows: For each group, compute a PCA, compute the top num.neighbors in pca space, compute the top num.neighbors in corrected pca space, compute the size of the intersection of those two sets of neighbors. Return the average over all groups. } \concept{integration} Seurat/man/GetResidual.Rd0000644000176200001440000000312314005656653015000 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{GetResidual} \alias{GetResidual} \title{Calculate pearson residuals of features not in the scale.data} \usage{ GetResidual( object, features, assay = NULL, umi.assay = NULL, clip.range = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE ) } \arguments{ \item{object}{A seurat object} \item{features}{Name of features to add into the scale.data} \item{assay}{Name of the assay of the seurat object generated by SCTransform} \item{umi.assay}{Name of the assay of the seurat object containing UMI matrix and the default is RNA} \item{clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{replace.value}{Recalculate residuals for all features, even if they are already present. Useful if you want to change the clip.range.} \item{na.rm}{For features where there is no feature model stored, return NA for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only return residuals for features with a model stored for all cells.} \item{verbose}{Whether to print messages and progress bars} } \value{ Returns a Seurat object containing Pearson residuals of added features in its scale.data } \description{ This function calls sctransform::get_residuals. } \examples{ data("pbmc_small") pbmc_small <- SCTransform(object = pbmc_small, variable.features.n = 20) pbmc_small <- GetResidual(object = pbmc_small, features = c('MS4A1', 'TCL1A')) } \seealso{ \code{\link[sctransform]{get_residuals}} } \concept{preprocessing} Seurat/man/PrepSCTIntegration.Rd0000644000176200001440000000617014005656653016261 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{PrepSCTIntegration} \alias{PrepSCTIntegration} \title{Prepare an object list normalized with sctransform for integration.} \usage{ PrepSCTIntegration( object.list, assay = NULL, anchor.features = 2000, sct.clip.range = NULL, verbose = TRUE ) } \arguments{ \item{object.list}{A list of \code{\link{Seurat}} objects to prepare for integration} \item{assay}{The name of the \code{\link{Assay}} to use for integration. This can be a single name if all the assays to be integrated have the same name, or a character vector containing the name of each \code{\link{Assay}} in each object to be integrated. The specified assays must have been normalized using \code{\link{SCTransform}}. If NULL (default), the current default assay for each object is used.} \item{anchor.features}{Can be either: \itemize{ \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} to select the provided number of features to be used in anchor finding} \item{A vector of features to be used as input to the anchor finding process} }} \item{sct.clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{verbose}{Display output/messages} } \value{ A list of \code{\link{Seurat}} objects with the appropriate \code{scale.data} slots containing only the required \code{anchor.features}. } \description{ This function takes in a list of objects that have been normalized with the \code{\link{SCTransform}} method and performs the following steps: \itemize{ \item{If anchor.features is a numeric value, calls \code{\link{SelectIntegrationFeatures}} to determine the features to use in the downstream integration procedure.} \item{Ensures that the sctransform residuals for the features specified to anchor.features are present in each object in the list. This is necessary because the default behavior of \code{\link{SCTransform}} is to only store the residuals for the features determined to be variable. Residuals are recomputed for missing features using the stored model parameters via the \code{\link{GetResidual}} function.} \item{Subsets the \code{scale.data} slot to only contain the residuals for anchor.features for efficiency in downstream processing. } } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset and take the first 2 to integrate pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] # perform SCTransform normalization pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) # select integration features and prep step features <- SelectIntegrationFeatures(pancreas.list) pancreas.list <- PrepSCTIntegration( pancreas.list, anchor.features = features ) # downstream integration steps anchors <- FindIntegrationAnchors( pancreas.list, normalization.method = "SCT", anchor.features = features ) pancreas.integrated <- IntegrateData(anchors) } } \concept{integration} Seurat/man/BGTextColor.Rd0000644000176200001440000000223414005656653014726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BGTextColor} \alias{BGTextColor} \title{Determine text color based on background color} \source{ \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} } \usage{ BGTextColor( background, threshold = 186, w3c = FALSE, dark = "black", light = "white" ) } \arguments{ \item{background}{A vector of background colors; supports R color names and hexadecimal codes} \item{threshold}{Intensity threshold for light/dark cutoff; intensities greater than \code{theshold} yield \code{dark}, others yield \code{light}} \item{w3c}{Use \href{http://www.w3.org/TR/WCAG20/}{W3C} formula for calculating background text color; ignores \code{threshold}} \item{dark}{Color for dark text} \item{light}{Color for light text} } \value{ A named vector of either \code{dark} or \code{light}, depending on \code{background}; names of vector are \code{background} } \description{ Determine text color based on background color } \examples{ BGTextColor(background = c('black', 'white', '#E76BF3')) } \concept{visualization} Seurat/man/NormalizeData.Rd0000644000176200001440000000376414005656653015335 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{NormalizeData} \alias{NormalizeData} \alias{NormalizeData.default} \alias{NormalizeData.Assay} \alias{NormalizeData.Seurat} \title{Normalize Data} \usage{ NormalizeData(object, ...) \method{NormalizeData}{default}( object, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, block.size = NULL, verbose = TRUE, ... ) \method{NormalizeData}{Assay}( object, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) \method{NormalizeData}{Seurat}( object, assay = NULL, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{normalization.method}{Method for normalization. \itemize{ \item{LogNormalize: }{Feature counts for each cell are divided by the total counts for that cell and multiplied by the scale.factor. This is then natural-log transformed using log1p.} \item{CLR: }{Applies a centered log ratio transformation} \item{RC: }{Relative counts. Feature counts for each cell are divided by the total counts for that cell and multiplied by the scale.factor. No log-transformation is applied. For counts per million (CPM) set \code{scale.factor = 1e6}} }} \item{scale.factor}{Sets the scale factor for cell-level normalization} \item{margin}{If performing CLR normalization, normalize across features (1) or cells (2)} \item{block.size}{How many cells should be run in each chunk, will try to split evenly across threads} \item{verbose}{display progress bar for normalization procedure} \item{assay}{Name of assay to use} } \value{ Returns object after normalization } \description{ Normalize the count data present in a given assay. } \examples{ \dontrun{ data("pbmc_small") pbmc_small pmbc_small <- NormalizeData(object = pbmc_small) } } \concept{preprocessing} Seurat/man/FindConservedMarkers.Rd0000644000176200001440000000376014156670503016652 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{FindConservedMarkers} \alias{FindConservedMarkers} \title{Finds markers that are conserved between the groups} \usage{ FindConservedMarkers( object, ident.1, ident.2 = NULL, grouping.var, assay = "RNA", slot = "data", min.cells.group = 3, meta.method = metap::minimump, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{ident.1}{Identity class to define markers for} \item{ident.2}{A second identity class for comparison. If NULL (default) - use all other cells for comparison.} \item{grouping.var}{grouping variable} \item{assay}{of assay to fetch data for (default is RNA)} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{meta.method}{method for combining p-values. Should be a function from the metap package (NOTE: pass the function, not a string)} \item{verbose}{Print a progress bar once expression testing begins} \item{\dots}{parameters to pass to FindMarkers} } \value{ data.frame containing a ranked list of putative conserved markers, and associated statistics (p-values within each group and a combined p-value (such as Fishers combined p-value or others from the metap package), percentage of cells expressing the marker, average differences). Name of group is appended to each associated output column (e.g. CTRL_p_val). If only one group is tested in the grouping.var, max and combined p-values are not returned. } \description{ Finds markers that are conserved between the groups } \examples{ \dontrun{ data("pbmc_small") pbmc_small # Create a simulated grouping variable pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) FindConservedMarkers(pbmc_small, ident.1 = 0, ident.2 = 1, grouping.var = "groups") } } \concept{differential_expression} Seurat/man/AddAzimuthResults.Rd0000644000176200001440000000124214152476164016204 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddAzimuthResults} \alias{AddAzimuthResults} \title{Add Azimuth Results} \usage{ AddAzimuthResults(object = NULL, filename) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{filename}{Path to Azimuth mapping scores file} } \value{ \code{object} with Azimuth results added } \description{ Add mapping and prediction scores, UMAP embeddings, and imputed assay (if available) from Azimuth to an existing or new \code{\link[SeuratObject]{Seurat}} object } \examples{ \dontrun{ object <- AddAzimuthResults(object, filename = "azimuth_results.Rds") } } Seurat/man/PercentAbove.Rd0000644000176200001440000000107614156670503015147 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{PercentAbove} \alias{PercentAbove} \title{Calculate the percentage of a vector above some threshold} \usage{ PercentAbove(x, threshold) } \arguments{ \item{x}{Vector of values} \item{threshold}{Threshold to use when calculating percentage} } \value{ Returns the percentage of \code{x} values above the given threshold } \description{ Calculate the percentage of a vector above some threshold } \examples{ set.seed(42) PercentAbove(sample(1:100, 10), 75) } \concept{utilities} Seurat/man/AnchorSet-class.Rd0000644000176200001440000000262114005656653015563 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{AnchorSet-class} \alias{AnchorSet-class} \alias{AnchorSet} \title{The AnchorSet Class} \description{ The AnchorSet class is an intermediate data storage class that stores the anchors and other related information needed for performing downstream analyses - namely data integration (\code{\link{IntegrateData}}) and data transfer (\code{\link{TransferData}}). } \section{Slots}{ \describe{ \item{\code{object.list}}{List of objects used to create anchors} \item{\code{reference.cells}}{List of cell names in the reference dataset - needed when performing data transfer.} \item{\code{reference.objects}}{Position of reference object/s in object.list} \item{\code{query.cells}}{List of cell names in the query dataset - needed when performing data transfer} \item{\code{anchors}}{The anchor matrix. This contains the cell indices of both anchor pair cells, the anchor score, and the index of the original dataset in the object.list for cell1 and cell2 of the anchor.} \item{\code{offsets}}{The offsets used to enable cell look up in downstream functions} \item{\code{anchor.features}}{The features used when performing anchor finding.} \item{\code{neighbors}}{List containing Neighbor objects for reuse later (e.g. mapping)} \item{\code{command}}{Store log of parameters that were used} }} \concept{objects} Seurat/man/UpdateSymbolList.Rd0000644000176200001440000000425514152476164016043 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{UpdateSymbolList} \alias{UpdateSymbolList} \alias{GeneSymbolThesarus} \title{Get updated synonyms for gene symbols} \source{ \url{https://www.genenames.org/} \url{https://www.genenames.org/help/rest/} } \usage{ GeneSymbolThesarus( symbols, timeout = 10, several.ok = FALSE, search.types = c("alias_symbol", "prev_symbol"), verbose = TRUE, ... ) UpdateSymbolList( symbols, timeout = 10, several.ok = FALSE, verbose = TRUE, ... ) } \arguments{ \item{symbols}{A vector of gene symbols} \item{timeout}{Time to wait before canceling query in seconds} \item{several.ok}{Allow several current gene symbols for each provided symbol} \item{search.types}{Type of query to perform: \describe{ \item{\dQuote{\code{alias_symbol}}}{Find alternate symbols for the genes described by \code{symbols}} \item{\dQuote{\code{prev_symbol}}}{Find new new symbols for the genes described by \code{symbols}} } This parameter accepts multiple options and short-hand options (eg. \dQuote{\code{prev}} for \dQuote{\code{prev_symbol}})} \item{verbose}{Show a progress bar depicting search progress} \item{...}{Extra parameters passed to \code{\link[httr]{GET}}} } \value{ \code{GeneSymbolThesarus}:, if \code{several.ok}, a named list where each entry is the current symbol found for each symbol provided and the names are the provided symbols. Otherwise, a named vector with the same information. \code{UpdateSymbolList}: \code{symbols} with updated symbols from HGNC's gene names database } \description{ Find current gene symbols based on old or alias symbols using the gene names database from the HUGO Gene Nomenclature Committee (HGNC) } \details{ For each symbol passed, we query the HGNC gene names database for current symbols that have the provided symbol as either an alias (\code{alias_symbol}) or old (\code{prev_symbol}) symbol. All other queries are \strong{not} supported. } \note{ This function requires internet access } \examples{ \dontrun{ GeneSybmolThesarus(symbols = c("FAM64A")) } \dontrun{ UpdateSymbolList(symbols = cc.genes$s.genes) } } \seealso{ \code{\link[httr]{GET}} } \concept{utilities} Seurat/man/MappingScore.Rd0000644000176200001440000000540414005656653015163 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{MappingScore} \alias{MappingScore} \alias{MappingScore.default} \alias{MappingScore.AnchorSet} \title{Metric for evaluating mapping success} \usage{ MappingScore(anchors, ...) \method{MappingScore}{default}( anchors, combined.object, query.neighbors, ref.embeddings, query.embeddings, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) \method{MappingScore}{AnchorSet}( anchors, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) } \arguments{ \item{anchors}{AnchorSet object or just anchor matrix from the Anchorset object returned from FindTransferAnchors} \item{...}{Reserved for internal use} \item{combined.object}{Combined object (ref + query) from the Anchorset object returned} \item{query.neighbors}{Neighbors object computed on query cells} \item{ref.embeddings}{Reference embeddings matrix} \item{query.embeddings}{Query embeddings matrix} \item{kanchors}{Number of anchors to use in projection steps when computing weights} \item{ndim}{Number of dimensions to use when working with low dimensional projections of the data} \item{ksmooth}{Number of cells to average over when computing transition probabilities} \item{ksnn}{Number of cells to average over when determining the kernel bandwidth from the SNN graph} \item{snn.prune}{Amount of pruning to apply to edges in SNN graph} \item{subtract.first.nn}{Option to the scoring function when computing distances to subtract the distance to the first nearest neighbor} \item{nn.method}{Nearest neighbor method to use (annoy or RANN)} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{query.weights}{Query weights matrix for reuse} \item{verbose}{Display messages/progress} } \value{ Returns a vector of cell scores } \description{ This metric was designed to help identify query cells that aren't well represented in the reference dataset. The intuition for the score is that we are going to project the query cells into a reference-defined space and then project them back onto the query. By comparing the neighborhoods before and after projection, we identify cells who's local neighborhoods are the most affected by this transformation. This could be because there is a population of query cells that aren't present in the reference or the state of the cells in the query is significantly different from the equivalent cell type in the reference. } \concept{integration} Seurat/man/LogVMR.Rd0000644000176200001440000000075114005656653013702 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{LogVMR} \alias{LogVMR} \title{Calculate the variance to mean ratio of logged values} \usage{ LogVMR(x, ...) } \arguments{ \item{x}{A vector of values} \item{...}{Other arguments (not used)} } \value{ Returns the VMR in log-space } \description{ Calculate the variance to mean ratio (VMR) in non-logspace (return answer in log-space) } \examples{ LogVMR(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/RidgePlot.Rd0000644000176200001440000000355514005656653014472 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{RidgePlot} \alias{RidgePlot} \title{Single cell ridge plot} \usage{ RidgePlot( object, features, cols = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = "data", stack = FALSE, combine = TRUE, fill.by = "feature" ) } \arguments{ \item{object}{Seurat object} \item{features}{Features to plot (gene expression, metrics, PC scores, anything that can be retreived by FetchData)} \item{cols}{Colors to use for plotting} \item{idents}{Which classes to include in the plot (default is all)} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction} \item{assay}{Name of assay to use, defaults to the active assay} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{y.max}{Maximum y axis value} \item{same.y.lims}{Set all the y-axis limits to the same values} \item{log}{plot the feature axis on log scale} \item{ncol}{Number of columns if multiple plots are displayed} \item{slot}{Use non-normalized counts data for plotting} \item{stack}{Horizontally stack plots for each feature} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot} \item{fill.by}{Color violins/ridges based on either 'feature' or 'ident'} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a ridge plot of single cell data (gene expression, metrics, PC scores, etc.) } \examples{ data("pbmc_small") RidgePlot(object = pbmc_small, features = 'PC_1') } \concept{visualization} Seurat/man/SpatialImage-class.Rd0000644000176200001440000000061414005656653016235 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{SpatialImage-class} \alias{SpatialImage-class} \title{The SpatialImage Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:SpatialImage]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:SpatialImage]{SeuratObject::SpatialImage-class}} } Seurat/man/ElbowPlot.Rd0000644000176200001440000000144014005656653014477 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ElbowPlot} \alias{ElbowPlot} \title{Quickly Pick Relevant Dimensions} \usage{ ElbowPlot(object, ndims = 20, reduction = "pca") } \arguments{ \item{object}{Seurat object} \item{ndims}{Number of dimensions to plot standard deviation for} \item{reduction}{Reduction technique to plot standard deviation for} } \value{ A ggplot object } \description{ Plots the standard deviations (or approximate singular values if running PCAFast) of the principle components for easy identification of an elbow in the graph. This elbow often corresponds well with the significant dims and is much faster to run than Jackstraw } \examples{ data("pbmc_small") ElbowPlot(object = pbmc_small) } \concept{visualization} Seurat/man/RenameCells.Rd0000644000176200001440000000143114024674706014762 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{RenameCells.SCTAssay} \alias{RenameCells.SCTAssay} \alias{RenameCells.SlideSeq} \alias{RenameCells.STARmap} \alias{RenameCells.VisiumV1} \title{Rename Cells in an Object} \usage{ \method{RenameCells}{SCTAssay}(object, new.names = NULL, ...) \method{RenameCells}{SlideSeq}(object, new.names = NULL, ...) \method{RenameCells}{STARmap}(object, new.names = NULL, ...) \method{RenameCells}{VisiumV1}(object, new.names = NULL, ...) } \arguments{ \item{object}{An object} \item{new.names}{vector of new cell names} \item{...}{Arguments passed to other methods} } \description{ Rename Cells in an Object } \seealso{ \code{\link[SeuratObject:RenameCells]{SeuratObject::RenameCells}} } \concept{objects} Seurat/man/UpdateSCTAssays.Rd0000644000176200001440000000074114024674706015553 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{UpdateSCTAssays} \alias{UpdateSCTAssays} \title{Update pre-V4 Assays generated with SCTransform in the Seurat to the new SCTAssay class} \usage{ UpdateSCTAssays(object) } \arguments{ \item{object}{A Seurat object} } \value{ A Seurat object with updated SCTAssays } \description{ Update pre-V4 Assays generated with SCTransform in the Seurat to the new SCTAssay class } \concept{objects} Seurat/man/SCTransform.Rd0000644000176200001440000000776114005656653015005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{SCTransform} \alias{SCTransform} \title{Use regularized negative binomial regression to normalize UMI count data} \usage{ SCTransform( object, assay = "RNA", new.assay.name = "SCT", reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object[[assay]])/30), sqrt(x = ncol(x = object[[assay]])/30)), conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) } \arguments{ \item{object}{A seurat object} \item{assay}{Name of assay to pull the count data from; default is 'RNA'} \item{new.assay.name}{Name for the new assay containing the normalized data} \item{reference.SCT.model}{If not NULL, compute residuals for the object using the provided SCT model; supports only log_umi as the latent variable. If residual.features are not specified, compute for the top variable.features.n specified in the model which are also present in the object. If residual.features are specified, the variable features of the resulting SCT assay are set to the top variable.features.n in the model.} \item{do.correct.umi}{Place corrected UMI matrix in assay counts slot; default is TRUE} \item{ncells}{Number of subsampling cells used to build NB regression; default is 5000} \item{residual.features}{Genes to calculate residual features for; default is NULL (all genes). If specified, will be set to VariableFeatures of the returned object.} \item{variable.features.n}{Use this many features as variable features after ranking by residual variance; default is 3000. Only applied if residual.features is not set.} \item{variable.features.rv.th}{Instead of setting a fixed number of variable features, use this residual variance cutoff; this is only used when \code{variable.features.n} is set to NULL; default is 1.3. Only applied if residual.features is not set.} \item{vars.to.regress}{Variables to regress out in a second non-regularized linear regression. For example, percent.mito. Default is NULL} \item{do.scale}{Whether to scale residuals to have unit variance; default is FALSE} \item{do.center}{Whether to center residuals to have mean zero; default is TRUE} \item{clip.range}{Range to clip the residuals to; default is \code{c(-sqrt(n/30), sqrt(n/30))}, where n is the number of cells} \item{conserve.memory}{If set to TRUE the residual matrix for all genes is never created in full; useful for large data sets, but will take longer to run; this will also set return.only.var.genes to TRUE; default is FALSE} \item{return.only.var.genes}{If set to TRUE the scale.data matrices in output assay are subset to contain only the variable genes; default is TRUE} \item{seed.use}{Set a random seed. By default, sets the seed to 1448145. Setting NULL will not set a seed.} \item{verbose}{Whether to print messages and progress bars} \item{...}{Additional parameters passed to \code{sctransform::vst}} } \value{ Returns a Seurat object with a new assay (named SCT by default) with counts being (corrected) counts, data being log1p(counts), scale.data being pearson residuals; sctransform::vst intermediate results are saved in misc slot of the new assay. } \description{ This function calls sctransform::vst. The sctransform package is available at https://github.com/ChristophH/sctransform. Use this function as an alternative to the NormalizeData, FindVariableFeatures, ScaleData workflow. Results are saved in a new assay (named SCT by default) with counts being (corrected) counts, data being log1p(counts), scale.data being pearson residuals; sctransform::vst intermediate results are saved in misc slot of new assay. } \examples{ data("pbmc_small") SCTransform(object = pbmc_small) } \seealso{ \code{\link[sctransform]{correct_counts}} \code{\link[sctransform]{get_residuals}} } \concept{preprocessing} Seurat/man/LogNormalize.Rd0000644000176200001440000000124114005656653015171 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{LogNormalize} \alias{LogNormalize} \title{Normalize raw data} \usage{ LogNormalize(data, scale.factor = 10000, verbose = TRUE) } \arguments{ \item{data}{Matrix with the raw count data} \item{scale.factor}{Scale the data. Default is 1e4} \item{verbose}{Print progress} } \value{ Returns a matrix with the normalize and log transformed data } \description{ Normalize count data per cell and transform to log scale } \examples{ mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) mat mat_norm <- LogNormalize(data = mat) mat_norm } \concept{preprocessing} Seurat/man/Read10X_Image.Rd0000644000176200001440000000161414152476164015041 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X_Image} \alias{Read10X_Image} \title{Load a 10X Genomics Visium Image} \usage{ Read10X_Image( image.dir, image.name = "tissue_lowres_image.png", filter.matrix = TRUE, ... ) } \arguments{ \item{image.dir}{Path to directory with 10X Genomics visium image data; should include files \code{tissue_lowres_image.png},} \item{image.name}{The file name of the image. Defaults to tissue_lowres_image.png. \code{scalefactors_json.json} and \code{tissue_positions_list.csv}} \item{filter.matrix}{Filter spot/feature matrix to only include spots that have been determined to be over tissue.} \item{...}{Ignored for now} } \value{ A \code{\link{VisiumV1}} object } \description{ Load a 10X Genomics Visium Image } \seealso{ \code{\link{VisiumV1}} \code{\link{Load10X_Spatial}} } \concept{preprocessing} Seurat/man/Seurat-package.Rd0000644000176200001440000000762614152476164015440 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/zzz.R \docType{package} \name{Seurat-package} \alias{Seurat} \alias{Seurat-package} \title{Seurat: Tools for Single Cell Genomics} \description{ A toolkit for quality control, analysis, and exploration of single cell RNA sequencing data. 'Seurat' aims to enable users to identify and interpret sources of heterogeneity from single cell transcriptomic measurements, and to integrate diverse types of single cell data. See Satija R, Farrell J, Gennert D, et al (2015) , Macosko E, Basu A, Satija R, et al (2015) , Stuart T, Butler A, et al (2019) , and Hao, Hao, et al (2020) for more details. } \section{Package options}{ Seurat uses the following [options()] to configure behaviour: \describe{ \item{\code{Seurat.memsafe}}{global option to call gc() after many operations. This can be helpful in cleaning up the memory status of the R session and prevent use of swap space. However, it does add to the computational overhead and setting to FALSE can speed things up if you're working in an environment where RAM availability is not a concern.} \item{\code{Seurat.warn.umap.uwot}}{Show warning about the default backend for \code{\link{RunUMAP}} changing from Python UMAP via reticulate to UWOT} \item{\code{Seurat.checkdots}}{For functions that have ... as a parameter, this controls the behavior when an item isn't used. Can be one of warn, stop, or silent.} \item{\code{Seurat.limma.wilcox.msg}}{{Show message about more efficient Wilcoxon Rank Sum test available via the limma package}} \item{\code{Seurat.Rfast2.msg}}{{Show message about more efficient Moran's I function available via the Rfast2 package}} \item{\code{Seurat.warn.vlnplot.split}}{Show message about changes to default behavior of split/multi violin plots} } } \seealso{ Useful links: \itemize{ \item \url{https://satijalab.org/seurat} \item \url{https://github.com/satijalab/seurat} \item Report bugs at \url{https://github.com/satijalab/seurat/issues} } } \author{ \strong{Maintainer}: Paul Hoffman \email{seurat@nygenome.org} (\href{https://orcid.org/0000-0002-7693-8957}{ORCID}) Other contributors: \itemize{ \item Andrew Butler \email{abutler@nygenome.org} (\href{https://orcid.org/0000-0003-3608-0463}{ORCID}) [contributor] \item Saket Choudhary \email{schoudhary@nygenome.org} (\href{https://orcid.org/0000-0001-5202-7633}{ORCID}) [contributor] \item Charlotte Darby \email{cdarby@nygenome.org} (\href{https://orcid.org/0000-0003-2195-5300}{ORCID}) [contributor] \item Jeff Farrell \email{jfarrell@g.harvard.edu} [contributor] \item Christoph Hafemeister \email{chafemeister@nygenome.org} (\href{https://orcid.org/0000-0001-6365-8254}{ORCID}) [contributor] \item Yuhan Hao \email{yhao@nygenome.org} (\href{https://orcid.org/0000-0002-1810-0822}{ORCID}) [contributor] \item Jaison Jain \email{jjain@nygenome.org} (\href{https://orcid.org/0000-0002-9478-5018}{ORCID}) [contributor] \item Efthymia Papalexi \email{epapalexi@nygenome.org} (\href{https://orcid.org/0000-0001-5898-694X}{ORCID}) [contributor] \item Patrick Roelli \email{proelli@nygenome.org} [contributor] \item Rahul Satija \email{rsatija@nygenome.org} (\href{https://orcid.org/0000-0001-9448-8833}{ORCID}) [contributor] \item Karthik Shekhar \email{kshekhar@berkeley.edu} [contributor] \item Avi Srivastava \email{asrivastava@nygenome.org} (\href{https://orcid.org/0000-0001-9798-2079}{ORCID}) [contributor] \item Tim Stuart \email{tstuart@nygenome.org} (\href{https://orcid.org/0000-0002-3044-0897}{ORCID}) [contributor] \item Kristof Torkenczy (\href{https://orcid.org/0000-0002-4869-7957}{ORCID}) [contributor] \item Shiwei Zheng \email{szheng@nygenome.org} (\href{https://orcid.org/0000-0001-6682-6743}{ORCID}) [contributor] \item Satija Lab and Collaborators [funder] } } Seurat/man/HVFInfo.SCTAssay.Rd0000644000176200001440000000216614024674706015466 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{HVFInfo.SCTAssay} \alias{HVFInfo.SCTAssay} \title{Get Variable Feature Information} \usage{ \method{HVFInfo}{SCTAssay}(object, selection.method, status = FALSE, ...) } \arguments{ \item{object}{An object} \item{selection.method}{Which method to pull. For \code{HVFInfo} and \code{VariableFeatures}, choose one from one of the following: \itemize{ \item \dQuote{vst} \item \dQuote{sctransform} or \dQuote{sct} \item \dQuote{mean.var.plot}, \dQuote{dispersion}, \dQuote{mvp}, or \dQuote{disp} } For \code{SVFInfo} and \code{SpatiallyVariableFeatures}, choose from: \itemize{ \item \dQuote{markvariogram} \item \dQuote{moransi} }} \item{status}{Add variable status to the resulting data frame} \item{...}{Arguments passed to other methods} } \description{ Get variable feature information from \code{\link{SCTAssay}} objects } \examples{ # Get the HVF info directly from an SCTAssay object pbmc_small <- SCTransform(pbmc_small) HVFInfo(pbmc_small[["SCT"]], selection.method = 'sct')[1:5, ] } \seealso{ \code{\link[SeuratObject]{HVFInfo}} } Seurat/man/RunLDA.Rd0000644000176200001440000000312514024674706013657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/mixscape.R \name{RunLDA} \alias{RunLDA} \alias{RunLDA.default} \alias{RunLDA.Assay} \alias{RunLDA.Seurat} \title{Run Linear Discriminant Analysis} \usage{ RunLDA(object, ...) \method{RunLDA}{default}( object, labels, assay = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) \method{RunLDA}{Assay}( object, assay = NULL, labels, features = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) \method{RunLDA}{Seurat}( object, assay = NULL, labels, features = NULL, reduction.name = "lda", reduction.key = "LDA_", seed = 42, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, ... ) } \arguments{ \item{object}{An object of class Seurat.} \item{...}{Arguments passed to other methods} \item{labels}{Meta data column with target gene class labels.} \item{assay}{Assay to use for performing Linear Discriminant Analysis (LDA).} \item{verbose}{Print the top genes associated with high/low loadings for the PCs} \item{ndims.print}{Number of LDA dimensions to print.} \item{nfeatures.print}{Number of features to print for each LDA component.} \item{reduction.key}{Reduction key name.} \item{seed}{Value for random seed} \item{features}{Features to compute LDA on} \item{reduction.name}{dimensional reduction name, lda by default} } \description{ Run Linear Discriminant Analysis Function to perform Linear Discriminant Analysis. } \concept{mixscape} Seurat/man/ScoreJackStraw.Rd0000644000176200001440000000305014005656653015454 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{ScoreJackStraw} \alias{ScoreJackStraw} \alias{ScoreJackStraw.JackStrawData} \alias{ScoreJackStraw.DimReduc} \alias{ScoreJackStraw.Seurat} \title{Compute Jackstraw scores significance.} \usage{ ScoreJackStraw(object, ...) \method{ScoreJackStraw}{JackStrawData}(object, dims = 1:5, score.thresh = 1e-05, ...) \method{ScoreJackStraw}{DimReduc}(object, dims = 1:5, score.thresh = 1e-05, ...) \method{ScoreJackStraw}{Seurat}( object, reduction = "pca", dims = 1:5, score.thresh = 1e-05, do.plot = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{dims}{Which dimensions to examine} \item{score.thresh}{Threshold to use for the proportion test of PC significance (see Details)} \item{reduction}{Reduction associated with JackStraw to score} \item{do.plot}{Show plot. To return ggplot object, use \code{JackStrawPlot} after running ScoreJackStraw.} } \value{ Returns a Seurat object } \description{ Significant PCs should show a p-value distribution that is strongly skewed to the left compared to the null distribution. The p-value for each PC is based on a proportion test comparing the number of features with a p-value below a particular threshold (score.thresh), compared with the proportion of features expected under a uniform distribution of p-values. } \seealso{ \code{\link{JackStrawPlot}} \code{\link{JackStrawPlot}} } \author{ Omri Wurtzel } \concept{dimensional_reduction} Seurat/man/ReadSlideSeq.Rd0000644000176200001440000000077414005656653015106 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{ReadSlideSeq} \alias{ReadSlideSeq} \title{Load Slide-seq spatial data} \usage{ ReadSlideSeq(coord.file, assay = "Spatial") } \arguments{ \item{coord.file}{Path to csv file containing bead coordinate positions} \item{assay}{Name of assay to associate image to} } \value{ A \code{\link{SlideSeq}} object } \description{ Load Slide-seq spatial data } \seealso{ \code{\link{SlideSeq}} } \concept{preprocessing} Seurat/man/SeuratCommand-class.Rd0000644000176200001440000000062214005656653016436 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{SeuratCommand-class} \alias{SeuratCommand-class} \title{The SeuratCommand Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:SeuratCommand]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:SeuratCommand]{SeuratObject::SeuratCommand-class}} } Seurat/man/TransferData.Rd0000644000176200001440000001465414152476164015161 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{TransferData} \alias{TransferData} \title{Transfer data} \usage{ TransferData( anchorset, refdata, reference = NULL, query = NULL, weight.reduction = "pcaproject", l2.norm = FALSE, dims = NULL, k.weight = 50, sd.weight = 1, eps = 0, n.trees = 50, verbose = TRUE, slot = "data", prediction.assay = FALSE, store.weights = TRUE ) } \arguments{ \item{anchorset}{An \code{\link{AnchorSet}} object generated by \code{\link{FindTransferAnchors}}} \item{refdata}{Data to transfer. This can be specified in one of two ways: \itemize{ \item{The reference data itself as either a vector where the names correspond to the reference cells, or a matrix, where the column names correspond to the reference cells.} \item{The name of the metadata field or assay from the reference object provided. This requires the reference parameter to be specified. If pulling assay data in this manner, it will pull the data from the data slot. To transfer data from other slots, please pull the data explicitly with \code{\link{GetAssayData}} and provide that matrix here.} }} \item{reference}{Reference object from which to pull data to transfer} \item{query}{Query object into which the data will be transferred.} \item{weight.reduction}{Dimensional reduction to use for the weighting anchors. Options are: \itemize{ \item{pcaproject: Use the projected PCA used for anchor building} \item{lsiproject: Use the projected LSI used for anchor building} \item{pca: Use an internal PCA on the query only} \item{cca: Use the CCA used for anchor building} \item{custom DimReduc: User provided \code{\link{DimReduc}} object computed on the query cells} }} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction} \item{dims}{Set of dimensions to use in the anchor weighting procedure. If NULL, the same dimensions that were used to find anchors will be used for weighting.} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}})} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{verbose}{Print progress bars and output} \item{slot}{Slot to store the imputed data. Must be either "data" (default) or "counts"} \item{prediction.assay}{Return an \code{Assay} object with the prediction scores for each class stored in the \code{data} slot.} \item{store.weights}{Optionally store the weights matrix used for predictions in the returned query object.} } \value{ If \code{query} is not provided, for the categorical data in \code{refdata}, returns a data.frame with label predictions. If \code{refdata} is a matrix, returns an Assay object where the imputed data has been stored in the provided slot. If \code{query} is provided, a modified query object is returned. For the categorical data in refdata, prediction scores are stored as Assays (prediction.score.NAME) and two additional metadata fields: predicted.NAME and predicted.NAME.score which contain the class prediction and the score for that predicted class. For continuous data, an Assay called NAME is returned. NAME here corresponds to the name of the element in the refdata list. } \description{ Transfer categorical or continuous data across single-cell datasets. For transferring categorical information, pass a vector from the reference dataset (e.g. \code{refdata = reference$celltype}). For transferring continuous information, pass a matrix from the reference dataset (e.g. \code{refdata = GetAssayData(reference[['RNA']])}). } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} For both transferring discrete labels and also feature imputation, we first compute the weights matrix. \itemize{ \item{Construct a weights matrix that defines the association between each query cell and each anchor. These weights are computed as 1 - the distance between the query cell and the anchor divided by the distance of the query cell to the \code{k.weight}th anchor multiplied by the anchor score computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian kernel width a bandwidth defined by \code{sd.weight} and normalize across all \code{k.weight} anchors.} } The main difference between label transfer (classification) and feature imputation is what gets multiplied by the weights matrix. For label transfer, we perform the following steps: \itemize{ \item{Create a binary classification matrix, the rows corresponding to each possible class and the columns corresponding to the anchors. If the reference cell in the anchor pair is a member of a certain class, that matrix entry is filled with a 1, otherwise 0.} \item{Multiply this classification matrix by the transpose of weights matrix to compute a prediction score for each class for each cell in the query dataset.} } For feature imputation, we perform the following step: \itemize{ \item{Multiply the expression matrix for the reference anchor cells by the weights matrix. This returns a predicted expression matrix for the specified features for each cell in the query dataset.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("pbmc3k") # for demonstration, split the object into reference and query pbmc.reference <- pbmc3k[, 1:1350] pbmc.query <- pbmc3k[, 1351:2700] # perform standard preprocessing on each object pbmc.reference <- NormalizeData(pbmc.reference) pbmc.reference <- FindVariableFeatures(pbmc.reference) pbmc.reference <- ScaleData(pbmc.reference) pbmc.query <- NormalizeData(pbmc.query) pbmc.query <- FindVariableFeatures(pbmc.query) pbmc.query <- ScaleData(pbmc.query) # find anchors anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) # transfer labels predictions <- TransferData(anchorset = anchors, refdata = pbmc.reference$seurat_annotations) pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/ReadMtx.Rd0000644000176200001440000000361114152476164014136 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{ReadMtx} \alias{ReadMtx} \title{Load in data from remote or local mtx files} \usage{ ReadMtx( mtx, cells, features, cell.column = 1, feature.column = 2, cell.sep = "\\t", feature.sep = "\\t", skip.cell = 0, skip.feature = 0, mtx.transpose = FALSE, unique.features = TRUE, strip.suffix = FALSE ) } \arguments{ \item{mtx}{Name or remote URL of the mtx file} \item{cells}{Name or remote URL of the cells/barcodes file} \item{features}{Name or remote URL of the features/genes file} \item{cell.column}{Specify which column of cells file to use for cell names; default is 1} \item{feature.column}{Specify which column of features files to use for feature/gene names; default is 2} \item{cell.sep}{Specify the delimiter in the cell name file} \item{feature.sep}{Specify the delimiter in the feature name file} \item{skip.cell}{Number of lines to skip in the cells file before beginning to read cell names} \item{skip.feature}{Number of lines to skip in the features file before beginning to gene names} \item{mtx.transpose}{Transpose the matrix after reading in} \item{unique.features}{Make feature names unique (default TRUE)} \item{strip.suffix}{Remove trailing "-1" if present in all cell barcodes.} } \value{ A sparse matrix containing the expression data. } \description{ Enables easy loading of sparse data matrices } \examples{ \dontrun{ # For local files: expression_matrix <- ReadMtx( mtx = "count_matrix.mtx.gz", features = "features.tsv.gz", cells = "barcodes.tsv.gz" ) seurat_object <- CreateSeuratObject(counts = expression_matrix) # For remote files: expression_matrix <- ReadMtx(mtx = "http://localhost/matrix.mtx", cells = "http://localhost/barcodes.tsv", features = "http://localhost/genes.tsv") seurat_object <- CreateSeuratObject(counts = data) } } \concept{preprocessing} Seurat/man/AddModuleScore.Rd0000644000176200001440000000454214005656653015430 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddModuleScore} \alias{AddModuleScore} \title{Calculate module scores for feature expression programs in single cells} \usage{ AddModuleScore( object, features, pool = NULL, nbin = 24, ctrl = 100, k = FALSE, assay = NULL, name = "Cluster", seed = 1, search = FALSE, ... ) } \arguments{ \item{object}{Seurat object} \item{features}{A list of vectors of features for expression programs; each entry should be a vector of feature names} \item{pool}{List of features to check expression levels against, defaults to \code{rownames(x = object)}} \item{nbin}{Number of bins of aggregate expression levels for all analyzed features} \item{ctrl}{Number of control features selected from the same bin per analyzed feature} \item{k}{Use feature clusters returned from DoKMeans} \item{assay}{Name of assay to use} \item{name}{Name for the expression programs; will append a number to the end for each entry in \code{features} (eg. if \code{features} has three programs, the results will be stored as \code{name1}, \code{name2}, \code{name3}, respectively)} \item{seed}{Set a random seed. If NULL, seed is not set.} \item{search}{Search for symbol synonyms for features in \code{features} that don't match features in \code{object}? Searches the HGNC's gene names database; see \code{\link{UpdateSymbolList}} for more details} \item{...}{Extra parameters passed to \code{\link{UpdateSymbolList}}} } \value{ Returns a Seurat object with module scores added to object meta data; each module is stored as \code{name#} for each module program present in \code{features} } \description{ Calculate the average expression levels of each program (cluster) on single cell level, subtracted by the aggregated expression of control feature sets. All analyzed features are binned based on averaged expression, and the control features are randomly selected from each bin. } \examples{ \dontrun{ data("pbmc_small") cd_features <- list(c( 'CD79B', 'CD79A', 'CD19', 'CD180', 'CD200', 'CD3D', 'CD2', 'CD3E', 'CD7', 'CD8A', 'CD14', 'CD1C', 'CD68', 'CD9', 'CD247' )) pbmc_small <- AddModuleScore( object = pbmc_small, features = cd_features, ctrl = 5, name = 'CD_Features' ) head(x = pbmc_small[]) } } \references{ Tirosh et al, Science (2016) } \concept{utilities} Seurat/man/Graph-class.Rd0000644000176200001440000000054214005656653014736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Graph-class} \alias{Graph-class} \title{The Graph Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:Graph]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Graph]{SeuratObject::Graph-class}} } Seurat/man/MixingMetric.Rd0000644000176200001440000000227514005656653015176 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{MixingMetric} \alias{MixingMetric} \title{Calculates a mixing metric} \usage{ MixingMetric( object, grouping.var, reduction = "pca", dims = 1:2, k = 5, max.k = 300, eps = 0, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{grouping.var}{Grouping variable for dataset} \item{reduction}{Which dimensionally reduced space to use} \item{dims}{Dimensions to use} \item{k}{Neighbor number to examine per group} \item{max.k}{Maximum size of local neighborhood to compute} \item{eps}{Error bound on the neighbor finding algorithm (from RANN)} \item{verbose}{Displays progress bar} } \value{ Returns a vector of values of the mixing metric for each cell } \description{ Here we compute a measure of how well mixed a composite dataset is. To compute, we first examine the local neighborhood for each cell (looking at max.k neighbors) and determine for each group (could be the dataset after integration) the k nearest neighbor and what rank that neighbor was in the overall neighborhood. We then take the median across all groups as the mixing metric per cell. } \concept{integration} Seurat/man/FeatureScatter.Rd0000644000176200001440000000460514165416216015513 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{FeatureScatter} \alias{FeatureScatter} \alias{GenePlot} \title{Scatter plot of single cell data} \usage{ FeatureScatter( object, feature1, feature2, cells = NULL, shuffle = FALSE, seed = 1, group.by = NULL, cols = NULL, pt.size = 1, shape.by = NULL, span = NULL, smooth = FALSE, combine = TRUE, slot = "data", plot.cor = TRUE, raster = NULL, raster.dpi = c(512, 512), jitter = TRUE ) } \arguments{ \item{object}{Seurat object} \item{feature1}{First feature to plot. Typically feature expression but can also be metrics, PC scores, etc. - anything that can be retreived with FetchData} \item{feature2}{Second feature to plot.} \item{cells}{Cells to include on the scatter plot.} \item{shuffle}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{seed}{Sets the seed if randomly shuffling the order of points.} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{cols}{Colors to use for identity class plotting.} \item{pt.size}{Size of the points on the plot} \item{shape.by}{Ignored for now} \item{span}{Spline span in loess function call, if \code{NULL}, no spline added} \item{smooth}{Smooth the graph (similar to smoothScatter)} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed}} \item{slot}{Slot to pull data from, should be one of 'counts', 'data', or 'scale.data'} \item{plot.cor}{Display correlation in plot title} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} \item{jitter}{Jitter for easier visualization of crowded points} } \value{ A ggplot object } \description{ Creates a scatter plot of two features (typically feature expression), across a set of single cells. Cells are colored by their identity class. Pearson correlation between the two features is displayed above the plot. } \examples{ data("pbmc_small") FeatureScatter(object = pbmc_small, feature1 = 'CD9', feature2 = 'CD3E') } \concept{visualization} Seurat/man/CalcPerturbSig.Rd0000644000176200001440000000361114005656653015443 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{CalcPerturbSig} \alias{CalcPerturbSig} \title{Calculate a perturbation Signature} \usage{ CalcPerturbSig( object, assay = NULL, features = NULL, slot = "data", gd.class = "guide_ID", nt.cell.class = "NT", split.by = NULL, num.neighbors = NULL, reduction = "pca", ndims = 15, new.assay.name = "PRTB", verbose = TRUE ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Name of Assay PRTB signature is being calculated on.} \item{features}{Features to compute PRTB signature for. Defaults to the variable features set in the assay specified.} \item{slot}{Data slot to use for PRTB signature calculation.} \item{gd.class}{Metadata column containing target gene classification.} \item{nt.cell.class}{Non-targeting gRNA cell classification identity.} \item{split.by}{Provide metadata column if multiple biological replicates exist to calculate PRTB signature for every replicate separately.} \item{num.neighbors}{Number of nearest neighbors to consider.} \item{reduction}{Reduction method used to calculate nearest neighbors.} \item{ndims}{Number of dimensions to use from dimensionality reduction method.} \item{new.assay.name}{Name for the new assay.} \item{verbose}{Display progress + messages} } \value{ Returns a Seurat object with a new assay added containing the perturbation signature for all cells in the data slot. } \description{ Function to calculate perturbation signature for pooled CRISPR screen datasets. For each target cell (expressing one target gRNA), we identified 20 cells from the control pool (non-targeting cells) with the most similar mRNA expression profiles. The perturbation signature is calculated by subtracting the averaged mRNA expression profile of the non-targeting neighbors from the mRNA expression profile of the target cell. } \concept{mixscape} Seurat/man/as.SingleCellExperiment.Rd0000644000176200001440000000114214152476164017253 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{as.SingleCellExperiment} \alias{as.SingleCellExperiment} \alias{as.SingleCellExperiment.Seurat} \title{Convert objects to SingleCellExperiment objects} \usage{ as.SingleCellExperiment(x, ...) \method{as.SingleCellExperiment}{Seurat}(x, assay = NULL, ...) } \arguments{ \item{x}{An object to convert to class \code{SingleCellExperiment}} \item{...}{Arguments passed to other methods} \item{assay}{Assays to convert} } \description{ Convert objects to SingleCellExperiment objects } \concept{objects} Seurat/man/RunTSNE.Rd0000644000176200001440000000542514005656653014035 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunTSNE} \alias{RunTSNE} \alias{RunTSNE.matrix} \alias{RunTSNE.DimReduc} \alias{RunTSNE.dist} \alias{RunTSNE.Seurat} \title{Run t-distributed Stochastic Neighbor Embedding} \usage{ RunTSNE(object, ...) \method{RunTSNE}{matrix}( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{DimReduc}( object, cells = NULL, dims = 1:5, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{dist}( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{Seurat}( object, reduction = "pca", cells = NULL, dims = 1:5, features = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, distance.matrix = NULL, reduction.name = "tsne", reduction.key = "tSNE_", ... ) } \arguments{ \item{object}{Seurat object} \item{...}{Arguments passed to other methods and to t-SNE call (most commonly used is perplexity)} \item{assay}{Name of assay that that t-SNE is being run on} \item{seed.use}{Random seed for the t-SNE. If NULL, does not set the seed} \item{tsne.method}{Select the method to use to compute the tSNE. Available methods are: \itemize{ \item{Rtsne: }{Use the Rtsne package Barnes-Hut implementation of tSNE (default)} \item{FIt-SNE: }{Use the FFT-accelerated Interpolation-based t-SNE. Based on Kluger Lab code found here: https://github.com/KlugerLab/FIt-SNE} }} \item{dim.embed}{The dimensional space of the resulting tSNE embedding (default is 2). For example, set to 3 for a 3d tSNE} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. tSNE_ by default} \item{cells}{Which cells to analyze (default, all cells)} \item{dims}{Which dimensions to use as input features} \item{reduction}{Which dimensional reduction (e.g. PCA, ICA) to use for the tSNE. Default is PCA} \item{features}{If set, run the tSNE on this subset of features (instead of running on a set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run on features} \item{distance.matrix}{If set, runs tSNE on the given distance matrix instead of data matrix (experimental)} \item{reduction.name}{dimensional reduction name, specifies the position in the object$dr list. tsne by default} } \description{ Run t-SNE dimensionality reduction on selected features. Has the option of running in a reduced dimensional space (i.e. spectral tSNE, recommended), or running based on a set of genes. For details about stored TSNE calculation parameters, see \code{PrintTSNEParams}. } \concept{dimensional_reduction} Seurat/man/PrepLDA.Rd0000644000176200001440000000230214005656653014015 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{PrepLDA} \alias{PrepLDA} \title{Function to prepare data for Linear Discriminant Analysis.} \usage{ PrepLDA( object, de.assay = "RNA", pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) } \arguments{ \item{object}{An object of class Seurat.} \item{de.assay}{Assay to use for selection of DE genes.} \item{pc.assay}{Assay to use for running Principle components analysis.} \item{labels}{Meta data column with target gene class labels.} \item{nt.label}{Name of non-targeting cell class.} \item{npcs}{Number of principle components to use.} \item{verbose}{Print progress bar.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} } \value{ Returns a list of the first 10 PCs from each projection. } \description{ This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all cells in the data. } \concept{mixscape} Seurat/man/MetaFeature.Rd0000644000176200001440000000176014005656653014777 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{MetaFeature} \alias{MetaFeature} \title{Aggregate expression of multiple features into a single feature} \usage{ MetaFeature( object, features, meta.name = "metafeature", cells = NULL, assay = NULL, slot = "data" ) } \arguments{ \item{object}{A Seurat object} \item{features}{List of features to aggregate} \item{meta.name}{Name of column in metadata to store metafeature} \item{cells}{List of cells to use (default all cells)} \item{assay}{Which assay to use} \item{slot}{Which slot to take data from (default data)} } \value{ Returns a \code{Seurat} object with metafeature stored in objct metadata } \description{ Calculates relative contribution of each feature to each cell for given set of features. } \examples{ data("pbmc_small") pbmc_small <- MetaFeature( object = pbmc_small, features = c("LTB", "EAF2"), meta.name = 'var.aggregate' ) head(pbmc_small[[]]) } \concept{utilities} Seurat/man/VariableFeaturePlot.Rd0000644000176200001440000000305014165416216016463 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VariableFeaturePlot} \alias{VariableFeaturePlot} \alias{VariableGenePlot} \alias{MeanVarPlot} \title{View variable features} \usage{ VariableFeaturePlot( object, cols = c("black", "red"), pt.size = 1, log = NULL, selection.method = NULL, assay = NULL, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{cols}{Colors to specify non-variable/variable status} \item{pt.size}{Size of the points on the plot} \item{log}{Plot the x-axis in log scale} \item{selection.method}{Which method to pull. For \code{HVFInfo} and \code{VariableFeatures}, choose one from one of the following: \itemize{ \item \dQuote{vst} \item \dQuote{sctransform} or \dQuote{sct} \item \dQuote{mean.var.plot}, \dQuote{dispersion}, \dQuote{mvp}, or \dQuote{disp} } For \code{SVFInfo} and \code{SpatiallyVariableFeatures}, choose from: \itemize{ \item \dQuote{markvariogram} \item \dQuote{moransi} }} \item{assay}{Assay to pull variable features from} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A ggplot object } \description{ View variable features } \examples{ data("pbmc_small") VariableFeaturePlot(object = pbmc_small) } \seealso{ \code{\link{FindVariableFeatures}} } \concept{visualization} Seurat/man/CombinePlots.Rd0000644000176200001440000000212314005656653015165 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CombinePlots} \alias{CombinePlots} \title{Combine ggplot2-based plots into a single plot} \usage{ CombinePlots(plots, ncol = NULL, legend = NULL, ...) } \arguments{ \item{plots}{A list of gg objects} \item{ncol}{Number of columns} \item{legend}{Combine legends into a single legend choose from 'right' or 'bottom'; pass 'none' to remove legends, or \code{NULL} to leave legends as they are} \item{...}{Extra parameters passed to plot_grid} } \value{ A combined plot } \description{ Combine ggplot2-based plots into a single plot } \examples{ data("pbmc_small") pbmc_small[['group']] <- sample( x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE ) plot1 <- FeaturePlot( object = pbmc_small, features = 'MS4A1', split.by = 'group' ) plot2 <- FeaturePlot( object = pbmc_small, features = 'FCN1', split.by = 'group' ) CombinePlots( plots = list(plot1, plot2), legend = 'none', nrow = length(x = unique(x = pbmc_small[['group', drop = TRUE]])) ) } \concept{visualization} Seurat/man/LabelPoints.Rd0000644000176200001440000000245014005656653015006 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LabelPoints} \alias{LabelPoints} \alias{Labeler} \title{Add text labels to a ggplot2 plot} \usage{ LabelPoints( plot, points, labels = NULL, repel = FALSE, xnudge = 0.3, ynudge = 0.05, ... ) } \arguments{ \item{plot}{A ggplot2 plot with a GeomPoint layer} \item{points}{A vector of points to label; if \code{NULL}, will use all points in the plot} \item{labels}{A vector of labels for the points; if \code{NULL}, will use rownames of the data provided to the plot at the points selected} \item{repel}{Use \code{geom_text_repel} to create a nicely-repelled labels; this is slow when a lot of points are being plotted. If using \code{repel}, set \code{xnudge} and \code{ynudge} to 0} \item{xnudge, ynudge}{Amount to nudge X and Y coordinates of labels by} \item{...}{Extra parameters passed to \code{geom_text}} } \value{ A ggplot object } \description{ Add text labels to a ggplot2 plot } \examples{ data("pbmc_small") ff <- TopFeatures(object = pbmc_small[['pca']]) cc <- TopCells(object = pbmc_small[['pca']]) plot <- FeatureScatter(object = pbmc_small, feature1 = ff[1], feature2 = ff[2]) LabelPoints(plot = plot, points = cc) } \seealso{ \code{\link[ggplot2]{geom_text}} } \concept{visualization} Seurat/man/BarcodeInflectionsPlot.Rd0000644000176200001440000000203314005656653017163 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BarcodeInflectionsPlot} \alias{BarcodeInflectionsPlot} \title{Plot the Barcode Distribution and Calculated Inflection Points} \usage{ BarcodeInflectionsPlot(object) } \arguments{ \item{object}{Seurat object} } \value{ Returns a `ggplot2` object showing the by-group inflection points and provided (or default) rank threshold values in grey. } \description{ This function plots the calculated inflection points derived from the barcode-rank distribution. } \details{ See [CalculateBarcodeInflections()] to calculate inflection points and [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. } \examples{ data("pbmc_small") pbmc_small <- CalculateBarcodeInflections(pbmc_small, group.column = 'groups') BarcodeInflectionsPlot(pbmc_small) } \seealso{ \code{\link{CalculateBarcodeInflections}} \code{\link{SubsetByBarcodeInflections}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{visualization} Seurat/man/TransferAnchorSet-class.Rd0000644000176200001440000000061214005656653017266 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{TransferAnchorSet-class} \alias{TransferAnchorSet-class} \alias{TransferAnchorSet} \title{The TransferAnchorSet Class} \description{ Inherits from the Anchorset class. Implemented mainly for method dispatch purposes. See \code{\link{AnchorSet}} for slot details. } \concept{objects} Seurat/man/SlideSeq-class.Rd0000644000176200001440000000116714024674706015412 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{SlideSeq-class} \alias{SlideSeq-class} \alias{SlideSeq} \title{The SlideSeq class} \description{ The SlideSeq class represents spatial information from the Slide-seq platform } \section{Slots}{ \describe{ \item{\code{coordinates}}{...} }} \section{Slots}{ \describe{ \item{\code{assay}}{Name of assay to associate image data with; will give this image priority for visualization when the assay is set as the active/default assay in a \code{Seurat} object} \item{\code{key}}{Key for the image} } } \concept{spatial} Seurat/man/JackStraw.Rd0000644000176200001440000000330214005656653014460 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{JackStraw} \alias{JackStraw} \title{Determine statistical significance of PCA scores.} \usage{ JackStraw( object, reduction = "pca", assay = NULL, dims = 20, num.replicate = 100, prop.freq = 0.01, verbose = TRUE, maxit = 1000 ) } \arguments{ \item{object}{Seurat object} \item{reduction}{DimReduc to use. ONLY PCA CURRENTLY SUPPORTED.} \item{assay}{Assay used to calculate reduction.} \item{dims}{Number of PCs to compute significance for} \item{num.replicate}{Number of replicate samplings to perform} \item{prop.freq}{Proportion of the data to randomly permute for each replicate} \item{verbose}{Print progress bar showing the number of replicates that have been processed.} \item{maxit}{maximum number of iterations to be performed by the irlba function of RunPCA} } \value{ Returns a Seurat object where JS(object = object[['pca']], slot = 'empirical') represents p-values for each gene in the PCA analysis. If ProjectPCA is subsequently run, JS(object = object[['pca']], slot = 'full') then represents p-values for all genes. } \description{ Randomly permutes a subset of data, and calculates projected PCA scores for these 'random' genes. Then compares the PCA scores for the 'random' genes with the observed PCA scores to determine statistical signifance. End result is a p-value for each gene's association with each principal component. } \examples{ \dontrun{ data("pbmc_small") pbmc_small = suppressWarnings(JackStraw(pbmc_small)) head(JS(object = pbmc_small[['pca']], slot = 'empirical')) } } \references{ Inspired by Chung et al, Bioinformatics (2014) } \concept{dimensional_reduction} Seurat/man/LoadAnnoyIndex.Rd0000644000176200001440000000064314005656653015450 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{LoadAnnoyIndex} \alias{LoadAnnoyIndex} \title{Load the Annoy index file} \usage{ LoadAnnoyIndex(object, file) } \arguments{ \item{object}{Neighbor object} \item{file}{Path to file with annoy index} } \value{ Returns the Neighbor object with the index stored } \description{ Load the Annoy index file } \concept{utilities} Seurat/man/SplitObject.Rd0000644000176200001440000000215714005656653015020 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{SplitObject} \alias{SplitObject} \title{Splits object into a list of subsetted objects.} \usage{ SplitObject(object, split.by = "ident") } \arguments{ \item{object}{Seurat object} \item{split.by}{Attribute for splitting. Default is "ident". Currently only supported for class-level (i.e. non-quantitative) attributes.} } \value{ A named list of Seurat objects, each containing a subset of cells from the original object. } \description{ Splits object based on a single attribute into a list of subsetted objects, one for each level of the attribute. For example, useful for taking an object that contains cells from many patients, and subdividing it into patient-specific objects. } \examples{ data("pbmc_small") # Assign the test object a three level attribute groups <- sample(c("group1", "group2", "group3"), size = 80, replace = TRUE) names(groups) <- colnames(pbmc_small) pbmc_small <- AddMetaData(object = pbmc_small, metadata = groups, col.name = "group") obj.list <- SplitObject(pbmc_small, split.by = "group") } \concept{objects} Seurat/man/LabelClusters.Rd0000644000176200001440000000305014005656653015333 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LabelClusters} \alias{LabelClusters} \title{Label clusters on a ggplot2-based scatter plot} \usage{ LabelClusters( plot, id, clusters = NULL, labels = NULL, split.by = NULL, repel = TRUE, box = FALSE, geom = "GeomPoint", position = "median", ... ) } \arguments{ \item{plot}{A ggplot2-based scatter plot} \item{id}{Name of variable used for coloring scatter plot} \item{clusters}{Vector of cluster ids to label} \item{labels}{Custom labels for the clusters} \item{split.by}{Split labels by some grouping label, useful when using \code{\link[ggplot2]{facet_wrap}} or \code{\link[ggplot2]{facet_grid}}} \item{repel}{Use \code{geom_text_repel} to create nicely-repelled labels} \item{box}{Use geom_label/geom_label_repel (includes a box around the text labels)} \item{geom}{Name of geom to get X/Y aesthetic names for} \item{position}{How to place the label if repel = FALSE. If "median", place the label at the median position. If "nearest" place the label at the position of the nearest data point to the median.} \item{...}{Extra parameters to \code{\link[ggrepel]{geom_text_repel}}, such as \code{size}} } \value{ A ggplot2-based scatter plot with cluster labels } \description{ Label clusters on a ggplot2-based scatter plot } \examples{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) LabelClusters(plot = plot, id = 'ident') } \seealso{ \code{\link[ggrepel]{geom_text_repel}} \code{\link[ggplot2]{geom_text}} } \concept{visualization} Seurat/man/FindClusters.Rd0000644000176200001440000000622014005656653015176 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/clustering.R \name{FindClusters} \alias{FindClusters} \alias{FindClusters.default} \alias{FindClusters.Seurat} \title{Cluster Determination} \usage{ FindClusters(object, ...) \method{FindClusters}{default}( object, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) \method{FindClusters}{Seurat}( object, graph.name = NULL, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{modularity.fxn}{Modularity function (1 = standard; 2 = alternative).} \item{initial.membership, node.sizes}{Parameters to pass to the Python leidenalg function.} \item{resolution}{Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities.} \item{method}{Method for running leiden (defaults to matrix which is fast for small datasets). Enable method = "igraph" to avoid casting large data to a dense matrix.} \item{algorithm}{Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python.} \item{n.start}{Number of random starts.} \item{n.iter}{Maximal number of iterations per random start.} \item{random.seed}{Seed of the random number generator.} \item{group.singletons}{Group singletons into nearest cluster. If FALSE, assign all singletons to a "singleton" group} \item{temp.file.location}{Directory where intermediate files will be written. Specify the ABSOLUTE path.} \item{edge.file.name}{Edge file to use as input for modularity optimizer jar.} \item{verbose}{Print output} \item{graph.name}{Name of graph to use for the clustering algorithm} } \value{ Returns a Seurat object where the idents have been updated with new cluster info; latest clustering results will be stored in object metadata under 'seurat_clusters'. Note that 'seurat_clusters' will be overwritten everytime FindClusters is run } \description{ Identify clusters of cells by a shared nearest neighbor (SNN) modularity optimization based clustering algorithm. First calculate k-nearest neighbors and construct the SNN graph. Then optimize the modularity function to determine clusters. For a full description of the algorithms, see Waltman and van Eck (2013) \emph{The European Physical Journal B}. Thanks to Nigel Delaney (evolvedmicrobe@github) for the rewrite of the Java modularity optimizer code in Rcpp! } \details{ To run Leiden algorithm, you must first install the leidenalg python package (e.g. via pip install leidenalg), see Traag et al (2018). } \concept{clustering} Seurat/man/PrepSCTFindMarkers.Rd0000644000176200001440000000310614170333512016164 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{PrepSCTFindMarkers} \alias{PrepSCTFindMarkers} \title{Prepare object to run differential expression on SCT assay with multiple models} \usage{ PrepSCTFindMarkers(object, assay = "SCT", verbose = TRUE) } \arguments{ \item{object}{Seurat object with SCT assays} \item{assay}{Assay name where for SCT objects are stored; Default is 'SCT'} \item{verbose}{Print messages and progress} } \value{ Returns a Seurat object with recorrected counts and data in the SCT assay. } \description{ Given a merged object with multiple SCT models, this function uses minimum of the median UMI (calculated using the raw UMI counts) of individual objects to reverse the individual SCT regression model using minimum of median UMI as the sequencing depth covariate. The counts slot of the SCT assay is replaced with recorrected counts and the data slot is replaced with log1p of recorrected counts. } \examples{ data("pbmc_small") pbmc_small1 <- SCTransform(object = pbmc_small, variable.features.n = 20) pbmc_small2 <- SCTransform(object = pbmc_small, variable.features.n = 20) pbmc_merged <- merge(x = pbmc_small1, y = pbmc_small2) pbmc_merged <- PrepSCTFindMarkers(object = pbmc_merged) markers <- FindMarkers( object = pbmc_merged, ident.1 = "0", ident.2 = "1", assay = "SCT" ) pbmc_subset <- subset(pbmc_merged, idents = c("0", "1")) markers_subset <- FindMarkers( object = pbmc_subset, ident.1 = "0", ident.2 = "1", assay = "SCT", recorrect_umi = FALSE ) } \concept{differential_expression} Seurat/man/reexports.Rd0000644000176200001440000000763414005656653014636 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{import} \name{reexports} \alias{reexports} \alias{AddMetaData} \alias{as.Graph} \alias{as.Neighbor} \alias{as.Seurat} \alias{as.sparse} \alias{Assays} \alias{Cells} \alias{CellsByIdentities} \alias{Command} \alias{CreateAssayObject} \alias{CreateDimReducObject} \alias{CreateSeuratObject} \alias{DefaultAssay} \alias{DefaultAssay<-} \alias{Distances} \alias{Embeddings} \alias{FetchData} \alias{GetAssayData} \alias{GetImage} \alias{GetTissueCoordinates} \alias{HVFInfo} \alias{Idents} \alias{Idents<-} \alias{Images} \alias{Index} \alias{Index<-} \alias{Indices} \alias{IsGlobal} \alias{JS} \alias{JS<-} \alias{Key} \alias{Key<-} \alias{Loadings} \alias{Loadings<-} \alias{LogSeuratCommand} \alias{Misc} \alias{Misc<-} \alias{Neighbors} \alias{Project} \alias{Project<-} \alias{Radius} \alias{Reductions} \alias{RenameCells} \alias{RenameIdents} \alias{ReorderIdent} \alias{RowMergeSparseMatrices} \alias{SetAssayData} \alias{SetIdent} \alias{SpatiallyVariableFeatures} \alias{StashIdent} \alias{Stdev} \alias{SVFInfo} \alias{Tool} \alias{Tool<-} \alias{UpdateSeuratObject} \alias{VariableFeatures} \alias{VariableFeatures<-} \alias{WhichCells} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from other packages. Follow the links below to see their documentation. \describe{ \item{SeuratObject}{\code{\link[SeuratObject]{AddMetaData}}, \code{\link[SeuratObject:ObjectAccess]{Assays}}, \code{\link[SeuratObject]{Cells}}, \code{\link[SeuratObject]{CellsByIdentities}}, \code{\link[SeuratObject]{Command}}, \code{\link[SeuratObject]{CreateAssayObject}}, \code{\link[SeuratObject]{CreateDimReducObject}}, \code{\link[SeuratObject]{CreateSeuratObject}}, \code{\link[SeuratObject]{DefaultAssay}}, \code{\link[SeuratObject:DefaultAssay]{DefaultAssay<-}}, \code{\link[SeuratObject]{Distances}}, \code{\link[SeuratObject]{Embeddings}}, \code{\link[SeuratObject]{FetchData}}, \code{\link[SeuratObject:AssayData]{GetAssayData}}, \code{\link[SeuratObject]{GetImage}}, \code{\link[SeuratObject]{GetTissueCoordinates}}, \code{\link[SeuratObject:VariableFeatures]{HVFInfo}}, \code{\link[SeuratObject]{Idents}}, \code{\link[SeuratObject:Idents]{Idents<-}}, \code{\link[SeuratObject]{Images}}, \code{\link[SeuratObject]{Index}}, \code{\link[SeuratObject:Index]{Index<-}}, \code{\link[SeuratObject]{Indices}}, \code{\link[SeuratObject]{IsGlobal}}, \code{\link[SeuratObject]{JS}}, \code{\link[SeuratObject:JS]{JS<-}}, \code{\link[SeuratObject]{Key}}, \code{\link[SeuratObject:Key]{Key<-}}, \code{\link[SeuratObject]{Loadings}}, \code{\link[SeuratObject:Loadings]{Loadings<-}}, \code{\link[SeuratObject]{LogSeuratCommand}}, \code{\link[SeuratObject]{Misc}}, \code{\link[SeuratObject:Misc]{Misc<-}}, \code{\link[SeuratObject:ObjectAccess]{Neighbors}}, \code{\link[SeuratObject]{Project}}, \code{\link[SeuratObject:Project]{Project<-}}, \code{\link[SeuratObject]{Radius}}, \code{\link[SeuratObject:ObjectAccess]{Reductions}}, \code{\link[SeuratObject]{RenameCells}}, \code{\link[SeuratObject:Idents]{RenameIdents}}, \code{\link[SeuratObject:Idents]{ReorderIdent}}, \code{\link[SeuratObject]{RowMergeSparseMatrices}}, \code{\link[SeuratObject:VariableFeatures]{SVFInfo}}, \code{\link[SeuratObject:AssayData]{SetAssayData}}, \code{\link[SeuratObject:Idents]{SetIdent}}, \code{\link[SeuratObject:VariableFeatures]{SpatiallyVariableFeatures}}, \code{\link[SeuratObject:Idents]{StashIdent}}, \code{\link[SeuratObject]{Stdev}}, \code{\link[SeuratObject]{Tool}}, \code{\link[SeuratObject:Tool]{Tool<-}}, \code{\link[SeuratObject]{UpdateSeuratObject}}, \code{\link[SeuratObject]{VariableFeatures}}, \code{\link[SeuratObject:VariableFeatures]{VariableFeatures<-}}, \code{\link[SeuratObject]{WhichCells}}, \code{\link[SeuratObject]{as.Graph}}, \code{\link[SeuratObject]{as.Neighbor}}, \code{\link[SeuratObject]{as.Seurat}}, \code{\link[SeuratObject]{as.sparse}}} }} Seurat/man/RunICA.Rd0000644000176200001440000000426314005656653013657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunICA} \alias{RunICA} \alias{RunICA.default} \alias{RunICA.Assay} \alias{RunICA.Seurat} \title{Run Independent Component Analysis on gene expression} \usage{ RunICA(object, ...) \method{RunICA}{default}( object, assay = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) \method{RunICA}{Assay}( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) \method{RunICA}{Seurat}( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "IC_", seed.use = 42, ... ) } \arguments{ \item{object}{Seurat object} \item{\dots}{Additional arguments to be passed to fastica} \item{assay}{Name of Assay ICA is being run on} \item{nics}{Number of ICs to compute} \item{rev.ica}{By default, computes the dimensional reduction on the cell x feature matrix. Setting to true will compute it on the transpose (feature x cell matrix).} \item{ica.function}{ICA function from ica package to run (options: icafast, icaimax, icajade)} \item{verbose}{Print the top genes associated with high/low loadings for the ICs} \item{ndims.print}{ICs to print genes for} \item{nfeatures.print}{Number of genes to print for each IC} \item{reduction.name}{dimensional reduction name} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names.} \item{seed.use}{Set a random seed. Setting NULL will not set a seed.} \item{features}{Features to compute ICA on} } \description{ Run fastica algorithm from the ica package for ICA dimensionality reduction. For details about stored ICA calculation parameters, see \code{PrintICAParams}. } \concept{dimensional_reduction} Seurat/man/DietSeurat.Rd0000644000176200001440000000205014005656653014637 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{DietSeurat} \alias{DietSeurat} \title{Slim down a Seurat object} \usage{ DietSeurat( object, counts = TRUE, data = TRUE, scale.data = FALSE, features = NULL, assays = NULL, dimreducs = NULL, graphs = NULL ) } \arguments{ \item{object}{Seurat object} \item{counts}{Preserve the count matrices for the assays specified} \item{data}{Preserve the data slot for the assays specified} \item{scale.data}{Preserve the scale.data slot for the assays specified} \item{features}{Only keep a subset of features, defaults to all features} \item{assays}{Only keep a subset of assays specified here} \item{dimreducs}{Only keep a subset of DimReducs specified here (if NULL, remove all DimReducs)} \item{graphs}{Only keep a subset of Graphs specified here (if NULL, remove all Graphs)} } \description{ Keep only certain aspects of the Seurat object. Can be useful in functions that utilize merge as it reduces the amount of data in the merge. } \concept{objects} Seurat/man/PlotPerturbScore.Rd0000644000176200001440000000272714152476164016057 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{PlotPerturbScore} \alias{PlotPerturbScore} \title{Function to plot perturbation score distributions.} \usage{ PlotPerturbScore( object, target.gene.class = "gene", target.gene.ident = NULL, mixscape.class = "mixscape_class", col = "orange2", split.by = NULL, before.mixscape = FALSE, prtb.type = "KO" ) } \arguments{ \item{object}{An object of class Seurat.} \item{target.gene.class}{meta data column specifying all target gene names in the experiment.} \item{target.gene.ident}{Target gene name to visualize perturbation scores for.} \item{mixscape.class}{meta data column specifying mixscape classifications.} \item{col}{Specify color of target gene class or knockout cell class. For control non-targeting and non-perturbed cells, colors are set to different shades of grey.} \item{split.by}{For datasets with more than one cell type. Set equal TRUE to visualize perturbation scores for each cell type separately.} \item{before.mixscape}{Option to split densities based on mixscape classification (default) or original target gene classification. Default is set to NULL and plots cells by original class ID.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} } \value{ A ggplot object. } \description{ Density plots to visualize perturbation scores calculated from RunMixscape function. } \concept{mixscape} Seurat/man/FastRowScale.Rd0000644000176200001440000000130014005656653015120 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{FastRowScale} \alias{FastRowScale} \title{Scale and/or center matrix rowwise} \usage{ FastRowScale(mat, center = TRUE, scale = TRUE, scale_max = 10) } \arguments{ \item{mat}{A matrix} \item{center}{a logical value indicating whether to center the rows} \item{scale}{a logical value indicating whether to scale the rows} \item{scale_max}{clip all values greater than scale_max to scale_max. Don't clip if Inf.} } \value{ Returns the center/scaled matrix } \description{ Performs row scaling and/or centering. Equivalent to using t(scale(t(mat))) in R except in the case of NA values. } \concept{utilities} Seurat/man/ModalityWeights-class.Rd0000644000176200001440000000176414005656653017021 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{ModalityWeights-class} \alias{ModalityWeights-class} \alias{ModalityWeights} \title{The ModalityWeights Class} \description{ The ModalityWeights class is an intermediate data storage class that stores the modality weight and other related information needed for performing downstream analyses - namely data integration (\code{FindModalityWeights}) and data transfer (\code{\link{FindMultiModalNeighbors}}). } \section{Slots}{ \describe{ \item{\code{modality.weight.list}}{A list of modality weights value from all modalities} \item{\code{modality.assay}}{Names of assays for the list of dimensional reductions} \item{\code{params}}{A list of parameters used in the FindModalityWeights} \item{\code{score.matrix}}{a list of score matrices representing cross and within-modality prediction score, and kernel value} \item{\code{command}}{Store log of parameters that were used} }} \concept{objects} Seurat/man/MixscapeHeatmap.Rd0000644000176200001440000001150114152476164015640 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{MixscapeHeatmap} \alias{MixscapeHeatmap} \title{Differential expression heatmap for mixscape} \usage{ MixscapeHeatmap( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = "RNA", max.genes = 100, test.use = "wilcox", max.cells.group = NULL, order.by.prob = TRUE, group.by = NULL, mixscape.class = "mixscape_class", prtb.type = "KO", fc.name = "avg_log2FC", pval.cutoff = 0.05, ... ) } \arguments{ \item{object}{An object} \item{ident.1}{Identity class to define markers for; pass an object of class \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for} \item{balanced}{Plot an equal number of genes with both groups of cells.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{assay}{Assay to use in differential expression testing} \item{max.genes}{Total number of DE genes to plot.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default) \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{max.cells.group}{Number of cells per identity to plot.} \item{order.by.prob}{Order cells on heatmap based on their mixscape knockout probability from highest to lowest score.} \item{group.by}{(Deprecated) Option to split densities based on mixscape classification. Please use mixscape.class instead} \item{mixscape.class}{metadata column with mixscape classifications.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. Default is avg_log2FC} \item{pval.cutoff}{P-value cut-off for selection of significantly DE genes.} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ A ggplot object. } \description{ Draws a heatmap of single cell feature expression with cells ordered by their mixscape ko probabilities. } \concept{mixscape} Seurat/man/DotPlot.Rd0000644000176200001440000000527714005656653014171 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DotPlot} \alias{DotPlot} \alias{SplitDotPlotGG} \title{Dot plot visualization} \usage{ DotPlot( object, assay = NULL, features, cols = c("lightgrey", "blue"), col.min = -2.5, col.max = 2.5, dot.min = 0, dot.scale = 6, idents = NULL, group.by = NULL, split.by = NULL, cluster.idents = FALSE, scale = TRUE, scale.by = "radius", scale.min = NA, scale.max = NA ) } \arguments{ \item{object}{Seurat object} \item{assay}{Name of assay to use, defaults to the active assay} \item{features}{Input vector of features, or named list of feature vectors if feature-grouped panels are desired (replicates the functionality of the old SplitDotPlotGG)} \item{cols}{Colors to plot: the name of a palette from \code{RColorBrewer::brewer.pal.info}, a pair of colors defining a gradient, or 3+ colors defining multiple gradients (if split.by is set)} \item{col.min}{Minimum scaled average expression threshold (everything smaller will be set to this)} \item{col.max}{Maximum scaled average expression threshold (everything larger will be set to this)} \item{dot.min}{The fraction of cells at which to draw the smallest dot (default is 0). All cell groups with less than this expressing the given gene will have no dot drawn.} \item{dot.scale}{Scale the size of the points, similar to cex} \item{idents}{Identity classes to include in plot (default is all)} \item{group.by}{Factor to group the cells by} \item{split.by}{Factor to split the groups by (replicates the functionality of the old SplitDotPlotGG); see \code{\link{FetchData}} for more details} \item{cluster.idents}{Whether to order identities by hierarchical clusters based on given features, default is FALSE} \item{scale}{Determine whether the data is scaled, TRUE for default} \item{scale.by}{Scale the size of the points by 'size' or by 'radius'} \item{scale.min}{Set lower limit for scaling, use NA for default} \item{scale.max}{Set upper limit for scaling, use NA for default} } \value{ A ggplot object } \description{ Intuitive way of visualizing how feature expression changes across different identity classes (clusters). The size of the dot encodes the percentage of cells within a class, while the color encodes the AverageExpression level across all cells within a class (blue is high). } \examples{ data("pbmc_small") cd_genes <- c("CD247", "CD3E", "CD9") DotPlot(object = pbmc_small, features = cd_genes) pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) DotPlot(object = pbmc_small, features = cd_genes, split.by = 'groups') } \seealso{ \code{RColorBrewer::brewer.pal.info} } \concept{visualization} Seurat/man/PlotClusterTree.Rd0000644000176200001440000000160014152507372015662 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PlotClusterTree} \alias{PlotClusterTree} \title{Plot clusters as a tree} \usage{ PlotClusterTree(object, direction = "downwards", ...) } \arguments{ \item{object}{Seurat object} \item{direction}{A character string specifying the direction of the tree (default is downwards) Possible options: "rightwards", "leftwards", "upwards", and "downwards".} \item{\dots}{Additional arguments to \code{\link[ape:plot.phylo]{ape::plot.phylo}}} } \value{ Plots dendogram (must be precomputed using BuildClusterTree), returns no value } \description{ Plots previously computed tree (from BuildClusterTree) } \examples{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small <- BuildClusterTree(object = pbmc_small) PlotClusterTree(object = pbmc_small) } } \concept{visualization} Seurat/man/RunSPCA.Rd0000644000176200001440000000454114156670503014005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunSPCA} \alias{RunSPCA} \alias{RunSPCA.default} \alias{RunSPCA.Assay} \alias{RunSPCA.Seurat} \title{Run Supervised Principal Component Analysis} \usage{ RunSPCA(object, ...) \method{RunSPCA}{default}( object, assay = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = FALSE, seed.use = 42, ... ) \method{RunSPCA}{Assay}( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSPCA}{Seurat}( object, assay = NULL, features = NULL, npcs = 50, reduction.name = "spca", reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and IRLBA} \item{assay}{Name of Assay SPCA is being run on} \item{npcs}{Total Number of SPCs to compute and store (50 by default)} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. SPC by default} \item{graph}{Graph used supervised by SPCA} \item{verbose}{Print the top genes associated with high/low loadings for the SPCs} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed.} \item{features}{Features to compute SPCA on. If features=NULL, SPCA will be run using the variable features for the Assay.} \item{reduction.name}{dimensional reduction name, spca by default} } \value{ Returns Seurat object with the SPCA calculation stored in the reductions slot } \description{ Run a supervised PCA (SPCA) dimensionality reduction supervised by a cell-cell kernel. SPCA is used to capture a linear transformation which maximizes its dependency to the given cell-cell kernel. We use SNN graph as the kernel to supervise the linear matrix factorization. } \references{ Barshan E, Ghodsi A, Azimifar Z, Jahromi MZ. Supervised principal component analysis: Visualization, classification and regression on subspaces and submanifolds. Pattern Recognition. 2011 Jul 1;44(7):1357-71. \url{https://www.sciencedirect.com/science/article/pii/S0031320310005819?casa_token=AZMFg5OtPnAAAAAA:_Udu7GJ7G2ed1-XSmr-3IGSISUwcHfMpNtCj-qacXH5SBC4nwzVid36GXI3r8XG8dK5WOQui}; } \concept{dimensional_reduction} Seurat/man/FindMarkers.Rd0000644000176200001440000002675714170106500014777 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/differential_expression.R \name{FindMarkers} \alias{FindMarkers} \alias{FindMarkersNode} \alias{FindMarkers.default} \alias{FindMarkers.Assay} \alias{FindMarkers.SCTAssay} \alias{FindMarkers.DimReduc} \alias{FindMarkers.Seurat} \title{Gene expression markers of identity classes} \usage{ FindMarkers(object, ...) \method{FindMarkers}{default}( object, slot = "data", counts = numeric(), cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, fc.results = NULL, densify = FALSE, ... ) \method{FindMarkers}{Assay}( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) \method{FindMarkers}{SCTAssay}( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, recorrect_umi = TRUE, ... ) \method{FindMarkers}{DimReduc}( object, cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = rowMeans, fc.name = NULL, densify = FALSE, ... ) \method{FindMarkers}{Seurat}( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = "data", reduction = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and to specific DE methods} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{counts}{Count matrix if using scale.data for DE tests. This is used for computing pct.1 and pct.2 and for filtering features based on fraction expressing} \item{cells.1}{Vector of cell names belonging to group 1} \item{cells.2}{Vector of cell names belonging to group 2} \item{features}{Genes to test. Default is to use all genes} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default) \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{min.pct}{only test genes that are detected in a minimum fraction of min.pct cells in either of the two populations. Meant to speed up the function by not testing genes that are very infrequently expressed. Default is 0.1} \item{min.diff.pct}{only test genes that show a minimum difference in the fraction of detection between the two groups. Set to -Inf by default} \item{verbose}{Print a progress bar once expression testing begins} \item{only.pos}{Only return positive markers (FALSE by default)} \item{max.cells.per.ident}{Down sample each identity class to a max number. Default is no downsampling. Not activated by default (set to Inf)} \item{random.seed}{Random seed for downsampling} \item{latent.vars}{Variables to test, used only when \code{test.use} is one of 'LR', 'negbinom', 'poisson', or 'MAST'} \item{min.cells.feature}{Minimum number of cells expressing the feature in at least one of the two groups, currently only used for poisson and negative binomial tests} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{pseudocount.use}{Pseudocount to add to averaged expression values when calculating logFC. 1 by default.} \item{fc.results}{data.frame from FoldChange} \item{densify}{Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE} \item{mean.fxn}{Function to use for fold change or average difference calculation. If NULL, the appropriate function will be chose according to the slot used} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. If NULL, the fold change column will be named according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data slot "avg_diff".} \item{base}{The base with respect to which logarithms are computed.} \item{recorrect_umi}{Recalculate corrected UMI counts using minimum of the median UMIs when performing DE using multiple SCT objects; default is TRUE} \item{ident.1}{Identity class to define markers for; pass an object of class \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for} \item{group.by}{Regroup cells into a different identity class prior to performing differential expression (see example)} \item{subset.ident}{Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example)} \item{assay}{Assay to use in differential expression testing} \item{reduction}{Reduction to use in differential expression testing - will test for DE on cell embeddings} } \value{ data.frame with a ranked list of putative markers as rows, and associated statistics as columns (p-values, ROC score, etc., depending on the test used (\code{test.use})). The following columns are always present: \itemize{ \item \code{avg_logFC}: log fold-chage of the average expression between the two groups. Positive values indicate that the gene is more highly expressed in the first group \item \code{pct.1}: The percentage of cells where the gene is detected in the first group \item \code{pct.2}: The percentage of cells where the gene is detected in the second group \item \code{p_val_adj}: Adjusted p-value, based on bonferroni correction using all genes in the dataset } } \description{ Finds markers (differentially expressed genes) for identity classes } \details{ p-value adjustment is performed using bonferroni correction based on the total number of genes in the dataset. Other correction methods are not recommended, as Seurat pre-filters genes using the arguments above, reducing the number of tests performed. Lastly, as Aaron Lun has pointed out, p-values should be interpreted cautiously, as the genes used for clustering are the same genes tested for differential expression. } \examples{ data("pbmc_small") # Find markers for cluster 2 markers <- FindMarkers(object = pbmc_small, ident.1 = 2) head(x = markers) # Take all cells in cluster 2, and find markers that separate cells in the 'g1' group (metadata # variable 'group') markers <- FindMarkers(pbmc_small, ident.1 = "g1", group.by = 'groups', subset.ident = "2") head(x = markers) # Pass 'clustertree' or an object of class phylo to ident.1 and # a node to ident.2 as a replacement for FindMarkersNode if (requireNamespace("ape", quietly = TRUE)) { pbmc_small <- BuildClusterTree(object = pbmc_small) markers <- FindMarkers(object = pbmc_small, ident.1 = 'clustertree', ident.2 = 5) head(x = markers) } } \references{ McDavid A, Finak G, Chattopadyay PK, et al. Data exploration, quality control and testing in single-cell qPCR-based gene expression experiments. Bioinformatics. 2013;29(4):461-467. doi:10.1093/bioinformatics/bts714 Trapnell C, et al. The dynamics and regulators of cell fate decisions are revealed by pseudotemporal ordering of single cells. Nature Biotechnology volume 32, pages 381-386 (2014) Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based Analysis of Single Cell Transcriptomics. R package version 1.2.1. https://github.com/RGLab/MAST/ Love MI, Huber W and Anders S (2014). "Moderated estimation of fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. https://bioconductor.org/packages/release/bioc/html/DESeq2.html } \seealso{ \code{FoldChange} } \concept{differential_expression} Seurat/man/SingleExIPlot.Rd0000644000176200001440000000241714156670503015260 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleExIPlot} \alias{SingleExIPlot} \title{Plot a single expression by identity on a plot} \usage{ SingleExIPlot( data, idents, split = NULL, type = "violin", sort = FALSE, y.max = NULL, adjust = 1, pt.size = 0, cols = NULL, seed.use = 42, log = FALSE, raster = NULL ) } \arguments{ \item{data}{Data to plot} \item{idents}{Idents to use} \item{split}{Use a split violin plot} \item{type}{Make either a \dQuote{ridge} or \dQuote{violin} plot} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted} \item{y.max}{Maximum Y value to plot} \item{adjust}{Adjust parameter for geom_violin} \item{pt.size}{Size of points for violin plots} \item{cols}{Colors to use for plotting} \item{seed.use}{Random seed to use. If NULL, don't set a seed} \item{log}{plot Y axis on log scale} \item{raster}{Convert points to raster format. Requires 'ggrastr' to be installed. default is \code{NULL} which automatically rasterizes if ggrastr is installed and number of points exceed 100,000.} } \value{ A ggplot-based Expression-by-Identity plot } \description{ Plot a single expression by identity on a plot } \keyword{internal} Seurat/man/CellSelector.Rd0000644000176200001440000000234014005656653015150 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CellSelector} \alias{CellSelector} \alias{FeatureLocator} \title{Cell Selector} \usage{ CellSelector(plot, object = NULL, ident = "SelectedCells", ...) FeatureLocator(plot, ...) } \arguments{ \item{plot}{A ggplot2 plot} \item{object}{An optional Seurat object; if passes, will return an object with the identities of selected cells set to \code{ident}} \item{ident}{An optional new identity class to assign the selected cells} \item{...}{Ignored} } \value{ If \code{object} is \code{NULL}, the names of the points selected; otherwise, a Seurat object with the selected cells identity classes set to \code{ident} } \description{ Select points on a scatterplot and get information about them } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) # Follow instructions in the terminal to select points cells.located <- CellSelector(plot = plot) cells.located # Automatically set the identity class of selected cells and return a new Seurat object pbmc_small <- CellSelector(plot = plot, object = pbmc_small, ident = 'SelectedCells') } } \seealso{ \code{\link{DimPlot}} \code{\link{FeaturePlot}} } \concept{visualization} Seurat/man/PercentageFeatureSet.Rd0000644000176200001440000000322514005656653016640 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{PercentageFeatureSet} \alias{PercentageFeatureSet} \title{Calculate the percentage of all counts that belong to a given set of features} \usage{ PercentageFeatureSet( object, pattern = NULL, features = NULL, col.name = NULL, assay = NULL ) } \arguments{ \item{object}{A Seurat object} \item{pattern}{A regex pattern to match features against} \item{features}{A defined feature set. If features provided, will ignore the pattern matching} \item{col.name}{Name in meta.data column to assign. If this is not null, returns a Seurat object with the proportion of the feature set stored in metadata.} \item{assay}{Assay to use} } \value{ Returns a vector with the proportion of the feature set or if md.name is set, returns a Seurat object with the proportion of the feature set stored in metadata. } \description{ This function enables you to easily calculate the percentage of all the counts belonging to a subset of the possible features for each cell. This is useful when trying to compute the percentage of transcripts that map to mitochondrial genes for example. The calculation here is simply the column sum of the matrix present in the counts slot for features belonging to the set divided by the column sum for all features times 100. } \examples{ data("pbmc_small") # Calculate the proportion of transcripts mapping to mitochondrial genes # NOTE: The pattern provided works for human gene names. You may need to adjust depending on your # system of interest pbmc_small[["percent.mt"]] <- PercentageFeatureSet(object = pbmc_small, pattern = "^MT-") } \concept{utilities} Seurat/man/DimReduc-class.Rd0000644000176200001440000000070614005656653015373 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{DimReduc-class} \alias{DimReduc-class} \title{The DimReduc Class} \description{ The \code{DimReduc} object stores a dimensionality reduction taken out in Seurat; for more details, please see the documentation in \code{\link[SeuratObject:DimReduc]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:DimReduc]{SeuratObject::DimReduc-class}} } Seurat/man/PolyFeaturePlot.Rd0000644000176200001440000000307614024674706015675 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PolyFeaturePlot} \alias{PolyFeaturePlot} \title{Polygon FeaturePlot} \usage{ PolyFeaturePlot( object, features, cells = NULL, poly.data = "spatial", ncol = ceiling(x = length(x = features)/2), min.cutoff = 0, max.cutoff = NA, common.scale = TRUE, flip.coords = FALSE ) } \arguments{ \item{object}{Seurat object} \item{features}{Vector of features to plot. Features can come from: \itemize{ \item An \code{Assay} feature (e.g. a gene name - "MS4A1") \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") \item A column name from a \code{DimReduc} object corresponding to the cell embedding values (e.g. the PC 1 scores - "PC_1") }} \item{cells}{Vector of cells to plot (default is all cells)} \item{poly.data}{Name of the polygon dataframe in the misc slot} \item{ncol}{Number of columns to split the plot into} \item{min.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{common.scale}{...} \item{flip.coords}{Flip x and y coordinates} } \value{ Returns a ggplot object } \description{ Plot cells as polygons, rather than single points. Color cells by any value accessible by \code{\link{FetchData}}. } \concept{spatial} \concept{visualization} Seurat/man/FilterSlideSeq.Rd0000644000176200001440000000313514024674706015452 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{FilterSlideSeq} \alias{FilterSlideSeq} \title{Filter stray beads from Slide-seq puck} \usage{ FilterSlideSeq( object, image = "image", center = NULL, radius = NULL, do.plot = TRUE ) } \arguments{ \item{object}{Seurat object with slide-seq data} \item{image}{Name of the image where the coordinates are stored} \item{center}{Vector specifying the x and y coordinates for the center of the inclusion circle} \item{radius}{Radius of the circle of inclusion} \item{do.plot}{Display a \code{\link{SpatialDimPlot}} with the cells being removed labeled.} } \value{ Returns a Seurat object with only the subset of cells that pass the circular filter } \description{ This function is useful for removing stray beads that fall outside the main Slide-seq puck area. Essentially, it's a circular filter where you set a center and radius defining a circle of beads to keep. If the center is not set, it will be estimated from the bead coordinates (removing the 1st and 99th quantile to avoid skewing the center by the stray beads). By default, this function will display a \code{\link{SpatialDimPlot}} showing which cells were removed for easy adjustment of the center and/or radius. } \examples{ \dontrun{ # This example uses the ssHippo dataset which you can download # using the SeuratData package. library(SeuratData) data('ssHippo') # perform filtering of beads ssHippo.filtered <- FilterSlideSeq(ssHippo, radius = 2300) # This radius looks to small so increase and repeat until satisfied } } \concept{objects} \concept{spatial} Seurat/man/ISpatialDimPlot.Rd0000644000176200001440000000152514152476164015573 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ISpatialDimPlot} \alias{ISpatialDimPlot} \title{Visualize clusters spatially and interactively} \usage{ ISpatialDimPlot(object, image = NULL, group.by = NULL, alpha = c(0.3, 1)) } \arguments{ \item{object}{Seurat object} \item{image}{Name of the image to use in the plot} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} } \value{ Returns final plot as a ggplot object } \description{ Visualize clusters spatially and interactively } \concept{spatial} \concept{visualization} Seurat/man/FindSpatiallyVariableFeatures.Rd0000644000176200001440000000462414024674706020507 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{FindSpatiallyVariableFeatures} \alias{FindSpatiallyVariableFeatures} \alias{FindSpatiallyVariableFeatures.default} \alias{FindSpatiallyVariableFeatures.Assay} \alias{FindSpatiallyVariableFeatures.Seurat} \title{Find spatially variable features} \usage{ FindSpatiallyVariableFeatures(object, ...) \method{FindSpatiallyVariableFeatures}{default}( object, spatial.location, selection.method = c("markvariogram", "moransi"), r.metric = 5, x.cuts = NULL, y.cuts = NULL, verbose = TRUE, ... ) \method{FindSpatiallyVariableFeatures}{Assay}( object, slot = "scale.data", spatial.location, selection.method = c("markvariogram", "moransi"), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) \method{FindSpatiallyVariableFeatures}{Seurat}( object, assay = NULL, slot = "scale.data", features = NULL, image = NULL, selection.method = c("markvariogram", "moransi"), r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = 2000, verbose = TRUE, ... ) } \arguments{ \item{object}{A Seurat object, assay, or expression matrix} \item{...}{Arguments passed to other methods} \item{spatial.location}{Coordinates for each cell/spot/bead} \item{selection.method}{Method for selecting spatially variable features. \itemize{ \item \code{markvariogram}: See \code{\link{RunMarkVario}} for details \item \code{moransi}: See \code{\link{RunMoransI}} for details. }} \item{r.metric}{r value at which to report the "trans" value of the mark variogram} \item{x.cuts}{Number of divisions to make in the x direction, helps define the grid over which binning is performed} \item{y.cuts}{Number of divisions to make in the y direction, helps define the grid over which binning is performed} \item{verbose}{Print messages and progress} \item{slot}{Slot in the Assay to pull data from} \item{features}{If provided, only compute on given features. Otherwise, compute for all features.} \item{nfeatures}{Number of features to mark as the top spatially variable.} \item{assay}{Assay to pull the features (marks) from} \item{image}{Name of image to pull the coordinates from} } \description{ Identify features whose variability in expression can be explained to some degree by spatial location. } \concept{preprocessing} \concept{spatial} Seurat/man/IntegrationAnchorSet-class.Rd0000644000176200001440000000062614005656653017772 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{IntegrationAnchorSet-class} \alias{IntegrationAnchorSet-class} \alias{IntegrationAnchorSet} \title{The IntegrationAnchorSet Class} \description{ Inherits from the Anchorset class. Implemented mainly for method dispatch purposes. See \code{\link{AnchorSet}} for slot details. } \concept{objects} Seurat/man/IntegrateEmbeddings.Rd0000644000176200001440000001004614152507372016472 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{IntegrateEmbeddings} \alias{IntegrateEmbeddings} \alias{IntegrateEmbeddings.IntegrationAnchorSet} \alias{IntegrateEmbeddings.TransferAnchorSet} \title{Integrate low dimensional embeddings} \usage{ IntegrateEmbeddings(anchorset, ...) \method{IntegrateEmbeddings}{IntegrationAnchorSet}( anchorset, new.reduction.name = "integrated_dr", reductions = NULL, dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) \method{IntegrateEmbeddings}{TransferAnchorSet}( anchorset, reference, query, new.reduction.name = "integrated_dr", reductions = "pcaproject", dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, reuse.weights.matrix = TRUE, sd.weight = 1, preserve.order = FALSE, verbose = TRUE, ... ) } \arguments{ \item{anchorset}{An AnchorSet object} \item{...}{Reserved for internal use} \item{new.reduction.name}{Name for new integrated dimensional reduction.} \item{reductions}{Name of reductions to be integrated. For a TransferAnchorSet, this should be the name of a reduction present in the anchorset object (for example, "pcaproject"). For an IntegrationAnchorSet, this should be a \code{\link{DimReduc}} object containing all cells present in the anchorset object.} \item{dims.to.integrate}{Number of dimensions to return integrated values for} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case the full corrected space is used for computing anchor weights.} }} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives:\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 } Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{verbose}{Print progress bars and output} \item{reference}{Reference object used in anchorset construction} \item{query}{Query object used in anchorset construction} \item{reuse.weights.matrix}{Can be used in conjunction with the store.weights parameter in TransferData to reuse a precomputed weights matrix.} } \value{ When called on a TransferAnchorSet (from FindTransferAnchors), this will return the query object with the integrated embeddings stored in a new reduction. When called on an IntegrationAnchorSet (from IntegrateData), this will return a merged object with the integrated reduction stored. } \description{ Perform dataset integration using a pre-computed Anchorset of specified low dimensional representations. } \details{ The main steps of this procedure are identical to \code{\link{IntegrateData}} with one key distinction. When computing the weights matrix, the distance calculations are performed in the full space of integrated embeddings when integrating more than two datasets, as opposed to a reduced PCA space which is the default behavior in \code{\link{IntegrateData}}. } \concept{integration} Seurat/man/SCTResults.Rd0000644000176200001440000000263614024674706014613 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{SCTResults} \alias{SCTResults} \alias{SCTResults<-} \alias{SCTResults.SCTModel} \alias{SCTResults<-.SCTModel} \alias{SCTResults.SCTAssay} \alias{SCTResults<-.SCTAssay} \alias{SCTResults.Seurat} \title{Get SCT results from an Assay} \usage{ SCTResults(object, ...) SCTResults(object, ...) <- value \method{SCTResults}{SCTModel}(object, slot, ...) \method{SCTResults}{SCTModel}(object, slot, ...) <- value \method{SCTResults}{SCTAssay}(object, slot, model = NULL, ...) \method{SCTResults}{SCTAssay}(object, slot, model = NULL, ...) <- value \method{SCTResults}{Seurat}(object, assay = "SCT", slot, model = NULL, ...) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods (not used)} \item{value}{new data to set} \item{slot}{Which slot to pull the SCT results from} \item{model}{Name of SCModel to pull result from. Available names can be retrieved with \code{levels}.} \item{assay}{Assay in the Seurat object to pull from} } \value{ Returns the value present in the requested slot for the requested group. If group is not specified, returns a list of slot results for each group unless there is only one group present (in which case it just returns the slot directly). } \description{ Pull the \code{\link{SCTResults}} information from an \code{\link{SCTAssay}} object. } \concept{objects} Seurat/man/RegroupIdents.Rd0000644000176200001440000000113314005656653015361 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{RegroupIdents} \alias{RegroupIdents} \title{Regroup idents based on meta.data info} \usage{ RegroupIdents(object, metadata) } \arguments{ \item{object}{Seurat object} \item{metadata}{Name of metadata column} } \value{ A Seurat object with the active idents regrouped } \description{ For cells in each ident, set a new identity based on the most common value of a specified metadata column. } \examples{ data("pbmc_small") pbmc_small <- RegroupIdents(pbmc_small, metadata = "groups") } \concept{utilities} Seurat/man/VizDimLoadings.Rd0000644000176200001440000000246514005656653015463 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VizDimLoadings} \alias{VizDimLoadings} \title{Visualize Dimensional Reduction genes} \usage{ VizDimLoadings( object, dims = 1:5, nfeatures = 30, col = "blue", reduction = "pca", projected = FALSE, balanced = FALSE, ncol = NULL, combine = TRUE ) } \arguments{ \item{object}{Seurat object} \item{dims}{Number of dimensions to display} \item{nfeatures}{Number of genes to display} \item{col}{Color of points to use} \item{reduction}{Reduction technique to visualize results for} \item{projected}{Use reduction values for full dataset (i.e. projected dimensional reduction values)} \item{balanced}{Return an equal number of genes with + and - scores. If FALSE (default), returns the top genes ranked by the scores absolute values} \item{ncol}{Number of columns to display} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Visualize top genes associated with reduction components } \examples{ data("pbmc_small") VizDimLoadings(object = pbmc_small) } \concept{visualization} Seurat/man/AugmentPlot.Rd0000644000176200001440000000151614005656653015033 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{AugmentPlot} \alias{AugmentPlot} \title{Augments ggplot2-based plot with a PNG image.} \usage{ AugmentPlot(plot, width = 10, height = 10, dpi = 100) } \arguments{ \item{plot}{A ggplot object} \item{width, height}{Width and height of PNG version of plot} \item{dpi}{Plot resolution} } \value{ A ggplot object } \description{ Creates "vector-friendly" plots. Does this by saving a copy of the plot as a PNG file, then adding the PNG image with \code{\link[ggplot2]{annotation_raster}} to a blank plot of the same dimensions as \code{plot}. Please note: original legends and axes will be lost during augmentation. } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) AugmentPlot(plot = plot) } } \concept{visualization} Seurat/man/DEenrichRPlot.Rd0000644000176200001440000001013514005656653015233 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{DEenrichRPlot} \alias{DEenrichRPlot} \title{DE and EnrichR pathway visualization barplot} \usage{ DEenrichRPlot( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = NULL, max.genes, test.use = "wilcox", p.val.cutoff = 0.05, cols = NULL, enrich.database = NULL, num.pathway = 10, return.gene.list = FALSE, ... ) } \arguments{ \item{object}{Name of object class Seurat.} \item{ident.1}{Cell class identity 1.} \item{ident.2}{Cell class identity 2.} \item{balanced}{Option to display pathway enrichments for both negative and positive DE genes.If false, only positive DE gene will be displayed.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{assay}{Assay to use in differential expression testing} \item{max.genes}{Maximum number of genes to use as input to enrichR.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default) \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{p.val.cutoff}{Cutoff to select DE genes.} \item{cols}{A list of colors to use for barplots.} \item{enrich.database}{Database to use from enrichR.} \item{num.pathway}{Number of pathways to display in barplot.} \item{return.gene.list}{Return list of DE genes} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ Returns one (only enriched) or two (both enriched and depleted) barplots with the top enriched/depleted GO terms from EnrichR. } \description{ DE and EnrichR pathway visualization barplot } \concept{mixscape} Seurat/man/ScaleData.Rd0000644000176200001440000000717614005656653014425 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{ScaleData} \alias{ScaleData} \alias{ScaleData.default} \alias{ScaleData.Assay} \alias{ScaleData.Seurat} \title{Scale and center the data.} \usage{ ScaleData(object, ...) \method{ScaleData}{default}( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) \method{ScaleData}{Assay}( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) \method{ScaleData}{Seurat}( object, features = NULL, assay = NULL, vars.to.regress = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{features}{Vector of features names to scale/center. Default is variable features.} \item{vars.to.regress}{Variables to regress out (previously latent.vars in RegressOut). For example, nUMI, or percent.mito.} \item{latent.data}{Extra data to regress out, should be cells x latent data} \item{split.by}{Name of variable in object metadata or a vector or factor defining grouping of cells. See argument \code{f} in \code{\link[base]{split}} for more details} \item{model.use}{Use a linear model or generalized linear model (poisson, negative binomial) for the regression. Options are 'linear' (default), 'poisson', and 'negbinom'} \item{use.umi}{Regress on UMI count data. Default is FALSE for linear modeling, but automatically set to TRUE if model.use is 'negbinom' or 'poisson'} \item{do.scale}{Whether to scale the data.} \item{do.center}{Whether to center the data.} \item{scale.max}{Max value to return for scaled data. The default is 10. Setting this can help reduce the effects of features that are only expressed in a very small number of cells. If regressing out latent variables and using a non-linear model, the default is 50.} \item{block.size}{Default size for number of features to scale at in a single computation. Increasing block.size may speed up calculations but at an additional memory cost.} \item{min.cells.to.block}{If object contains fewer than this number of cells, don't block for scaling calculations.} \item{verbose}{Displays a progress bar for scaling procedure} \item{assay}{Name of Assay to scale} } \description{ Scales and centers features in the dataset. If variables are provided in vars.to.regress, they are individually regressed against each feature, and the resulting residuals are then scaled and centered. } \details{ ScaleData now incorporates the functionality of the function formerly known as RegressOut (which regressed out given the effects of provided variables and then scaled the residuals). To make use of the regression functionality, simply pass the variables you want to remove to the vars.to.regress parameter. Setting center to TRUE will center the expression for each feature by subtracting the average expression for that feature. Setting scale to TRUE will scale the expression level for each feature by dividing the centered feature expression levels by their standard deviations if center is TRUE and by their root mean square otherwise. } \concept{preprocessing} Seurat/man/Neighbor-class.Rd0000644000176200001440000000056414005656653015436 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Neighbor-class} \alias{Neighbor-class} \title{The Neighbor Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:Neighbor]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Neighbor]{SeuratObject::Neighbor-class}} } Seurat/man/ReadSTARsolo.Rd0000644000176200001440000000061614152476164015036 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convenience.R \name{ReadSTARsolo} \alias{ReadSTARsolo} \title{Read output from STARsolo} \usage{ ReadSTARsolo(data.dir, ...) } \arguments{ \item{data.dir}{Directory containing the data files} \item{...}{Extra parameters passed to \code{\link{ReadMtx}}} } \description{ Read output from STARsolo } \concept{convenience} Seurat/man/SpatialPlot.Rd0000644000176200001440000001200214152476164015020 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{SpatialPlot} \alias{SpatialPlot} \alias{SpatialDimPlot} \alias{SpatialFeaturePlot} \title{Visualize spatial clustering and expression data.} \usage{ SpatialPlot( object, group.by = NULL, features = NULL, images = NULL, cols = NULL, image.alpha = 1, crop = TRUE, slot = "data", min.cutoff = NA, max.cutoff = NA, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), facet.highlight = FALSE, label = FALSE, label.size = 5, label.color = "white", label.box = TRUE, repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), stroke = 0.25, interactive = FALSE, do.identify = FALSE, identify.ident = NULL, do.hover = FALSE, information = NULL ) SpatialDimPlot( object, group.by = NULL, images = NULL, cols = NULL, crop = TRUE, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), facet.highlight = FALSE, label = FALSE, label.size = 7, label.color = "white", repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, label.box = TRUE, interactive = FALSE, information = NULL ) SpatialFeaturePlot( object, features, images = NULL, crop = TRUE, slot = "data", min.cutoff = NA, max.cutoff = NA, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, interactive = FALSE, information = NULL ) } \arguments{ \item{object}{A Seurat object} \item{group.by}{Name of meta.data column to group the data by} \item{features}{Name of the feature to visualize. Provide either group.by OR features, not both.} \item{images}{Name of the images to use in the plot(s)} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors} \item{image.alpha}{Adjust the opacity of the background images. Set to 0 to remove.} \item{crop}{Crop the plot in to focus on points plotted. Set to FALSE to show entire background image.} \item{slot}{If plotting a feature, which data slot to pull from (counts, data, or scale.data)} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in cols.highlight} \item{cols.highlight}{A vector of colors to highlight the cells as; ordered the same as the groups in cells.highlight; last color corresponds to unselected cells.} \item{facet.highlight}{When highlighting certain groups of cells, split each group into its own plot} \item{label}{Whether to label the clusters} \item{label.size}{Sets the size of the labels} \item{label.color}{Sets the color of the label text} \item{label.box}{Whether to put a box around the label text (geom_text vs geom_label)} \item{repel}{Repels the labels to prevent overlap} \item{ncol}{Number of columns if plotting multiple plots} \item{combine}{Combine plots into a single gg object; note that if TRUE; themeing will not work when plotting multiple features/groupings} \item{pt.size.factor}{Scale the size of the spots.} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} \item{stroke}{Control the width of the border around the spots} \item{interactive}{Launch an interactive SpatialDimPlot or SpatialFeaturePlot session, see \code{\link{ISpatialDimPlot}} or \code{\link{ISpatialFeaturePlot}} for more details} \item{do.identify, do.hover}{DEPRECATED in favor of \code{interactive}} \item{identify.ident}{DEPRECATED} \item{information}{An optional dataframe or matrix of extra information to be displayed on hover} } \value{ If \code{do.identify}, either a vector of cells selected or the object with selected cells set to the value of \code{identify.ident} (if set). Else, if \code{do.hover}, a plotly object with interactive graphics. Else, a ggplot object } \description{ SpatialPlot plots a feature or discrete grouping (e.g. cluster assignments) as spots over the image that was collected. We also provide SpatialFeaturePlot and SpatialDimPlot as wrapper functions around SpatialPlot for a consistent naming framework. } \examples{ \dontrun{ # For functionality analagous to FeaturePlot SpatialPlot(seurat.object, features = "MS4A1") SpatialFeaturePlot(seurat.object, features = "MS4A1") # For functionality analagous to DimPlot SpatialPlot(seurat.object, group.by = "clusters") SpatialDimPlot(seurat.object, group.by = "clusters") } } \concept{convenience} \concept{spatial} \concept{visualization} Seurat/man/BuildClusterTree.Rd0000644000176200001440000000413114152507372016005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tree.R \name{BuildClusterTree} \alias{BuildClusterTree} \title{Phylogenetic Analysis of Identity Classes} \usage{ BuildClusterTree( object, assay = NULL, features = NULL, dims = NULL, reduction = "pca", graph = NULL, slot = "data", reorder = FALSE, reorder.numeric = FALSE, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay to use for the analysis.} \item{features}{Genes to use for the analysis. Default is the set of variable genes (\code{VariableFeatures(object = object)})} \item{dims}{If set, tree is calculated in dimension reduction space; overrides \code{features}} \item{reduction}{Name of dimension reduction to use. Only used if \code{dims} is not NULL.} \item{graph}{If graph is passed, build tree based on graph connectivity between clusters; overrides \code{dims} and \code{features}} \item{slot}{Slot(s) to use; if multiple slots are given, assumed to follow the order of 'assays' (if specified) or object's assays} \item{reorder}{Re-order identity classes (factor ordering), according to position on the tree. This groups similar classes together which can be helpful, for example, when drawing violin plots.} \item{reorder.numeric}{Re-order identity classes according to position on the tree, assigning a numeric value ('1' is the leftmost node)} \item{verbose}{Show progress updates} } \value{ A Seurat object where the cluster tree can be accessed with \code{\link{Tool}} } \description{ Constructs a phylogenetic tree relating the 'average' cell from each identity class. Tree is estimated based on a distance matrix constructed in either gene expression space or PCA space. } \details{ Note that the tree is calculated for an 'average' cell, so gene expression or PC scores are averaged across all cells in an identity class before the tree is constructed. } \examples{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small pbmc_small <- BuildClusterTree(object = pbmc_small) Tool(object = pbmc_small, slot = 'BuildClusterTree') } } \concept{tree} Seurat/man/TopCells.Rd0000644000176200001440000000157214005656653014323 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopCells} \alias{TopCells} \title{Find cells with highest scores for a given dimensional reduction technique} \usage{ TopCells(object, dim = 1, ncells = 20, balanced = FALSE, ...) } \arguments{ \item{object}{DimReduc object} \item{dim}{Dimension to use} \item{ncells}{Number of cells to return} \item{balanced}{Return an equal number of cells with both + and - scores.} \item{...}{Extra parameters passed to \code{\link{Embeddings}}} } \value{ Returns a vector of cells } \description{ Return a list of genes with the strongest contribution to a set of components } \examples{ data("pbmc_small") pbmc_small head(TopCells(object = pbmc_small[["pca"]])) # Can specify which dimension and how many cells to return TopCells(object = pbmc_small[["pca"]], dim = 2, ncells = 5) } \concept{objects} Seurat/man/FindAllMarkers.Rd0000644000176200001440000001462214152476164015434 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{FindAllMarkers} \alias{FindAllMarkers} \alias{FindAllMarkersNode} \title{Gene expression markers for all identity classes} \usage{ FindAllMarkers( object, assay = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", slot = "data", min.pct = 0.1, min.diff.pct = -Inf, node = NULL, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, return.thresh = 0.01, densify = FALSE, ... ) } \arguments{ \item{object}{An object} \item{assay}{Assay to use in differential expression testing} \item{features}{Genes to test. Default is to use all genes} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default) \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{min.pct}{only test genes that are detected in a minimum fraction of min.pct cells in either of the two populations. Meant to speed up the function by not testing genes that are very infrequently expressed. Default is 0.1} \item{min.diff.pct}{only test genes that show a minimum difference in the fraction of detection between the two groups. Set to -Inf by default} \item{node}{A node to find markers for and all its children; requires \code{\link{BuildClusterTree}} to have been run previously; replaces \code{FindAllMarkersNode}} \item{verbose}{Print a progress bar once expression testing begins} \item{only.pos}{Only return positive markers (FALSE by default)} \item{max.cells.per.ident}{Down sample each identity class to a max number. Default is no downsampling. Not activated by default (set to Inf)} \item{random.seed}{Random seed for downsampling} \item{latent.vars}{Variables to test, used only when \code{test.use} is one of 'LR', 'negbinom', 'poisson', or 'MAST'} \item{min.cells.feature}{Minimum number of cells expressing the feature in at least one of the two groups, currently only used for poisson and negative binomial tests} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{pseudocount.use}{Pseudocount to add to averaged expression values when calculating logFC. 1 by default.} \item{mean.fxn}{Function to use for fold change or average difference calculation. If NULL, the appropriate function will be chose according to the slot used} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. If NULL, the fold change column will be named according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data slot "avg_diff".} \item{base}{The base with respect to which logarithms are computed.} \item{return.thresh}{Only return markers that have a p-value < return.thresh, or a power > return.thresh (if the test is ROC)} \item{densify}{Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ Matrix containing a ranked list of putative markers, and associated statistics (p-values, ROC score, etc.) } \description{ Finds markers (differentially expressed genes) for each of the identity classes in a dataset } \examples{ data("pbmc_small") # Find markers for all clusters all.markers <- FindAllMarkers(object = pbmc_small) head(x = all.markers) \dontrun{ # Pass a value to node as a replacement for FindAllMarkersNode pbmc_small <- BuildClusterTree(object = pbmc_small) all.markers <- FindAllMarkers(object = pbmc_small, node = 4) head(x = all.markers) } } \concept{differential_expression} Seurat/man/GroupCorrelation.Rd0000644000176200001440000000212514005656653016067 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{GroupCorrelation} \alias{GroupCorrelation} \title{Compute the correlation of features broken down by groups with another covariate} \usage{ GroupCorrelation( object, assay = NULL, slot = "scale.data", var = NULL, group.assay = NULL, min.cells = 5, ngroups = 6, do.plot = TRUE ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay to pull the data from} \item{slot}{Slot in the assay to pull feature expression data from (counts, data, or scale.data)} \item{var}{Variable with which to correlate the features} \item{group.assay}{Compute the gene groups based off the data in this assay.} \item{min.cells}{Only compute for genes in at least this many cells} \item{ngroups}{Number of groups to split into} \item{do.plot}{Display the group correlation boxplot (via \code{GroupCorrelationPlot})} } \value{ A Seurat object with the correlation stored in metafeatures } \description{ Compute the correlation of features broken down by groups with another covariate } \concept{utilities} Seurat/man/GetTransferPredictions.Rd0000644000176200001440000000207714005656653017227 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{GetTransferPredictions} \alias{GetTransferPredictions} \title{Get the predicted identity} \usage{ GetTransferPredictions( object, assay = "predictions", slot = "data", score.filter = 0.75 ) } \arguments{ \item{object}{Seurat object} \item{assay}{Name of the assay holding the predictions} \item{slot}{Slot of the assay in which the prediction scores are stored} \item{score.filter}{Return "Unassigned" for any cell with a score less than this value} } \value{ Returns a vector of predicted class names } \description{ Utility function to easily pull out the name of the class with the maximum prediction. This is useful if you've set \code{prediction.assay = TRUE} in \code{\link{TransferData}} and want to have a vector with the predicted class. } \examples{ \dontrun{ prediction.assay <- TransferData(anchorset = anchors, refdata = reference$class) query[["predictions"]] <- prediction.assay query$predicted.id <- GetTransferPredictions(query) } } \concept{integration} Seurat/man/as.CellDataSet.Rd0000644000176200001440000000116014005656653015316 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{as.CellDataSet} \alias{as.CellDataSet} \alias{as.CellDataSet.Seurat} \title{Convert objects to CellDataSet objects} \usage{ as.CellDataSet(x, ...) \method{as.CellDataSet}{Seurat}(x, assay = NULL, reduction = NULL, ...) } \arguments{ \item{x}{An object to convert to class \code{CellDataSet}} \item{...}{Arguments passed to other methods} \item{assay}{Assay to convert} \item{reduction}{Name of DimReduc to set to main reducedDim in cds} } \description{ Convert objects to CellDataSet objects } \concept{objects} Seurat/man/FindVariableFeatures.Rd0000644000176200001440000001111514005656653016615 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{FindVariableFeatures} \alias{FindVariableFeatures} \alias{FindVariableGenes} \alias{FindVariableFeatures.default} \alias{FindVariableFeatures.Assay} \alias{FindVariableFeatures.SCTAssay} \alias{FindVariableFeatures.Seurat} \title{Find variable features} \usage{ FindVariableFeatures(object, ...) \method{FindVariableFeatures}{default}( object, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) \method{FindVariableFeatures}{Assay}( object, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) \method{FindVariableFeatures}{SCTAssay}(object, nfeatures = 2000, ...) \method{FindVariableFeatures}{Seurat}( object, assay = NULL, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{selection.method}{How to choose top variable features. Choose one of : \itemize{ \item{vst:}{ First, fits a line to the relationship of log(variance) and log(mean) using local polynomial regression (loess). Then standardizes the feature values using the observed mean and expected variance (given by the fitted line). Feature variance is then calculated on the standardized values after clipping to a maximum (see clip.max parameter).} \item{mean.var.plot (mvp):}{ First, uses a function to calculate average expression (mean.function) and dispersion (dispersion.function) for each feature. Next, divides features into num.bin (deafult 20) bins based on their average expression, and calculates z-scores for dispersion within each bin. The purpose of this is to identify variable features while controlling for the strong relationship between variability and average expression.} \item{dispersion (disp):}{ selects the genes with the highest dispersion values} }} \item{loess.span}{(vst method) Loess span parameter used when fitting the variance-mean relationship} \item{clip.max}{(vst method) After standardization values larger than clip.max will be set to clip.max; default is 'auto' which sets this value to the square root of the number of cells} \item{mean.function}{Function to compute x-axis value (average expression). Default is to take the mean of the detected (i.e. non-zero) values} \item{dispersion.function}{Function to compute y-axis value (dispersion). Default is to take the standard deviation of all values} \item{num.bin}{Total number of bins to use in the scaled analysis (default is 20)} \item{binning.method}{Specifies how the bins should be computed. Available methods are: \itemize{ \item{equal_width:}{ each bin is of equal width along the x-axis [default]} \item{equal_frequency:}{ each bin contains an equal number of features (can increase statistical power to detect overdispersed features at high expression values, at the cost of reduced resolution along the x-axis)} }} \item{verbose}{show progress bar for calculations} \item{nfeatures}{Number of features to select as top variable features; only used when \code{selection.method} is set to \code{'dispersion'} or \code{'vst'}} \item{mean.cutoff}{A two-length numeric vector with low- and high-cutoffs for feature means} \item{dispersion.cutoff}{A two-length numeric vector with low- and high-cutoffs for feature dispersions} \item{assay}{Assay to use} } \description{ Identifies features that are outliers on a 'mean variability plot'. } \details{ For the mean.var.plot method: Exact parameter settings may vary empirically from dataset to dataset, and based on visual inspection of the plot. Setting the y.cutoff parameter to 2 identifies features that are more than two standard deviations away from the average dispersion within a bin. The default X-axis function is the mean expression level, and for Y-axis it is the log(Variance/mean). All mean/variance calculations are not performed in log-space, but the results are reported in log-space - see relevant functions for exact details. } \concept{preprocessing} Seurat/man/L2CCA.Rd0000644000176200001440000000054114005656653013355 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{L2CCA} \alias{L2CCA} \title{L2-Normalize CCA} \usage{ L2CCA(object, ...) } \arguments{ \item{object}{Seurat object} \item{\dots}{Additional parameters to L2Dim.} } \description{ Perform l2 normalization on CCs } \concept{dimensional_reduction} Seurat/man/SetQuantile.Rd0000644000176200001440000000135614156670503015031 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{SetQuantile} \alias{SetQuantile} \title{Find the Quantile of Data} \usage{ SetQuantile(cutoff, data) } \arguments{ \item{cutoff}{The cutoff to turn into a quantile} \item{data}{The data to turn find the quantile of} } \value{ The numerical representation of the quantile } \description{ Converts a quantile in character form to a number regarding some data. String form for a quantile is represented as a number prefixed with \dQuote{q}; for example, 10th quantile is \dQuote{q10} while 2nd quantile is \dQuote{q2}. Will only take a quantile of non-zero data values } \examples{ set.seed(42) SetQuantile('q10', sample(1:100, 10)) } \concept{utilities} Seurat/man/AnnotateAnchors.Rd0000644000176200001440000000256414024674706015667 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{AnnotateAnchors} \alias{AnnotateAnchors} \alias{AnnotateAnchors.default} \alias{AnnotateAnchors.IntegrationAnchorSet} \alias{AnnotateAnchors.TransferAnchorSet} \title{Add info to anchor matrix} \usage{ AnnotateAnchors(anchors, vars, slot, ...) \method{AnnotateAnchors}{default}( anchors, vars = NULL, slot = NULL, object.list, assay = NULL, ... ) \method{AnnotateAnchors}{IntegrationAnchorSet}( anchors, vars = NULL, slot = NULL, object.list = NULL, assay = NULL, ... ) \method{AnnotateAnchors}{TransferAnchorSet}( anchors, vars = NULL, slot = NULL, reference = NULL, query = NULL, assay = NULL, ... ) } \arguments{ \item{anchors}{An \code{\link{AnchorSet}} object} \item{vars}{Variables to pull for each object via FetchData} \item{slot}{Slot to pull feature data for} \item{...}{Arguments passed to other methods} \item{object.list}{List of Seurat objects} \item{assay}{Specify the Assay per object if annotating with expression data} \item{reference}{Reference object used in \code{\link{FindTransferAnchors}}} \item{query}{Query object used in \code{\link{FindTransferAnchors}}} } \value{ Returns the anchor dataframe with additional columns for annotation metadata } \description{ Add info to anchor matrix } \concept{integration} Seurat/man/CaseMatch.Rd0000644000176200001440000000113214005656653014416 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CaseMatch} \alias{CaseMatch} \title{Match the case of character vectors} \usage{ CaseMatch(search, match) } \arguments{ \item{search}{A vector of search terms} \item{match}{A vector of characters whose case should be matched} } \value{ Values from search present in match with the case of match } \description{ Match the case of character vectors } \examples{ data("pbmc_small") cd_genes <- c('Cd79b', 'Cd19', 'Cd200') CaseMatch(search = cd_genes, match = rownames(x = pbmc_small)) } \concept{utilities} Seurat/man/ReadParseBio.Rd0000644000176200001440000000064014152476164015071 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convenience.R \name{ReadParseBio} \alias{ReadParseBio} \title{Read output from Parse Biosciences} \usage{ ReadParseBio(data.dir, ...) } \arguments{ \item{data.dir}{Directory containing the data files} \item{...}{Extra parameters passed to \code{\link{ReadMtx}}} } \description{ Read output from Parse Biosciences } \concept{convenience} Seurat/man/as.sparse.Rd0000644000176200001440000000253714024674706014477 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R, R/utilities.R \name{as.sparse.H5Group} \alias{as.sparse.H5Group} \alias{as.data.frame.Matrix} \title{Cast to Sparse} \usage{ \method{as.sparse}{H5Group}(x, ...) \method{as.data.frame}{Matrix}( x, row.names = NULL, optional = FALSE, ..., stringsAsFactors = default.stringsAsFactors() ) } \arguments{ \item{x}{An object} \item{...}{Arguments passed to other methods} \item{row.names}{\code{NULL} or a character vector giving the row names for the data; missing values are not allowed} \item{optional}{logical. If \code{TRUE}, setting row names and converting column names (to syntactic names: see \code{\link[base]{make.names}}) is optional. Note that all of \R's \pkg{base} package \code{as.data.frame()} methods use \code{optional} only for column names treatment, basically with the meaning of \code{\link[base]{data.frame}(*, check.names = !optional)}. See also the \code{make.names} argument of the \code{matrix} method.} \item{stringsAsFactors}{logical: should the character vector be converted to a factor?} } \value{ \code{as.data.frame.Matrix}: A data frame representation of the S4 Matrix } \description{ Cast to Sparse } \seealso{ \code{\link[SeuratObject:as.sparse]{SeuratObject::as.sparse}} } \concept{objects} \concept{utilities} Seurat/man/MinMax.Rd0000644000176200001440000000126214005656653013763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{MinMax} \alias{MinMax} \title{Apply a ceiling and floor to all values in a matrix} \usage{ MinMax(data, min, max) } \arguments{ \item{data}{Matrix or data frame} \item{min}{all values below this min value will be replaced with min} \item{max}{all values above this max value will be replaced with max} } \value{ Returns matrix after performing these floor and ceil operations } \description{ Apply a ceiling and floor to all values in a matrix } \examples{ mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) mat MinMax(data = mat, min = 4, max = 5) } \concept{utilities} Seurat/man/AggregateExpression.Rd0000644000176200001440000000444614005656653016547 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AggregateExpression} \alias{AggregateExpression} \title{Aggregated feature expression by identity class} \usage{ AggregateExpression( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = "ident", add.ident = NULL, slot = "data", verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{assays}{Which assays to use. Default is all assays} \item{features}{Features to analyze. Default is all features in the assay} \item{return.seurat}{Whether to return the data as a Seurat object. Default is FALSE} \item{group.by}{Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default} \item{add.ident}{(Deprecated) Place an additional label on each cell prior to pseudobulking (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example)} \item{slot}{Slot(s) to use; if multiple slots are given, assumed to follow the order of 'assays' (if specified) or object's assays} \item{verbose}{Print messages and show progress bar} \item{...}{Arguments to be passed to methods such as \code{\link{CreateSeuratObject}}#'} } \value{ Returns a matrix with genes as rows, identity classes as columns. If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. } \description{ Returns aggregated (summed) expression values for each identity class } \details{ If slot is set to 'data', this function assumes that the data has been log normalized and therefore feature values are exponentiated prior to aggregating so that sum is done in non-log space. Otherwise, if slot is set to either 'counts' or 'scale.data', no exponentiation is performed prior to aggregating If \code{return.seurat = TRUE} and slot is not 'scale.data', aggregated values are placed in the 'counts' slot of the returned object and the log of aggregated values are placed in the 'data' slot. For the \code{\link{ScaleData}} is then run on the default assay before returning the object. If \code{return.seurat = TRUE} and slot is 'scale.data', the 'counts' slot is left empty, the 'data' slot is filled with NA, and 'scale.data' is set to the aggregated values. } \examples{ data("pbmc_small") head(AggregateExpression(object = pbmc_small)) } \concept{utilities} Seurat/man/MULTIseqDemux.Rd0000644000176200001440000000243014005656653015176 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{MULTIseqDemux} \alias{MULTIseqDemux} \title{Demultiplex samples based on classification method from MULTI-seq (McGinnis et al., bioRxiv 2018)} \usage{ MULTIseqDemux( object, assay = "HTO", quantile = 0.7, autoThresh = FALSE, maxiter = 5, qrange = seq(from = 0.1, to = 0.9, by = 0.05), verbose = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the specified assay data has been added} \item{assay}{Name of the multiplexing assay (HTO by default)} \item{quantile}{The quantile to use for classification} \item{autoThresh}{Whether to perform automated threshold finding to define the best quantile. Default is FALSE} \item{maxiter}{Maximum number of iterations if autoThresh = TRUE. Default is 5} \item{qrange}{A range of possible quantile values to try if autoThresh = TRUE} \item{verbose}{Prints the output} } \value{ A Seurat object with demultiplexing results stored at \code{object$MULTI_ID} } \description{ Identify singlets, doublets and negative cells from multiplexing experiments. Annotate singlets by tags. } \examples{ \dontrun{ object <- MULTIseqDemux(object) } } \references{ \url{https://www.biorxiv.org/content/10.1101/387241v1} } \concept{preprocessing} Seurat/man/CollapseEmbeddingOutliers.Rd0000644000176200001440000000230414005656653017660 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CollapseEmbeddingOutliers} \alias{CollapseEmbeddingOutliers} \title{Move outliers towards center on dimension reduction plot} \usage{ CollapseEmbeddingOutliers( object, reduction = "umap", dims = 1:2, group.by = "ident", outlier.sd = 2, reduction.key = "UMAP_" ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Name of DimReduc to adjust} \item{dims}{Dimensions to visualize} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{outlier.sd}{Controls the outlier distance} \item{reduction.key}{Key for DimReduc that is returned} } \value{ Returns a DimReduc object with the modified embeddings } \description{ Move outliers towards center on dimension reduction plot } \examples{ \dontrun{ data("pbmc_small") pbmc_small <- FindClusters(pbmc_small, resolution = 1.1) pbmc_small <- RunUMAP(pbmc_small, dims = 1:5) DimPlot(pbmc_small, reduction = "umap") pbmc_small[["umap_new"]] <- CollapseEmbeddingOutliers(pbmc_small, reduction = "umap", reduction.key = 'umap_', outlier.sd = 0.5) DimPlot(pbmc_small, reduction = "umap_new") } } \concept{visualization} Seurat/man/L2Dim.Rd0000644000176200001440000000114114005656653013475 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{L2Dim} \alias{L2Dim} \title{L2-normalization} \usage{ L2Dim(object, reduction, new.dr = NULL, new.key = NULL) } \arguments{ \item{object}{Seurat object} \item{reduction}{Dimensional reduction to normalize} \item{new.dr}{name of new dimensional reduction to store (default is olddr.l2)} \item{new.key}{name of key for new dimensional reduction} } \value{ Returns a \code{\link{Seurat}} object } \description{ Perform l2 normalization on given dimensional reduction } \concept{dimensional_reduction} Seurat/man/STARmap-class.Rd0000644000176200001440000000101114024674706015134 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{STARmap-class} \alias{STARmap-class} \alias{STARmap} \title{The STARmap class} \description{ The STARmap class } \section{Slots}{ \describe{ \item{\code{assay}}{Name of assay to associate image data with; will give this image priority for visualization when the assay is set as the active/default assay in a \code{Seurat} object} \item{\code{key}}{Key for the image} } } \concept{objects} \concept{spatial} Seurat/man/AutoPointSize.Rd0000644000176200001440000000112514156670503015342 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{AutoPointSize} \alias{AutoPointSize} \title{Automagically calculate a point size for ggplot2-based scatter plots} \usage{ AutoPointSize(data, raster = NULL) } \arguments{ \item{data}{A data frame being passed to ggplot2} \item{raster}{If TRUE, point size is set to 1} } \value{ The "optimal" point size for visualizing these data } \description{ It happens to look good } \examples{ df <- data.frame(x = rnorm(n = 10000), y = runif(n = 10000)) AutoPointSize(data = df) } \concept{visualization} Seurat/man/CreateSCTAssayObject.Rd0000644000176200001440000000223514024674706016500 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{CreateSCTAssayObject} \alias{CreateSCTAssayObject} \title{Create a SCT Assay object} \usage{ CreateSCTAssayObject( counts, data, scale.data = NULL, umi.assay = "RNA", min.cells = 0, min.features = 0, SCTModel.list = NULL ) } \arguments{ \item{counts}{Unnormalized data such as raw counts or TPMs} \item{data}{Prenormalized data; if provided, do not pass \code{counts}} \item{scale.data}{a residual matrix} \item{umi.assay}{The UMI assay name. Default is RNA} \item{min.cells}{Include features detected in at least this many cells. Will subset the counts matrix as well. To reintroduce excluded features, create a new object with a lower cutoff.} \item{min.features}{Include cells where at least this many features are detected.} \item{SCTModel.list}{list of SCTModels} } \description{ Create a SCT object from a feature (e.g. gene) expression matrix and a list of SCTModels. The expected format of the input matrix is features x cells. } \details{ Non-unique cell or feature names are not allowed. Please make unique before calling this function. } \concept{objects} Seurat/man/IntegrateData.Rd0000644000176200001440000001347214152507372015310 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{IntegrateData} \alias{IntegrateData} \title{Integrate data} \usage{ IntegrateData( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) } \arguments{ \item{anchorset}{An \code{\link{AnchorSet}} object generated by \code{\link{FindIntegrationAnchors}}} \item{new.assay.name}{Name for the new assay containing the integrated data} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{features}{Vector of features to use when computing the PCA to determine the weights. Only set if you want a different set from those used in the anchor finding process} \item{features.to.integrate}{Vector of features to integrate. By default, will use the features used in anchor finding.} \item{dims}{Number of dimensions to use in the anchor weighting procedure} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} } Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the first merge between reference and query, as the merged object will subsequently contain more cells than was in query, and weights will need to be calculated for all cells in the object.} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives:\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 } Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}})} \item{verbose}{Print progress bars and output} } \value{ Returns a \code{\link{Seurat}} object with a new integrated \code{\link{Assay}}. If \code{normalization.method = "LogNormalize"}, the integrated data is returned to the \code{data} slot and can be treated as log-normalized, corrected data. If \code{normalization.method = "SCT"}, the integrated data is returned to the \code{scale.data} slot and can be treated as centered, corrected Pearson residuals. } \description{ Perform dataset integration using a pre-computed \code{\link{AnchorSet}}. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} For pairwise integration: \itemize{ \item{Construct a weights matrix that defines the association between each query cell and each anchor. These weights are computed as 1 - the distance between the query cell and the anchor divided by the distance of the query cell to the \code{k.weight}th anchor multiplied by the anchor score computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian kernel width a bandwidth defined by \code{sd.weight} and normalize across all \code{k.weight} anchors.} \item{Compute the anchor integration matrix as the difference between the two expression matrices for every pair of anchor cells} \item{Compute the transformation matrix as the product of the integration matrix and the weights matrix.} \item{Subtract the transformation matrix from the original expression matrix.} } For multiple dataset integration, we perform iterative pairwise integration. To determine the order of integration (if not specified via \code{sample.tree}), we \itemize{ \item{Define a distance between datasets as the total number of cells in the smaller dataset divided by the total number of anchors between the two datasets.} \item{Compute all pairwise distances between datasets} \item{Cluster this distance matrix to determine a guide tree} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset pancreas.list <- SplitObject(panc8, split.by = "tech") # perform standard preprocessing on each object for (i in 1:length(pancreas.list)) { pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) pancreas.list[[i]] <- FindVariableFeatures( pancreas.list[[i]], selection.method = "vst", nfeatures = 2000, verbose = FALSE ) } # find anchors anchors <- FindIntegrationAnchors(object.list = pancreas.list) # integrate data integrated <- IntegrateData(anchorset = anchors) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/MapQuery.Rd0000644000176200001440000000565014152476164014342 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{MapQuery} \alias{MapQuery} \title{Map query cells to a reference} \usage{ MapQuery( anchorset, query, reference, refdata = NULL, new.reduction.name = NULL, reference.reduction = NULL, reference.dims = NULL, query.dims = NULL, reduction.model = NULL, transferdata.args = list(), integrateembeddings.args = list(), projectumap.args = list(), verbose = TRUE ) } \arguments{ \item{anchorset}{An AnchorSet object} \item{query}{Query object used in anchorset construction} \item{reference}{Reference object used in anchorset construction} \item{refdata}{Data to transfer. This can be specified in one of two ways: \itemize{ \item{The reference data itself as either a vector where the names correspond to the reference cells, or a matrix, where the column names correspond to the reference cells.} \item{The name of the metadata field or assay from the reference object provided. This requires the reference parameter to be specified. If pulling assay data in this manner, it will pull the data from the data slot. To transfer data from other slots, please pull the data explicitly with \code{\link{GetAssayData}} and provide that matrix here.} }} \item{new.reduction.name}{Name for new integrated dimensional reduction.} \item{reference.reduction}{Name of reduction to use from the reference for neighbor finding} \item{reference.dims}{Dimensions (columns) to use from reference} \item{query.dims}{Dimensions (columns) to use from query} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{transferdata.args}{A named list of additional arguments to \code{\link{TransferData}}} \item{integrateembeddings.args}{A named list of additional arguments to \code{\link{IntegrateEmbeddings}}} \item{projectumap.args}{A named list of additional arguments to \code{\link{ProjectUMAP}}} \item{verbose}{Print progress bars and output} } \value{ Returns a modified query Seurat object containing: \itemize{ \item{New Assays corresponding to the features transferred and/or their corresponding prediction scores from \code{\link{TransferData}}} \item{An integrated reduction from \code{\link{IntegrateEmbeddings}}} \item{A projected UMAP reduction of the query cells projected into the reference UMAP using \code{\link{ProjectUMAP}}} } } \description{ This is a convenience wrapper function around the following three functions that are often run together when mapping query data to a reference: \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}}, \code{\link{ProjectUMAP}}. Note that by default, the \code{weight.reduction} parameter for all functions will be set to the dimension reduction method used in the \code{\link{FindTransferAnchors}} function call used to construct the anchor object, and the \code{dims} parameter will be the same dimensions used to find anchors. } \concept{integration} Seurat/man/GetImage.Rd0000644000176200001440000000150014024674706014247 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetImage.SlideSeq} \alias{GetImage.SlideSeq} \alias{GetImage.STARmap} \alias{GetImage.VisiumV1} \title{Get Image Data} \usage{ \method{GetImage}{SlideSeq}(object, mode = c("grob", "raster", "plotly", "raw"), ...) \method{GetImage}{STARmap}(object, mode = c("grob", "raster", "plotly", "raw"), ...) \method{GetImage}{VisiumV1}(object, mode = c("grob", "raster", "plotly", "raw"), ...) } \arguments{ \item{object}{An object} \item{mode}{How to return the image; should accept one of \dQuote{grob}, \dQuote{raster}, \dQuote{plotly}, or \dQuote{raw}} \item{...}{Arguments passed to other methods} } \description{ Get Image Data } \seealso{ \code{\link[SeuratObject:GetImage]{SeuratObject::GetImage}} } \concept{objects} \concept{spatial} Seurat/man/CellCycleScoring.Rd0000644000176200001440000000267614005656653015770 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CellCycleScoring} \alias{CellCycleScoring} \title{Score cell cycle phases} \usage{ CellCycleScoring( object, s.features, g2m.features, ctrl = NULL, set.ident = FALSE, ... ) } \arguments{ \item{object}{A Seurat object} \item{s.features}{A vector of features associated with S phase} \item{g2m.features}{A vector of features associated with G2M phase} \item{ctrl}{Number of control features selected from the same bin per analyzed feature supplied to \code{\link{AddModuleScore}}. Defaults to value equivalent to minimum number of features present in 's.features' and 'g2m.features'.} \item{set.ident}{If true, sets identity to phase assignments Stashes old identities in 'old.ident'} \item{...}{Arguments to be passed to \code{\link{AddModuleScore}}} } \value{ A Seurat object with the following columns added to object meta data: S.Score, G2M.Score, and Phase } \description{ Score cell cycle phases } \examples{ \dontrun{ data("pbmc_small") # pbmc_small doesn't have any cell-cycle genes # To run CellCycleScoring, please use a dataset with cell-cycle genes # An example is available at http://satijalab.org/seurat/cell_cycle_vignette.html pbmc_small <- CellCycleScoring( object = pbmc_small, g2m.features = cc.genes$g2m.genes, s.features = cc.genes$s.genes ) head(x = pbmc_small@meta.data) } } \seealso{ \code{AddModuleScore} } \concept{utilities} Seurat/man/FindTransferAnchors.Rd0000644000176200001440000002012214152476164016471 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindTransferAnchors} \alias{FindTransferAnchors} \title{Find transfer anchors} \usage{ FindTransferAnchors( reference, query, normalization.method = "LogNormalize", recompute.residuals = TRUE, reference.assay = NULL, reference.neighbors = NULL, query.assay = NULL, reduction = "pcaproject", reference.reduction = NULL, project.query = FALSE, features = NULL, scale = TRUE, npcs = 30, l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, approx.pca = TRUE, mapping.score.k = NULL, verbose = TRUE ) } \arguments{ \item{reference}{\code{\link{Seurat}} object to use as the reference} \item{query}{\code{\link{Seurat}} object to use as the query} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT.} \item{recompute.residuals}{If using SCT as a normalization method, compute query Pearson residuals using the reference SCT model parameters.} \item{reference.assay}{Name of the Assay to use from reference} \item{reference.neighbors}{Name of the Neighbor to use from the reference. Optionally enables reuse of precomputed neighbors.} \item{query.assay}{Name of the Assay to use from query} \item{reduction}{Dimensional reduction to perform when finding anchors. Options are: \itemize{ \item{pcaproject: Project the PCA from the reference onto the query. We recommend using PCA when reference and query datasets are from scRNA-seq} \item{lsiproject: Project the LSI from the reference onto the query. We recommend using LSI when reference and query datasets are from scATAC-seq. This requires that LSI has been computed for the reference dataset, and the same features (eg, peaks or genome bins) are present in both the reference and query. See \code{\link[Signac]{RunTFIDF}} and \code{\link[Signac]{RunSVD}}} \item{rpca: Project the PCA from the reference onto the query, and the PCA from the query onto the reference (reciprocal PCA projection).} \item{cca: Run a CCA on the reference and query } }} \item{reference.reduction}{Name of dimensional reduction to use from the reference if running the pcaproject workflow. Optionally enables reuse of precomputed reference dimensional reduction. If NULL (default), use a PCA computed on the reference object.} \item{project.query}{Project the PCA from the query dataset onto the reference. Use only in rare cases where the query dataset has a much larger cell number, but the reference dataset has a unique assay for transfer. In this case, the default features will be set to the variable features of the query object that are alos present in the reference.} \item{features}{Features to use for dimensional reduction. If not specified, set as variable features of the reference object which are also present in the query.} \item{scale}{Scale query data.} \item{npcs}{Number of PCs to compute on reference if reference.reduction is not provided.} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction} \item{dims}{Which dimensions to use from the reduction to specify the neighbor search space} \item{k.anchor}{How many neighbors (k) to use when finding anchors} \item{k.filter}{How many neighbors (k) to use when filtering anchors. Set to NA to turn off filtering.} \item{k.score}{How many neighbors (k) to use when scoring anchors} \item{max.features}{The maximum number of features to use when specifying the neighborhood search space in the anchor filtering} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}} or \code{\link{RcppAnnoy}})} \item{approx.pca}{Use truncated singular value decomposition to approximate PCA} \item{mapping.score.k}{Compute and store nearest k query neighbors in the AnchorSet object that is returned. You can optionally set this if you plan on computing the mapping score and want to enable reuse of some downstream neighbor calculations to make the mapping score function more efficient.} \item{verbose}{Print progress bars and output} } \value{ Returns an \code{AnchorSet} object that can be used as input to \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and \code{\link{MapQuery}}. The dimension reduction used for finding anchors is stored in the \code{AnchorSet} object and can be used for computing anchor weights in downstream functions. Note that only the requested dimensions are stored in the dimension reduction object in the \code{AnchorSet}. This means that if \code{dims=2:20} is used, for example, the dimension of the stored reduction is \code{1:19}. } \description{ Find a set of anchors between a reference and query object. These anchors can later be used to transfer data from the reference to query object using the \code{\link{TransferData}} object. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} \itemize{ \item{Perform dimensional reduction. Exactly what is done here depends on the values set for the \code{reduction} and \code{project.query} parameters. If \code{reduction = "pcaproject"}, a PCA is performed on either the reference (if \code{project.query = FALSE}) or the query (if \code{project.query = TRUE}), using the \code{features} specified. The data from the other dataset is then projected onto this learned PCA structure. If \code{reduction = "cca"}, then CCA is performed on the reference and query for this dimensional reduction step. If \code{reduction = "lsiproject"}, the stored LSI dimension reduction in the reference object is used to project the query dataset onto the reference. If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the embedding vectors.} \item{Identify anchors between the reference and query - pairs of cells from each dataset that are contained within each other's neighborhoods (also known as mutual nearest neighbors).} \item{Filter low confidence anchors to ensure anchors in the low dimension space are in broad agreement with the high dimensional measurements. This is done by looking at the neighbors of each query cell in the reference dataset using \code{max.features} to define this space. If the reference cell isn't found within the first \code{k.filter} neighbors, remove the anchor.} \item{Assign each remaining anchor a score. For each anchor cell, determine the nearest \code{k.score} anchors within its own dataset and within its pair's dataset. Based on these neighborhoods, construct an overall neighbor graph and then compute the shared neighbor overlap between anchor and query cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on these scores to dampen outlier effects and rescale to range between 0-1.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("pbmc3k") # for demonstration, split the object into reference and query pbmc.reference <- pbmc3k[, 1:1350] pbmc.query <- pbmc3k[, 1351:2700] # perform standard preprocessing on each object pbmc.reference <- NormalizeData(pbmc.reference) pbmc.reference <- FindVariableFeatures(pbmc.reference) pbmc.reference <- ScaleData(pbmc.reference) pbmc.query <- NormalizeData(pbmc.query) pbmc.query <- FindVariableFeatures(pbmc.query) pbmc.query <- ScaleData(pbmc.query) # find anchors anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) # transfer labels predictions <- TransferData( anchorset = anchors, refdata = pbmc.reference$seurat_annotations ) pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031}; } \concept{integration} Seurat/man/contrast-theory.Rd0000644000176200001440000000131614005656653015737 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{contrast-theory} \alias{contrast-theory} \alias{Intensity} \alias{Luminance} \title{Get the intensity and/or luminance of a color} \source{ \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} } \usage{ Intensity(color) Luminance(color) } \arguments{ \item{color}{A vector of colors} } \value{ A vector of intensities/luminances for each color } \description{ Get the intensity and/or luminance of a color } \examples{ Intensity(color = c('black', 'white', '#E76BF3')) Luminance(color = c('black', 'white', '#E76BF3')) } \concept{visualization} Seurat/man/DimPlot.Rd0000644000176200001440000001124014165416216014133 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{DimPlot} \alias{DimPlot} \alias{TSNEPlot} \alias{PCAPlot} \alias{ICAPlot} \alias{UMAPPlot} \title{Dimensional reduction plot} \usage{ DimPlot( object, dims = c(1, 2), cells = NULL, cols = NULL, pt.size = NULL, reduction = NULL, group.by = NULL, split.by = NULL, shape.by = NULL, order = NULL, shuffle = FALSE, seed = 1, label = FALSE, label.size = 4, label.color = "black", label.box = FALSE, repel = FALSE, cells.highlight = NULL, cols.highlight = "#DE2D26", sizes.highlight = 1, na.value = "grey50", ncol = NULL, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) PCAPlot(object, ...) TSNEPlot(object, ...) UMAPPlot(object, ...) } \arguments{ \item{object}{Seurat object} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{cells}{Vector of cells to plot (default is all cells)} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{pt.size}{Adjust point size for plotting} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{split.by}{Name of a metadata column to split plot by; see \code{\link{FetchData}} for more details} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{shuffle}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{seed}{Sets the seed if randomly shuffling the order of points.} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{label.color}{Sets the color of the label text} \item{label.box}{Whether to put a box around the label text (geom_text vs geom_label)} \item{repel}{Repel labels} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight} \item{na.value}{Color value for NA points when using custom scale} \item{ncol}{Number of columns for display when combining plots} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{raster}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} \item{...}{Extra parameters passed to \code{DimPlot}} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Graphs the output of a dimensional reduction technique on a 2D scatter plot where each point is a cell and it's positioned based on the cell embeddings determined by the reduction technique. By default, cells are colored by their identity class (can be changed with the group.by parameter). } \note{ For the old \code{do.hover} and \code{do.identify} functionality, please see \code{HoverLocator} and \code{CellSelector}, respectively. } \examples{ data("pbmc_small") DimPlot(object = pbmc_small) DimPlot(object = pbmc_small, split.by = 'ident') } \seealso{ \code{\link{FeaturePlot}} \code{\link{HoverLocator}} \code{\link{CellSelector}} \code{\link{FetchData}} } \concept{convenience} \concept{visualization} Seurat/man/merge.SCTAssay.Rd0000644000176200001440000000203114024674706015315 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{merge.SCTAssay} \alias{merge.SCTAssay} \title{Merge SCTAssay objects} \usage{ \method{merge}{SCTAssay}( x = NULL, y = NULL, add.cell.ids = NULL, merge.data = TRUE, na.rm = TRUE, ... ) } \arguments{ \item{x}{A \code{\link[SeuratObject]{Seurat}} object} \item{y}{A single \code{Seurat} object or a list of \code{Seurat} objects} \item{add.cell.ids}{A character vector of \code{length(x = c(x, y))}; appends the corresponding values to the start of each objects' cell names} \item{merge.data}{Merge the data slots instead of just merging the counts (which requires renormalization); this is recommended if the same normalization approach was applied to all objects} \item{na.rm}{If na.rm = TRUE, this will only preserve residuals that are present in all SCTAssays being merged. Otherwise, missing residuals will be populated with NAs.} \item{...}{Arguments passed to other methods} } \description{ Merge SCTAssay objects } \concept{objects} Seurat/man/JackStrawPlot.Rd0000644000176200001440000000330114024674706015316 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{JackStrawPlot} \alias{JackStrawPlot} \title{JackStraw Plot} \usage{ JackStrawPlot( object, dims = 1:5, cols = NULL, reduction = "pca", xmax = 0.1, ymax = 0.3 ) } \arguments{ \item{object}{Seurat object} \item{dims}{Dims to plot} \item{cols}{Vector of colors, each color corresponds to an individual PC. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{reduction}{reduction to pull jackstraw info from} \item{xmax}{X-axis maximum on each QQ plot.} \item{ymax}{Y-axis maximum on each QQ plot.} } \value{ A ggplot object } \description{ Plots the results of the JackStraw analysis for PCA significance. For each PC, plots a QQ-plot comparing the distribution of p-values for all genes across each PC, compared with a uniform distribution. Also determines a p-value for the overall significance of each PC (see Details). } \details{ Significant PCs should show a p-value distribution (black curve) that is strongly skewed to the left compared to the null distribution (dashed line) The p-value for each PC is based on a proportion test comparing the number of genes with a p-value below a particular threshold (score.thresh), compared with the proportion of genes expected under a uniform distribution of p-values. } \examples{ data("pbmc_small") JackStrawPlot(object = pbmc_small) } \seealso{ \code{\link{ScoreJackStraw}} } \author{ Omri Wurtzel } \concept{visualization} Seurat/man/LinkedPlots.Rd0000644000176200001440000000354614152476164015031 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LinkedPlots} \alias{LinkedPlots} \alias{LinkedDimPlot} \alias{LinkedPlot} \alias{LinkedFeaturePlot} \title{Visualize spatial and clustering (dimensional reduction) data in a linked, interactive framework} \usage{ LinkedDimPlot( object, dims = 1:2, reduction = NULL, image = NULL, group.by = NULL, alpha = c(0.1, 1), combine = TRUE ) LinkedFeaturePlot( object, feature, dims = 1:2, reduction = NULL, image = NULL, slot = "data", alpha = c(0.1, 1), combine = TRUE ) } \arguments{ \item{object}{Seurat object} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{image}{Name of the image to use in the plot} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{feature}{Feature to visualize} \item{slot}{Which slot to pull expression data from?} } \value{ Returns final plots. If \code{combine}, plots are stiched together using \code{\link{CombinePlots}}; otherwise, returns a list of ggplot objects } \description{ Visualize spatial and clustering (dimensional reduction) data in a linked, interactive framework } \examples{ \dontrun{ LinkedDimPlot(seurat.object) LinkedFeaturePlot(seurat.object, feature = 'Hpca') } } \concept{spatial} \concept{visualization} Seurat/man/VlnPlot.Rd0000644000176200001440000000470414156670503014171 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VlnPlot} \alias{VlnPlot} \title{Single cell violin plot} \usage{ VlnPlot( object, features, cols = NULL, pt.size = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, split.by = NULL, adjust = 1, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = "data", split.plot = FALSE, stack = FALSE, combine = TRUE, fill.by = "feature", flip = FALSE, raster = NULL ) } \arguments{ \item{object}{Seurat object} \item{features}{Features to plot (gene expression, metrics, PC scores, anything that can be retreived by FetchData)} \item{cols}{Colors to use for plotting} \item{pt.size}{Point size for geom_violin} \item{idents}{Which classes to include in the plot (default is all)} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction} \item{assay}{Name of assay to use, defaults to the active assay} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{split.by}{A variable to split the violin plots by,} \item{adjust}{Adjust parameter for geom_violin} \item{y.max}{Maximum y axis value} \item{same.y.lims}{Set all the y-axis limits to the same values} \item{log}{plot the feature axis on log scale} \item{ncol}{Number of columns if multiple plots are displayed} \item{slot}{Use non-normalized counts data for plotting} \item{split.plot}{plot each group of the split violin plots by multiple or single violin shapes.} \item{stack}{Horizontally stack plots for each feature} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot} \item{fill.by}{Color violins/ridges based on either 'feature' or 'ident'} \item{flip}{flip plot orientation (identities on x-axis)} \item{raster}{Convert points to raster format. Requires 'ggrastr' to be installed.} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a violin plot of single cell data (gene expression, metrics, PC scores, etc.) } \examples{ data("pbmc_small") VlnPlot(object = pbmc_small, features = 'PC_1') VlnPlot(object = pbmc_small, features = 'LYZ', split.by = 'groups') } \seealso{ \code{\link{FetchData}} } \concept{visualization} Seurat/man/CellsByImage.Rd0000644000176200001440000000120514024674706015067 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{CellsByImage} \alias{CellsByImage} \title{Get a vector of cell names associated with an image (or set of images)} \usage{ CellsByImage(object, images = NULL, unlist = FALSE) } \arguments{ \item{object}{Seurat object} \item{images}{Vector of image names} \item{unlist}{Return as a single vector of cell names as opposed to a list, named by image name.} } \value{ A vector of cell names } \description{ Get a vector of cell names associated with an image (or set of images) } \examples{ \dontrun{ CellsByImage(object = object, images = "slice1") } } Seurat/man/FeaturePlot.Rd0000644000176200001440000001306114165416216015020 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{FeaturePlot} \alias{FeaturePlot} \alias{FeatureHeatmap} \title{Visualize 'features' on a dimensional reduction plot} \usage{ FeaturePlot( object, features, dims = c(1, 2), cells = NULL, cols = if (blend) { c("lightgrey", "#ff0000", "#00ff00") } else { c("lightgrey", "blue") }, pt.size = NULL, order = FALSE, min.cutoff = NA, max.cutoff = NA, reduction = NULL, split.by = NULL, keep.scale = "feature", shape.by = NULL, slot = "data", blend = FALSE, blend.threshold = 0.5, label = FALSE, label.size = 4, label.color = "black", repel = FALSE, ncol = NULL, coord.fixed = FALSE, by.col = TRUE, sort.cell = NULL, interactive = FALSE, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{features}{Vector of features to plot. Features can come from: \itemize{ \item An \code{Assay} feature (e.g. a gene name - "MS4A1") \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") \item A column name from a \code{DimReduc} object corresponding to the cell embedding values (e.g. the PC 1 scores - "PC_1") }} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{cells}{Vector of cells to plot (default is all cells)} \item{cols}{The two colors to form the gradient over. Provide as string vector with the first color corresponding to low values, the second to high. Also accepts a Brewer color scale or vector of colors. Note: this will bin the data into number of colors provided. When blend is \code{TRUE}, takes anywhere from 1-3 colors: \describe{ \item{1 color:}{Treated as color for double-negatives, will use default colors 2 and 3 for per-feature expression} \item{2 colors:}{Treated as colors for per-feature expression, will use default color 1 for double-negatives} \item{3+ colors:}{First color used for double-negatives, colors 2 and 3 used for per-feature expression, all others ignored} }} \item{pt.size}{Adjust point size for plotting} \item{order}{Boolean determining whether to plot cells in order of expression. Can be useful if cells expressing given feature are getting buried.} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{split.by}{A factor in object metadata to split the feature plot by, pass 'ident' to split by cell identity'; similar to the old \code{FeatureHeatmap}} \item{keep.scale}{How to handle the color scale across multiple plots. Options are: \itemize{ \item{"feature" (default; by row/feature scaling):}{ The plots for each individual feature are scaled to the maximum expression of the feature across the conditions provided to 'split.by'.} \item{"all" (universal scaling):}{ The plots for all features and conditions are scaled to the maximum expression value for the feature with the highest overall expression.} \item{NULL (no scaling):}{ Each individual plot is scaled to the maximum expression value of the feature in the condition provided to 'split.by'. Be aware setting NULL will result in color scales that are not comparable between plots.} }} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{slot}{Which slot to pull expression data from?} \item{blend}{Scale and blend expression values to visualize coexpression of two features} \item{blend.threshold}{The color cutoff from weak signal to strong signal; ranges from 0 to 1.} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{label.color}{Sets the color of the label text} \item{repel}{Repel labels} \item{ncol}{Number of columns to combine multiple feature plots to, ignored if \code{split.by} is not \code{NULL}} \item{coord.fixed}{Plot cartesian coordinates with fixed aspect ratio} \item{by.col}{If splitting by a factor, plot the splits per column with the features as rows; ignored if \code{blend = TRUE}} \item{sort.cell}{Redundant with \code{order}. This argument is being deprecated. Please use \code{order} instead.} \item{interactive}{Launch an interactive \code{\link[Seurat:IFeaturePlot]{FeaturePlot}}} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{raster}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Colors single cells on a dimensional reduction plot according to a 'feature' (i.e. gene expression, PC scores, number of genes detected, etc.) } \note{ For the old \code{do.hover} and \code{do.identify} functionality, please see \code{HoverLocator} and \code{CellSelector}, respectively. } \examples{ data("pbmc_small") FeaturePlot(object = pbmc_small, features = 'PC_1') } \seealso{ \code{\link{DimPlot}} \code{\link{HoverLocator}} \code{\link{CellSelector}} } \concept{visualization} Seurat/man/TopNeighbors.Rd0000644000176200001440000000073614005656653015202 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopNeighbors} \alias{TopNeighbors} \title{Get nearest neighbors for given cell} \usage{ TopNeighbors(object, cell, n = 5) } \arguments{ \item{object}{\code{\link{Neighbor}} object} \item{cell}{Cell of interest} \item{n}{Number of neighbors to return} } \value{ Returns a vector of cell names } \description{ Return a vector of cell names of the nearest n cells. } \concept{objects} Seurat/man/SaveAnnoyIndex.Rd0000644000176200001440000000057414005656653015472 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{SaveAnnoyIndex} \alias{SaveAnnoyIndex} \title{Save the Annoy index} \usage{ SaveAnnoyIndex(object, file) } \arguments{ \item{object}{A Neighbor object with the annoy index stored} \item{file}{Path to file to write index to} } \description{ Save the Annoy index } \concept{utilities} Seurat/man/RunPCA.Rd0000644000176200001440000000510214005656653013657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunPCA} \alias{RunPCA} \alias{RunPCA.default} \alias{RunPCA.Assay} \alias{RunPCA.Seurat} \title{Run Principal Component Analysis} \usage{ RunPCA(object, ...) \method{RunPCA}{default}( object, assay = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, approx = TRUE, ... ) \method{RunPCA}{Assay}( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, ... ) \method{RunPCA}{Seurat}( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "pca", reduction.key = "PC_", seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and IRLBA} \item{assay}{Name of Assay PCA is being run on} \item{npcs}{Total Number of PCs to compute and store (50 by default)} \item{rev.pca}{By default computes the PCA on the cell x gene matrix. Setting to true will compute it on gene x cell matrix.} \item{weight.by.var}{Weight the cell embeddings by the variance of each PC (weights the gene loadings if rev.pca is TRUE)} \item{verbose}{Print the top genes associated with high/low loadings for the PCs} \item{ndims.print}{PCs to print genes for} \item{nfeatures.print}{Number of genes to print for each PC} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. PC by default} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed.} \item{approx}{Use truncated singular value decomposition to approximate PCA} \item{features}{Features to compute PCA on. If features=NULL, PCA will be run using the variable features for the Assay. Note that the features must be present in the scaled data. Any requested features that are not scaled or have 0 variance will be dropped, and the PCA will be run using the remaining features.} \item{reduction.name}{dimensional reduction name, pca by default} } \value{ Returns Seurat object with the PCA calculation stored in the reductions slot } \description{ Run a PCA dimensionality reduction. For details about stored PCA calculation parameters, see \code{PrintPCAParams}. } \concept{dimensional_reduction} Seurat/man/ColorDimSplit.Rd0000644000176200001440000001043514165416216015314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ColorDimSplit} \alias{ColorDimSplit} \title{Color dimensional reduction plot by tree split} \usage{ ColorDimSplit( object, node, left.color = "red", right.color = "blue", other.color = "grey50", ... ) } \arguments{ \item{object}{Seurat object} \item{node}{Node in cluster tree on which to base the split} \item{left.color}{Color for the left side of the split} \item{right.color}{Color for the right side of the split} \item{other.color}{Color for all other cells} \item{...}{ Arguments passed on to \code{\link[=DimPlot]{DimPlot}} \describe{ \item{\code{dims}}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{\code{cells}}{Vector of cells to plot (default is all cells)} \item{\code{cols}}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{\code{pt.size}}{Adjust point size for plotting} \item{\code{reduction}}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{\code{group.by}}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{\code{split.by}}{Name of a metadata column to split plot by; see \code{\link{FetchData}} for more details} \item{\code{shape.by}}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{\code{order}}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{\code{shuffle}}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{\code{seed}}{Sets the seed if randomly shuffling the order of points.} \item{\code{label}}{Whether to label the clusters} \item{\code{label.size}}{Sets size of labels} \item{\code{label.color}}{Sets the color of the label text} \item{\code{label.box}}{Whether to put a box around the label text (geom_text vs geom_label)} \item{\code{repel}}{Repel labels} \item{\code{cells.highlight}}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{\code{cols.highlight}}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{\code{sizes.highlight}}{Size of highlighted cells; will repeat to the length groups in cells.highlight} \item{\code{na.value}}{Color value for NA points when using custom scale} \item{\code{ncol}}{Number of columns for display when combining plots} \item{\code{combine}}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{\code{raster}}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{\code{raster.dpi}}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} }} } \value{ Returns a DimPlot } \description{ Returns a DimPlot colored based on whether the cells fall in clusters to the left or to the right of a node split in the cluster tree. } \examples{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small <- BuildClusterTree(object = pbmc_small, verbose = FALSE) PlotClusterTree(pbmc_small) ColorDimSplit(pbmc_small, node = 5) } } \seealso{ \code{\link{DimPlot}} } \concept{visualization} Seurat/man/HTODemux.Rd0000644000176200001440000000405414005656653014231 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{HTODemux} \alias{HTODemux} \title{Demultiplex samples based on data from cell 'hashing'} \usage{ HTODemux( object, assay = "HTO", positive.quantile = 0.99, init = NULL, nstarts = 100, kfunc = "clara", nsamples = 100, seed = 42, verbose = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized.} \item{assay}{Name of the Hashtag assay (HTO by default)} \item{positive.quantile}{The quantile of inferred 'negative' distribution for each hashtag - over which the cell is considered 'positive'. Default is 0.99} \item{init}{Initial number of clusters for hashtags. Default is the # of hashtag oligo names + 1 (to account for negatives)} \item{nstarts}{nstarts value for k-means clustering (for kfunc = "kmeans"). 100 by default} \item{kfunc}{Clustering function for initial hashtag grouping. Default is "clara" for fast k-medoids clustering on large applications, also support "kmeans" for kmeans clustering} \item{nsamples}{Number of samples to be drawn from the dataset used for clustering, for kfunc = "clara"} \item{seed}{Sets the random seed. If NULL, seed is not set} \item{verbose}{Prints the output} } \value{ The Seurat object with the following demultiplexed information stored in the meta data: \describe{ \item{hash.maxID}{Name of hashtag with the highest signal} \item{hash.secondID}{Name of hashtag with the second highest signal} \item{hash.margin}{The difference between signals for hash.maxID and hash.secondID} \item{classification}{Classification result, with doublets/multiplets named by the top two highest hashtags} \item{classification.global}{Global classification result (singlet, doublet or negative)} \item{hash.ID}{Classification result where doublet IDs are collapsed} } } \description{ Assign sample-of-origin for each cell, annotate doublets. } \examples{ \dontrun{ object <- HTODemux(object) } } \seealso{ \code{\link{HTOHeatmap}} } \concept{preprocessing} Seurat/man/as.Seurat.Rd0000644000176200001440000000241014152476164014433 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{as.Seurat.CellDataSet} \alias{as.Seurat.CellDataSet} \alias{as.Seurat.SingleCellExperiment} \title{Convert objects to \code{Seurat} objects} \usage{ \method{as.Seurat}{CellDataSet}(x, slot = "counts", assay = "RNA", verbose = TRUE, ...) \method{as.Seurat}{SingleCellExperiment}( x, counts = "counts", data = "logcounts", assay = NULL, project = "SingleCellExperiment", ... ) } \arguments{ \item{x}{An object to convert to class \code{Seurat}} \item{slot}{Slot to store expression data as} \item{assay}{Name of assays to convert; set to \code{NULL} for all assays to be converted} \item{verbose}{Show progress updates} \item{...}{Arguments passed to other methods} \item{counts}{name of the SingleCellExperiment assay to store as \code{counts}; set to \code{NULL} if only normalized data are present} \item{data}{name of the SingleCellExperiment assay to slot as \code{data}. Set to NULL if only counts are present} \item{project}{Project name for new Seurat object} } \value{ A \code{Seurat} object generated from \code{x} } \description{ Convert objects to \code{Seurat} objects } \seealso{ \code{\link[SeuratObject:as.Seurat]{SeuratObject::as.Seurat}} } \concept{objects} Seurat/DESCRIPTION0000644000176200001440000001244314170340713013227 0ustar liggesusersPackage: Seurat Version: 4.1.0 Date: 2022-01-14 Title: Tools for Single Cell Genomics Description: A toolkit for quality control, analysis, and exploration of single cell RNA sequencing data. 'Seurat' aims to enable users to identify and interpret sources of heterogeneity from single cell transcriptomic measurements, and to integrate diverse types of single cell data. See Satija R, Farrell J, Gennert D, et al (2015) , Macosko E, Basu A, Satija R, et al (2015) , Stuart T, Butler A, et al (2019) , and Hao, Hao, et al (2020) for more details. Authors@R: c( person(given = "Andrew", family = "Butler", email = "abutler@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0003-3608-0463")), person(given = "Saket", family = "Choudhary", email = "schoudhary@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-5202-7633")), person(given = "Charlotte", family = "Darby", email = "cdarby@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0003-2195-5300")), person(given = "Jeff", family = "Farrell", email = "jfarrell@g.harvard.edu", role = "ctb"), person(given = "Christoph", family = "Hafemeister", email = "chafemeister@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-6365-8254")), person(given = "Yuhan", family = "Hao", email = "yhao@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-1810-0822")), person(given = "Paul", family = "Hoffman", email = "seurat@nygenome.org", role = c("aut", "cre"), comment = c(ORCID = "0000-0002-7693-8957")), person(given = "Jaison", family = "Jain", email = "jjain@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-9478-5018")), person(given = "Efthymia", family = "Papalexi", email = "epapalexi@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-5898-694X")), person(given = "Patrick", family = "Roelli", email = "proelli@nygenome.org", role = "ctb"), person(given = "Rahul", family = "Satija", email = "rsatija@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-9448-8833")), person(given = "Karthik", family = "Shekhar", email = "kshekhar@berkeley.edu", role = "ctb"), person(given = "Avi", family = "Srivastava", email = "asrivastava@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-9798-2079")), person(given = "Tim", family = "Stuart", email = "tstuart@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-3044-0897")), person(given = "Kristof", family = "Torkenczy", email = "", role = "ctb", comment = c(ORCID = "0000-0002-4869-7957")), person(given = "Shiwei", family = "Zheng", email = "szheng@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-6682-6743")), person("Satija Lab and Collaborators", role = "fnd") ) URL: https://satijalab.org/seurat, https://github.com/satijalab/seurat BugReports: https://github.com/satijalab/seurat/issues Depends: R (>= 4.0.0), methods Imports: cluster, cowplot, fitdistrplus, future, future.apply, ggplot2 (>= 3.3.0), ggrepel, ggridges, graphics, grDevices, grid, httr, ica, igraph, irlba, jsonlite, KernSmooth, leiden (>= 0.3.1), lmtest, MASS, Matrix (>= 1.2-14), matrixStats, miniUI, patchwork, pbapply, plotly (>= 4.9.0), png, RANN, RColorBrewer, Rcpp (>= 1.0.7), RcppAnnoy (>= 0.0.18), reticulate, rlang, ROCR, Rtsne, scales, scattermore (>= 0.7), sctransform (>= 0.3.3), SeuratObject (>= 4.0.4), shiny, spatstat.core, spatstat.geom, stats, tibble, tools, utils, uwot (>= 0.1.9) LinkingTo: Rcpp (>= 0.11.0), RcppEigen, RcppProgress License: MIT + file LICENSE LazyData: true Collate: 'RcppExports.R' 'reexports.R' 'generics.R' 'clustering.R' 'visualization.R' 'convenience.R' 'data.R' 'differential_expression.R' 'dimensional_reduction.R' 'integration.R' 'mixscape.R' 'objects.R' 'preprocessing.R' 'tree.R' 'utilities.R' 'zzz.R' RoxygenNote: 7.1.2 Encoding: UTF-8 Suggests: ape, rsvd, testthat, hdf5r, S4Vectors, SummarizedExperiment, SingleCellExperiment, MAST, DESeq2, BiocGenerics, GenomicRanges, GenomeInfoDb, IRanges, rtracklayer, Rfast2, monocle, Biobase, VGAM, limma, metap, enrichR, mixtools, ggrastr NeedsCompilation: yes Packaged: 2022-01-14 17:49:20 UTC; paul Author: Andrew Butler [ctb] (), Saket Choudhary [ctb] (), Charlotte Darby [ctb] (), Jeff Farrell [ctb], Christoph Hafemeister [ctb] (), Yuhan Hao [ctb] (), Paul Hoffman [aut, cre] (), Jaison Jain [ctb] (), Efthymia Papalexi [ctb] (), Patrick Roelli [ctb], Rahul Satija [ctb] (), Karthik Shekhar [ctb], Avi Srivastava [ctb] (), Tim Stuart [ctb] (), Kristof Torkenczy [ctb] (), Shiwei Zheng [ctb] (), Satija Lab and Collaborators [fnd] Maintainer: Paul Hoffman Repository: CRAN Date/Publication: 2022-01-14 18:32:42 UTC Seurat/build/0000755000176200001440000000000014170333640012615 5ustar liggesusersSeurat/build/partial.rdb0000644000176200001440000007471614170333640014761 0ustar liggesusersiwG%\.2$@, (  $))SP*ŀ{݃ RZ}+k{מRԩP9̯GX!G@l㺏x̞=۟}x tdԑC+f2{O~9t8|kڦkfl1yfڣ0Y|`ݰKtݿX7IΩ _ܢk4ӿ'g j0kzeϴ]l񣙑Muv)YZSľ66Lgk/tVبzɿ{g_. /(/~XctQf&jJn1ߚ7WVd+WF ܒY 9aع.ھrI^v;6KMx~M!( o6z&R3J^jG:dq#El֕Sn3Y6[Y.&pn6 ;@0 9M[:H}N@< 4^ @xI&q|zv Gw3Ϩpպo9zj_|%D +UBƻg!+eUF2/AV"" '":IEwÌ )>>0RePH> !_Hx ٷ8ut (hf<(M%ExImzsV>5~ly> K!㐏k{gl)8j:X]]S!< 6 (j~5h>'xOC>FzELh}z+YT c i:$zUyZBXV0M7^)KT" zƳ6C1"Lp"͂C7lg'0ʜ|"k\iK cf()0zb/@ƞEm4/Qݙ&!i"BwH6JmV[4*On"v&u !(ҡ  $I4 <Yn3Xch v`񡛳x ,I\폦AΥV--I <|jN)bsB f=biu|GQ=43t!KΦ!Ne% z#B|s#!"CϡD4^CDV!Ij,`ƒ7Ҟ&JoNXs+ɼYV,{ 0wX5mlSzA~OUBTQ=lv5W`ɊU5mUZ¦,;dStB8yN;E .z~'f[E6oGOi "lMJһ!ܫI=:+X1}ͨnjl>SsR%b[614̌rU ݴG J5! »jz-sO xEش}l&^|]c.#đ/1<1 Y8' M V`B=YY~t xK<Bo`/l^}.@^ЯZn0. p4݃Ϫ."(:){>Cǂ]^w[ >nJcFBߓvePK)WBze I P"&ZM MouR#Wn۴,IvvA>Yh&7eDM+lwB6I%ȗs'']gx41IYS69ͳ%#[I`Pm7s&]0]Ȟ*o~w͛"I28YmEzSKf9 [f#Ѽ S끁|*+ h"X^*5>9Yi*^RRvA,dA^Qi# wG=@}f86h]7F{ $d]#9=dy2}ȻH.nGַ|ttjYVi{'B<1E 7|$55 Lfu,p4G-*ߪJnz5->%839e O,Xl˩u)̧S)ԘzoӰhjX̕LIYo WAroʘe/B#i7q@+GqK2 ] iJsKcäLSԧ`s|&gsrai$aBo1S] fmKtIӸ5$ QJabLlk:7U >.Np>Hz-؟.@o^'rL7VjYhDY.,H}@(,nNj;!])HxM-@-߀8n*ft7l)u0m/sP*MQ|m'Ҕ!-p0Mz(lB=uV&\cpAVٕwB+\@hL: "Q,ى*" rjN.MADؚqH'K΄.0*b'&`174%Gc`=55Ɋf067V E&ކ|&.lu/o[xMC8OZHYϷį<Ս|=">ZrF>$S.R?# vX@Dآ 8I&Ȩqbn$[}#mJi)@Cb<0Iaڄ}R-LQH^7~ٜ&ǀ  u)1q@0= ,@.>ԎBm ci7%@-Zط=B$.7Wѷl Z-g],WKs NVSJ{2(I1ʙC jC~ 3p"E@|$0[W4Ef= ԹyA3H3kapd̨T\'hK _y.@oOlsa)IC=VO6 ҕԟOi| 9ak"[G [65ݮXDjnQ^ b܋ IC 9 YJ^\ i#si7A]Ir0q=)>Z V D_+Uٳ:xpҦ.9k^Ff!goF>' R`pY^3H&#ģXF&ܓa:8.~յ0IBhwPSd_u oBT R<|jJC~q1:>+mO2Q pļc$ofdn퐺Ny?9f}:ABVځu2?KY)FfWR!^{өKg%FG3WuQ(*Br#;͵SQ !O' OޛMoޛ![ҶQSiB>)9`@ B>[sfiҖ_ ~48?9x?&"e 2M%QwFu3y9HHdQD*IH=|`ə/Mu߯x\vqr_>O?y;>Ĺr*ypJٖE6Ow6V|-=Mm]՛>_'ղTz/bzp"1ꏦO$5Lgnj^&ݰPaH'KH*Z>]PV/& zvZ&OLLNxE’9^|)E K(B`ύ6?poXl^̲yW|VC.-Ҫ^Xw2 QӴu>-&Jg( eR[-ʦGM5MVOgͩgPK ar'3x" %[e30H!!?I3(]⎃'dlT'qʃ,P"7F٤Uʏd?4j ؤtA=Ȼڐ 0VNwU?LgStaXxkHwu2`oҖmlXřA?~??|.&B|K'𓙰zxIسSpa:Rf(0 9y8ցw{)XtSO>p6KۮF6ÐGx %m{߄v½j3BoA7adm0;2 T>|DVFD]soRB.?U ɉV?mj9Ix㏐*GeQ)qD? ukOQ$M}zIrgg h0%{ŠdBeSF0nXOOM&sʲ#u׵_^ 3es$'3_._̱mNBZ4f3۔zsU [+M.]ySu;;m_h"^FcZ|g;k.bU(&^8kx䬼ug3c_1 :c%_K>C^苵}ใu]SljQƎNֻ=+N,=6v;=^ z֎/PmUz QY q NSAײoT {"x'PݱX_ipPTK-#|shvm zƚƾ=ʞ冾C˕'@%>gely# :"UqݶFO[<$H}"kEBW!Gq(oG3#X)a&{)ɿ߭64_;_Gs@Kb\8fy[^ࣳ`b5Co-4̕+Jb.yrg0E*SǠ̧-aیeBIY UzI&xxd~($zB/|IFğj/>4"]'RKcw0ru(d.ʼ@R?&V~K!%ɘZ\5Bvvq$E!,n |:OwL1q|*I.|y9=WJťRBױט rQ8F-h Nu_͇F3k,ީN#ޑ!+ԟGo@Fj^Z}hB:y8$ 0ѐ=Q>Lb',%$Rrbe˩Cl!K#8TpS,ᄄ +jROɗFBr.2%ȥ=e`J{߭F v.0DmP)=͑i[{l<ݴ7|kf%eγ5`%'dc5پ>u~~z!nOX ݾ#m>{gy %5lot+6 [OT-?g/e:KNϕ{--愥좼v2arVtxhgx% &lB1 R!5Z!U4/Rx:C޵ xR>j-} *d*RF@<)1*检kT^*ńΉX/B>"ģXBgN⡵(hf˱x >['0f'"ìb7{". HC|;NŒ&W⑨\ +$^f, {p]Rs1 ܔ3K蕈IY W'oYOt{]uTNjY}D+'vdNZsO`tH@׆\u %= |RoHμ(D[o߮H;i7r –H6?Hì ["&]F)Zy{f3>LE;3m 8 %Z/~tTIBm} j r+i7 ![XG {$ >*Kl`GeCoY wW_Oi\ADmĒwL؛]4i|펵Aw6]IW%:;=v߃vQ۝`QԄz;(_Z!6]C{cD™|&RP_ݮoל`BS1+AMM2UڲzeIx~v0$Z_v&U0^qqar3?L čLm{mVEٔ}:YSy񴿖 SvNfNRKkiٳbklkjjDt!Rj)m0  iѺEϪm/ 'k؊$6ޣK )>M)sT ʣ}m䠹6t' ҰfNe{ClAg>]qDʦIX L4,+˖LSǼ .}+yzzb~43{gF,lczX$ ٴ'ZB ^^g/ R̛eR 09mȌeQ(B -TB:qSJP޿$,-BS11J |xRZޙ^N72ȬiR^@<5.tA?v1`EeT!]ɠhkss=AESDj.;-XNJېoz)YVwzFJln(R ofZiCl;x%e̮UI)B|m)r^v@h=i7;ȁ[@uutOetm",KI<L&ggnY̭v>w6j̲ȦgBx] ɲU~R UZí*wwSMZb5U|N{Fx0@aN j!S,r 8 !?$) w_M{_|aj6`b}8 Y4\bفl$u1 bC oO:Q-ᮙlſ+t,+;V~Wm+H D$ <- !z+Ej 0O {}a b2JO עRH6EH|EtCoƢ27xHNEI:AՍ]Amԣ֟6Ƥ~\@,d%{&Fo1qMD<|TA:~>JfI} WX/크3|R+ v9ak_'FJ`![`3h eATmFƫkxpűN$ U jC>~''n!"lQHQ%-N|E5;bd K*0pKe@ѯѱK晆*JQq{kಠvkǨuR r MDA%}G?!j=gmV1|2Ÿcj oCzIm؅xMȁ[fa@iIvHěeD[<ɖlFpO-LEMWWte\+({SO2JRpi(""lѐ A%ɕP\* VonJxprzK-r$ Y3-R?%.U2%´ 7y _vFu4ѷڴ{8v,ٜ&&ǀ  u)5o%hgQk$i( Qȣ~L@<5V jZf҈k.|MϯIш'V_,WKs ۖ^!w K>$|r&nvluų!o 3OiCDؚV@r;I~n&.(::Uh ,zVˆ˞*-YgEKQ  `jRB&:-W\A^{o~xMGDؚZtm$*~泴cSH= W ¹G\&'w!ؼ7g@iPz"usiHԟq؝!„%kUD5=)4YTOm˲7jz4.2\9!CAo(qŤV~L _D|oAD w ]f͖+2 oW]T𪬽lP:X{#7G'FB|z' Q)^H]7$dJc4GOv;[Q)GQj|B!kUDd}|X kZ߅940Q!kUD(6XoU }Axrz3uA$pI}x r.%DG3WnCfUDߤ؀JO(QېF⧓sꪖ tkV: .) uIȓ8b1bЭY"ڷ۲yCbvk ;CηޣdDݥ".N@H--LZzm 2I}z.τ!L"ģ+1`p q+iQVhD8U?/\1yli{ JoXe/` g!j'gNr0 ɆG!uMW fQHaYfS0i4_!]i~ ԏہOo,/*GU˾w- {f\ZǟS&&&K{k?~t!- O *Ӗ͏xizq~rj~L* )#dl*Cu9~W3- ,9Ź o" :Z. C `u*格{vQlߧwvV\'Eo\e'E,H7"VY'x~k+=ᴺkGҮZyw~P-.Vm粵j8mVѱɷ~cj}iK?\P}*Dxpb8yP\X_SafnaQIen2Z2pHbZTj 9~]zYk8VN%n_),Frﮥg ~z3ZJU@@zHLĵirX jDGOίdYȉY`LǯA7a$vZ:RϚNtI{ό_b8^rlNKfŴKsSJs7bӳ\5=_T*QƄ Jew$n8BLiKLAN i?eR2osݚc\NBVڠ$_:9mȷIx4iabƆVzo#ۛR4`@CNssRCkafiKظKeMXAVUL˧;7& UjX7 lfrniZֲ[?prá"ug _bh< L֨u]H{!S6fyXtZ7 dCrCBV:ض o%*o@~#{w,:)ohб8hkl&J X} ÛF)65oh;m,-A?!(c] G]ηB_wP+P|l-8З* N:< w< Y_ 5qQV@TBo r8즹)36G߅0qLQ,12ȬՁ|^×lni6hZ4Hb7h4u4:3NHTlԦtJ]n4 o4ĺAl n8HY.ɮxQGtA>ZUuAm$u OrG-w̍*ΊY QVԜ= w)C xrz1.%-%tA1J!?IqoGxBI&%,U=@=@gKӕǦѦa/BޅQx4s4<@,ܴ^ǝ{LO:9߃EBNe'iSō_ w"yC>A2H)T]: Y =BGv'x])vPߧhʎ~_%+;TV\H`xdsۍBކޯ AdK ʈ#H@su3\ӤܝptZ|4(2R 3.d}2p.Z՟ɟRwy.M~ﶓ#4Hdp8_ֲ*byx~{oCׁخ _`R)YZדlxMQȣ2Rw8yLN]p`SSSeDHBwU -lZ!wF-i ;IAe#{]옙*"{x~`y׽e6g6N# '\.v5;iǰR*5%HӚkTy" C- w4J=C43$D:24 {n4aT3(0CYǓ <|ʤy]⎃'dlT!qjQ`55VZl Z6n5]z~.Jw;HxRJ|oo9_AVS`:G43w' y1g:@o ȔP9Sx`Iu&G9&>>w$bpa:;攚PKf5| 4'Z./:+٠1yo}jǿ3d Cul5XO͐,lS& WV6cOI B ,Go X%s0*e+B]e@-3D Nx -D Pfaaa4 e G8Y֐ v\7Z_a?cވY%-kHwOn%!Yol0+6~.TY_Κ␿o];I]5^cx;%7 XCnRo#L 6:;IgڿCAJ[gZ&ۧD ssڤ/4nK5=ꭽhchVM"uGs|(~x7X=7Yf +﷿g0 m6a%_r6A_dQw+ R_A,d3 '58gò7"ԻB4=+Ņpaێe'arVst?r6wi nydBQN׭V:1'@.wU4Kؾ[c4sh74]ԿA1FؑJq*Ge\a7aPoSIzIrӿ˄̕Qt+ĽY6luy=5Ytsx0@r$Y pޥ`^6G䜲6,µkׯnG$$ ~7=(u{W荶%s9J~Rћ.4Bƨ!zS#5!jYґ`? Z#SۣvE70S=ZVӵ׎f?7]k}]@3x-.Vu_lFw{ȎTCV:]rm+nxš3kz#@DC@!wsu?hՇ}&}Cxrz'ǻ+6# Aޅx2 ^m!ٙ Kg:v?~i:oTrq7 s赱xa <6 ;:B?C?~okO7/j_d.]\֚iNlgfu.%y q%ul2m߶FL!~sgIs[uőz137ٿMMt>4aяLӯЁfe$/fk;o<6׶ԖUlJʾJzD@@㫰I£\}, 4Ceh 9˩>2CHOAz[WqRed~^@üV wJa,d6R_GyDBm{i7 !>Q+{$!X8~x뗥Nў-ZTܺJspL|I̢]EOi5fr^Ia!+3q݉aDgղ!غl6S{^ hldt둳:!KQH&ueWNFK]gECn74뷢yF=|oMWe3ZCd}wsbKzXg%˄¼nJAR l6n7L ]֞G~[bӓ ;{O}G6G0r{=jV bid8aI_6|RI@ES%E~b͛`/1Y>}̒v9t]Ee ^^Tl ŕԝ&k u!jו k~E6!=NRäma8b7*h=Sqh%E8lQJn 0&MnS߈Aa|8h0QY7Yby$t Oii=9\hҢ_|FNO 'tP>0ܥ a!M i6l;,4;f?ӏuϡޕpW6IBt~Lޣe,KTXÍ+\׏ȦFyG/e#q֜o)j7y x˵milqM\nPV0пdHFIK Q(<%ɽv?⤩05(0d&BU 6暻vyR?'`m&r w{I{+ݣ ʹޜN9+AشJ:Uϳtmm4>d3P i+# i7ȁ[cC߆|D_n"vm)>! ,]U6Q]޺S-92)6 wgô4&[Uhw݂J'd2 :v/̵OMfZ/@aVqRw6|#u'3pfj`NկT!3޷ XK솁15cF=Xfa62{EVEb|fdlRp8eglLQ7'&I肥ع1޴mBV,Co=-je8c< LmQ%*줗0w/O .;R?! v׶p1^$G@Sw®yZ1hַaq3?$Yއ) IF (nB!\Y߼07|R# vӸ5oY]}F [5LnPVX ܅_;h7h7 Mf5-ȷx'$=?CldC#֔!̚r~MK>5^2cG֢5$-N2Œ: k:/tz|@Ε)p}8yּ2#BζߌH}N@<)qţG&`QI}thKtBvX4kIy%C(3N]s! $6,dS r?;<eo5ױ>#k5[JZMR.IjdLc!u|9c 6EޗegQ`S-TH-\L=)>NAV',_jw .*w4.d8zۦpgj0oB!LJwhCT@NS`>0 YSyye=l]3܈/Mw!`Tegm$nЦT`H_kQIrZt˄} 5T7Bl} F8yf|qx8Yk |aJ_oYF̪xp ވ!vJXvE!5ax MW=0 7YQS?I}iՑR)_~jq3an I}zCt>΄!2Rx`.z|B?%iH!)#3q|R 9IFAW!YH,.Z.^ѵ*j']/C@=rL73\AoOgўi:bƷO->3ǩAE̲nO5Ci%7|ck`L^˪Y%p3MW fę0 QfSfFҕf ]_|cy|Vq?ni5&=}){OOJ5?P?pܧY:ri_ ~48?9x?&EAf7m{,~Gg=/>:qa@|%3|87}MrY]%ahדS죃vbƈT(ӻl;+8W1_$K2xNt,H7"VY'x~k+=ᴺkGҮZyw~P-.Vm粵j틭8mVѱm{;W:KL\!ࢇCP!rW+Tq/Ā D7s6K*tђCz8¨UTI˒^ùr*ypJٖ`E6Ow6V|w-=Mm]՛>_'ղT/ҫ@b"M(MVDw֕Jt OCV /MP޹út2am7c9@Y)7jP"D, #pE-)(%MxDM!ok'ðt䫳+ : beҌmy/c֔8zԢ8Y+[ ire)i : |Z_rkX@ 4"2'ny!uBV(!° R)[u^q<YZlYs*ÄʩSYlET֍8f!gS\e3ar]9!m, J:Fafe p݆p !*nz(tMx|Nt2p X-]^8ok'+QtOq Y^ .ĆwZs6C_vtG|f ( _.h? lDžx5sPL+x3vfľkc2܀PVKAlRJ:w!;qaiz^y1BVOiCj0RI8'j#hy&Hu&|5w tnסkXF`ZEt1`o_.rd%'e _Z-3=g:]Ph<{xfFYp g hiYND^ f ڻ 뇐6kR- Ӆ^k*p UPC<9a:1?SI3"MbPVnxEbM$pܷ9txt7,LЄ Zu1ѭLdp @Yi ܸcPp+#Э m^A?h5uO򛼱TK;~5/J'ê g!j={(v?lZjNfJq. F  az+ZQzn5I 럆xUYv*]EW{Q\ɼ6*#UW[n8plkQ2XǏ<}CwlLpw`3--x[@ͬ2i4*9x@iJPa:nBvȿR"G]̲bP;- ^QgN6=?4 ,i*e. HrY"Zwh 'ʻl9鰷FQS dY\۟ak[`@:t6?P` CCsYz!_Ԧzf}d}1j-j29 3 XҴA|E6)AQ}õ9c~5AYο YYmte ;p%<Wtk>f\/Au/ckNQL Y݄պ.7:t\|*% B WԭhNc W~ oAm0>2PJc~T@VZj*uJ9*᰽CT wAB;i&"谻U;a5*߅۷5|hfsGK>VѦSz~w31MVt>4aяӯЁfeoK#~t`hX۪IgPMV+);*DT͗e/ED,KNa~8Qil1'ZP&BjӖ=Rh~tYe]:܃& *RT@<_r!G|Rjݿl-#),TlWcI`Gv46>>Pj%o*#OMdHqz%ӧֹ?p;z4GI *:Xm>qGT". :ԇ9d,E:h w7gl0ciHDu(UX쎃 'u$GL$"(FSzZ\O=/9 loWO$ajyO}DΩL"JLlZYBVj3qGFLJdz&"}&b+޽_CR\!Iݫ7 ڄ8=2J!~z,:ʷ&L+4 |;+t* =󸃇3 q[e1 [̫a&r<|jZCoCnBH%ީ $0Ƴ  -Goct3*-ݘbyhM/B7f-Mc˓e,#r6wL8qaSx(c<^ŽŶ!{O-7D?|,R2M()f0 -I-4 ӐKt㺦Wq(oxM=I<0s(aȞa!E\ݰՒ` gs9d*\qMhbI"#GH%ݏnRG:#8(ܛSī-K@!;w!.qxCYrqZ40yT0%βUV~l)4eлa F+ÁgEī"OXjQ1\\ұ_EE1=ZoH 𦊘޾nGiLR_ RK_'Dȼ ]#R*~$ix]ӟ$i {D/  & IӸEፏ8H}q; <.h}ēRw .xU?+Z:*CxMc9akfk]Iz4 X7Ӻ uN=BR4!% W@<r X>EeX5AJK걔>{q`ݖ1;Y3o7+$x֛ 3yE4 -47QI|\ڛrw EC}^Wg0%ҧBwfǣK |KOOgQL?kL8%a:{x0ȇenҼ ܍gF{05I_▏K i~H: +Z!iށ O \vǘ 0BkY;lZ\jpawp׌y<;< m1xЊ GJtKB  ;vYv0 %@_OJ?lk'/F)${IjepxQ]:ӹd*$EBG~[t@=r=R WL]}ᛴU4ʵ\ 2&d%Pw,;6db!w*9X#G/.LCfI;ym0Uj!ęnHVayW2hG)rUhQ2 NYτӉ$9K7rlPG6\ޙ GQ\>&s~٢5l^FG`chX(Xzse w;%mծ|L-U qtEoFҕSPؑ|zcy|Vq? ղtKÞo7y詏?MLL,>>)9`@ B>[sffcv?^^Z'H!dSIoږ=807n$UXVқ= O|f&غW\.x;k/$5XKN*d{vQlwvV\xUIޟ^JrZh{ #~x[kdelx?t&~2JZl~7Kj/ h-ߙAX֪Yƶ/bzq[EǶM$v¯t,21{Fd=T 2/\:C!TX'E=owD_R8 ң!F8BN_^$lSɃWʶt /Q5|\khj:A?q<^a5a:Kts3< 7s,/o(FsتiD7 cE{dI@*WoO }:%@ֿAZPɗӥSɄK,*g ْIt ' I`'NeM1=؞V>3O=%sղ0byN 㬯&%KICP9τ{$+g`Yh5.qXZo(san&=A|n&OKVxae!B9l<$K5I=,LJrrOz/\C®"?BV:ܐ C~TiF[U,|<E7iXf8{[i"onLtϑ iٸ `#NlҜm>!GG2 Po [η3W5dad5g4Fj6e̔x zږmKCvǧMO8K;Js` dj9X%sZ+lw4QTt ;|}TQ0 Y&VsFU EX3>^tva>)Cg!j''ǢyU+ Z:c0s(,/q%Aε7HymR_OO wO=@}?ټ#Q?VOgѩ-TJE7=)UC~\glXϺ#Mwax9Z^Y`9qPvAG8H}x062:$0h y"=v&Vt?  ֠?ٕƐN h yB;E`kC3)[n`zl۴ãZ$8]Ą&a½aIΘW1a$5T[0&D6 Zo@~_M'ӆ6gտ6wf&d@x!.:K:@o Ȕ_BFDޑIF˙Ơ(9ٓOW2aCa:X%>EaX #bD{, =9/ }jǿ3BP6Y "|vpI*ٯ QJkGx(++ű'Ҥ~D{!Ё% 0?zk%so)DW۫-fLVd{ZB"~ oA*l[#,,쏆Ya0v5,9D f^Lɱkyy nocڈF H<~)eʲ]0%Cn si{`{5{p-clΠ_KAPo-~:R(Ð_#L 6:;Ig! AmFF{ J&}t]1c#BmO:tl7Xt(\ߔ% B^w>DvEŦzKv5:aZjgH}_S}A2Y^.}y}C=*s^{a,Zf>;lOE N㻍iT7}]noH'"Zhfcwv˜TMX~w<1 BU۽l>36*e3nb엡/SI`܏P$ Wp/B$יpQQZ,hVؿFLYdHqe@|ڦ3#ٙ Kgm:v?n휿Q-[s?"*0̕+Jb.Z08BCzok ƽx*> X񘅎/G_5# Bmcl]Seurat/tests/0000755000176200001440000000000013712563445012671 5ustar liggesusersSeurat/tests/testthat/0000755000176200001440000000000014170340712014516 5ustar liggesusersSeurat/tests/testthat/test_transferdata.R0000644000176200001440000001645614005656653020405 0ustar liggesusers# Tests for integration/transfer related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small query <- CreateSeuratObject( counts = GetAssayData(object = pbmc_small[['RNA']], slot = "counts") + rpois(n = ncol(pbmc_small), lambda = 1) ) query <- NormalizeData(object = query, verbose = FALSE) query <- FindVariableFeatures(object = query, verbose = FALSE, nfeatures = 100) ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50) # Tests for TransferData # ------------------------------------------------------------------------------ context("TransferData") preds.standard <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, verbose = FALSE) test_that("TransferData default work", { # categorical metadata expect_equal(dim(preds.standard), c(80, 5)) expect_equal(colnames(preds.standard)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds.standard), Cells(query)) expect_equal(preds.standard[1, 1], "1") expect_equal(preds.standard[1, 5], 0.4280746, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds.standard[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds.standard, "data.frame")) # continuous assay data pred.assay <- TransferData(anchorset = anchors, refdata = GetAssayData(ref[["RNA"]]), verbose = FALSE) expect_equal(dim(pred.assay), c(230, 80)) expect_equal(GetAssayData(pred.assay, slot = "counts"), new("matrix")) expect_equal(GetAssayData(pred.assay, slot = "scale.data"), new("matrix")) expect_equal(colnames(pred.assay), Cells(query)) expect_equal(rownames(pred.assay), rownames(ref[["RNA"]])) expect_equal(sum(GetAssayData(pred.assay)[1, ]), 64.46388, tolerance = 1e-6) expect_equal(sum(GetAssayData(pred.assay)[, 1]), 281.0306, tolerance = 1e-6) expect_true(inherits(pred.assay, "Assay")) expect_equal(pred.assay@var.features, logical(0)) expect_equal(ncol(pred.assay@meta.features), 0) }) test_that("TransferData can return predictions assay, ", { pred.assay <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, prediction.assay = TRUE, verbose = FALSE) expect_true(inherits(pred.assay, "Assay")) expect_equal(dim(pred.assay), c(4, 80)) expect_equal(GetAssayData(pred.assay, slot = "counts"), new("matrix")) expect_equal(GetAssayData(pred.assay, slot = "scale.data"), new("matrix")) expect_equal(colnames(pred.assay), Cells(query)) expect_equal(pred.assay@var.features, logical(0)) expect_equal(ncol(pred.assay@meta.features), 0) expect_equal(sum(GetAssayData(pred.assay)[1, ]), 26.59365, tolerance = 1e-6) expect_equal(sum(GetAssayData(pred.assay)[, 1]), 1.428075, tolerance = 1e-6) expect_equal(as.vector(colSums(GetAssayData(pred.assay)[1:3, ])), rep(1, ncol(query))) }) test_that("TransferData handles weight.reduction properly, ", { skip_on_cran() # test for custom dimreduc custom.dr <- anchors@object.list[[1]][["pcaproject"]] custom.dr <- subset(x = custom.dr, cells = anchors@query.cells) custom.dr <- RenameCells(object = custom.dr, new.names = sapply(X = Cells(custom.dr), FUN = function(x){ x <- gsub(pattern = "_query", replacement = "", x = x) })) expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = custom.dr, dims = 1:100)) preds <-TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, verbose = FALSE) cdr.preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = custom.dr, verbose = FALSE, dims = 1:30) expect_equal(preds, cdr.preds) # weight.reduction = "pca pca.preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, weight.reduction = "pca", verbose = FALSE) expect_true(inherits(pca.preds, "Seurat")) expect_equal(sum(GetAssayData(pca.preds[['prediction.score.id']])[1, ]), 27.83330252, tolerance = 1e-6) # weight.reduction = "cca" anchors.cca <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reduction = "cca") cca.preds <- TransferData(anchorset = anchors.cca, refdata = ref$RNA_snn_res.1, weight.reduction = "cca", verbose = FALSE) expect_true(inherits(cca.preds, "data.frame")) expect_equal(sum(cca.preds[, 2]), 43.61738383, tolerance = 1e-6) }) test_that("TransferData with l2.norm works", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, l2.norm = TRUE, verbose = FALSE) expect_equal(dim(preds), c(80, 5)) expect_equal(colnames(preds)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds), Cells(query)) expect_equal(preds[1, 1], "0") expect_equal(preds[1, 5], 0.3973124793, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds, "data.frame")) }) test_that("TransferData with other k.weight works", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, k.weight = 10, verbose = FALSE) expect_equal(dim(preds), c(80, 5)) expect_equal(colnames(preds)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds), Cells(query)) expect_equal(preds[1, 1], "2") expect_equal(preds[1, 5], 0.6145459065, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds, "data.frame")) }) test_that("TransferData with reference specified works", { skip_on_cran() pred2 <- TransferData(anchorset = anchors, refdata = "RNA_snn_res.1", reference = ref, verbose = FALSE) expect_equal(preds.standard, pred2) }) test_that("TransferData throws expected errors ", { expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = "BAD")) # better message expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = "cca")) # better message expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, dims = 1:100)) expect_error(ransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, k.weight = 1000)) expect_error(suppressWarnings(TransferData(anchorset = anchors, refdata = "RNA_snn_res.1"))) expect_error(suppressWarnings(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1[1:10]))) expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = subset(x = query, cells = Cells(query)[1:10]))) }) test_that("TransferData with multiple items to transfer works ", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = list( ids = ref$RNA_snn_res.1, groups = ref$groups, dat = GetAssayData(ref[["RNA"]])), verbose = FALSE) expect_equal(length(preds), 3) expect_equal(preds[[1]], preds.standard) }) test_that("TransferData can return a modified query object ", { query <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, verbose = FALSE) expect_true("prediction.score.id" %in% Assays(query)) expect_true("predicted.id" %in% colnames(query[[]])) expect_true("predicted.id.score" %in% colnames(query[[]])) query <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, store.weights = TRUE, verbose = FALSE) expect_equal(dim(Tool(query, slot = "TransferData")$weights.matrix), c(128, 80)) }) Seurat/tests/testthat/test_differential_expression.R0000644000176200001440000003440114024674706022630 0ustar liggesusers# Tests for functions in differential_expression.R suppressWarnings(RNGversion(vstr = "3.5.3")) set.seed(seed = 42) # Tests for FindMarkers default parameters # -------------------------------------------------------------------------------- context("FindMarkers") markers.0 <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, verbose = FALSE, base = exp(1))) markers.01 <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1))) test_that("Default settings work as expected", { expect_error(FindMarkers(object = pbmc_small)) expect_error(FindMarkers(object = pbmc_small, ident.1 = "test")) expect_error(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = "test")) expect_equal(colnames(x = markers.0), c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj")) expect_equal(markers.0[1, "p_val"], 9.572778e-13) expect_equal(markers.0[1, "avg_logFC"], -4.034691, tolerance = 1e-6) expect_equal(markers.0[1, "pct.1"], 0.083) expect_equal(markers.0[1, "pct.2"], 0.909) expect_equal(markers.0[1, "p_val_adj"], 2.201739e-10) expect_equal(nrow(x = markers.0), 204) expect_equal(rownames(markers.0)[1], "HLA-DPB1") expect_equal(markers.01[1, "p_val"], 1.702818e-11) expect_equal(markers.01[1, "avg_logFC"], -2.539289, tolerance = 1e-6) expect_equal(markers.01[1, "pct.1"], 0.111) expect_equal(markers.01[1, "pct.2"], 1.00) expect_equal(markers.01[1, "p_val_adj"], 3.916481e-09) expect_equal(nrow(x = markers.01), 201) expect_equal(rownames(x = markers.01)[1], "TYMP") }) tymp.results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, features = "TYMP", verbose = FALSE, base = exp(1))) vargenes.results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, features = VariableFeatures(object = pbmc_small), verbose = FALSE, base = exp(1))) test_that("features parameter behaves correctly ", { expect_equal(nrow(x = tymp.results), 1) expect_equal(tymp.results[1, "p_val"], 3.227445e-07) expect_equal(tymp.results[1, "avg_logFC"], -2.093928, tolerance = 1e-6) expect_equal(tymp.results[1, "pct.1"], 0.111) expect_equal(tymp.results[1, "pct.2"], 0.682) expect_equal(tymp.results[1, "p_val_adj"], 7.423123e-05) expect_equal(rownames(x = tymp.results)[1], "TYMP") expect_equal(nrow(x = vargenes.results), 19) expect_equal(vargenes.results[19, "p_val"], 4.225151e-01, tolerance = 1e-6) expect_equal(vargenes.results[19, "avg_logFC"], 1.5976958, tolerance = 1e-6) expect_equal(vargenes.results[19, "pct.1"], 0.139) expect_equal(vargenes.results[19, "pct.2"], 0.091) expect_equal(vargenes.results[19, "p_val_adj"], 1.000000e+00) expect_equal(rownames(x = vargenes.results)[19], "PARVB") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = Cells(x = pbmc_small)[1:40], ident.2 = Cells(x = pbmc_small)[41:80], verbose = FALSE, base = exp(1))) test_that("passing cell names works", { expect_equal(nrow(x = results), 190) expect_equal(results[1, "p_val"], 0.0001690882) expect_equal(results[1, "avg_logFC"], -1.790824, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.075) expect_equal(results[1, "pct.2"], 0.450) expect_equal(results[1, "p_val_adj"], 0.03889028) expect_equal(rownames(x = results)[1], "IFI30") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, logfc.threshold = 2, verbose = FALSE, base = exp(1))) test_that("logfc.threshold works", { expect_equal(nrow(x = results), 112) expect_gte(min(abs(x = results$avg_logFC)), 2) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, logfc.threshold = 100, verbose = FALSE, base = exp(1))) test_that("logfc.threshold warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.pct = 0.5, verbose = FALSE, base = exp(1))) test_that("min.pct works", { expect_equal(nrow(x = results), 65) expect_gte(min(apply(X = results, MARGIN = 1, FUN = function(x) max(x[3], x[4]))), 0.5) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.pct = 2.0, verbose = FALSE, base = exp(1))) test_that("min.pct warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.diff.pct = 0.5, verbose = FALSE, base = exp(1))) test_that("min.diff.pct works", { expect_equal(nrow(x = results), 44) expect_gte(min(apply(X = results, MARGIN = 1, FUN = function(x) abs(x[4] - x[3]))), 0.5) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.diff.pct = 1.0, verbose = FALSE, base = exp(1))) test_that("min.diff.pct warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, only.pos = TRUE, verbose = FALSE, base = exp(1))) test_that("only.pos works", { expect_equal(nrow(x = results), 116) expect_true(all(results$avg_logFC > 0)) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, max.cells.per.ident = 20, verbose = FALSE, base = exp(1))) test_that("max.cells.per.ident works", { expect_equal(nrow(x = results), 201) expect_equal(results[1, "p_val"], 3.428568e-08) expect_equal(results[1, "avg_logFC"], -2.539289, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.111) expect_equal(results[1, "pct.2"], 1) expect_equal(results[1, "p_val_adj"], 7.885706e-06) expect_equal(rownames(x = results)[1], "TYMP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "groups", verbose = FALSE, test.use = 'LR', base = exp(1))) test_that("latent.vars works", { expect_error(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "fake", verbose = FALSE)) expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "groups", verbose = FALSE)) expect_equal(nrow(x = results), 201) expect_equal(results[1, "p_val"], 2.130202e-16) expect_equal(results[1, "avg_logFC"], -3.082150, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.417) expect_equal(results[1, "pct.2"], 1) expect_equal(results[1, "p_val_adj"], 4.899466e-14) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = "g1", ident.2 = "g2", group.by= "groups", verbose = FALSE, base = exp(1))) t2 <- pbmc_small Idents(object = t2) <- "groups" results2 <- suppressWarnings(FindMarkers(object = t2, ident.1 = "g1", ident.2 = "g2", verbose = FALSE, base = exp(1))) test_that("group.by works", { expect_equal(nrow(x = results), 136) expect_equal(results, results2) expect_equal(results[1, "p_val"], 0.02870319) expect_equal(results[1, "avg_logFC"], 0.8226720, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.455) expect_equal(results[1, "pct.2"], 0.194) expect_equal(results[1, "p_val_adj"], 1) expect_equal(rownames(x = results)[1], "NOSIP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = "g1", ident.2 = "g2", group.by= "groups", subset.ident = 0, verbose = FALSE, base = exp(1))) t2 <- subset(x = pbmc_small, idents = 0) Idents(object = t2) <- "groups" results2 <- suppressWarnings(FindMarkers(object = t2, ident.1 = "g1", ident.2 = "g2", verbose = FALSE, base = exp(1))) test_that("subset.ident works", { expect_equal(nrow(x = results), 127) expect_equal(results, results2) expect_equal(results[1, "p_val"], 0.01293720) expect_equal(results[1, "avg_logFC"], 1.799280, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.50) expect_equal(results[1, "pct.2"], 0.125) expect_equal(results[1, "p_val_adj"], 1) expect_equal(rownames(x = results)[1], "TSPO") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, reduction = "pca", verbose = FALSE, base = exp(1))) test_that("reduction works", { expect_equal(results[1, "p_val"], 1.664954e-10) expect_equal(results[1, "avg_diff"], -2.810453669, tolerance = 1e-6) expect_equal(results[1, "p_val_adj"], 3.163412e-09) expect_equal(rownames(x = results)[1], "PC_2") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "bimod", verbose = FALSE, base = exp(1)) test_that("bimod test works", { expect_equal(nrow(x = results), 201) expect_equal(results[1, "p_val"], 4.751376e-17) expect_equal(results[1, "avg_logFC"], -2.552769, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.306) expect_equal(results[1, "pct.2"], 1.00) expect_equal(results[1, "p_val_adj"], 1.092816e-14) expect_equal(rownames(x = results)[1], "CST3") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "roc", verbose = FALSE, base = exp(1)) test_that("roc test works", { expect_equal(nrow(x = results), 201) # expect_equal(colnames(x = results), c("myAUC", "avg_diff", "power", "pct.1", "pct.2")) expect_equal(colnames(x = results), c("myAUC", "avg_diff", "power", "avg_logFC", "pct.1", "pct.2")) expect_equal(results["CST3", "myAUC"], 0.018) expect_equal(results["CST3", "avg_diff"], -2.552769, tolerance = 1e-6) expect_equal(results["CST3", "power"], 0.964) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(rownames(x = results)[1], "LYZ") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "t", verbose = FALSE, base = exp(1)) test_that("bimod test works", { expect_equal(nrow(x = results), 201) expect_equal(results["CST3", "p_val"], 1.170112e-15) expect_equal(results["CST3", "avg_logFC"], -2.552769 , tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 2.691258e-13) expect_equal(rownames(x = results)[1], "TYMP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "negbinom", verbose = FALSE, base = exp(1))) test_that("negbinom test works", { expect_equal(nrow(x = results), 149) expect_equal(results["CST3", "p_val"], 1.354443e-17) expect_equal(results["CST3", "avg_logFC"], -2.353701, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 3.115218e-15) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "poisson", verbose = FALSE, base = exp(1))) test_that("poisson test works", { expect_equal(nrow(x = results), 149) expect_equal(results["CST3", "p_val"], 3.792196e-78) expect_equal(results["CST3", "avg_logFC"], -2.353701, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 8.722050e-76) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "LR", verbose = FALSE, base = exp(1))) test_that("LR test works", { expect_equal(nrow(x = results), 201) expect_equal(results["CST3", "p_val"], 3.990707e-16) expect_equal(results["CST3", "avg_logFC"], -2.552769, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 9.178625e-14) expect_equal(rownames(x = results)[1], "LYZ") }) # Tests for FindConservedMarkers # ------------------------------------------------------------------------------- if (requireNamespace('metap', quietly = TRUE)) { context("FindConservedMarkers") pbmc_small$groups markers <- suppressWarnings(FindConservedMarkers(object = pbmc_small, ident.1 = 0, grouping.var = "groups", verbose = FALSE, base = exp(1))) standard.names <- c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj") test_that("FindConservedMarkers works", { expect_equal(colnames(x = markers), c(paste0("g2_", standard.names), paste0("g1_", standard.names), "max_pval", "minimump_p_val")) expect_equal(markers[1, "g2_p_val"], 4.983576e-05) expect_equal(markers[1, "g2_avg_logFC"], -4.125279, tolerance = 1e-6) # expect_equal(markers[1, "g2_pct.1"], 0.062) expect_equal(markers[1, "g2_pct.2"], 0.75) expect_equal(markers[1, "g2_p_val_adj"], 0.0114622238) expect_equal(markers[1, "g1_p_val"], 3.946643e-08) expect_equal(markers[1, "g1_avg_logFC"], -3.589384, tolerance = 1e-6) expect_equal(markers[1, "g1_pct.1"], 0.10) expect_equal(markers[1, "g1_pct.2"], 0.958) expect_equal(markers[1, "g1_p_val_adj"], 9.077279e-06) expect_equal(markers[1, "max_pval"], 4.983576e-05) expect_equal(markers[1, "minimump_p_val"], 7.893286e-08) expect_equal(nrow(markers), 179) expect_equal(rownames(markers)[1], "HLA-DRB1") expect_equal(markers[, "max_pval"], unname(obj = apply(X = markers, MARGIN = 1, FUN = function(x) max(x[c("g1_p_val", "g2_p_val")])))) }) test_that("FindConservedMarkers errors when expected", { expect_error(FindConservedMarkers(pbmc_small)) expect_error(FindConservedMarkers(pbmc_small, ident.1 = 0)) expect_error(FindConservedMarkers(pbmc_small, ident.1 = 0, grouping.var = "groups", meta.method = "minimump")) }) pbmc.test <- pbmc_small Idents(object = pbmc.test) <- "RNA_snn_res.1" pbmc.test$id.group <- paste0(pbmc.test$RNA_snn_res.1, "_", pbmc.test$groups) pbmc.test <- subset(x = pbmc.test, id.group == "0_g1", invert = TRUE) markers.missing <- suppressWarnings(FindConservedMarkers(object = pbmc.test, ident.1 = 0, grouping.var = "groups", test.use = "t", verbose = FALSE, base = exp(1))) test_that("FindConservedMarkers handles missing idents in certain groups", { expect_warning(FindConservedMarkers(object = pbmc.test, ident.1 = 0, grouping.var = "groups", test.use = "t")) expect_equal(colnames(x = markers.missing), paste0("g2_", standard.names)) expect_equal(markers.missing[1, "g2_p_val"], 1.672911e-13) expect_equal(markers.missing[1, "g2_avg_logFC"], -4.527888, tolerance = 1e-6) # expect_equal(markers.missing[1, "g2_pct.1"], 0.062) expect_equal(markers.missing[1, "g2_pct.2"], 0.95) expect_equal(markers.missing[1, "g2_p_val_adj"], 3.847695e-11) expect_equal(nrow(markers.missing), 205) expect_equal(rownames(markers.missing)[1], "HLA-DPB1") }) } Seurat/tests/testthat/test_integration.R0000644000176200001440000005213414024674706020243 0ustar liggesusers# Tests for integration/transfer related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small query <- CreateSeuratObject( counts = GetAssayData(object = pbmc_small[['RNA']], slot = "counts") + rpois(n = ncol(pbmc_small), lambda = 1) ) query <- NormalizeData(object = query, verbose = FALSE) query <- FindVariableFeatures(object = query, verbose = FALSE, nfeatures = 100) ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) # Tests for FindTransferAnchors # ------------------------------------------------------------------------------ context("FindTransferAnchors") test_that("FindTransferAnchors defaults work", { anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors catches bad input", { expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, query.assay = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, normalization.method = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reduction = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, npcs = NULL, k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, npcs = NULL, reference.reduction = "BAD", k.filter = 50)) expect_error(suppressWarngings(FindTransferAnchors(reference = ref, query = query, dims = 1:100, k.filter = 50))) expect_error(suppressWarnings(FindTransferAnchors(reference = ref, query = query, dims = 1:100, project.query = TRUE, k.filter = 50))) expect_error(FindTransferAnchors(reference = ref, query = query, k.anchor = 80, k.filter = 50)) expect_warning(FindTransferAnchors(reference = ref, query = query, k.filter = 81)) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, k.score = 80)) expect_error(suppressWarnings(FindTransferAnchors(reference = ref, query = query, k.filter = 50, features = "BAD"))) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reduction = "cca", project.query = TRUE)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.reduction = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.reduction = "BAD", project.query = TRUE, k.filter = 50)) }) ref <- ScaleData(ref, verbose = FALSE) ref <- suppressWarnings(RunPCA(ref, npcs = 30, verbose = FALSE)) test_that("FindTransferAnchors allows reference.reduction to be precomputed", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca") expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", reduction = "cca")) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", project.query = TRUE)) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with cca defaults work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, reduction = "cca", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("cca", "cca.l2")) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 1], 0) expect_equal(dim(co[['cca']]), c(160, 30)) expect_equal(Embeddings(co[['cca']])[1, 1], 0.04611130861, tolerance = 1e-7) expect_equal(Loadings(co[['cca']], projected = T)["PPBP", 1], 12.32379661, tolerance = 1e-7) expect_equal(dim(co[['cca.l2']]), c(160, 30)) expect_equal(Embeddings(co[['cca.l2']])[1, 1], 0.06244169641, tolerance = 1e-7) expect_equal(Loadings(co[['cca.l2']], projected = T)["PPBP", 1], 12.32379661, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(324, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.8211091234), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with project.query defaults work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, project.query = TRUE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 1.577959404, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.1358602536, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(208, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 10, 0.4984040128), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "GZMA") expect_equal(anchors@neighbors, list()) }) query <- ScaleData(query, verbose = FALSE) query <- suppressWarnings(RunPCA(query, npcs = 30, verbose = FALSE)) test_that("FindTransferAnchors with project.query and reference.reduction works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", project.query = TRUE) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")["PPBP", 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 1.577959404, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.1358602536, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(208, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 10, 0.4984040128), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "GZMA") expect_equal(anchors@neighbors, list()) }) ref <- FindNeighbors(object = ref, reduction = "pca", dims = 1:30, return.neighbor = TRUE, k.param = 31, verbose = FALSE, l2.norm = TRUE, nn.method = "annoy") test_that("FindTransferAnchors with reference.neighbors precomputed works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50, k.score = 31)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50, k.anchor = 31)) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with no l2 works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, l2.norm = FALSE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], slot = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(115, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.2950654582), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) # SCTransform tests query <- suppressWarnings(SCTransform(object = query, verbose = FALSE)) ref <- suppressWarnings(SCTransform(object = ref, verbose = FALSE)) test_that("FindTransferAnchors with default SCT works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]], slot = "scale.data"), new(Class = "matrix")) expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], -1.852491719, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], -0.1971047407, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(256, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.688195991), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) test_that("Mixing SCT and non-SCT assays fails", { expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50)) ref.0 <- ref ref.2 <- ref ref.0[["SCT"]]@SCTModel.list <- list() ref.2[["SCT"]]@SCTModel.list$model2 <- ref.2[["SCT"]]@SCTModel.list$model1 expect_error(FindTransferAnchors(reference = ref.0, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50, normalization.method = "SCT")) expect_error(FindTransferAnchors(reference = ref.2, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50, normalization.method = "SCT")) expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "RNA", query.assay = "SCT", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "RNA", query.assay = "SCT", k.filter = 50, normalization.method = "SCT")) }) test_that("FindTransferAnchors with default SCT works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", reduction = "cca", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("cca", "cca.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(dim(co[['cca']]), c(160, 30)) expect_equal(Embeddings(co[['cca']])[1, 1], 0.0459135444, tolerance = 1e-7) expect_equal(Loadings(co[['cca']], projected = T)[1, 1], 8.51477973, tolerance = 1e-7) expect_equal(dim(co[['cca.l2']]), c(160, 30)) expect_equal(Embeddings(co[['cca.l2']])[1, 1], 0.0625989664, tolerance = 1e-7) expect_equal(Loadings(co[['cca.l2']], projected = T)[1, 1], 8.51477973, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(313, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.616858238), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with SCT and project.query work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", project.query = TRUE, k.filter = 50, recompute.residuals = FALSE) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(GetAssayData(co[["SCT"]], slot = "scale.data"), new("matrix")) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.3308694488, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.05788217444, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.03807493471, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.05788217444, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(288, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.6138996139), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with SCT and l2.norm FALSE work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", l2.norm = FALSE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(GetAssayData(co[["SCT"]], slot = "scale.data"), new("matrix")) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], -1.852491719, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(249, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.760589319), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) Seurat/tests/testthat/test_data_manipulation.R0000644000176200001440000002257614152476164021420 0ustar liggesusers# Tests for functions in data_manipulation.cpp # change in random number generation in R3.6, this ensures tests will pass under older and newer Rs suppressWarnings(RNGversion(vstr = "3.5.3")) set.seed(42) library(Matrix) # Tests for row merging # -------------------------------------------------------------------------------- context("Row Merging") m1 <- rsparsematrix(10, 10, 0.1) m2 <- rsparsematrix(10, 10, 0.1) m1.names <- paste0("row", sample(1:10, size = 10)) m2.names <- paste0("row", sample(1:20, size = 10)) all.names <- union(m1.names, m2.names) rownames(m1) <- m1.names rownames(m2) <- m2.names m1 <- as(m1, "RsparseMatrix") m2 <- as(m2, "RsparseMatrix") test_that("Row merging done correctly", { m3 <- RowMergeMatrices(mat1 = m1, mat2 = m2, mat1_rownames = m1.names, mat2_rownames = m2.names, all_rownames = all.names) expect_equal(m3[1, 14], -0.17) expect_equal(m3[3, 2], -1.4) expect_equal(m3[14, 18], -0.43) expect_equal(length(m3), 280) }) #test_that("Row merging with a list done correctly", { # m3 <- RowMergeMatricesList(mat_list = list(m1, m2), mat_rownames = list(m1.names, m2.names), all_rownames = all.names) # expect_equal(m3[1, 14], -0.17) # expect_equal(m3[3, 2], -1.4) # expect_equal(m3[14, 18], -0.43) # expect_equal(length(m3), 280) #}) # Tests for log normalization # -------------------------------------------------------------------------------- context("Log Normalization") mat <- as(matrix(1:16, ncol = 4, nrow = 4), "sparseMatrix") test_that("Log Normalization returns expected values", { mat.norm.r <- log1p(sweep(mat, 2, Matrix::colSums(mat), FUN = "/") * 1e4) mat.norm <- LogNorm(mat, 1e4, display_progress = F) expect_equal(mat.norm[1, ], mat.norm.r[1, ]) expect_equal(mat.norm[4, 4], mat.norm.r[4, 4]) }) # Tests for scaling data # -------------------------------------------------------------------------------- context("Fast Scale Data Functions") mat <- matrix(rnorm(n = 10*15), nrow = 10, ncol = 15) # should be the equivalent of t(scale(t(mat))) test_that("Fast implementation of row scaling returns expected values", { expect_equal(t(scale(t(mat)))[1:10, 1:15], FastRowScale(mat)) expect_equal(t(scale(t(mat), center = FALSE))[1:10, 1:15], FastRowScale(mat, center = FALSE)) expect_equal(t(scale(t(mat), scale = FALSE))[1:10, 1:15], FastRowScale(mat, scale = FALSE)) expect_equal(t(scale(t(mat), scale = FALSE, center = F))[1:10, 1:15], FastRowScale(mat, scale = FALSE, center = F)) mat.clipped <- FastRowScale(mat, scale_max = 0.2) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) # should be the equivalent of scale(mat, TRUE, apply(mat, 2, sd)) test_that("Standardize returns expected values", { expect_equal(Standardize(mat, display_progress = FALSE), scale(mat, TRUE, apply(mat, 2, sd)), check.attributes = FALSE) }) # should be the equivalent of t(scale(t(mat))) mat <- rsparsematrix(10, 15, 0.1) test_that("Fast implementation of row scaling returns expected values", { expect_equal(t(scale(t(as.matrix(mat))))[1:10, 1:15], FastSparseRowScale(mat, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), center = FALSE))[1:10, 1:15], FastSparseRowScale(mat, center = FALSE, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), scale = FALSE))[1:10, 1:15], FastSparseRowScale(mat, scale = FALSE, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), scale = FALSE, center = F))[1:10, 1:15], FastSparseRowScale(mat, scale = FALSE, center = F, display_progress = FALSE), check.attributes = FALSE) mat.clipped <- FastSparseRowScale(mat, scale_max = 0.2, display_progress = F) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) mat <- as(object = matrix(rnorm(100), nrow = 10, ncol = 10), Class = "dgCMatrix") test_that("Row scaling with known stats works", { mat.rowmeans <- rowMeans(x = mat) mat.sd <- apply(X = mat, MARGIN = 1, FUN = sd) expect_equal( t(scale(t(as.matrix(mat)), center = mat.rowmeans, scale = mat.sd)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = TRUE, center = TRUE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) expect_equal( t(scale(t(as.matrix(mat)), center = FALSE, scale = mat.sd)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = TRUE, center = FALSE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) expect_equal( t(scale(t(as.matrix(mat)), center = mat.rowmeans, scale = FALSE)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = FALSE, center = TRUE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) mat.clipped <- FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = FALSE, center = TRUE, scale_max = 0.2, display_progress = FALSE) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) # Tests for fast basic stats functions # -------------------------------------------------------------------------------- context("Fast Basic Stats Functions") set.seed(42) mat <- replicate(10, rchisq(10, 4)) fcv <- FastCov(mat) cv <- cov(mat) test_that("Fast implementation of covariance returns expected values", { expect_equal(fcv[1,1], 9.451051142) expect_equal(fcv[10,10], 5.6650068) expect_equal(fcv, cv) }) mat2 <- replicate(10, rchisq(10, 4)) fcv <- FastCovMats(mat1 = mat, mat2 = mat2) cv <- cov(mat, mat2) test_that("Fast implementation of covariance returns expected values for matrices", { expect_equal(fcv[1,1], 1.523417, tolerance = 1e-6) expect_equal(fcv[10,10], -0.6031694, tolerance = 1e-6) expect_equal(fcv, cv) }) merged.mat <- FastRBind(mat, fcv) test_that("Fast implementation of rbind returns expected values", { expect_equal(merged.mat, rbind(mat, fcv)) expect_equal(mat[1,1], merged.mat[1,1]) expect_equal(fcv[10,10], merged.mat[20,10]) }) mat <- as(mat, "dgCMatrix") test_that("Fast implementation of ExpMean returns expected values",{ expect_equal(ExpMean(mat[1,]), FastExpMean(mat, display_progress = F)[1]) expect_equal(ExpMean(mat[5,]), FastExpMean(mat, display_progress = F)[5]) expect_equal(ExpMean(mat[10,]), FastExpMean(mat, display_progress = F)[10]) expect_equal(length(FastExpMean(mat, display_progress = F)), nrow(mat)) expect_error(FastExpMean(mat[1, ], display_progress = F)) expect_equal(FastExpMean(mat[1, ,drop = F], display_progress = F), ExpMean(mat[1,])) expect_equal(FastExpMean(mat, display_progress = F)[1], 6.493418, tolerance = 1e-6) expect_equal(FastExpMean(mat, display_progress = F)[5], 6.255206, tolerance = 1e-6) expect_equal(FastExpMean(mat, display_progress = F)[10], 7.84965, tolerance = 1e-6) }) test_that("Fast implementation of LogVMR returns expected values", { expect_equal(LogVMR(mat[1,]), FastLogVMR(mat, display_progress = F)[1]) expect_equal(LogVMR(mat[5,]), FastLogVMR(mat, display_progress = F)[5]) expect_equal(LogVMR(mat[10,]), FastLogVMR(mat, display_progress = F)[10]) expect_equal(length(FastExpMean(mat, display_progress = F)), nrow(mat)) expect_error(FastLogVMR(mat[1, ], display_progress = F)) expect_equal(FastLogVMR(mat[1, ,drop = F], display_progress = F), LogVMR(mat[1,])) expect_equal(FastLogVMR(mat, display_progress = F)[1], 7.615384, tolerance = 1e-6) expect_equal(FastLogVMR(mat, display_progress = F)[5], 7.546768, tolerance = 1e-6) expect_equal(FastLogVMR(mat, display_progress = F)[10], 10.11755, tolerance = 1e-6) }) test_that("Row variance calculations for sparse matrices work", { expect_equal(apply(X = mat, MARGIN = 1, FUN = var), SparseRowVar(mat = mat, display_progress = FALSE), tolerance = 1e-6) expect_equal(apply(X = mat2, MARGIN = 1, FUN = var), SparseRowVar(mat = as(object = mat2, Class = "dgCMatrix"), display_progress = FALSE), tolerance = 1e-6) }) # Tests for data structure manipulations # -------------------------------------------------------------------------------- context("Data structure manipulations") mat <- rsparsematrix(nrow = 10, ncol = 100, density = 0.1) mat2 <- rsparsematrix(nrow = 10, ncol = 10, density = 0.1) cols.to.replace1 <- 1:10 cols.to.replace2 <- 10:1 cols.to.replace3 <- 91:100 cols.to.replace4 <- c(10, 15, 33, 2, 6, 99, 55, 30, 25, 42) ReplaceCols <- function(mat, cols, replace){ mat[, cols] <- replace return(mat) } test_that("Replacing columns works", { expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace1 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace1, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace2 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace2, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace3 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace3, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace4 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace4, replace = mat2)) }) test_that("Cpp implementation of row variance is correct", { expect_equal(apply(X = mat, MARGIN = 1, FUN = var), RowVar(as.matrix(mat))) expect_equal(apply(X = merged.mat, MARGIN = 1, FUN = var), RowVar(as.matrix(merged.mat))) }) Seurat/tests/testthat/test_utilities.R0000644000176200001440000001202414005656653017725 0ustar liggesusersset.seed(42) pbmc.file <- system.file('extdata', 'pbmc_raw.txt', package = 'Seurat') pbmc.test <- as(as.matrix(read.table(pbmc.file, sep = "\t", row.names = 1)), "dgCMatrix") meta.data <- data.frame( a = rep(as.factor(c('a', 'b', 'c')), length.out = ncol(pbmc.test)), row.names = colnames(pbmc.test) ) object <- CreateSeuratObject( counts = pbmc.test, min.cells = 10, min.features = 30, meta.data = meta.data ) object <- SetIdent(object, value = 'a') test_that("AverageExpression works for different slots", { average.expression <- AverageExpression(object, slot = 'data')$RNA expect_equivalent( average.expression['KHDRBS1', 1:3], c(a = 7.278237e-01, b = 1.658166e+14, c = 1.431902e-01), tolerance = 1e-6 ) expect_equivalent( average.expression['DNAJB1', 1:3] , c(a = 1.374079e+00, b = 5.100840e-01, c = 5.011655e-01), tolerance = 1e-6 ) avg.counts <- AverageExpression(object, slot = 'counts')$RNA expect_equal( avg.counts['MS4A1', ], c(a = 0.37037037, b = 0.3461538, c = 0.3333333), tolerance = 1e-6 ) expect_equal( avg.counts['SPON2', ], c(a = 0.5185185, b = 0.6153846, c = 0.08333333), tolerance = 1e-6 ) expect_warning(AverageExpression(object, slot = 'scale.data')) object <- ScaleData(object = object, verbose = FALSE) avg.scale <- AverageExpression(object, slot = "scale.data")$RNA expect_equal( avg.scale['MS4A1', ], c(a = 0.02092088, b = -0.004769018, c = -0.018369549), tolerance = 1e-6 ) expect_equal( avg.scale['SPON2', ], c(a = 0.1052434, b = 0.2042827, c = -0.3397051), tolerance = 1e-6 ) }) test_that("AverageExpression handles features properly", { features <- rownames(x = object)[1:10] average.expression <- AverageExpression(object, slot = 'data', features = features)$RNA expect_equal(rownames(x = average.expression), features) expect_warning(AverageExpression(object, slot = 'data', features = "BAD")) expect_warning(AverageExpression(object, slot = "data", features = c(features, "BAD"))) }) test_that("AverageExpression with return.seurat", { # counts avg.counts <- AverageExpression(object, slot = "counts", return.seurat = TRUE, verbose = FALSE) expect_s4_class(object = avg.counts, "Seurat") avg.counts.mat <- AverageExpression(object, slot = 'counts')$RNA expect_equal(as.matrix(GetAssayData(avg.counts[["RNA"]], slot = "counts")), avg.counts.mat) avg.data <- GetAssayData(avg.counts[["RNA"]], slot = "data") expect_equal( avg.data['MS4A1', ], c(a = 0.31508105, b = 0.2972515, c = 0.2876821), tolerance = 1e-6 ) expect_equal( avg.data['SPON2', ], c(a = 0.4177352, b = 0.4795731, c = 0.08004271), tolerance = 1e-6 ) avg.scale <- GetAssayData(avg.counts[["RNA"]], slot = "scale.data") expect_equal( avg.scale['MS4A1', ], c(a = 1.0841908, b = -0.1980056, c = -0.8861852), tolerance = 1e-6 ) expect_equal( avg.scale['SPON2', ], c(a = 0.4275778, b = 0.7151260, c = -1.1427038), tolerance = 1e-6 ) # data avg.data <- AverageExpression(object, slot = "data", return.seurat = TRUE, verbose = FALSE) expect_s4_class(object = avg.data, "Seurat") avg.data.mat <- AverageExpression(object, slot = 'data')$RNA expect_equal(as.matrix(GetAssayData(avg.data[["RNA"]], slot = "counts")), avg.data.mat) expect_equal(unname(as.matrix(GetAssayData(avg.data[["RNA"]], slot = "data"))), unname(log1p(x = avg.data.mat))) avg.scale <- GetAssayData(avg.data[["RNA"]], slot = "scale.data") expect_equal( avg.scale['MS4A1', ], c(a = 0.721145238, b = -1.1415734, c = 0.4204281), tolerance = 1e-6 ) expect_equal( avg.scale['SPON2', ], c(a = 0.08226771, b = 0.9563249, c = -1.0385926), tolerance = 1e-6 ) # scale.data object <- ScaleData(object = object, verbose = FALSE) avg.scale <- AverageExpression(object, slot = "scale.data", return.seurat = TRUE, verbose = FALSE) expect_s4_class(object = avg.scale, "Seurat") avg.scale.mat <- AverageExpression(object, slot = 'scale.data')$RNA expect_equal(unname(as.matrix(GetAssayData(avg.scale[["RNA"]], slot = "scale.data"))), unname(avg.scale.mat)) expect_true(all(is.na(GetAssayData(avg.scale[["RNA"]], slot = "data")))) expect_equal(GetAssayData(avg.scale[["RNA"]], slot = "counts"), matrix()) }) test.dat <- GetAssayData(object = object, slot = "data") rownames(x = test.dat) <- paste0("test-", rownames(x = test.dat)) object[["TEST"]] <- CreateAssayObject(data = test.dat) test_that("AverageExpression with multiple assays", { avg.test <- AverageExpression(object = object, assays = "TEST") expect_equal(names(x = avg.test), "TEST") expect_equal(length(x = avg.test), 1) expect_equivalent( avg.test[[1]]['test-KHDRBS1', 1:3], c(a = 7.278237e-01, b = 1.658166e+14, c = 1.431902e-01), tolerance = 1e-6 ) expect_equivalent( avg.test[[1]]['test-DNAJB1', 1:3] , c(a = 1.374079e+00, b = 5.100840e-01, c = 5.011655e-01), tolerance = 1e-6 ) avg.all <- AverageExpression(object = object) expect_equal(names(x = avg.all), c("RNA", "TEST")) expect_equal(length(x = avg.all), 2) }) Seurat/tests/testthat/test_objects.R0000644000176200001440000004104014152476164017343 0ustar liggesusers# Tests for functions in objects.R # Tests for interacting with the meta.data slot # ------------------------------------------------------------------------------ context("Metadata") data("pbmc_small") pbmc_small <- suppressWarnings(suppressMessages(UpdateSeuratObject(pbmc_small))) cluster_letters <- LETTERS[Idents(object = pbmc_small)] names(cluster_letters) <- colnames(x = pbmc_small) cluster_letters_shuffled <- sample(x = cluster_letters) test_that("AddMetaData adds in cell-level vector properly ", { pbmc_small <- AddMetaData(object = pbmc_small, metadata = cluster_letters, col.name = 'letter.idents') expect_equal(pbmc_small$letter.idents, cluster_letters) pbmc_small <- AddMetaData(object = pbmc_small, metadata = cluster_letters_shuffled, col.name = 'letter.idents.shuffled') expect_equal(pbmc_small$letter.idents, pbmc_small$letter.idents.shuffled) }) cluster_letters_df <- data.frame(A = cluster_letters, B = cluster_letters_shuffled) test_that("AddMetaData adds in data frame properly for cell-level metadata", { pbmc_small <- AddMetaData(object = pbmc_small, metadata = cluster_letters_df) expect_equal(pbmc_small[[c("A", "B")]], cluster_letters_df) }) feature_letters <- sample(x = LETTERS, size = nrow(x = pbmc_small[["RNA"]]), replace = TRUE) names(feature_letters) <- rownames(x = pbmc_small[["RNA"]]) feature_letters_shuffled <- sample(x = feature_letters) test_that("AddMetaData adds feature level metadata", { pbmc_small[["RNA"]] <- AddMetaData(object = pbmc_small[["RNA"]], metadata = feature_letters, col.name = 'feature_letters') expect_equal(pbmc_small[["RNA"]][["feature_letters", drop = TRUE]], feature_letters) pbmc_small[["RNA"]] <- AddMetaData(object = pbmc_small[["RNA"]], metadata = feature_letters_shuffled, col.name = 'feature_letters_shuffled') expect_equal(pbmc_small[["RNA"]][["feature_letters", drop = TRUE]], pbmc_small[["RNA"]][["feature_letters_shuffled", drop = TRUE]]) }) feature_letters_df <- data.frame(A = feature_letters, B = feature_letters_shuffled) test_that("AddMetaData adds in data frame properly for Assays", { pbmc_small[["RNA"]] <- AddMetaData(object = pbmc_small[["RNA"]], metadata = feature_letters_df) expect_equal(pbmc_small[["RNA"]][[c("A", "B")]], feature_letters_df) }) test_that("AddMetaData errors", { expect_error(AddMetaData(object = pbmc_small, metadata = cluster_letters, col.name = "RNA")) expect_error(AddMetaData(object = pbmc_small, metadata = c(unname(cluster_letters), "A"), col.name = "letter.idents")) expect_error(AddMetaData(object = pbmc_small, metadata = feature_letters, col.name = "letter.idents")) expect_error(AddMetaData(object = pbmc_small[["RNA"]], metadata = cluster_letters, col.name = "letter.idents")) }) # Tests for creating an Assay object # ------------------------------------------------------------------------------ context("CreateAssayObject") pbmc.raw <- GetAssayData(object = pbmc_small[["RNA"]], slot = "counts") rna.assay <- CreateAssayObject(counts = pbmc.raw) rna.assay2 <- CreateAssayObject(data = pbmc.raw) test_that("CreateAssayObject works as expected", { expect_equal(dim(x = rna.assay), c(230, 80)) expect_equal(rownames(x = rna.assay), rownames(x = pbmc.raw)) expect_equal(colnames(x = rna.assay), colnames(x = pbmc.raw)) expect_equal(GetAssayData(object = rna.assay, slot = "counts"), pbmc.raw) expect_equal(GetAssayData(object = rna.assay, slot = "data"), pbmc.raw) expect_equal(GetAssayData(object = rna.assay, slot = "scale.data"), new(Class = "matrix")) expect_equal(dim(rna.assay[[]]), c(230, 0)) expect_equal(rownames(x = rna.assay[[]]), rownames(x = rna.assay)) expect_equal(VariableFeatures(object = rna.assay), vector()) expect_equal(rna.assay@misc, list()) expect_equal(GetAssayData(object = rna.assay2, slot = "counts"), new(Class = "matrix")) }) rna.assay2 <- CreateAssayObject(counts = pbmc.raw, min.cells = 10, min.features = 30) test_that("CreateAssayObject filtering works", { expect_equal(dim(x = rna.assay2), c(163, 77)) expect_true(all(rowSums(GetAssayData(object = rna.assay2, slot = "counts")) >= 10)) expect_true(all(colSums(GetAssayData(object = rna.assay2, slot = "counts")) >= 30)) }) test_that("CreateAssayObject catches improper input", { expect_error(CreateAssayObject()) expect_error(CreateAssayObject(counts = pbmc.raw, data = pbmc.raw)) pbmc.raw2 <- cbind(pbmc.raw[, 1:10], pbmc.raw[, 1:10]) expect_warning(CreateAssayObject(counts = pbmc.raw2)) expect_warning(CreateAssayObject(data = pbmc.raw2)) pbmc.raw2 <- rbind(pbmc.raw[1:10, ], pbmc.raw[1:10, ]) expect_warning(CreateAssayObject(counts = pbmc.raw2)) expect_warning(CreateAssayObject(data = pbmc.raw2)) pbmc.raw2 <- pbmc.raw colnames(x = pbmc.raw2) <- c() expect_error(CreateAssayObject(counts = pbmc.raw2)) expect_error(CreateAssayObject(data = pbmc.raw2)) pbmc.raw2 <- pbmc.raw rownames(x = pbmc.raw2) <- c() expect_error(CreateAssayObject(counts = pbmc.raw2)) expect_error(CreateAssayObject(data = pbmc.raw2)) pbmc.raw.mat <- as.matrix(x = pbmc.raw) pbmc.raw.df <- as.data.frame(x = pbmc.raw.mat) rna.assay3 <- CreateAssayObject(counts = pbmc.raw.df) rna.assay4 <- CreateAssayObject(counts = pbmc.raw.mat) expect_is(object = GetAssayData(object = rna.assay3, slot = "counts"), class = "dgCMatrix") expect_is(object = GetAssayData(object = rna.assay4, slot = "counts"), class = "dgCMatrix") pbmc.raw.underscores <- pbmc.raw rownames(pbmc.raw.underscores) <- gsub(pattern = "-", replacement = "_", x = rownames(pbmc.raw.underscores)) expect_warning(CreateAssayObject(counts = pbmc.raw.underscores)) }) # Tests for creating an DimReduc object # ------------------------------------------------------------------------------ context("CreateDimReducObject") pca <- pbmc_small[["pca"]] Key(object = pca) <- 'PC_' test_that("CreateDimReducObject works", { pca.dr <- CreateDimReducObject( embeddings = Embeddings(object = pca), loadings = Loadings(object = pca), projected = Loadings(object = pca, projected = TRUE), assay = "RNA" ) expect_equal(Embeddings(object = pca.dr), Embeddings(object = pca)) expect_equal(Loadings(object = pca.dr), Loadings(object = pca)) expect_equal(Loadings(object = pca.dr, projected = TRUE), Loadings(object = pca, projected = TRUE)) expect_equal(Key(object = pca.dr), "PC_") expect_equal(pca.dr@assay.used, "RNA") }) test_that("CreateDimReducObject catches improper input", { bad.embeddings <- Embeddings(object = pca) colnames(x = bad.embeddings) <- paste0("PCA", 1:ncol(x = bad.embeddings)) expect_warning(CreateDimReducObject(embeddings = bad.embeddings, key = "PC")) colnames(x = bad.embeddings) <- paste0("PC", 1:ncol(x = bad.embeddings), "X") suppressWarnings(expect_error(CreateDimReducObject(embeddings = bad.embeddings, key = "PC"))) suppressWarnings(expect_error(CreateDimReducObject(embeddings = bad.embeddings))) }) # Tests for creating a Seurat object # ------------------------------------------------------------------------------ context("CreateSeuratObject") colnames(x = pbmc.raw) <- paste0(colnames(x = pbmc.raw), "-", pbmc_small$groups) metadata.test <- pbmc_small[[]][, 5:7] rownames(x = metadata.test) <- colnames(x = pbmc.raw) test_that("CreateSeuratObject works", { seurat.object <- CreateSeuratObject( counts = pbmc.raw, project = "TESTING", assay = "RNA.TEST", names.field = 2, names.delim = "-", meta.data = metadata.test ) expect_equal(seurat.object[[]][, 4:6], metadata.test) expect_equal(seurat.object@project.name, "TESTING") expect_equal(names(x = seurat.object), "RNA.TEST") expect_equal(as.vector(x = unname(obj = Idents(object = seurat.object))), unname(pbmc_small$groups)) }) test_that("CreateSeuratObject handles bad names.field/names.delim", { expect_warning(seurat.object <- CreateSeuratObject( counts = pbmc.raw[1:5,1:5], names.field = 3, names.delim = ":", meta.data = metadata.test )) }) # Tests for creating a Seurat object # ------------------------------------------------------------------------------ context("Merging") pbmc.assay <- pbmc_small[["RNA"]] x <- merge(x = pbmc.assay, y = pbmc.assay) test_that("Merging Assays works properly", { expect_equal(dim(GetAssayData(object = x, slot = "counts")), c(230, 160)) expect_equal(dim(GetAssayData(object = x, slot = "data")), c(230, 160)) expect_equal(GetAssayData(object = x, slot = "scale.data"), new(Class = "matrix")) expect_equal(Key(object = x), "rna_") expect_equal(VariableFeatures(object = x), vector()) expect_equal(x[[]], data.frame(row.names = rownames(x = pbmc.assay))) }) pbmc.assay2 <- pbmc.assay pbmc.assay2@counts <- new("dgCMatrix") test_that("Merging Assays handles case when counts not present", { y <- merge(x = pbmc.assay2, y = pbmc.assay) expect_equal(unname(colSums(x = GetAssayData(object = y, slot = "counts"))[1:80]), rep.int(x = 0, times = 80)) z <- merge(x = pbmc.assay2, pbmc.assay2) expect_equal(nnzero(x = GetAssayData(object = z, slot = "counts")), 0) }) pbmc.assay2 <- pbmc.assay pbmc.assay2@data <- new("dgCMatrix") test_that("Merging Assays handles case when data not present", { y <- merge(x = pbmc.assay2, y = pbmc.assay, merge.data = TRUE) expect_equal(unname(colSums(x = GetAssayData(object = y, slot = "data"))[1:80]), rep.int(x = 0, times = 80)) z <- merge(x = pbmc.assay2, y = pbmc.assay2, merge.data = TRUE) expect_equal(nnzero(x = GetAssayData(object = z, slot = "data")), 0) }) # Tests for Neighbor object # ------------------------------------------------------------------------------ context("Neighbor") # converting to Graph and back n.rann.ob <- NNHelper( data = Embeddings(object = pbmc_small[["pca"]]), query = Embeddings(object = pbmc_small[["pca"]]), k = 10, method = "rann") test_that("Neighbor object methods work", { expect_equal(dim(x = Indices(object = n.rann.ob)), c(80, 10)) expect_equal(dim(x = n.rann.ob), c(80, 10)) expect_equal(as.numeric(Indices(object = n.rann.ob)[1, 7]), 45, ) expect_equal(dim(x = Distances(object = n.rann.ob)), c(80, 10)) expect_equal(as.numeric(Distances(object = n.rann.ob)[2, 2]), 2.643759, tolerance = 1e-6) expect_equal(length(x = Cells(x = n.rann.ob)), 80) expect_equal(Cells(x = n.rann.ob)[c(1, 20, 80)], c("ATGCCAGAACGACT", "TACATCACGCTAAC", "CTTGATTGATCTTC")) pbmc_small[["n.ob"]] <- n.rann.ob pbmc_small <- RenameCells(object = pbmc_small, add.cell.id = "test") expect_equal(Cells(x = pbmc_small[['n.ob']])[1], c("test_ATGCCAGAACGACT")) expect_equal(TopNeighbors(object = n.rann.ob, cell = "ATGCCAGAACGACT", n = 5)[5], "GATATAACACGCAT") expect_equal(length(TopNeighbors(object = n.rann.ob, cell = "ATGCCAGAACGACT", n = 7)), 7) nrg <- as.Graph(x = n.rann.ob) expect_true(inherits(x = nrg, what = "Graph")) expect_equal(as.numeric(Distances(object = n.rann.ob)[2, 3]), nrg[2, Indices(object = n.rann.ob)[2, 3]]) nro2 <- as.Neighbor(x = nrg) expect_true(inherits(x = nro2, what = "Neighbor")) expect_equal(Distances(object = n.rann.ob)[2, 3], Distances(object = nro2)[2, 3]) expect_equal(Indices(object = n.rann.ob)[1, 6], Indices(object = nro2)[1, 6]) }) n.annoy.ob <- NNHelper( data = Embeddings(object = pbmc_small[["pca"]]), query = Embeddings(object = pbmc_small[["pca"]]), k = 10, method = "annoy", cache.index = TRUE) idx.file <- tempfile() SaveAnnoyIndex(object = n.annoy.ob, file = idx.file) nao2 <- LoadAnnoyIndex(object = n.annoy.ob, file = idx.file) test_that("Saving/Loading annoy index", { expect_error(SaveAnnoyIndex(object = n.rann.ob, file = idx.file)) expect_equal(head(Indices(n.annoy.ob)), head(Indices(nao2))) expect_equal(head(Distances(n.annoy.ob)), head(Distances(nao2))) expect_false(is.null(x = Index(nao2))) }) # Tests for FetchData # ------------------------------------------------------------------------------ context("FetchData") # Features to test: # able to pull cell embeddings, data, metadata # subset of cells test_that("Fetching a subset of cells works", { x <- FetchData(object = pbmc_small, cells = colnames(x = pbmc_small)[1:10], vars = rownames(x = pbmc_small)[1]) expect_equal(rownames(x = x), colnames(x = pbmc_small)[1:10]) random.cells <- sample(x = colnames(x = pbmc_small), size = 10) x <- FetchData(object = pbmc_small, cells = random.cells, vars = rownames(x = pbmc_small)[1]) expect_equal(rownames(x = x), random.cells) x <- FetchData(object = pbmc_small, cells = 1:10, vars = rownames(x = pbmc_small)[1]) expect_equal(rownames(x = x), colnames(x = pbmc_small)[1:10]) }) suppressWarnings(pbmc_small[["RNA2"]] <- pbmc_small[["RNA"]]) Key(pbmc_small[["RNA2"]]) <- "rna2_" test_that("Fetching keyed variables works", { x <- FetchData(object = pbmc_small, vars = c(paste0("rna_", rownames(x = pbmc_small)[1:5]), paste0("rna2_", rownames(x = pbmc_small)[1:5]))) expect_equal(colnames(x = x), c(paste0("rna_", rownames(x = pbmc_small)[1:5]), paste0("rna2_", rownames(x = pbmc_small)[1:5]))) x <- FetchData(object = pbmc_small, vars = c(paste0("rna_", rownames(x = pbmc_small)[1:5]), paste0("PC_", 1:5))) expect_equal(colnames(x = x), c(paste0("rna_", rownames(x = pbmc_small)[1:5]), paste0("PC_", 1:5))) }) test_that("Fetching embeddings/loadings not present returns warning or errors", { expect_warning(FetchData(object = pbmc_small, vars = c("PC_1", "PC_100"))) expect_error(FetchData(object = pbmc_small, vars = "PC_100")) }) bad.gene <- GetAssayData(object = pbmc_small[["RNA"]], slot = "data") rownames(x = bad.gene)[1] <- paste0("rna_", rownames(x = bad.gene)[1]) pbmc_small[["RNA"]]@data <- bad.gene # Tests for WhichCells # ------------------------------------------------------------------------------ test_that("Specifying cells works", { test.cells <- Cells(x = pbmc_small)[1:10] expect_equal(WhichCells(object = pbmc_small, cells = test.cells), test.cells) expect_equal(WhichCells(object = pbmc_small, cells = test.cells, invert = TRUE), setdiff(Cells(x = pbmc_small), test.cells)) }) test_that("Specifying idents works", { c12 <- WhichCells(object = pbmc_small, idents = c(1, 2)) expect_equal(length(x = c12), 44) expect_equal(c12[44], "CTTGATTGATCTTC") expect_equal(c12, WhichCells(object = pbmc_small, idents = 0, invert = TRUE)) }) test_that("downsample works", { expect_equal(length(x = WhichCells(object = pbmc_small, downsample = 5)), 15) expect_equal(length(x = WhichCells(object = pbmc_small, downsample = 100)), 80) }) test_that("passing an expression works", { lyz.pos <- WhichCells(object = pbmc_small, expression = LYZ > 1) expect_true(all(GetAssayData(object = pbmc_small, slot = "data")["LYZ", lyz.pos] > 1)) # multiple values in expression lyz.pos <- WhichCells(object = pbmc_small, expression = LYZ > 1 & groups == "g1") expect_equal(length(x = lyz.pos), 30) expect_equal(lyz.pos[30], "CTTGATTGATCTTC") }) # Tests for small other functions # ------------------------------------------------------------------------------ test_that("Top works", { dat <- Embeddings(object = pbmc_small[['pca']])[, 1, drop = FALSE] expect_warning(Top(data = dat, num = 1000, balanced = FALSE)) tpc1 <- Top(data = dat, num = 20, balanced = FALSE) expect_equal(length(x = tpc1), 20) expect_equal(tpc1[1], "ACGTGATGCCATGA") expect_equal(tpc1[20], "GTCATACTTCGCCT") tpc1b <- Top(data = dat, num = 20, balanced = TRUE) expect_equal(length(x = tpc1b), 2) expect_equal(names(tpc1b), c("positive", "negative")) expect_equal(length(tpc1b[[1]]), 10) expect_equal(length(tpc1b[[2]]), 10) expect_equal(tpc1b[[1]][1], "GTCATACTTCGCCT") expect_equal(tpc1b[[1]][10], "CTTGATTGATCTTC") expect_equal(tpc1b[[2]][1], "ACGTGATGCCATGA") expect_equal(tpc1b[[2]][10], "ATTGTAGATTCCCG") tpc1.sub <- Top(data = dat[1:79, , drop = FALSE], num = 79, balanced = TRUE) expect_equal(length(tpc1.sub[[1]]), 40) expect_equal(length(tpc1.sub[[2]]), 39) }) # Tests for SCE conversion # ------------------------------------------------------------------------------ test_that("as.SingleCellExperiment works", { skip_on_cran() if (requireNamespace('SingleCellExperiment', quietly = TRUE)) { mat <- matrix(1:100, ncol = 10) colnames(mat) <- LETTERS[1:10] rownames(mat) <- LETTERS[1:10] seuratObj <- Seurat::CreateSeuratObject(mat) sce <- as.SingleCellExperiment(seuratObj) expect_equal(ncol(sce), 10) expect_equal(nrow(sce), 10) # expect_equal(length(SingleCellExperiment::altExps(sce)), 0) # expect_equal(SingleCellExperiment::mainExpName(sce), 'RNA') seuratObj <- Seurat::CreateSeuratObject(mat) seuratObj[['ADT']] <- CreateAssayObject(mat) sce <- as.SingleCellExperiment(seuratObj) expect_equal(ncol(sce), 10) expect_equal(nrow(sce), 10) # expect_equal(names(SingleCellExperiment::altExps(sce)), 'ADT') # expect_equal(SingleCellExperiment::mainExpName(sce), 'RNA') } }) Seurat/tests/testthat/test_read_mtx.R0000644000176200001440000000275414024674706017526 0ustar liggesuserscontext("ReadMtx") test_that("skip.cell and skip.feature work", { skip_on_cran() mtx <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_matrix.mtx.gz" features <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_genes.csv.gz" cells <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_barcodes.csv.gz" counts1 <- ReadMtx(mtx = mtx, cells = cells, features = features, feature.column = 1, skip.cell = 1, skip.feature = 1) expect_is(counts1, "dgCMatrix") expect_equal(ncol(counts1), 1436) expect_equal(nrow(counts1), 29445) expect_equal(colnames(counts1)[5], "MD5828a_GGGCATCCAATGAAAC-1") expect_equal(rownames(counts1)[2], "A1BG-AS1") }) test_that("ReadMtx works", { skip_on_cran() mtx <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fmatrix%2Emtx%2Egz" cells <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fbarcodes%2Etsv%2Egz" features <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fgenes%2Etsv%2Egz" counts2 <- ReadMtx(mtx = mtx, cells = cells, features = features, feature.column = 1) expect_is(counts2, "dgCMatrix") expect_equal(ncol(counts2), 22063) expect_equal(nrow(counts2), 22530) expect_equal(colnames(counts2)[1], "AAACCTGAGCAATCTC-1") expect_equal(rownames(counts2)[2], "ENSPPAG00000040697") }) Seurat/tests/testthat/test_integratedata.R0000644000176200001440000001362614005656653020537 0ustar liggesusers# Tests for integration related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) query <- CreateSeuratObject( counts = GetAssayData(object = pbmc_small[['RNA']], slot = "counts") + rpois(n = ncol(pbmc_small), lambda = 1) ) query2 <- CreateSeuratObject( counts = GetAssayData(object = pbmc_small[['RNA']], slot = "counts")[, 1:40] + rpois(n = ncol(pbmc_small), lambda = 1) ) query.list <- list(query, query2) query.list <- lapply(X = query.list, FUN = NormalizeData, verbose = FALSE) query.list <- lapply(X = query.list, FUN = FindVariableFeatures, verbose = FALSE, nfeatures = 100) query.list <- lapply(X = query.list, FUN = ScaleData, verbose = FALSE) query.list <- suppressWarnings(lapply(X = query.list, FUN = RunPCA, verbose = FALSE, npcs = 20)) anchors2 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list[[1]]), k.filter = NA, verbose = FALSE))) anchors3 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list), k.filter = NA, verbose = FALSE))) # Tests for IntegrateEmbeddings # ------------------------------------------------------------------------------ # context("IntegrateEmbeddings") # test_that("IntegrateEmbeddings validates properly", { # expect_error(IntegrateEmbeddings(anchorset = anchors2)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 100)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = c("pca", "pca2"), k.weight = 40)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, weight.reduction = c(ref[['pca']]))) # pca3 <- RenameCells(object = ref[['pca']], new.names = paste0(Cells(ref), "_test")) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, # weight.reduction = c(pca3, ref[['pca']]))) # }) # # test_that("IntegrateEmbeddings with two objects default works", { # skip_on_cran() # int2 <- IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int2), "integrated_pca") # expect_equal(sum(Embeddings(int2[['integrated_pca']])[1,]), -3.13050872287, tolerance = 1e-6) # expect_equal(sum(Embeddings(int2[['integrated_pca']])[,1]), -5.78790844887, tolerance = 1e-6) # }) # # test_that("IntegrateEmbeddings with three objects default works", { # skip_on_cran() # int3 <- IntegrateEmbeddings(anchorset = anchors3, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int3), "integrated_pca") # expect_equal(sum(Embeddings(int3[['integrated_pca']])[1,]), 0.221867815987, tolerance = 1e-6) # expect_equal(sum(Embeddings(int3[['integrated_pca']])[,1]), -16.7881409595, tolerance = 1e-6) # }) # # test_that("IntegrateEmbeddings works with specified reference objects", { # skip_on_cran() # anchors4 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list), k.filter = NA, verbose = FALSE, reference = 1))) # int4 <- IntegrateEmbeddings(anchorset = anchors4, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int4), "integrated_pca") # expect_equal(sum(Embeddings(int4[['integrated_pca']])[1,]), -3.13050872287, tolerance = 1e-6) # expect_equal(sum(Embeddings(int4[['integrated_pca']])[,1]), 13.1180105492, tolerance = 1e-6) # }) # Tests for IntegrateData # ------------------------------------------------------------------------------ context("IntegrateData") test_that("IntegrateData with two objects default work", { expect_error(IntegrateData(anchorset = anchors2)) int2 <- IntegrateData(anchorset = anchors2, k.weight = 50, verbose = FALSE) expect_true(all(Assays(int2) %in% c("integrated", "RNA"))) expect_equal(Tool(int2), "Integration") expect_equal(dim(int2[["integrated"]]), c(133, 160)) expect_equal(length(VariableFeatures(int2)), 133) expect_equal(GetAssayData(int2[["integrated"]], slot = "counts"), new("dgCMatrix")) expect_equal(GetAssayData(int2[['integrated']], slot = "scale.data"), matrix()) expect_equal(sum(GetAssayData(int2[["integrated"]])[1, ]), 44.97355, tolerance = 1e-3) expect_equal(sum(GetAssayData(int2[["integrated"]])[, 1]), 78.8965706046, tolerance = 1e-6) expect_equal(Tool(object = int2, slot = "Integration")@sample.tree, matrix(c(-1, -2), nrow = 1)) }) test_that("IntegrateData with three objects default work", { expect_error(IntegrateData(anchorset = anchors3, k.weight = 50)) int3 <- IntegrateData(anchorset = anchors3, k.weight = 25, verbose = FALSE) expect_true(all(Assays(int3) %in% c("integrated", "RNA"))) expect_equal(Tool(int3), "Integration") expect_equal(dim(int3[["integrated"]]), c(169, 200)) expect_equal(length(VariableFeatures(int3)), 169) expect_equal(GetAssayData(int3[["integrated"]], slot = "counts"), new("dgCMatrix")) expect_equal(GetAssayData(int3[['integrated']], slot = "scale.data"), matrix()) expect_equal(sum(GetAssayData(int3[["integrated"]])[1, ]), 372.829, tolerance = 1e-6) expect_equal(sum(GetAssayData(int3[["integrated"]])[, 1]), 482.5009, tolerance = 1e-6) expect_equal(Tool(object = int3, slot = "Integration")@sample.tree, matrix(c(-2, -3, 1, -1), nrow = 2, byrow = TRUE)) }) test_that("Input validates correctly ", { expect_error(anchorset = anchors2, k.weight = 50, features.to.integrate = "BAD") expect_error(IntegrateData(anchorset = anchors2, k.weight = 50, normalization.method = "BAD")) expect_error(IntegrateData(anchorset = anchors2, k.weight = 50, weight.reduction = "BAD")) expect_error(IntegrateData(anchorset = anchors2, reductions.to.integrate = "pca")) skip_on_cran() #expect_warning(IntegrateData(anchorset = anchors2, k.weight = 50, features = c(rownames(ref), "BAD"))) #expect_warning(IntegrateData(anchorset = anchors2, k.weight = 50, dims = 1:1000)) }) Seurat/tests/testthat/test_preprocessing.R0000644000176200001440000004163714170106500020571 0ustar liggesusers# Tests for functions dependent on a seurat object set.seed(42) pbmc.file <- system.file('extdata', 'pbmc_raw.txt', package = 'Seurat') pbmc.test <- as(as.matrix(read.table(pbmc.file, sep = "\t", row.names = 1)), "dgCMatrix") # Tests for object creation (via CreateSeuratObject) # -------------------------------------------------------------------------------- context("Object creation") fake.meta.data <- data.frame(rep(1, ncol(pbmc.test))) rownames(fake.meta.data) <- colnames(pbmc.test) colnames(fake.meta.data) <- "FMD" object <- CreateSeuratObject(counts = pbmc.test, meta.data = fake.meta.data) test_that("object initialization actually creates seurat object", { expect_is(object, "Seurat") }) test_that("meta.data slot generated correctly", { expect_equal(dim(object[[]]), c(80, 4)) expect_equal(colnames(object[[]]), c("orig.ident", "nCount_RNA", "nFeature_RNA", "FMD")) expect_equal(rownames(object[[]]), colnames(object)) expect_equal(object[["nFeature_RNA"]][1:5, ], c(47, 52, 50, 56, 53)) expect_equal(object[["nCount_RNA"]][75:80, ], c(228, 527, 202, 157, 150, 233)) }) object.filtered <- CreateSeuratObject( counts = pbmc.test, min.cells = 10, min.features = 30 ) test_that("Filtering handled properly", { expect_equal(nrow(x = GetAssayData(object = object.filtered, slot = "counts")), 163) expect_equal(ncol(x = GetAssayData(object = object.filtered, slot = "counts")), 77) }) test_that("Metadata check errors correctly", { pbmc.md <- pbmc_small[[]] pbmc.md.norownames <- as.matrix(pbmc.md) rownames(pbmc.md.norownames) <- NULL expect_error(CreateSeuratObject(counts = pbmc.test, meta.data = pbmc.md.norownames), "Row names not set in metadata. Please ensure that rownames of metadata match column names of data matrix") }) # Tests for NormalizeData # -------------------------------------------------------------------------------- context("NormalizeData") test_that("NormalizeData error handling", { expect_error(NormalizeData(object = object, assay = "FAKE")) expect_equal( object = GetAssayData( object = NormalizeData( object = object, normalization.method = NULL, verbose = FALSE ), slot = "data" ), expected = GetAssayData(object = object, slot = "counts") ) }) object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6) test_that("NormalizeData scales properly", { expect_equal(GetAssayData(object = object, slot = "data")[2, 1], 9.567085, tolerance = 1e-6) expect_equal(GetAssayData(object = object, slot = "data")[161, 55], 8.415309, tolerance = 1e-6) expect_equal(Command(object = object, command = "NormalizeData.RNA", value = "scale.factor"), 1e6) expect_equal(Command(object = object, command = "NormalizeData.RNA", value = "normalization.method"), "LogNormalize") }) normalized.data <- LogNormalize(data = GetAssayData(object = object[["RNA"]], slot = "counts"), verbose = FALSE) test_that("LogNormalize normalizes properly", { expect_equal( LogNormalize(data = GetAssayData(object = object[["RNA"]], slot = "counts"), verbose = FALSE), LogNormalize(data = as.data.frame(as.matrix(GetAssayData(object = object[["RNA"]], slot = "counts"))), verbose = FALSE) ) }) clr.counts <- NormalizeData(object = pbmc.test, normalization.method = "CLR", verbose = FALSE) test_that("CLR normalization returns expected values", { expect_equal(dim(clr.counts), c(dim(pbmc.test))) expect_equal(clr.counts[2, 1], 0.5517828, tolerance = 1e-6) expect_equal(clr.counts[228, 76], 0.5971381, tolerance = 1e-6) expect_equal(clr.counts[230, 80], 0) }) rc.counts <- NormalizeData(object = pbmc.test, normalization.method = "RC", verbose = FALSE) test_that("Relative count normalization returns expected values", { expect_equal(rc.counts[2, 1], 142.8571, tolerance = 1e-6) expect_equal(rc.counts[228, 76], 18.97533, tolerance = 1e-6) expect_equal(rc.counts[230, 80], 0) rc.counts <- NormalizeData(object = pbmc.test, normalization.method = "RC", verbose = FALSE, scale.factor = 1e6) expect_equal(rc.counts[2, 1], 14285.71, tolerance = 1e-6) }) # Tests for ScaleData # -------------------------------------------------------------------------------- context("ScaleData") object <- ScaleData(object, verbose = FALSE) test_that("ScaleData returns expected values when input is a sparse matrix", { expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[1, 1], -0.4148587, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[75, 25], -0.2562305, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[162, 59], -0.4363939, tolerance = 1e-6) }) new.data <- as.matrix(GetAssayData(object = object[["RNA"]], slot = "data")) new.data[1, ] <- rep(x = 0, times = ncol(x = new.data)) object2 <- object object2[["RNA"]] <- SetAssayData( object = object[["RNA"]], slot = "data", new.data = new.data ) object2 <- ScaleData(object = object2, verbose = FALSE) object <- ScaleData(object = object, verbose = FALSE) test_that("ScaleData returns expected values when input is not sparse", { expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[75, 25], -0.2562305, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[162, 59], -0.4363939, tolerance = 1e-6) }) test_that("ScaleData handles zero variance features properly", { expect_equal(GetAssayData(object = object2[["RNA"]], slot = "scale.data")[1, 1], 0) expect_equal(GetAssayData(object = object2[["RNA"]], slot = "scale.data")[1, 80], 0) }) ng1 <- rep(x = "g1", times = round(x = ncol(x = object) / 2)) object$group <- c(ng1, rep(x = "g2", times = ncol(x = object) - length(x = ng1))) g1 <- subset(x = object, group == "g1") g1 <- ScaleData(object = g1, features = rownames(x = g1), verbose = FALSE) g2 <- subset(x = object, group == "g2") g2 <- ScaleData(object = g2, features = rownames(x = g2), verbose = FALSE) object <- ScaleData(object = object, features = rownames(x = object), verbose = FALSE, split.by = "group") test_that("split.by option works", { expect_equal(GetAssayData(object = object, slot = "scale.data")[, Cells(x = g1)], GetAssayData(object = g1, slot = "scale.data")) expect_equal(GetAssayData(object = object, slot = "scale.data")[, Cells(x = g2)], GetAssayData(object = g2, slot = "scale.data")) }) g1 <- ScaleData(object = g1, features = rownames(x = g1), vars.to.regress = "nCount_RNA", verbose = FALSE) g2 <- ScaleData(object = g2, features = rownames(x = g2), vars.to.regress = "nCount_RNA", verbose = FALSE) object <- ScaleData(object = object, features = rownames(x = object), verbose = FALSE, split.by = "group", vars.to.regress = "nCount_RNA") test_that("split.by option works with regression", { expect_equal(GetAssayData(object = object, slot = "scale.data")[, Cells(x = g1)], GetAssayData(object = g1, slot = "scale.data")) expect_equal(GetAssayData(object = object, slot = "scale.data")[, Cells(x = g2)], GetAssayData(object = g2, slot = "scale.data")) }) # Tests for various regression techniques context("Regression") object <- ScaleData( object = object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "linear") test_that("Linear regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], slot = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[1, 1], -0.6436435, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[5, 25], -0.09035383, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[10, 80], -0.2723782, tolerance = 1e-6) }) object <- ScaleData( object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "negbinom") test_that("Negative binomial regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], slot = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[1, 1], -0.5888811, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[5, 25], -0.2553394, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[10, 80], -0.1921429, tolerance = 1e-6) }) test_that("Regression error handling checks out", { expect_error(ScaleData(object, vars.to.regress = "nCount_RNA", model.use = "not.a.model", verbose = FALSE)) }) object <- ScaleData( object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "poisson") test_that("Poisson regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], slot = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[1, 1], -1.011717, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[5, 25], 0.05575307, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], slot = "scale.data")[10, 80], -0.1662119, tolerance = 1e-6) }) #Tests for SampleUMI #-------------------------------------------------------------------------------- context("SampleUMI") downsampled.umis <- SampleUMI( data = GetAssayData(object = object, slot = "counts"), max.umi = 100, verbose = FALSE ) downsampled.umis.p.cell <- SampleUMI( data = GetAssayData(object = object, slot = "counts"), max.umi = seq(50, 1640, 20), verbose = FALSE, upsample = TRUE ) test_that("SampleUMI gives reasonable downsampled/upsampled UMI counts", { expect_true(!any(colSums(x = downsampled.umis) < 30, colSums(x = downsampled.umis) > 120)) expect_error(SampleUMI(data = GetAssayData(object = object, slot = "raw.data"), max.umi = rep(1, 5))) expect_true(!is.unsorted(x = colSums(x = downsampled.umis.p.cell))) expect_error(SampleUMI( data = GetAssayData(object = object, slot = "counts"), max.umi = seq(50, 900, 10), verbose = FALSE, upsample = TRUE )) }) # Tests for FindVariableFeatures # -------------------------------------------------------------------------------- context("FindVariableFeatures") object <- FindVariableFeatures(object = object, selection.method = "mean.var.plot", verbose = FALSE) test_that("mean.var.plot selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PTGDR", "SATB1", "ZNF330", "S100B")) expect_equal(length(x = VariableFeatures(object = object)), 20) expect_equal(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$mean[1:2], c(8.328927, 8.444462), tolerance = 1e-6) expect_equal(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$dispersion[1:2], c(10.552507, 10.088223), tolerance = 1e-6) expect_equal(as.numeric(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$dispersion.scaled[1:2]), c(0.1113214, -0.1332181523), tolerance = 1e-6) }) object <- FindVariableFeatures(object, selection.method = "dispersion", verbose = FALSE) test_that("dispersion selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PCMT1", "PPBP", "LYAR", "VDAC3")) expect_equal(length(x = VariableFeatures(object = object)), 230) expect_equal(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$mean[1:2], c(8.328927, 8.444462), tolerance = 1e-6) expect_equal(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$dispersion[1:2], c(10.552507, 10.088223), tolerance = 1e-6) expect_equal(as.numeric(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')$dispersion.scaled[1:2]), c(0.1113214, -0.1332181523), tolerance = 1e-6) expect_true(!is.unsorted(rev(HVFInfo(object = object[["RNA"]], selection.method = 'mvp')[VariableFeatures(object = object), "dispersion"]))) }) object <- FindVariableFeatures(object, selection.method = "vst", verbose = FALSE) test_that("vst selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PPBP", "IGLL5", "VDAC3", "CD1C")) expect_equal(length(x = VariableFeatures(object = object)), 230) expect_equal(unname(object[["RNA"]][["vst.variance", drop = TRUE]][1:2]), c(1.0251582, 1.2810127), tolerance = 1e-6) expect_equal(unname(object[["RNA"]][["vst.variance.expected", drop = TRUE]][1:2]), c(1.1411616, 2.7076228), tolerance = 1e-6) expect_equal(unname(object[["RNA"]][["vst.variance.standardized", drop = TRUE]][1:2]), c(0.8983463, 0.4731134), tolerance = 1e-6) expect_true(!is.unsorted(rev(object[["RNA"]][["vst.variance.standardized", drop = TRUE]][VariableFeatures(object = object)]))) }) # Tests for internal functions # ------------------------------------------------------------------------------ norm.fxn <- function(x) {x / mean(x)} test_that("CustomNormalize works as expected", { expect_equal( CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = as.matrix(pbmc.test), custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = as.data.frame(as.matrix(pbmc.test)), custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 1), t(apply(X = pbmc.test, MARGIN = 1, FUN = norm.fxn)) ) expect_error(CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 10)) }) # Tests for SCTransform # ------------------------------------------------------------------------------ context("SCTransform") object <- suppressWarnings(SCTransform(object = object, verbose = FALSE)) test_that("SCTransform wrapper works as expected", { expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "scale.data"))[1]), 11.40288448) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "data"))[1]), 57.7295742, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "data"))[5]), 11.74403719, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "counts"))[1]), 129) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "counts"))[5]), 28) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2362887, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_variance"], 2.875761, tolerance = 1e-6) }) suppressWarnings(RNGversion(vstr = "3.5.0")) object <- suppressWarnings(SCTransform(object = object, ncells = 40, verbose = FALSE, seed.use = 42)) test_that("SCTransform ncells param works", { expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "scale.data"))[1]), 12.02126, tolerance = 1e6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "data"))[1]), 60.65299, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "data"))[5]), 11.74404, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], slot = "counts"))[1]), 136) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], slot = "counts"))[5]), 28) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2829672, tolerance = 1e-3) expect_equal(fa["MS4A1", "residual_variance"], 3.674079, tolerance = 1e-3) }) suppressWarnings(object[["SCT_SAVE"]] <- object[["SCT"]]) object[["SCT"]] <- SetAssayData(object = object[["SCT"]], slot = "scale.data", new.data = GetAssayData(object = object[["SCT"]], slot = "scale.data")[1:100, ]) object <- GetResidual(object = object, features = rownames(x = object), verbose = FALSE) test_that("GetResidual works", { expect_equal(dim(GetAssayData(object = object[["SCT"]], slot = "scale.data")), c(220, 80)) expect_equal( GetAssayData(object = object[["SCT"]], slot = "scale.data"), GetAssayData(object = object[["SCT_SAVE"]], slot = "scale.data") ) expect_warning(GetResidual(object, features = "asd")) }) Seurat/tests/testthat/test_visualization.R0000644000176200001440000000160013712563445020611 0ustar liggesusers# Tests for functions in visualization.R set.seed(42) # Tests for visualization utilities # ------------------------------------------------------------------------------ pbmc_small[["tsne_new"]] <- CollapseEmbeddingOutliers(pbmc_small, reduction = "tsne", reduction.key = 'tsne_', outlier.sd = 0.5) test_that("CollapseEmbeddingOutliers works", { expect_equal(Embeddings(pbmc_small[["tsne_new"]])[1, 1], -12.59713, tolerance = 1e-6) expect_equal(colSums(x = Embeddings(object = pbmc_small[["tsne_new"]])), c(-219.9218, 182.9215), check.attributes = FALSE, tolerance = 1e-5) }) test_that("DiscretePalette works", { isColors <- function(x) { all(grepl("#[0-9A-Fa-f]{6}", x)) } expect_true(isColors(DiscretePalette(26))) expect_true(isColors(DiscretePalette(32))) expect_true(isColors(DiscretePalette(36))) expect_warning(DiscretePalette(50), "Not enough colours") }) Seurat/tests/testthat/test_modularity_optimizer.R0000644000176200001440000001117713712563445022215 0ustar liggesusers# Tests to verify the RCpp version of ModularityOptimizer produces the same # results as the java version. # Equivalent java commands are given above. context("ModularityOptimizer") # The "karate club" network available from the ModularityOptimizer website at: # http://www.ludowaltman.nl/slm/ node1 <- c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 4, 4, 5, 5, 5, 6, 8, 8, 8, 9, 13, 14, 14, 15, 15, 18, 18, 19, 20, 20, 22, 22, 23, 23, 23, 23, 23, 24, 24, 24, 25, 26, 26, 27, 28, 28, 29, 29, 30, 30, 31, 31, 32) node2 <- c(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 17, 19, 21, 31, 2, 3, 7, 13, 17, 19, 21, 30, 3, 7, 8, 9, 13, 27, 28, 32, 7, 12, 13, 6, 10, 6, 10, 16, 16, 30, 32, 33, 33, 33, 32, 33, 32, 33, 32, 33, 33, 32, 33, 32, 33, 25, 27, 29, 32, 33, 25, 27, 31, 31, 29, 33, 33, 31, 33, 32, 33, 32, 33, 32, 33, 33) dim_s <- max(max(node1), max(node2)) + 1 # Note we want to represent network in the lower diagonal. connections <- sparseMatrix(i = node2 + 1, j = node1 + 1, x = 1.0) # Result from equivalent command to # java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 1 1 1 564 0 test_that("Algorithm 1", { expected <- c(1, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 0, 0, 2, 1, 0, 1, 0, 1, 0, 0, 3, 3, 0, 0, 3, 0, 0, 3, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 1, nRandomStarts = 1, nIterations = 1, randomSeed = 564, printOutput = 0, "" ) expect_equal(expected, s) }) #java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 2 1 1 2 0 test_that("Algorithm 2", { expected <- c(1, 1, 1, 1, 3, 3, 3, 1, 0, 0, 3, 1, 1, 1, 0, 0, 3, 1, 0, 1, 0, 1, 0, 2, 2, 2, 0, 2, 2, 0, 0, 2, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 2, nRandomStarts = 1, nIterations = 1, randomSeed = 2, printOutput = 0, "" ) expect_equal(expected, s) }) #java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 3 1 1 56464 0 test_that("Algorithm 3", { expected <- c(1, 1, 1, 1, 3, 3, 3, 1, 0, 0, 3, 1, 1, 1, 0, 0, 3, 1, 0, 1, 0, 1, 0, 2, 2, 2, 0, 2, 2, 0, 0, 2, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 3, nRandomStarts = 1, nIterations = 1, randomSeed = 56464, printOutput = 0, "") expect_equal(expected, s) }) test_that("Low Resolution", { e1 <- rep(0, 34) # java -jar ModularityOptimizer.jar karate_club_network.txt outjava.txt 1 0.05 3 1 10 10 0 s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 0.05, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 10, printOutput = 0, "" ) expect_equal(s, e1) # java -jar ModularityOptimizer.jar karate_club_network.txt outjava.txt 2 0.05 3 1 10 10 0 s2 <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 2, resolution=0.05, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 10, printOutput = 0, "" ) e2 = c(0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) expect_equal(s2, e2) }) test_that("EdgeWeights", { # Make 1, 4, 5 and 20 a community by weighting them c2 <- connections c2[5, 4] <- 3.0 c2[5, 1] <- 5.0 c2[4, 1] <- 8.0 c2[20, 5] <- 8.0 c2[20, 4] <- 5.0 c2[20, 1] <- 5.0 # java -jar ModularityOptimizer.jar weighted_karate_club_network.txt outjava.txt 1 1.0 3 1 10 40 1 s2 <- Seurat:::RunModularityClusteringCpp( SNN = c2, modularityFunction = 1, resolution = 1.0, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 40, printOutput = 0, "" ) exp <- c(2, 1, 1, 2, 2, 3, 3, 1, 0, 1, 3, 2, 2, 1, 0, 0, 3, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) expect_equal(s2, exp) }) # test_that("pbmc_small network", { # observed <- as.numeric(FindClusters( # object = pbmc_small, # reduction.type = "pca", # dims.use = 1:10, # resolution = 1.1, # save.SNN = TRUE, # print.output = 0)@ident) # expected = c(1,1,1,1,1,1,1,1,1,1,6,1,6,1,2,2,1,6,2,1,2,2,2,2,2,2,2,2,2,6,3,5,3,3,3,3,3,3,3,3,5,1,1,1,1,1,3,1,3,1,2,1,2,2,6,2,3,2,1,3,5,2,5,5,2,2,2,2,5,3,4,4,4,4,4,4,4,4,4,4) # expect_equal(observed, expected) # }) Seurat/tests/testthat/test_load_10X.R0000644000176200001440000000344414152507372017263 0ustar liggesuserscontext("Read10X") # These tests were added to ensure Seurat was forwards and backwards compatible for 3.0 data dname = "../testdata/cr3.0" test.data <- Read10X(dname) test.data2 <- Read10X(c(dname, dname)) test_that("Cell Ranger 3.0 Data Parsing", { expect_is(test.data, "list") expect_equal(ncol(test.data$`Gene Expression`), .5 * ncol(test.data2$`Gene Expression`)) expect_equal(ncol(test.data$`Antibody Capture`), .5 * ncol(test.data2$`Antibody Capture`)) expect_equal(colnames(test.data2[[1]])[6], "2_AAAGTAGCACAGTCGC-1") expect_equal(test.data$`Gene Expression`[2,2], 1000) }) # Tests of Pre-3.0 Data test.data3 <- Read10X("../testdata/") test_that("Read10X creates sparse matrix", { expect_is(test.data3, "dgCMatrix") expect_equal(colnames(test.data3)[1], "ATGCCAGAACGACT-1") expect_equal(rownames(test.data3)[1], "MS4A1") }) test_that("Read10X handles missing files properly", { expect_error(Read10X(".")) expect_error(Read10X("./notadir/")) expect_error(Read10X(dname, gene.column = 10)) }) # Tests for reading in spatial 10x data if (requireNamespace("hdf5r", quietly = TRUE)) { context("Load10X_Spatial") dname <- "../testdata/visium" # txsp <- Read10X_Spatial(outs_path = "../testdata/visium/") txsp <- Load10X_Spatial(data.dir = '../testdata/visium') test_that("10x Spatial Data Parsing", { expect_is(txsp, "Seurat") expect_equal(ncol(x = txsp), 2695) expect_equal(nrow(x = txsp), 100) expect_equal(Cells(x = txsp)[1], "AAACAAGTATCTCCCA-1") expect_equal(Assays(object = txsp), "Spatial") expect_equal(GetAssayData(object = txsp[["Spatial"]], slot = "counts")[5, 9], 1) }) test_that("Read10X_Spatial handles missing files properly", { expect_error(Load10X_Spatial(data.dir = ".")) expect_error(Load10X_Spatial(data.dir = "./notadir/")) }) } Seurat/tests/testthat/test_dimensional_reduction.R0000644000176200001440000000412413712563445022272 0ustar liggesuserscontext("test-dimensional_reduction") test_that("different ways of passing distance matrix", { # Generate dummy data exp matrix set.seed(1) dummyexpMat <- matrix(data = sample(x = c(1:50), size = 1e4, replace = TRUE), ncol = 100, nrow = 100) colnames(dummyexpMat) <- paste0("cell", seq(ncol(dummyexpMat))) row.names(dummyexpMat) <- paste0("gene", seq(nrow(dummyexpMat))) # Create Seurat object for testing obj <- CreateSeuratObject(counts = dummyexpMat) # Manually make a distance object to test distMat <- dist(t(dummyexpMat)) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)), suppressWarnings(expr = RunTSNE(obj, distance.matrix = as.matrix(distMat))) ) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)@reductions$tsne), suppressWarnings(expr = RunTSNE(distMat, assay = "RNA")) ) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)@reductions$tsne), suppressWarnings(expr = RunTSNE(as.matrix(distMat), assay = "RNA", is_distance = TRUE)) ) }) test_that("pca returns total variance (see #982)", { # Generate dummy data exp matrix set.seed(seed = 1) dummyexpMat <- matrix( data = sample(x = c(1:50), size = 1e4, replace = TRUE), ncol = 100, nrow = 100 ) colnames(x = dummyexpMat) <- paste0("cell", seq(ncol(x = dummyexpMat))) row.names(x = dummyexpMat) <- paste0("gene", seq(nrow(x = dummyexpMat))) # Create Seurat object for testing obj <- CreateSeuratObject(counts = dummyexpMat) # Scale and compute PCA, using RunPCA obj <- ScaleData(object = obj, verbose = FALSE) pca_result <- suppressWarnings(expr = RunPCA( object = obj, features = rownames(x = obj), verbose = FALSE )) # Using stats::prcomp scaled_data <- Seurat::GetAssayData(object = obj, slot = "scale.data") prcomp_result <- stats::prcomp(scaled_data, center = FALSE, scale. = FALSE) # Compare expect_equivalent(slot(object = pca_result[["pca"]], name = "misc")$total.variance, sum(prcomp_result$sdev^2)) }) Seurat/tests/testdata/0000755000176200001440000000000013712563445014502 5ustar liggesusersSeurat/tests/testdata/cr3.0/0000755000176200001440000000000013712563445015327 5ustar liggesusersSeurat/tests/testdata/cr3.0/barcodes.tsv.gz0000644000176200001440000000012713712563445020266 0ustar liggesusers?Fw[barcodes.tsv% ./Zx& DCB}{Ņo]+5j{P`ܡC0VPh3T:r|V{̤ҧ(.s>?5nY0'2K ?Seurat/tests/testdata/matrix.mtx0000644000176200001440000012076213712563445016550 0ustar liggesusers%%MatrixMarket matrix coordinate integer general 240 80 4814 2 1 1 6 1 1 9 1 3 12 1 1 23 1 1 31 1 4 33 1 3 35 1 1 36 1 5 38 1 1 40 1 1 44 1 3 45 1 3 47 1 1 48 1 3 50 1 1 51 1 1 52 1 1 53 1 1 54 1 2 56 1 1 57 1 2 58 1 1 59 1 2 60 1 1 65 1 1 78 1 4 80 1 1 84 1 1 93 1 1 95 1 1 100 1 1 104 1 1 109 1 1 120 1 1 126 1 1 133 1 1 141 1 1 149 1 2 152 1 2 159 1 1 163 1 2 166 1 3 167 1 1 177 1 1 194 1 1 198 1 1 208 1 1 222 1 1 233 1 1 236 1 1 238 1 1 4 2 1 9 2 7 22 2 1 23 2 1 25 2 2 27 2 1 31 2 4 32 2 3 33 2 7 34 2 1 36 2 2 38 2 1 39 2 1 40 2 1 41 2 5 42 2 2 43 2 2 44 2 2 45 2 3 48 2 1 49 2 2 52 2 2 56 2 3 57 2 2 58 2 1 67 2 1 69 2 3 72 2 2 74 2 2 77 2 1 78 2 4 80 2 2 92 2 1 93 2 1 95 2 1 100 2 1 111 2 1 115 2 1 116 2 1 117 2 1 126 2 1 128 2 1 141 2 1 147 2 1 156 2 1 159 2 1 177 2 1 191 2 1 193 2 1 197 2 1 198 2 2 219 2 1 223 2 1 228 2 1 231 2 1 232 2 1 239 2 1 9 3 11 12 3 1 24 3 1 31 3 4 32 3 2 33 3 11 35 3 2 36 3 1 37 3 1 39 3 1 42 3 1 44 3 1 47 3 1 48 3 2 49 3 1 51 3 2 54 3 1 55 3 1 56 3 2 57 3 2 58 3 1 65 3 9 67 3 1 69 3 2 72 3 2 78 3 4 80 3 2 89 3 1 92 3 1 93 3 1 98 3 1 104 3 1 107 3 1 124 3 1 126 3 1 140 3 1 141 3 1 144 3 1 149 3 1 152 3 1 153 3 2 155 3 2 157 3 1 160 3 1 163 3 1 164 3 1 166 3 3 172 3 4 175 3 1 179 3 1 181 3 1 222 3 1 232 3 1 9 4 13 12 4 1 23 4 6 31 4 5 32 4 2 33 4 13 34 4 1 35 4 2 36 4 2 39 4 1 40 4 1 42 4 1 43 4 2 44 4 6 45 4 4 46 4 2 48 4 5 49 4 4 54 4 1 55 4 2 56 4 3 57 4 3 58 4 1 59 4 2 60 4 1 65 4 8 67 4 1 70 4 2 72 4 4 76 4 1 78 4 5 81 4 1 88 4 1 90 4 1 96 4 2 98 4 1 104 4 2 112 4 1 130 4 1 141 4 1 142 4 3 149 4 2 151 4 3 154 4 1 155 4 1 157 4 1 163 4 2 166 4 3 169 4 1 172 4 2 173 4 1 176 4 1 180 4 1 182 4 1 210 4 1 214 4 1 222 4 1 236 4 3 4 5 1 9 5 3 31 5 4 32 5 3 33 5 3 36 5 2 39 5 36 41 5 2 43 5 1 44 5 5 45 5 2 48 5 2 49 5 3 51 5 2 52 5 1 54 5 54 56 5 2 57 5 2 64 5 1 65 5 1 67 5 3 68 5 1 69 5 1 72 5 1 75 5 1 78 5 4 89 5 1 90 5 2 99 5 1 100 5 1 101 5 1 104 5 1 128 5 1 131 5 1 137 5 1 140 5 1 141 5 2 144 5 1 146 5 1 149 5 3 151 5 2 153 5 3 154 5 1 159 5 1 161 5 1 163 5 1 166 5 1 172 5 1 176 5 1 180 5 1 182 5 1 183 5 1 198 5 2 211 5 1 217 5 1 222 5 2 223 5 1 232 5 2 233 5 1 4 6 1 9 6 4 14 6 1 23 6 2 24 6 1 31 6 4 32 6 1 33 6 4 34 6 1 35 6 1 37 6 2 42 6 2 43 6 2 44 6 3 45 6 1 46 6 1 47 6 1 48 6 4 49 6 1 50 6 1 54 6 2 55 6 1 57 6 1 60 6 1 66 6 1 69 6 1 72 6 1 78 6 4 80 6 1 81 6 1 93 6 1 100 6 2 101 6 1 126 6 1 128 6 1 135 6 1 144 6 1 146 6 1 149 6 2 151 6 1 153 6 2 155 6 1 156 6 1 159 6 2 166 6 3 172 6 2 182 6 1 194 6 1 200 6 1 223 6 1 231 6 1 232 6 1 236 6 1 9 7 6 23 7 4 31 7 3 32 7 1 33 7 6 34 7 1 36 7 1 37 7 3 38 7 1 41 7 1 42 7 1 44 7 4 45 7 1 48 7 3 49 7 3 50 7 1 54 7 1 55 7 2 59 7 2 60 7 1 65 7 3 69 7 2 70 7 1 78 7 3 96 7 1 101 7 1 113 7 1 125 7 1 127 7 1 132 7 1 149 7 2 152 7 2 156 7 1 166 7 3 179 7 1 210 7 1 213 7 1 236 7 1 238 7 1 4 8 1 9 8 4 23 8 1 24 8 1 31 8 2 32 8 3 33 8 4 35 8 1 36 8 12 37 8 2 38 8 1 40 8 1 41 8 1 43 8 2 45 8 2 46 8 1 47 8 1 48 8 2 49 8 4 50 8 1 53 8 1 54 8 1 55 8 2 58 8 1 59 8 1 65 8 3 71 8 1 74 8 1 78 8 2 81 8 1 85 8 1 92 8 1 100 8 1 120 8 1 128 8 1 141 8 1 149 8 1 152 8 1 154 8 1 156 8 1 159 8 1 161 8 1 163 8 1 166 8 1 172 8 1 182 8 1 183 8 1 222 8 1 233 8 1 9 9 2 31 9 2 32 9 2 33 9 2 35 9 2 37 9 3 38 9 1 42 9 1 43 9 3 44 9 1 45 9 1 47 9 1 48 9 3 49 9 2 51 9 1 54 9 1 56 9 1 57 9 3 58 9 2 59 9 1 78 9 2 80 9 2 81 9 2 83 9 1 89 9 1 93 9 1 95 9 1 96 9 1 100 9 1 112 9 1 126 9 1 142 9 1 148 9 1 156 9 1 159 9 1 163 9 1 172 9 1 179 9 1 191 9 1 236 9 1 2 10 1 9 10 21 12 10 1 23 10 4 25 10 1 31 10 2 32 10 1 33 10 21 34 10 1 35 10 1 36 10 9 38 10 1 40 10 1 41 10 1 43 10 1 44 10 6 45 10 1 47 10 1 53 10 1 54 10 3 55 10 1 59 10 1 65 10 3 69 10 2 72 10 6 74 10 2 78 10 2 80 10 1 98 10 4 100 10 1 101 10 1 115 10 1 141 10 2 146 10 1 149 10 3 150 10 1 154 10 1 156 10 2 159 10 1 166 10 3 172 10 3 182 10 1 210 10 1 231 10 1 1 11 2 2 11 2 4 11 14 5 11 3 6 11 1 7 11 3 9 11 2 13 11 1 15 11 3 20 11 1 21 11 1 22 11 1 23 11 2 24 11 2 26 11 2 28 11 1 29 11 1 30 11 1 33 11 2 43 11 1 54 11 1 86 11 1 90 11 1 93 11 1 95 11 1 100 11 1 121 11 3 126 11 1 128 11 14 129 11 4 132 11 1 133 11 1 134 11 2 143 11 1 159 11 1 233 11 1 1 12 2 2 12 4 3 12 5 4 12 28 6 12 6 7 12 1 8 12 4 9 12 9 10 12 2 11 12 1 12 12 3 14 12 1 16 12 3 17 12 1 18 12 1 25 12 1 26 12 2 27 12 4 33 12 9 43 12 1 44 12 1 45 12 1 48 12 2 54 12 2 55 12 1 57 12 1 68 12 1 72 12 1 75 12 2 81 12 2 84 12 1 89 12 1 90 12 1 93 12 4 98 12 1 101 12 1 107 12 1 112 12 1 121 12 8 124 12 1 126 12 4 128 12 28 129 12 10 130 12 4 132 12 4 133 12 6 134 12 10 138 12 2 141 12 1 143 12 1 146 12 1 155 12 1 156 12 1 161 12 1 163 12 1 165 12 1 166 12 1 169 12 2 172 12 2 180 12 1 181 12 1 182 12 1 210 12 29 217 12 1 222 12 1 236 12 2 1 13 4 2 13 3 3 13 2 4 13 18 5 13 2 6 13 2 8 13 1 9 13 2 11 13 1 12 13 2 13 13 1 15 13 1 21 13 1 23 13 4 25 13 1 26 13 1 28 13 15 33 13 2 37 13 1 45 13 1 48 13 1 66 13 1 81 13 1 82 13 1 98 13 1 100 13 1 108 13 1 121 13 2 128 13 18 129 13 4 130 13 4 132 13 3 133 13 2 134 13 6 142 13 1 146 13 1 149 13 1 154 13 2 155 13 1 159 13 1 166 13 2 182 13 1 215 13 1 230 13 1 231 13 1 1 14 4 2 14 3 3 14 2 4 14 7 5 14 4 6 14 2 8 14 1 9 14 4 10 14 1 14 14 1 16 14 1 17 14 2 21 14 1 23 14 1 29 14 1 33 14 4 48 14 1 54 14 1 64 14 2 93 14 1 121 14 2 126 14 1 128 14 7 129 14 4 130 14 1 133 14 2 134 14 1 143 14 1 178 14 1 180 14 1 211 14 2 1 15 2 2 15 2 3 15 5 4 15 15 6 15 2 7 15 2 8 15 2 9 15 4 10 15 1 12 15 1 15 15 1 18 15 1 22 15 2 24 15 2 25 15 2 27 15 1 33 15 4 36 15 1 47 15 1 48 15 5 56 15 1 61 15 1 72 15 1 81 15 1 84 15 1 98 15 1 121 15 5 128 15 15 129 15 8 132 15 4 133 15 2 134 15 5 138 15 1 142 15 2 144 15 1 152 15 1 153 15 2 154 15 1 155 15 1 156 15 2 166 15 1 182 15 1 194 15 1 196 15 1 198 15 1 222 15 1 231 15 1 236 15 1 1 16 3 2 16 3 3 16 8 4 16 28 6 16 8 8 16 2 10 16 1 12 16 2 14 16 3 15 16 1 17 16 1 19 16 1 20 16 1 25 16 3 27 16 1 30 16 1 32 16 2 37 16 1 47 16 1 56 16 1 61 16 1 71 16 1 72 16 1 79 16 1 101 16 1 121 16 9 124 16 4 128 16 28 129 16 23 130 16 8 132 16 8 133 16 8 134 16 16 140 16 3 141 16 2 146 16 1 149 16 2 156 16 5 158 16 1 164 16 1 172 16 1 176 16 1 194 16 1 198 16 2 228 16 1 233 16 2 1 17 3 2 17 1 3 17 1 4 17 7 5 17 3 6 17 2 7 17 2 8 17 1 9 17 3 11 17 1 12 17 2 13 17 1 19 17 2 20 17 1 23 17 4 26 17 1 30 17 1 33 17 3 44 17 2 45 17 1 58 17 1 67 17 2 100 17 1 101 17 1 107 17 1 124 17 1 128 17 7 129 17 7 130 17 1 132 17 1 133 17 2 134 17 5 138 17 1 149 17 4 159 17 1 182 17 1 233 17 1 1 18 4 2 18 2 3 18 5 4 18 26 5 18 3 6 18 2 7 18 1 8 18 2 9 18 6 11 18 1 13 18 2 16 18 1 18 18 1 19 18 1 20 18 1 22 18 2 23 18 2 25 18 1 27 18 1 28 18 23 29 18 1 33 18 6 45 18 3 48 18 1 49 18 1 61 18 1 64 18 1 66 18 1 72 18 1 81 18 1 93 18 1 100 18 2 121 18 5 124 18 1 126 18 1 128 18 26 130 18 5 132 18 2 133 18 2 134 18 11 138 18 1 140 18 1 143 18 2 154 18 1 159 18 2 172 18 1 175 18 1 183 18 1 190 18 1 211 18 1 229 18 1 1 19 2 2 19 2 3 19 5 4 19 10 5 19 3 6 19 1 7 19 1 9 19 5 10 19 1 12 19 1 13 19 2 14 19 1 17 19 1 21 19 1 22 19 1 23 19 6 24 19 1 26 19 1 33 19 5 36 19 1 44 19 1 45 19 1 47 19 1 49 19 1 56 19 1 62 19 1 63 19 1 90 19 1 93 19 1 98 19 1 102 19 1 121 19 1 126 19 1 128 19 10 129 19 4 132 19 2 133 19 1 134 19 5 135 19 1 143 19 1 151 19 1 154 19 1 166 19 2 180 19 1 221 19 1 228 19 1 231 19 1 233 19 1 1 20 3 2 20 5 3 20 12 4 20 16 5 20 2 6 20 2 7 20 2 8 20 1 9 20 7 11 20 1 16 20 2 18 20 3 19 20 1 23 20 2 24 20 1 25 20 1 26 20 1 30 20 1 33 20 7 37 20 1 41 20 1 45 20 1 49 20 1 56 20 1 75 20 1 80 20 2 81 20 1 98 20 1 100 20 2 121 20 5 124 20 1 128 20 16 129 20 6 130 20 1 132 20 4 133 20 2 134 20 8 135 20 1 143 20 1 144 20 1 149 20 1 152 20 1 159 20 2 166 20 3 196 20 1 4 21 7 7 21 1 9 21 1 23 21 3 33 21 1 36 21 1 44 21 1 49 21 1 50 21 1 64 21 2 68 21 1 81 21 1 90 21 2 91 21 18 92 21 30 93 21 50 94 21 1 95 21 10 96 21 14 97 21 3 98 21 3 99 21 4 100 21 15 101 21 1 104 21 2 105 21 1 107 21 1 108 21 1 109 21 2 110 21 1 111 21 3 112 21 5 113 21 12 115 21 2 119 21 1 120 21 5 125 21 12 126 21 50 128 21 7 130 21 1 134 21 2 135 21 13 136 21 4 140 21 2 146 21 1 147 21 4 151 21 3 152 21 5 153 21 6 154 21 1 156 21 4 157 21 3 158 21 6 159 21 15 160 21 1 161 21 1 163 21 2 164 21 6 166 21 6 170 21 1 172 21 6 173 21 1 177 21 2 178 21 1 182 21 4 186 21 1 198 21 1 211 21 2 212 21 1 217 21 1 218 21 1 222 21 1 4 22 22 6 22 3 8 22 1 32 22 2 44 22 2 54 22 2 64 22 1 70 22 1 74 22 1 76 22 1 82 22 1 91 22 5 92 22 12 93 22 29 94 22 2 95 22 6 96 22 13 97 22 2 98 22 13 99 22 7 100 22 9 101 22 2 103 22 1 104 22 14 105 22 1 106 22 1 107 22 3 108 22 1 109 22 27 110 22 1 111 22 4 112 22 1 113 22 6 116 22 4 118 22 1 120 22 3 121 22 13 124 22 4 125 22 6 126 22 29 128 22 22 129 22 18 130 22 5 131 22 1 132 22 8 133 22 3 134 22 12 135 22 28 138 22 2 140 22 3 144 22 1 146 22 3 147 22 1 148 22 7 151 22 6 152 22 7 153 22 5 155 22 1 156 22 15 158 22 4 159 22 9 160 22 5 161 22 1 163 22 10 164 22 3 165 22 1 166 22 4 167 22 2 170 22 1 171 22 1 172 22 15 173 22 1 174 22 2 176 22 1 177 22 1 180 22 1 182 22 5 198 22 1 211 22 1 231 22 2 3 23 1 7 23 1 12 23 1 13 23 1 74 23 1 75 23 1 91 23 25 92 23 51 93 23 25 94 23 2 95 23 5 96 23 3 98 23 5 99 23 1 100 23 1 101 23 6 102 23 1 104 23 10 105 23 1 107 23 1 111 23 2 113 23 2 114 23 1 118 23 1 121 23 2 124 23 1 125 23 2 126 23 25 129 23 1 132 23 1 134 23 1 135 23 15 137 23 1 140 23 1 141 23 1 147 23 1 148 23 1 151 23 1 152 23 6 153 23 1 155 23 1 156 23 8 159 23 1 163 23 4 164 23 4 172 23 2 174 23 4 177 23 1 180 23 1 182 23 3 198 23 2 4 24 10 8 24 1 9 24 1 24 24 1 33 24 1 44 24 2 74 24 1 91 24 5 92 24 22 93 24 49 94 24 4 95 24 9 96 24 10 99 24 6 100 24 5 103 24 4 104 24 8 107 24 2 108 24 1 109 24 1 110 24 2 111 24 1 113 24 1 114 24 1 115 24 1 116 24 4 121 24 1 124 24 1 125 24 1 126 24 49 128 24 10 129 24 2 132 24 1 134 24 5 135 24 11 140 24 6 142 24 1 146 24 3 148 24 1 151 24 4 152 24 5 153 24 5 154 24 1 156 24 5 158 24 2 159 24 5 163 24 2 164 24 6 172 24 4 177 24 1 182 24 5 195 24 3 198 24 3 237 24 1 1 25 1 4 25 6 9 25 1 13 25 1 31 25 1 32 25 1 33 25 1 42 25 1 44 25 1 45 25 3 48 25 1 54 25 1 56 25 1 58 25 1 61 25 1 66 25 1 72 25 3 74 25 1 78 25 1 81 25 1 91 25 25 92 25 85 93 25 98 94 25 1 95 25 7 96 25 16 97 25 1 98 25 11 99 25 5 100 25 7 101 25 36 102 25 2 103 25 1 104 25 11 105 25 1 106 25 1 108 25 1 109 25 1 110 25 1 111 25 1 112 25 1 113 25 6 114 25 2 115 25 14 116 25 4 117 25 1 119 25 1 120 25 3 125 25 6 126 25 98 128 25 6 131 25 2 134 25 1 135 25 13 140 25 5 141 25 2 142 25 2 143 25 2 146 25 1 147 25 1 148 25 1 151 25 8 152 25 4 153 25 3 154 25 2 156 25 4 157 25 3 159 25 7 163 25 2 164 25 1 165 25 1 166 25 1 168 25 1 169 25 2 172 25 7 177 25 1 181 25 1 182 25 12 183 25 1 198 25 5 210 25 1 212 25 1 239 25 1 36 26 1 91 26 6 92 26 3 93 26 11 95 26 1 96 26 4 99 26 1 100 26 3 101 26 1 102 26 1 103 26 1 104 26 4 105 26 1 107 26 1 108 26 1 109 26 1 112 26 1 114 26 2 115 26 1 116 26 1 117 26 1 119 26 1 121 26 1 126 26 11 129 26 3 135 26 7 136 26 1 140 26 1 146 26 1 151 26 3 152 26 3 153 26 2 156 26 2 159 26 3 160 26 2 163 26 2 164 26 2 166 26 1 172 26 3 177 26 1 182 26 1 191 26 1 198 26 1 237 26 1 4 27 4 6 27 1 23 27 1 38 27 1 44 27 1 48 27 1 58 27 1 64 27 1 72 27 1 82 27 1 91 27 24 92 27 54 93 27 59 94 27 1 95 27 1 96 27 13 97 27 1 98 27 2 99 27 6 100 27 4 101 27 5 102 27 4 103 27 7 104 27 6 105 27 1 106 27 1 110 27 1 114 27 1 115 27 2 116 27 3 117 27 3 119 27 1 120 27 1 126 27 59 128 27 4 133 27 1 134 27 3 135 27 37 140 27 3 142 27 1 146 27 1 147 27 5 148 27 2 151 27 5 152 27 1 153 27 1 156 27 8 157 27 1 158 27 1 159 27 4 160 27 4 163 27 1 164 27 4 171 27 1 172 27 6 174 27 3 177 27 1 180 27 1 182 27 15 183 27 1 198 27 2 206 27 1 211 27 1 215 27 1 225 27 1 228 27 1 236 27 1 3 28 1 4 28 3 24 28 1 48 28 1 56 28 1 58 28 1 91 28 40 92 28 55 93 28 28 94 28 1 95 28 2 96 28 12 98 28 3 99 28 4 100 28 4 102 28 1 103 28 1 104 28 7 105 28 1 106 28 2 107 28 1 111 28 2 114 28 3 117 28 2 118 28 2 120 28 5 126 28 28 128 28 3 129 28 1 131 28 1 135 28 5 140 28 1 141 28 1 142 28 1 147 28 1 152 28 2 153 28 1 154 28 1 156 28 2 157 28 1 159 28 4 160 28 1 163 28 6 164 28 4 166 28 1 168 28 2 170 28 1 178 28 1 182 28 2 193 28 1 228 28 1 231 28 1 233 28 1 4 29 7 6 29 1 8 29 2 39 29 1 43 29 1 59 29 1 61 29 1 72 29 1 79 29 1 80 29 1 91 29 16 92 29 35 93 29 34 94 29 3 95 29 8 96 29 19 97 29 1 98 29 5 99 29 5 100 29 11 101 29 3 102 29 1 103 29 1 104 29 22 107 29 1 108 29 2 109 29 1 110 29 2 111 29 15 112 29 2 113 29 5 115 29 1 116 29 2 117 29 1 121 29 7 124 29 1 125 29 5 126 29 34 128 29 7 129 29 7 131 29 1 132 29 4 133 29 1 134 29 5 135 29 20 136 29 2 137 29 1 140 29 4 141 29 2 142 29 1 143 29 1 146 29 2 148 29 2 149 29 3 151 29 7 152 29 10 153 29 6 154 29 2 155 29 2 156 29 11 157 29 1 158 29 3 159 29 11 160 29 2 162 29 1 163 29 5 164 29 9 166 29 3 167 29 1 170 29 1 171 29 3 172 29 4 174 29 1 175 29 2 182 29 3 191 29 1 193 29 1 198 29 3 225 29 1 227 29 1 228 29 1 2 30 1 4 30 13 9 30 1 13 30 1 25 30 1 33 30 1 36 30 1 37 30 1 45 30 1 48 30 1 51 30 1 54 30 3 64 30 1 70 30 1 81 30 1 84 30 1 88 30 1 91 30 11 92 30 17 93 30 16 95 30 7 96 30 12 98 30 10 99 30 1 100 30 7 101 30 5 103 30 2 104 30 37 106 30 1 107 30 3 108 30 1 109 30 1 110 30 1 111 30 2 112 30 1 113 30 2 114 30 1 115 30 2 118 30 5 120 30 4 121 30 6 124 30 2 125 30 2 126 30 16 128 30 13 129 30 7 130 30 1 132 30 1 134 30 3 135 30 18 136 30 3 140 30 2 141 30 1 146 30 3 147 30 2 148 30 1 149 30 3 151 30 13 152 30 12 153 30 4 156 30 18 157 30 1 158 30 3 159 30 7 160 30 7 161 30 1 163 30 6 164 30 8 166 30 6 168 30 2 172 30 20 174 30 3 175 30 1 176 30 1 177 30 2 180 30 1 182 30 1 186 30 2 187 30 1 198 30 2 203 30 1 206 30 1 210 30 1 211 30 1 232 30 1 237 30 2 2 31 1 9 31 1 22 31 1 32 31 1 33 31 1 36 31 2 39 31 1 47 31 1 48 31 2 54 31 1 57 31 3 61 31 1 62 31 1 64 31 35 68 31 4 80 31 1 84 31 1 91 31 1 96 31 3 101 31 1 104 31 3 129 31 2 135 31 1 140 31 1 141 31 3 146 31 1 149 31 1 152 31 1 154 31 2 155 31 1 156 31 3 161 31 6 163 31 6 164 31 8 166 31 8 176 31 2 183 31 2 193 31 1 201 31 1 211 31 35 212 31 27 213 31 2 214 31 35 215 31 5 216 31 7 217 31 4 218 31 5 219 31 14 221 31 1 222 31 1 225 31 4 226 31 2 227 31 1 228 31 1 229 31 1 230 31 3 231 31 2 232 31 3 233 31 3 235 31 4 236 31 7 237 31 1 239 31 3 4 32 1 24 32 1 31 32 7 35 32 2 48 32 2 49 32 1 62 32 3 64 32 14 65 32 2 68 32 4 69 32 1 70 32 2 72 32 2 78 32 7 80 32 1 84 32 1 87 32 1 98 32 1 104 32 4 121 32 1 128 32 1 129 32 4 132 32 1 134 32 2 140 32 2 145 32 1 149 32 4 153 32 1 157 32 1 161 32 2 166 32 2 172 32 1 174 32 1 177 32 2 182 32 1 183 32 1 211 32 14 212 32 2 213 32 5 215 32 3 217 32 4 218 32 3 219 32 1 220 32 3 221 32 3 222 32 1 223 32 4 224 32 1 226 32 1 228 32 1 231 32 2 232 32 1 234 32 1 235 32 1 236 32 3 239 32 5 2 33 2 26 33 1 32 33 1 54 33 1 57 33 1 62 33 2 64 33 12 68 33 2 70 33 5 74 33 1 77 33 1 82 33 1 84 33 1 87 33 1 93 33 1 96 33 4 100 33 1 104 33 9 126 33 1 141 33 1 142 33 1 149 33 5 153 33 2 154 33 1 155 33 1 159 33 1 161 33 2 164 33 3 166 33 3 172 33 1 177 33 1 179 33 1 211 33 12 212 33 1 213 33 3 214 33 15 215 33 9 216 33 1 217 33 2 218 33 1 219 33 4 220 33 5 221 33 2 222 33 2 223 33 2 224 33 1 226 33 1 227 33 1 228 33 1 230 33 4 231 33 1 232 33 1 233 33 2 234 33 3 236 33 2 237 33 58 238 33 1 239 33 1 9 34 1 24 34 1 32 34 1 33 34 1 44 34 2 48 34 2 49 34 1 54 34 15 55 34 1 56 34 1 57 34 1 59 34 1 62 34 3 64 34 30 65 34 5 68 34 7 69 34 2 70 34 14 71 34 1 74 34 1 80 34 1 87 34 1 90 34 2 92 34 1 96 34 3 98 34 1 99 34 2 104 34 6 107 34 1 121 34 2 134 34 1 140 34 1 142 34 1 146 34 4 149 34 1 154 34 1 161 34 1 164 34 1 166 34 5 173 34 1 176 34 1 211 34 30 212 34 10 213 34 4 214 34 3 215 34 2 216 34 1 217 34 7 219 34 9 220 34 7 221 34 3 222 34 2 223 34 1 224 34 1 225 34 3 226 34 1 227 34 1 228 34 1 231 34 1 232 34 3 233 34 1 234 34 2 235 34 1 238 34 2 239 34 3 4 35 1 12 35 1 26 35 1 39 35 1 44 35 2 57 35 1 62 35 2 64 35 20 65 35 4 68 35 2 69 35 1 72 35 1 80 35 1 83 35 2 90 35 2 92 35 1 93 35 2 96 35 6 104 35 1 120 35 1 124 35 1 126 35 2 128 35 1 140 35 2 141 35 3 142 35 1 146 35 1 148 35 1 149 35 1 153 35 1 164 35 2 166 35 2 173 35 1 211 35 20 212 35 8 213 35 10 214 35 29 215 35 6 217 35 2 218 35 3 219 35 7 220 35 1 221 35 2 222 35 4 223 35 1 228 35 1 229 35 2 230 35 1 231 35 1 233 35 1 236 35 1 238 35 1 240 35 10 12 36 1 14 36 1 23 36 1 42 36 1 51 36 1 57 36 3 62 36 4 64 36 27 67 36 2 68 36 4 69 36 1 70 36 29 74 36 2 75 36 1 81 36 1 90 36 1 91 36 1 96 36 7 98 36 1 104 36 3 116 36 1 151 36 1 154 36 1 161 36 1 164 36 5 166 36 1 176 36 1 186 36 1 211 36 27 212 36 5 213 36 8 214 36 11 215 36 3 216 36 1 217 36 4 218 36 1 219 36 10 221 36 4 222 36 1 223 36 2 225 36 1 227 36 1 229 36 1 230 36 2 231 36 1 232 36 2 233 36 3 236 36 3 237 36 1 238 36 3 4 37 1 31 37 1 44 37 1 51 37 1 56 37 1 57 37 4 59 37 2 61 37 1 62 37 8 64 37 28 68 37 3 69 37 1 70 37 1 72 37 1 78 37 1 81 37 1 88 37 2 90 37 1 92 37 1 96 37 3 100 37 1 101 37 1 103 37 1 104 37 14 120 37 1 121 37 1 128 37 1 135 37 1 140 37 1 142 37 1 146 37 4 149 37 1 153 37 1 156 37 1 158 37 1 159 37 1 161 37 2 163 37 1 164 37 6 165 37 1 166 37 5 172 37 1 176 37 3 179 37 1 182 37 2 183 37 1 210 37 2 211 37 28 212 37 10 213 37 12 214 37 22 215 37 6 216 37 5 217 37 3 218 37 1 219 37 10 220 37 3 221 37 8 223 37 3 224 37 1 225 37 7 226 37 1 227 37 2 228 37 1 229 37 1 230 37 3 231 37 3 232 37 2 233 37 1 235 37 4 236 37 3 238 37 2 239 37 1 4 38 1 25 38 1 32 38 2 38 38 1 48 38 1 55 38 1 57 38 2 61 38 1 62 38 6 64 38 10 68 38 3 69 38 2 70 38 7 74 38 1 80 38 1 81 38 1 83 38 3 90 38 1 93 38 1 95 38 1 96 38 4 101 38 1 104 38 2 109 38 1 120 38 1 126 38 1 128 38 1 135 38 1 140 38 2 141 38 1 142 38 1 146 38 1 153 38 1 154 38 1 161 38 1 163 38 1 164 38 6 166 38 1 176 38 1 183 38 1 193 38 1 211 38 10 212 38 7 213 38 10 214 38 15 215 38 8 216 38 4 217 38 3 218 38 2 219 38 2 220 38 1 221 38 6 222 38 2 223 38 2 225 38 4 227 38 2 229 38 1 233 38 2 235 38 2 236 38 5 237 38 2 238 38 1 239 38 2 240 38 1 14 39 1 23 39 4 31 39 1 48 39 3 49 39 1 57 39 1 61 39 1 62 39 1 64 39 25 67 39 1 68 39 2 69 39 1 70 39 5 72 39 1 74 39 1 78 39 1 81 39 2 90 39 1 96 39 5 101 39 1 104 39 1 120 39 2 134 39 1 142 39 1 155 39 1 156 39 1 161 39 2 164 39 1 165 39 1 166 39 3 176 39 1 182 39 1 183 39 1 211 39 25 212 39 4 213 39 3 214 39 18 215 39 2 216 39 1 217 39 2 218 39 1 219 39 4 221 39 1 222 39 1 223 39 2 224 39 1 226 39 1 227 39 1 228 39 1 230 39 1 231 39 3 232 39 1 233 39 1 234 39 1 235 39 1 237 39 1 238 39 2 26 40 1 35 40 2 47 40 1 48 40 1 54 40 1 55 40 1 57 40 1 62 40 11 64 40 27 65 40 7 67 40 1 68 40 5 70 40 25 72 40 2 74 40 1 75 40 1 80 40 1 81 40 1 83 40 2 84 40 2 89 40 1 96 40 15 99 40 1 100 40 4 104 40 4 137 40 1 140 40 1 141 40 3 142 40 1 146 40 1 154 40 2 155 40 1 156 40 1 159 40 4 161 40 6 164 40 6 166 40 2 176 40 2 179 40 3 196 40 1 198 40 2 210 40 1 211 40 27 212 40 11 213 40 13 214 40 18 215 40 5 216 40 1 217 40 5 218 40 1 219 40 7 220 40 2 221 40 11 222 40 3 223 40 6 224 40 51 225 40 3 226 40 1 228 40 25 229 40 3 231 40 1 232 40 1 233 40 2 234 40 2 235 40 2 236 40 1 238 40 1 239 40 2 240 40 1 23 41 7 32 41 1 44 41 2 45 41 2 48 41 1 49 41 1 50 41 1 54 41 2 57 41 2 62 41 1 63 41 1 64 41 31 65 41 8 68 41 2 69 41 1 71 41 1 72 41 1 73 41 1 74 41 1 75 41 1 76 41 1 81 41 1 84 41 1 85 41 1 87 41 1 90 41 2 96 41 2 98 41 5 100 41 1 101 41 1 104 41 1 122 41 1 140 41 2 141 41 1 145 41 1 153 41 3 155 41 1 157 41 1 159 41 1 161 41 2 163 41 1 164 41 3 166 41 7 172 41 1 198 41 1 210 41 1 211 41 31 212 41 3 213 41 1 214 41 10 215 41 4 217 41 2 218 41 3 219 41 6 220 41 6 221 41 1 222 41 1 223 41 2 225 41 1 226 41 1 227 41 1 228 41 1 232 41 2 239 41 3 9 42 1 23 42 1 31 42 2 33 42 1 36 42 1 44 42 1 45 42 2 54 42 1 57 42 1 59 42 1 61 42 1 62 42 4 64 42 22 65 42 5 66 42 1 67 42 1 68 42 3 69 42 1 70 42 14 72 42 2 75 42 2 78 42 2 79 42 1 80 42 1 81 42 2 82 42 1 83 42 1 84 42 1 85 42 1 88 42 1 90 42 1 93 42 1 98 42 1 100 42 1 101 42 1 104 42 3 111 42 1 126 42 1 129 42 4 130 42 1 138 42 1 141 42 1 144 42 1 153 42 1 154 42 1 155 42 1 156 42 1 159 42 1 166 42 4 172 42 2 179 42 1 193 42 1 206 42 1 211 42 22 213 42 8 215 42 1 217 42 3 219 42 13 221 42 4 223 42 1 231 42 1 232 42 1 233 42 2 234 42 1 235 42 1 236 42 2 237 42 1 238 42 1 9 43 1 23 43 3 31 43 3 33 43 1 36 43 3 44 43 4 49 43 2 54 43 3 55 43 1 56 43 1 57 43 4 61 43 1 62 43 1 63 43 2 64 43 7 65 43 5 67 43 1 68 43 1 69 43 2 70 43 27 71 43 1 72 43 1 74 43 2 77 43 3 78 43 3 80 43 3 81 43 1 85 43 1 86 43 1 89 43 1 90 43 2 93 43 1 96 43 1 98 43 1 107 43 1 113 43 1 125 43 1 126 43 1 137 43 1 146 43 1 154 43 1 156 43 3 157 43 1 166 43 2 211 43 7 213 43 2 217 43 1 221 43 1 227 43 1 231 43 2 236 43 1 9 44 1 22 44 1 33 44 1 36 44 1 45 44 1 48 44 1 54 44 1 61 44 1 62 44 2 63 44 1 64 44 2 67 44 2 68 44 1 70 44 3 72 44 1 74 44 1 75 44 1 81 44 1 82 44 1 84 44 1 86 44 1 88 44 1 89 44 1 96 44 1 149 44 2 166 44 2 180 44 1 182 44 1 193 44 1 211 44 2 213 44 1 214 44 3 217 44 1 221 44 2 236 44 1 238 44 1 9 45 7 12 45 1 23 45 6 24 45 1 25 45 1 31 45 3 33 45 7 36 45 1 42 45 1 44 45 4 46 45 1 48 45 2 49 45 1 57 45 2 62 45 1 63 45 2 64 45 4 65 45 7 67 45 47 69 45 1 70 45 13 72 45 1 73 45 1 75 45 1 76 45 1 77 45 1 78 45 3 80 45 1 81 45 1 84 45 1 86 45 1 87 45 2 88 45 1 89 45 2 90 45 2 100 45 1 101 45 2 121 45 1 129 45 1 152 45 2 153 45 1 155 45 1 159 45 1 163 45 1 166 45 5 167 45 1 172 45 5 182 45 1 211 45 4 221 45 1 223 45 1 224 45 1 4 46 1 9 46 1 23 46 1 26 46 3 31 46 15 33 46 1 36 46 1 43 46 1 44 46 4 49 46 2 54 46 1 58 46 2 62 46 2 64 46 14 65 46 1 68 46 2 69 46 2 70 46 17 71 46 1 72 46 2 75 46 1 77 46 3 78 46 15 82 46 1 84 46 1 86 46 2 87 46 1 90 46 1 121 46 3 128 46 1 129 46 2 132 46 1 134 46 3 149 46 1 154 46 1 155 46 1 163 46 1 166 46 1 172 46 1 177 46 1 211 46 14 215 46 2 217 46 2 221 46 2 231 46 1 236 46 2 4 47 1 31 47 1 32 47 1 35 47 2 43 47 1 48 47 2 54 47 1 61 47 1 62 47 2 64 47 16 65 47 6 66 47 1 67 47 1 68 47 8 69 47 1 70 47 7 72 47 4 74 47 1 76 47 1 77 47 3 78 47 1 81 47 1 83 47 1 84 47 1 95 47 1 98 47 1 101 47 1 104 47 1 128 47 1 141 47 2 146 47 1 154 47 2 157 47 1 161 47 1 166 47 1 176 47 1 177 47 1 182 47 1 211 47 16 212 47 6 214 47 4 215 47 9 217 47 8 219 47 6 220 47 10 221 47 2 222 47 1 223 47 3 232 47 1 236 47 2 237 47 1 238 47 1 239 47 3 4 48 1 9 48 1 14 48 1 23 48 1 31 48 3 33 48 1 36 48 2 37 48 1 42 48 1 45 48 1 48 48 3 49 48 1 54 48 1 62 48 1 63 48 2 64 48 4 65 48 7 66 48 1 67 48 1 68 48 4 69 48 1 70 48 3 71 48 1 72 48 1 74 48 1 75 48 1 78 48 3 79 48 1 80 48 7 81 48 2 82 48 1 83 48 4 87 48 1 88 48 1 90 48 1 93 48 1 107 48 1 116 48 1 126 48 1 128 48 1 135 48 1 141 48 1 144 48 1 146 48 1 149 48 1 153 48 1 154 48 2 163 48 1 166 48 4 176 48 1 182 48 2 211 48 4 213 48 3 214 48 1 217 48 4 221 48 1 222 48 1 223 48 2 229 48 1 233 48 1 238 48 2 239 48 1 9 49 5 31 49 6 33 49 5 39 49 2 49 49 5 61 49 39 62 49 5 64 49 29 65 49 6 66 49 1 67 49 1 68 49 5 69 49 1 70 49 16 71 49 1 72 49 2 73 49 1 74 49 17 75 49 1 78 49 6 79 49 1 82 49 1 83 49 1 84 49 1 88 49 1 90 49 2 98 49 1 103 49 1 104 49 1 120 49 1 121 49 1 140 49 1 141 49 1 149 49 3 153 49 2 154 49 1 155 49 1 156 49 2 166 49 1 172 49 2 179 49 1 198 49 1 210 49 1 211 49 29 212 49 2 213 49 3 214 49 3 215 49 3 217 49 5 219 49 5 220 49 9 221 49 5 227 49 1 232 49 1 234 49 1 236 49 1 239 49 3 9 50 3 23 50 1 31 50 4 32 50 1 33 50 3 36 50 2 37 50 1 44 50 2 48 50 1 49 50 2 54 50 1 55 50 3 57 50 1 59 50 1 60 50 1 62 50 1 63 50 3 64 50 8 65 50 1 66 50 1 67 50 1 68 50 2 69 50 1 70 50 12 72 50 4 73 50 1 78 50 4 79 50 2 80 50 1 81 50 2 83 50 1 85 50 13 89 50 1 95 50 2 141 50 1 146 50 2 153 50 1 154 50 1 156 50 1 166 50 2 167 50 1 211 50 8 213 50 2 217 50 2 219 50 3 221 50 1 223 50 1 235 50 1 236 50 1 238 50 1 2 51 1 4 51 10 9 51 1 15 51 1 24 51 1 33 51 1 38 51 1 42 51 1 45 51 1 50 51 1 56 51 1 59 51 1 64 51 5 70 51 3 72 51 2 75 51 1 91 51 2 92 51 20 93 51 41 95 51 13 96 51 11 98 51 2 99 51 6 100 51 8 101 51 2 102 51 1 103 51 4 104 51 5 109 51 1 111 51 4 113 51 3 115 51 3 116 51 2 118 51 1 119 51 1 121 51 12 124 51 1 125 51 3 126 51 41 128 51 10 129 51 8 132 51 4 134 51 8 135 51 16 140 51 4 141 51 3 142 51 1 146 51 9 147 51 2 148 51 2 149 51 3 151 51 15 152 51 7 153 51 8 154 51 2 156 51 21 157 51 2 158 51 2 159 51 8 160 51 5 162 51 1 163 51 17 164 51 12 166 51 5 168 51 3 170 51 2 171 51 1 172 51 9 173 51 2 174 51 6 176 51 1 177 51 2 178 51 1 182 51 5 183 51 2 193 51 1 194 51 1 198 51 1 211 51 5 225 51 1 228 51 2 4 52 10 6 52 1 9 52 2 23 52 1 32 52 2 33 52 2 40 52 1 45 52 1 48 52 2 51 52 1 54 52 1 58 52 1 59 52 1 64 52 3 67 52 2 70 52 1 72 52 1 81 52 1 87 52 1 90 52 1 91 52 2 92 52 6 93 52 4 95 52 7 96 52 21 98 52 2 99 52 5 100 52 8 101 52 4 104 52 12 109 52 1 110 52 1 111 52 5 115 52 2 116 52 2 120 52 2 121 52 4 126 52 4 128 52 10 129 52 3 130 52 2 132 52 5 133 52 1 134 52 4 135 52 32 140 52 1 141 52 3 142 52 1 146 52 3 148 52 1 149 52 1 151 52 17 152 52 12 153 52 8 154 52 2 155 52 1 156 52 25 157 52 3 158 52 1 159 52 8 160 52 3 161 52 5 162 52 1 163 52 13 164 52 12 165 52 2 166 52 10 167 52 2 168 52 3 169 52 5 170 52 4 171 52 2 172 52 20 173 52 3 174 52 4 175 52 5 176 52 6 177 52 2 179 52 26 182 52 3 193 52 1 196 52 1 198 52 2 210 52 1 211 52 3 212 52 1 230 52 1 237 52 1 238 52 1 2 53 1 4 53 4 6 53 1 38 53 1 44 53 1 59 53 1 74 53 1 92 53 1 93 53 3 95 53 5 96 53 2 98 53 2 99 53 1 100 53 7 103 53 1 104 53 4 109 53 1 110 53 1 111 53 2 114 53 1 121 53 2 126 53 3 128 53 4 129 53 5 133 53 1 135 53 7 140 53 2 146 53 1 147 53 1 148 53 3 151 53 8 152 53 7 153 53 6 154 53 1 156 53 6 157 53 16 158 53 1 159 53 7 160 53 4 161 53 1 162 53 3 163 53 1 164 53 2 165 53 1 166 53 1 167 53 2 169 53 1 170 53 2 172 53 9 174 53 1 178 53 1 180 53 1 191 53 1 228 53 1 2 54 1 4 54 1 8 54 1 23 54 1 45 54 1 91 54 4 93 54 3 95 54 1 96 54 5 99 54 1 100 54 3 101 54 1 104 54 2 115 54 1 120 54 1 121 54 1 126 54 3 128 54 1 129 54 2 131 54 1 135 54 9 141 54 1 142 54 1 147 54 1 151 54 11 152 54 6 153 54 2 156 54 10 158 54 1 159 54 3 160 54 1 161 54 2 163 54 2 164 54 4 166 54 4 167 54 1 168 54 1 170 54 1 171 54 1 172 54 3 176 54 1 177 54 2 180 54 1 215 54 1 225 54 1 2 55 2 4 55 6 6 55 2 8 55 1 9 55 1 12 55 1 15 55 1 22 55 1 33 55 1 37 55 1 45 55 1 48 55 2 72 55 2 74 55 1 91 55 3 92 55 10 93 55 14 95 55 4 96 55 21 98 55 2 99 55 6 100 55 10 101 55 2 103 55 1 104 55 16 105 55 2 106 55 1 107 55 2 109 55 3 110 55 1 112 55 1 115 55 3 116 55 4 117 55 2 118 55 1 120 55 1 121 55 5 126 55 14 128 55 6 129 55 3 132 55 3 133 55 2 134 55 7 135 55 11 140 55 1 141 55 2 146 55 4 147 55 2 151 55 18 152 55 32 153 55 9 154 55 50 155 55 3 156 55 26 157 55 1 158 55 3 159 55 10 160 55 11 161 55 14 163 55 9 164 55 35 165 55 3 166 55 17 167 55 1 168 55 2 169 55 2 170 55 1 172 55 6 173 55 6 174 55 4 175 55 4 176 55 1 177 55 2 178 55 1 179 55 2 180 55 2 182 55 1 193 55 1 226 55 1 233 55 1 236 55 2 237 55 2 2 56 2 4 56 28 7 56 1 9 56 1 22 56 1 32 56 1 33 56 1 43 56 2 44 56 2 47 56 2 54 56 1 56 56 1 87 56 1 90 56 2 92 56 4 93 56 17 94 56 1 95 56 3 96 56 13 98 56 1 99 56 4 100 56 15 101 56 3 104 56 10 107 56 2 109 56 6 110 56 2 111 56 5 115 56 1 116 56 2 118 56 1 120 56 3 121 56 5 124 56 4 126 56 17 128 56 28 129 56 7 130 56 1 132 56 3 134 56 7 135 56 17 140 56 5 141 56 2 144 56 1 146 56 2 147 56 3 148 56 1 149 56 1 151 56 13 152 56 33 153 56 9 154 56 1 155 56 3 156 56 26 157 56 11 158 56 4 159 56 15 160 56 9 161 56 4 162 56 1 163 56 12 164 56 16 165 56 4 166 56 8 168 56 1 169 56 4 170 56 3 171 56 3 172 56 9 174 56 3 176 56 2 177 56 1 178 56 6 179 56 2 180 56 25 182 56 1 198 56 2 206 56 1 228 56 3 4 57 10 9 57 1 12 57 1 22 57 1 32 57 1 33 57 1 39 57 1 43 57 3 48 57 1 51 57 1 59 57 1 61 57 1 64 57 5 65 57 1 66 57 1 67 57 2 69 57 1 72 57 3 74 57 1 81 57 1 90 57 1 91 57 1 92 57 8 93 57 7 95 57 1 96 57 16 98 57 1 99 57 3 100 57 18 101 57 6 103 57 3 104 57 6 105 57 1 107 57 5 108 57 1 109 57 1 110 57 1 111 57 2 115 57 1 117 57 1 120 57 1 121 57 7 126 57 7 128 57 10 129 57 6 130 57 1 131 57 1 132 57 6 134 57 13 135 57 33 137 57 2 138 57 1 146 57 3 148 57 1 151 57 36 152 57 12 153 57 10 154 57 1 155 57 1 156 57 16 157 57 3 158 57 5 159 57 18 160 57 2 161 57 18 162 57 1 163 57 14 164 57 24 165 57 1 166 57 33 167 57 3 168 57 6 169 57 3 171 57 1 172 57 91 175 57 5 176 57 7 178 57 4 179 57 1 191 57 1 194 57 1 198 57 2 211 57 5 214 57 1 228 57 2 237 57 1 4 58 13 6 58 1 8 58 1 9 58 1 13 58 1 22 58 1 24 58 1 33 58 1 44 58 1 50 58 1 54 58 3 65 58 1 72 58 1 81 58 1 90 58 1 91 58 1 92 58 6 93 58 6 95 58 1 96 58 9 98 58 1 99 58 2 100 58 19 101 58 4 104 58 2 107 58 1 109 58 2 110 58 1 111 58 3 112 58 1 116 58 2 117 58 2 120 58 3 121 58 14 126 58 6 128 58 13 129 58 5 130 58 2 131 58 1 132 58 3 133 58 1 134 58 6 135 58 10 137 58 1 138 58 2 141 58 1 142 58 2 143 58 2 146 58 2 147 58 1 151 58 17 152 58 19 153 58 8 154 58 1 155 58 27 156 58 15 157 58 5 158 58 5 159 58 19 160 58 5 161 58 9 163 58 8 164 58 9 165 58 2 166 58 8 167 58 1 168 58 1 169 58 1 170 58 1 171 58 2 172 58 11 173 58 3 174 58 4 175 58 2 176 58 2 177 58 6 178 58 1 180 58 1 182 58 1 198 58 2 228 58 1 2 59 3 4 59 5 6 59 1 7 59 1 9 59 2 23 59 1 33 59 2 39 59 1 48 59 2 54 59 1 67 59 1 72 59 3 74 59 2 84 59 2 91 59 2 93 59 9 95 59 2 96 59 16 98 59 2 99 59 4 100 59 4 101 59 2 104 59 12 105 59 2 107 59 2 109 59 2 111 59 2 112 59 3 115 59 1 116 59 1 118 59 1 121 59 5 126 59 9 128 59 5 129 59 9 131 59 2 132 59 6 133 59 1 134 59 6 135 59 15 136 59 1 140 59 1 141 59 2 142 59 3 147 59 1 148 59 3 149 59 1 151 59 12 152 59 18 153 59 5 154 59 3 155 59 1 156 59 11 157 59 4 158 59 3 159 59 4 160 59 7 161 59 5 162 59 1 163 59 7 164 59 9 165 59 1 166 59 14 167 59 1 168 59 5 171 59 3 172 59 18 174 59 2 175 59 1 176 59 6 177 59 3 182 59 1 198 59 1 213 59 1 219 59 1 225 59 1 4 60 8 8 60 1 9 60 1 23 60 2 25 60 1 33 60 1 43 60 1 44 60 1 45 60 1 47 60 1 48 60 1 56 60 1 59 60 1 61 60 2 66 60 1 72 60 1 81 60 1 89 60 1 90 60 1 93 60 6 96 60 17 98 60 9 99 60 5 100 60 17 101 60 5 103 60 2 104 60 16 105 60 1 106 60 1 109 60 4 111 60 3 112 60 1 115 60 1 118 60 2 120 60 2 121 60 11 126 60 6 128 60 8 129 60 4 131 60 2 132 60 2 134 60 4 135 60 25 140 60 1 146 60 6 147 60 3 148 60 2 149 60 1 151 60 27 152 60 29 153 60 10 154 60 1 155 60 1 156 60 22 157 60 6 158 60 6 159 60 17 160 60 10 161 60 11 162 60 1 163 60 13 164 60 30 165 60 1 166 60 19 167 60 2 168 60 6 169 60 2 171 60 5 172 60 18 173 60 3 174 60 8 175 60 4 176 60 3 177 60 6 178 60 2 179 60 1 182 60 2 183 60 1 191 60 2 193 60 2 198 60 2 210 60 2 214 60 1 225 60 1 238 60 3 4 61 108 6 61 21 8 61 3 12 61 1 15 61 1 22 61 2 23 61 12 26 61 1 32 61 1 36 61 1 39 61 1 42 61 1 44 61 2 46 61 1 48 61 4 49 61 1 61 61 3 71 61 2 74 61 1 75 61 1 82 61 1 84 61 1 85 61 1 88 61 1 89 61 1 90 61 1 93 61 76 96 61 2 98 61 2 99 61 1 100 61 5 101 61 1 103 61 1 104 61 8 107 61 1 109 61 2 112 61 2 113 61 3 114 61 4 115 61 3 121 61 75 122 61 16 124 61 6 125 61 3 126 61 76 127 61 3 128 61 108 129 61 102 130 61 25 131 61 2 132 61 11 133 61 21 134 61 50 135 61 61 136 61 1 138 61 7 139 61 2 140 61 9 141 61 1 142 61 4 143 61 2 145 61 2 146 61 5 147 61 6 148 61 4 149 61 3 151 61 12 152 61 6 153 61 1 154 61 5 156 61 10 157 61 8 158 61 1 159 61 5 163 61 5 164 61 8 166 61 4 168 61 6 172 61 18 176 61 2 177 61 1 182 61 6 183 61 1 191 61 1 196 61 1 198 61 1 210 61 1 222 61 1 223 61 1 228 61 1 235 61 1 237 61 2 239 61 1 4 62 93 6 62 21 8 62 2 9 62 1 23 62 3 25 62 2 32 62 1 33 62 1 36 62 1 50 62 1 54 62 2 56 62 1 58 62 1 59 62 2 64 62 1 70 62 1 72 62 2 91 62 2 93 62 20 96 62 8 98 62 2 99 62 3 100 62 3 103 62 1 104 62 13 109 62 2 112 62 1 113 62 10 114 62 1 115 62 1 116 62 5 120 62 4 121 62 52 122 62 1 123 62 5 124 62 6 125 62 10 126 62 20 127 62 3 128 62 93 129 62 78 130 62 39 131 62 2 132 62 26 133 62 21 134 62 53 135 62 31 136 62 8 137 62 1 138 62 9 139 62 5 140 62 4 142 62 3 146 62 1 147 62 1 149 62 1 150 62 4 151 62 7 152 62 7 153 62 2 156 62 5 157 62 1 158 62 1 159 62 3 160 62 12 163 62 4 164 62 8 166 62 7 171 62 1 172 62 2 178 62 1 182 62 7 191 62 1 211 62 1 236 62 1 239 62 1 4 63 41 6 63 3 8 63 1 23 63 1 25 63 1 26 63 1 36 63 1 43 63 1 54 63 1 56 63 1 62 63 1 72 63 3 80 63 1 81 63 1 84 63 1 92 63 1 93 63 24 95 63 3 96 63 6 99 63 2 100 63 1 104 63 21 107 63 1 108 63 1 109 63 2 112 63 1 113 63 1 116 63 1 117 63 1 118 63 2 121 63 11 122 63 2 123 63 2 124 63 5 125 63 1 126 63 24 127 63 1 128 63 41 129 63 23 130 63 5 132 63 5 133 63 3 134 63 10 135 63 25 137 63 1 138 63 1 140 63 5 141 63 1 143 63 3 146 63 5 148 63 2 150 63 1 151 63 7 152 63 1 153 63 1 156 63 5 157 63 1 159 63 1 160 63 2 163 63 5 164 63 3 166 63 4 168 63 3 169 63 1 172 63 9 173 63 1 176 63 1 178 63 1 182 63 2 184 63 1 214 63 1 219 63 1 221 63 1 228 63 1 2 64 4 3 64 8 4 64 42 5 64 4 6 64 5 8 64 4 9 64 5 12 64 3 16 64 1 21 64 1 23 64 3 25 64 1 26 64 1 30 64 2 33 64 5 43 64 1 44 64 5 45 64 2 48 64 4 54 64 1 59 64 2 61 64 2 66 64 1 74 64 1 75 64 1 81 64 1 91 64 2 92 64 10 93 64 79 94 64 2 95 64 1 96 64 9 97 64 1 98 64 1 99 64 5 100 64 5 101 64 4 103 64 2 104 64 9 109 64 2 113 64 2 114 64 7 115 64 2 117 64 1 120 64 2 121 64 19 122 64 4 123 64 4 124 64 4 125 64 2 126 64 79 127 64 3 128 64 42 129 64 25 130 64 2 131 64 1 132 64 2 133 64 5 134 64 9 135 64 14 136 64 3 137 64 33 140 64 7 141 64 2 142 64 1 143 64 2 144 64 1 146 64 1 147 64 2 149 64 2 151 64 4 152 64 3 153 64 6 156 64 16 158 64 3 159 64 5 160 64 1 163 64 3 164 64 3 166 64 3 168 64 1 170 64 1 172 64 11 173 64 1 177 64 1 179 64 1 182 64 6 193 64 2 198 64 1 222 64 1 225 64 1 228 64 1 2 65 1 4 65 138 6 65 11 8 65 5 12 65 1 15 65 1 26 65 1 35 65 2 36 65 1 39 65 1 43 65 1 44 65 2 45 65 3 48 65 2 54 65 3 55 65 1 56 65 4 62 65 1 64 65 1 65 65 1 68 65 1 70 65 1 71 65 1 72 65 1 74 65 1 75 65 2 84 65 1 87 65 1 90 65 1 91 65 1 93 65 53 94 65 2 95 65 2 96 65 11 98 65 1 99 65 14 101 65 2 104 65 20 108 65 2 111 65 1 112 65 3 113 65 3 114 65 7 115 65 2 116 65 6 118 65 1 120 65 6 121 65 54 122 65 8 123 65 2 124 65 6 125 65 3 126 65 53 128 65 138 129 65 69 130 65 16 131 65 1 132 65 31 133 65 11 134 65 68 135 65 58 136 65 1 138 65 6 139 65 3 140 65 2 141 65 1 143 65 3 144 65 1 146 65 22 147 65 5 148 65 1 149 65 64 150 65 1 151 65 8 152 65 11 153 65 6 154 65 2 156 65 2 160 65 3 163 65 11 164 65 13 166 65 2 168 65 1 171 65 1 172 65 12 174 65 1 176 65 2 177 65 2 179 65 1 182 65 24 183 65 1 191 65 1 194 65 2 198 65 3 211 65 1 217 65 1 221 65 1 225 65 3 228 65 1 231 65 1 233 65 1 236 65 2 237 65 4 4 66 77 6 66 11 8 66 2 22 66 1 23 66 1 32 66 1 35 66 1 43 66 1 44 66 2 45 66 1 48 66 4 51 66 1 59 66 1 64 66 3 66 66 1 67 66 1 68 66 1 70 66 1 72 66 3 75 66 2 82 66 1 90 66 4 91 66 9 92 66 41 93 66 53 94 66 1 95 66 4 96 66 14 97 66 1 98 66 6 99 66 11 100 66 3 101 66 5 103 66 2 104 66 10 105 66 2 108 66 1 111 66 2 112 66 1 113 66 4 115 66 3 116 66 2 117 66 1 120 66 2 121 66 23 122 66 5 123 66 3 124 66 5 125 66 4 126 66 53 127 66 1 128 66 77 129 66 24 130 66 6 131 66 1 132 66 21 133 66 11 134 66 36 135 66 112 136 66 2 138 66 1 139 66 3 140 66 5 142 66 1 143 66 1 146 66 10 147 66 4 148 66 39 149 66 2 151 66 10 152 66 7 153 66 4 154 66 5 156 66 3 158 66 1 159 66 3 160 66 4 163 66 9 164 66 8 165 66 1 166 66 2 167 66 2 168 66 3 171 66 1 172 66 11 173 66 1 174 66 1 177 66 1 179 66 1 182 66 16 183 66 2 186 66 2 198 66 1 211 66 3 217 66 1 225 66 2 228 66 2 231 66 1 233 66 1 236 66 1 237 66 2 4 67 76 6 67 10 8 67 1 15 67 1 23 67 2 39 67 1 48 67 1 54 67 1 56 67 2 61 67 1 72 67 2 81 67 2 82 67 1 89 67 1 91 67 1 92 67 11 93 67 87 94 67 1 95 67 6 96 67 10 98 67 1 99 67 3 100 67 6 101 67 10 103 67 2 104 67 23 105 67 1 107 67 1 109 67 3 112 67 1 113 67 4 114 67 2 115 67 3 116 67 7 120 67 5 121 67 45 122 67 8 123 67 6 124 67 6 125 67 4 126 67 87 127 67 3 128 67 76 129 67 43 130 67 11 131 67 6 132 67 21 133 67 10 134 67 49 135 67 37 136 67 3 138 67 4 140 67 12 141 67 1 142 67 38 143 67 2 144 67 21 146 67 9 147 67 8 148 67 1 149 67 3 151 67 4 152 67 9 153 67 4 154 67 4 155 67 2 156 67 16 157 67 1 159 67 6 160 67 4 161 67 1 163 67 9 164 67 7 167 67 1 168 67 3 172 67 7 178 67 1 179 67 1 182 67 28 183 67 1 193 67 1 194 67 1 198 67 3 222 67 2 228 67 4 235 67 1 236 67 1 237 67 1 238 67 1 1 68 1 4 68 15 6 68 1 8 68 1 32 68 1 36 68 1 43 68 1 44 68 1 54 68 5 64 68 1 74 68 1 90 68 1 91 68 23 92 68 32 93 68 76 94 68 1 95 68 1 96 68 10 98 68 3 99 68 4 100 68 2 101 68 6 103 68 1 104 68 5 108 68 1 109 68 1 110 68 1 112 68 1 113 68 1 114 68 1 115 68 1 116 68 2 117 68 1 120 68 1 121 68 10 122 68 4 123 68 4 124 68 5 125 68 1 126 68 76 128 68 15 129 68 8 130 68 3 131 68 3 132 68 2 133 68 1 134 68 3 135 68 18 136 68 6 138 68 1 140 68 7 141 68 1 145 68 1 146 68 1 147 68 2 148 68 3 149 68 1 150 68 1 151 68 2 152 68 4 153 68 2 155 68 2 156 68 3 157 68 1 159 68 2 163 68 4 164 68 5 166 68 1 172 68 5 173 68 1 177 68 1 178 68 1 182 68 3 198 68 1 202 68 1 211 68 1 228 68 1 239 68 1 3 69 1 4 69 19 6 69 2 8 69 1 9 69 1 26 69 1 27 69 1 33 69 1 44 69 2 54 69 13 59 69 1 62 69 1 72 69 4 91 69 4 92 69 17 93 69 42 96 69 6 97 69 1 98 69 2 99 69 8 101 69 4 104 69 28 112 69 1 113 69 3 114 69 2 115 69 4 116 69 2 121 69 23 122 69 7 123 69 2 124 69 3 125 69 3 126 69 42 127 69 1 128 69 19 129 69 10 130 69 4 131 69 5 132 69 3 133 69 2 134 69 9 135 69 29 136 69 1 137 69 1 140 69 10 141 69 3 143 69 1 144 69 1 145 69 32 146 69 3 147 69 4 148 69 5 149 69 1 151 69 6 152 69 1 153 69 2 154 69 1 155 69 1 156 69 4 157 69 1 161 69 1 163 69 5 164 69 8 166 69 6 167 69 1 168 69 1 169 69 1 171 69 1 172 69 4 173 69 1 179 69 1 180 69 1 182 69 6 183 69 1 198 69 1 221 69 1 225 69 1 233 69 1 237 69 3 4 70 104 6 70 11 8 70 5 9 70 4 12 70 1 23 70 2 31 70 1 32 70 1 33 70 4 39 70 2 48 70 1 49 70 1 54 70 2 57 70 1 63 70 1 64 70 1 68 70 1 72 70 2 77 70 1 78 70 1 80 70 2 90 70 2 93 70 114 96 70 7 97 70 1 98 70 4 99 70 4 100 70 3 101 70 2 104 70 13 109 70 2 112 70 1 113 70 6 115 70 3 121 70 37 123 70 1 124 70 5 125 70 6 126 70 114 127 70 2 128 70 104 129 70 50 130 70 9 131 70 1 132 70 10 133 70 11 134 70 26 135 70 125 137 70 3 138 70 5 139 70 1 140 70 18 141 70 43 142 70 1 143 70 8 146 70 3 147 70 5 148 70 1 149 70 1 151 70 6 152 70 4 153 70 7 154 70 3 155 70 2 156 70 5 157 70 2 158 70 1 159 70 3 160 70 1 163 70 2 164 70 3 165 70 2 166 70 4 168 70 4 169 70 1 171 70 4 172 70 25 173 70 1 174 70 1 177 70 1 178 70 2 182 70 3 183 70 1 185 70 1 193 70 1 198 70 2 211 70 1 214 70 1 215 70 1 217 70 1 231 70 1 237 70 1 4 71 1 44 71 1 70 71 8 92 71 3 93 71 3 100 71 1 126 71 3 128 71 1 129 71 1 135 71 5 143 71 1 156 71 3 159 71 1 172 71 1 181 71 43 182 71 18 183 71 4 184 71 14 185 71 11 186 71 1 187 71 8 188 71 6 189 71 14 190 71 5 191 71 8 192 71 1 193 71 2 194 71 3 195 71 6 196 71 3 197 71 4 198 71 2 199 71 3 200 71 4 201 71 4 202 71 3 203 71 5 204 71 4 206 71 5 207 71 2 208 71 1 70 72 5 91 72 1 93 72 1 126 72 1 135 72 1 147 72 1 156 72 4 172 72 2 181 72 41 182 72 8 183 72 4 184 72 11 185 72 3 186 72 5 187 72 3 188 72 5 189 72 5 190 72 3 191 72 2 192 72 3 193 72 5 195 72 4 196 72 3 197 72 3 200 72 1 201 72 1 202 72 1 203 72 1 206 72 4 210 72 41 9 73 1 33 73 1 67 73 1 70 73 4 84 73 1 93 73 1 126 73 1 136 73 1 142 73 1 153 73 1 156 73 2 181 73 36 182 73 12 183 73 2 184 73 14 185 73 13 186 73 3 187 73 2 188 73 9 189 73 8 190 73 5 192 73 3 193 73 4 194 73 1 195 73 4 196 73 2 197 73 5 198 73 1 199 73 2 200 73 2 202 73 4 203 73 4 204 73 3 205 73 3 206 73 1 207 73 4 208 73 1 209 73 6 236 73 1 70 74 10 88 74 1 153 74 1 156 74 6 164 74 1 172 74 3 181 74 55 182 74 18 183 74 2 184 74 18 185 74 8 186 74 3 187 74 2 188 74 10 189 74 11 190 74 5 191 74 12 192 74 2 193 74 4 194 74 2 195 74 3 196 74 3 197 74 2 198 74 2 199 74 7 201 74 4 202 74 1 203 74 2 204 74 4 205 74 1 206 74 4 209 74 1 235 74 2 4 75 2 70 75 11 93 75 1 103 75 1 104 75 1 126 75 1 128 75 2 135 75 5 140 75 1 152 75 1 153 75 1 156 75 3 166 75 1 174 75 1 181 75 58 182 75 18 183 75 2 184 75 23 185 75 8 186 75 2 187 75 3 188 75 7 189 75 15 190 75 2 191 75 8 192 75 3 193 75 1 194 75 1 195 75 4 196 75 2 197 75 14 198 75 1 199 75 4 200 75 2 201 75 2 202 75 3 203 75 4 204 75 8 205 75 1 210 75 2 219 75 1 4 76 1 25 76 1 27 76 1 36 76 1 44 76 1 70 76 30 91 76 1 101 76 2 108 76 1 116 76 1 128 76 1 135 76 1 142 76 2 156 76 17 166 76 1 172 76 2 181 76 54 182 76 28 183 76 15 184 76 62 185 76 29 186 76 7 187 76 9 188 76 23 189 76 6 190 76 42 191 76 7 192 76 11 193 76 6 194 76 14 195 76 3 196 76 9 197 76 32 198 76 10 200 76 3 201 76 6 202 76 8 203 76 1 204 76 1 205 76 2 207 76 20 208 76 25 209 76 26 210 76 1 224 76 1 228 76 1 236 76 1 238 76 1 4 77 1 70 77 8 128 77 1 135 77 3 156 77 3 172 77 3 181 77 66 182 77 11 183 77 2 184 77 9 185 77 3 186 77 3 187 77 3 188 77 12 189 77 4 190 77 2 191 77 3 192 77 6 194 77 2 195 77 4 197 77 2 198 77 37 199 77 1 200 77 1 201 77 2 203 77 4 204 77 2 205 77 1 206 77 1 207 77 2 209 77 1 45 78 1 70 78 5 156 78 6 164 78 1 174 78 1 176 78 1 181 78 34 182 78 13 183 78 1 184 78 14 185 78 6 186 78 1 187 78 3 188 78 6 189 78 3 190 78 1 191 78 2 192 78 5 193 78 4 195 78 20 199 78 3 200 78 2 201 78 2 202 78 13 203 78 1 205 78 2 207 78 2 208 78 3 210 78 1 4 79 2 49 79 1 70 79 9 128 79 2 156 79 4 172 79 4 174 79 1 181 79 30 182 79 16 183 79 3 184 79 6 185 79 5 186 79 1 187 79 4 188 79 11 189 79 5 190 79 2 191 79 6 192 79 3 194 79 4 195 79 5 196 79 1 197 79 8 198 79 2 199 79 5 200 79 4 202 79 2 205 79 3 207 79 1 208 79 1 210 79 1 4 80 7 6 80 1 13 80 1 26 80 1 43 80 1 44 80 1 45 80 1 60 80 1 65 80 1 70 80 2 91 80 2 92 80 7 93 80 22 96 80 14 98 80 6 99 80 2 100 80 3 101 80 3 103 80 1 104 80 10 111 80 3 112 80 1 113 80 3 114 80 1 115 80 2 116 80 1 117 80 1 120 80 4 121 80 5 123 80 1 124 80 1 125 80 3 126 80 22 128 80 7 129 80 5 132 80 1 133 80 1 134 80 4 135 80 16 136 80 5 140 80 4 141 80 3 142 80 2 146 80 4 148 80 1 151 80 7 152 80 5 153 80 1 154 80 1 155 80 1 156 80 3 158 80 2 159 80 3 160 80 1 163 80 1 164 80 4 166 80 1 168 80 1 171 80 1 172 80 7 174 80 1 178 80 1 179 80 1 181 80 6 182 80 9 183 80 2 185 80 2 186 80 2 187 80 2 188 80 1 189 80 2 190 80 1 193 80 1 194 80 1 198 80 3 199 80 2 204 80 1 208 80 1 209 80 1 210 80 1 231 80 1 Seurat/tests/testdata/barcodes.tsv0000644000176200001440000000252013712563445017021 0ustar liggesusersATGCCAGAACGACT-1 CATGGCCTGTGCAT-1 GAACCTGATGAACC-1 TGACTGGATTCTCA-1 AGTCAGACTGCACA-1 TCTGATACACGTGT-1 TGGTATCTAAACAG-1 GCAGCTCTGTTTCT-1 GATATAACACGCAT-1 AATGTTGACAGTCA-1 AGGTCATGAGTGTC-1 AGAGATGATCTCGC-1 GGGTAACTCTAGTG-1 CATGAGACACGGGA-1 TACGCCACTCCGAA-1 CTAAACCTGTGCAT-1 GTAAGCACTCATTC-1 TTGGTACTGAATCC-1 CATCATACGGAGCA-1 TACATCACGCTAAC-1 TTACCATGAATCGC-1 ATAGGAGAAACAGA-1 GCGCACGACTTTAC-1 ACTCGCACGAAAGT-1 ATTACCTGCCTTAT-1 CCCAACTGCAATCG-1 AAATTCGAATCACG-1 CCATCCGATTCGCC-1 TCCACTCTGAGCTT-1 CATCAGGATGCACA-1 CTAAACCTCTGACA-1 GATAGAGAAGGGTG-1 CTAACGGAACCGAT-1 AGATATACCCGTAA-1 TACTCTGAATCGAC-1 GCGCATCTTGCTCC-1 GTTGACGATATCGG-1 ACAGGTACTGGTGT-1 GGCATATGCTTATC-1 CATTACACCAACTG-1 TAGGGACTGAACTC-1 GCTCCATGAGAAGT-1 TACAATGATGCTAG-1 CTTCATGACCGAAT-1 CTGCCAACAGGAGC-1 TTGCATTGAGCTAC-1 AAGCAAGAGCTTAG-1 CGGCACGAACTCAG-1 GGTGGAGATTACTC-1 GGCCGATGTACTCT-1 CGTAGCCTGTATGC-1 TGAGCTGAATGCTG-1 CCTATAACGAGACG-1 ATAAGTTGGTACGT-1 AAGCGACTTTGACG-1 ACCAGTGAATACCG-1 ATTGCACTTGCTTT-1 CTAGGTGATGGTTG-1 GCACTAGACCTTTA-1 CATGCGCTAGTCAC-1 TTGAGGACTACGCA-1 ATACCACTCTAAGC-1 CATATAGACTAAGC-1 TTTAGCTGTACTCT-1 GACATTCTCCACCT-1 ACGTGATGCCATGA-1 ATTGTAGATTCCCG-1 GATAGAGATCACGA-1 AATGCGTGGACGGA-1 GCGTAAACACGGTT-1 ATTCAGCTCATTGG-1 GGCATATGGGGAGT-1 ATCATCTGACACCA-1 GTCATACTTCGCCT-1 TTACGTACGTTCAG-1 GAGTTGTGGTAGCT-1 GACGCTCTCTCTCG-1 AGTCTTACTTCGGA-1 GGAACACTTCAGAC-1 CTTGATTGATCTTC-1 Seurat/tests/testdata/nbt_small.Rdata0000644000176200001440000133013713712563445017442 0ustar liggesusersVŹ?K[Xzea"xTQJP@ v^b5&7hnM175h$&^b)]3Ϝyg̜s]IV9=e)2zf3WV-ZիUVZwټսV-tij5 Ac߮,lm)5lj^`Y=9ܺ'im|=e nsZ3n'& _Www+0dh|(g #~bb0fӎDP|ri>ޜjwxq gp>㟣MwAuo\ cyz^?_n{|}Fm1^~ׯ+kuy*8"kr/o}p!LsՏ׽5|] D_Ν^9ű_t~~Q"SyqW8Nlnu-X6U-+qopT}P3Ή_5n".-߻{Y?<Wv~Mז&gq:0~}n:>oyb>kVF~ngz[Z>b)2C]o?),uu>o+k yjhݼFk/W~@D~q|g 8SKEa %MK}wj#5uBK^oј~u"/jdBPW?Me }d`x__`~7G\o*zvL>yxPH;>?<{O&P6gN޹){<]r֯ nPeI wG]bXO YWdmŸD~i/lv<:(ާ&:FPu$-U ?ym*~ӊĺ!ZRk[Gfp@bg>a=I7CmvMw3O}gnZ1J:Xq<4~Qg@}/~̫c~5g|o >~S0N8^t?y)(84L GQB+"W$[=-jӂD3c/&/a|.u5nbhij/{ gOXQGU &xGsKq_ɰq1O[M~?Z?T'~\8N} ~՟ytKˈ~(ەlvsgk3?KLu6AC?~nmZo#R>d?gv!>ق} 9wAy<E eLW7; \I<Ƽ+h#E xduȡypP<~v?Ց∏h_0PtU7uO Vq1܂ o!폩ńOucbb:4Tgy/_Zt<1P'Oˤ9Oj]}h.Wdwvu7fӀ\/)>Zc/!-C vL/fx=ǰ籮B}_֝㗾P—" 뾫P&|yZ[IǽQ͓xy%ſTEIuEIqޑz<Ń(5pn8JO܅-OZ.3-|'OyMC`w݉#f9ڿXzA:zxU?+?W'߅5c}}]x]9\C*Ы/p'~ok#Ђ &7 MkpG}e=8dG_Yk1rpa?֟半mꏽ7_ +?p]p-¯vRwg^F<a:q-m[p?|77A?ҽn:WػWq܉[ S׼ % q s'd5fg\mΧ@_:/Aٶ:g$w6< ol2}s$^ux 'G?3-E~nGnWH[ͣ3AFD뺔Rq3])؅;\?bKXﯸ)o$&u68[Kg Wd/'SJxo* +h4:ZW ZQ|'Ӹ1w;:u|5eqbh! ?Q 7J||w5N_{9B=v/])4pǧSCpmC{ǩneg4/ӉC] [C/y웖%;N)|% 0Mo|38^>]{Ot>kZ'e0 pXd^7؎O_ |'{/~̏WS >ů2/TFo|& n6Zp[~ ;[xwG!&E[钧IN=ONNq{Jǐ ,Jw8ad/\lwr/y<~ހ UU$>87`&C+?އM>cZzI=X!Q>Bߛ.]OQo6>SL\WW>I |Vl~ꦣAG_<xiH?6D<ьZ0Tx,uK=9^ͣy.z]L$֙N? azY ;ߙu ?W]BquԤ߅˓`(Ƅ7_߳m2vPq!w:{~Pƈcjl'֧oKn8_9ɢ;(PG*⿹"x'{1}c @-ʟ.wRՅؿ g{zEw?/dHl6fuwZGn+nXޣϯ>y1O=t~ xQb?2ϺQFG=yiܳ][{"~-}DDp--q|3KUot[{$;:;Au܄cR>e1V J>,,TggE$hS]w˾ ȷ[PS6;+4o}91-t=&+ֺBGshFnLSi~k"]!Pa|35?:/D]]߲mt1y:̖gͩj=~cկ<_J0^qo^]\2=nӅG#ΡMsYFu' 9n+<;|2}>)'Πz[ #`ym?G')O ~#޳SXcnyUa0~,ԧYaNKO&[gڶ^%{n#PN,V K7[eٵ{ מT_:NEx2u>G{hRUc cC'=1޿h]\7#wWgok3zr *b ۬jd2a2oaw |ӹD^kAo-uoT 0; Pj+ S(0NS>mT>9t5Ϲ 2Χf2]nEsH˙7 \#ޓ>0?Fs'r\#3b7ǹlŜ樇қR}G-봝qv._qnnuBtfWG=W/$+Q3¥OeGtAqhj#N|pk̻( kMNZ/-h-ד$J2wu\bέn^O 5/M\'cZ&*&uOI?Oz>p(i[ûuREٺٙ?}M?>Ed׆OxMWǢ,}! .uո>[2X#;jeyjūƙ1?ui1;|ZזPVp({B!v~C9<~ g@h=~0i4\-0Z2\>]*Gb3N7~<_Tdi|6mr+Lxl+ŏ/6h<;ڨS1_8z8㺬hS_Ɛ VL_\_;"}D\'{;T<3-sAE~"aEd(V߃q0RG{cNR>Xh~`o3~? yVs~,>j9'统n5_P3NJ|.pV&p|^,<V7 JuqW 7VUцUIēu9tR|&8o;Iz47wf=ѢhϧF?nugnikJl*ׯYMsE|h<ΈI}foP뎖N>(0FM2$y~R n<{5WُSq4/E:uϔUtTᚯw K,R-yvz}/ ?%jq [ n}8~+G??7Cה'yz<{o]JyUB{%eq=%Um48=>Cz~Ǖ}%9WOqt9gl]FI㢹&!rqew?I։G?d֤S$;5ӎL>4ٮrbs9>[|h^ʓ+yXFkĕPW:2&p} p*k:ЯXXկ﮺?[ј7Ÿ>6<%+euZǘ.K|Ugٛa,lE [։y& } ({V-7" PݺE8(&'Ɓ`ӣ)Wt[s1O¸q%u EA p Eu:ܯg2[7D_5W bD7_O\C1!!##/f5LMϤO5I/mδ OD3O?=U M}UagtwD̈|_m^xA_QLݹ=s~k|{, uR(Y>.O%yy|юYK}1(Z/;ͳgu&̟-q9,KT~phxD"u%p?g o]3%^濄:駅!بskS޵7g2XEyM%ƤPцb=_\v9S;:FOˋs;"μKNC~qf?Ŭp]Y͡g7%WR5<#y`7><\ ;^PAǾxh0֙Z\zjx|/µE/u>ZO{EGh<$?d^ʒ%{yLpD'Ȓ^V=޹+[dW~ԧ]T8͛-|#obx/E_/WCC7?_ӯ|rc>1"<ƶЮh뿙.Y'~ׅ:L/lj~41۽T8|Ew-4|B6 =s/Ϋp'9sTG>SŞ8k4NGOO}0G&*&ݾyx,}/S~z}KeB%MKv3?웲pome&92ܰ*7QW=>O\yIvR<ƿGgꇅuOO ױ zy滉|0\GOQ%9>H㭇R]7q;Xqgق% !u-CB14<=ig:6;ui,y0_XVB2.֙<1 ƒO$5LkytXM~&561.0~rz{~9F~gֽz\߰:w<>xڧ¢?ׂ>GK}0KԵtԱq+}g۰[Z*Lm:i2]M)t¥qb$PϬKTsUxs:رN70귊b_dO>TF Nxyհtg~Rdf?O߷0iOy[S8^p3?W[fOwK q|+e:q$dӉY+`"kc=մ,9[^ C+,C׾G.GM0Ze8<oEݱ8 +6L:<.tbuↆ<Uyd=-=י|?e|ވ[Mf#8g &M2gq'u$ /ckcb."7]Ix5-׺~En+Kx,D(߸Kt^:f}V!]Lcniyl\wC]mӽ u_G)릾m%.h,Ckq݃,?!xz> g2\짆y"ṟgG2zyWW8Ekg~*ȱNY#f=٭o wd]bqR-0 G]#ÇC>EpvXvwKDyAYpڧ5av⿌\AZϰg{ncr;&Kbs&.^K@~Zmq{x= pL zj%q4Lr'{w],#F~(oF}q]V&2ߛEuLZJ8jXC??pQW}z疟̡>>^e<7d׸}?aU=f/ {Gn-5. B=\U|_̟P\ /ri!R7|ĽgF6,q9rK`Ԋ7]{Sלs65ǽ}V|7ءO 5#a!^,?@ٳ|_}ЖՉ['nz)z^&u#9a?wq!1<mKO!w9b7"üЂ,u翻.7p?~]B ->OK\ϟ~@7sÿfއlfp=F@{Sp;y~V6_YL: #FVqUVeUohjG\-ؓ]\vږZeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVen.gGL{Ut4R /!>nw`-,.,зko:[/W^}vzḤ{ ]kOzMՑgUtx=3G>O'C ׺mKSӏ?oW՛ĠG|޽Cfz[Ï.ytW3ymB`SԫT=rq>_yO ~Qh^yڿAvu$0'^iqSOD7S >W?8cîx(W>(I>\,[(u/Xne">)> ao ,WT[~N]Q3D,KuۿWcXp3zu%銘t_8_mhS\7 7\sIsҋ@UkR*q_z]:/K6-.8u$('-a_Kp1y |II.{P~))Xw#ʶ:P?O}y0|q}rYD1"&鯇Ѽl_~3(oZ [ʯiuǝhz^mA^{&i\}ceUkŠy'싃Z W$s>5ӡv‚Fz-!E>($ݡz3%WTx:OxnB]1[ - F$2.0ZŒ?(_mDh/q)Y'cb>\31(^5EiSGHs\&e,P;Y#G[TK-g`qM)2L5]QW/:o|3}v]w>B7gyVUwbOUzNկN~bUMSy/4Hk0BaK'NRϋ-.y{yźp}5q_RZ8o.#2?|=Qn}YS79/#GMC_=+~? cgbbԏ?-uq_ļ~WMc-cпޠS~u=X[u^0OW/a}1q1x^Ħˎ _׏(vv&=K&Em}wq y5S~/"~km7w3K䩆uzJϊ(zէ"5zQa.+p`z?=F0m4/s:kt_q.-z?[TWG+jh>91o~ӠӉy|RWO-+}0W0DUK>Za3S{ҧE|kS 8eοub"}Vqӂ5EwXybL{*YT|@:ZP5 }^~NoG,٧%>2| |ކ SlnZ?=Oi./˸#2[yR_&Y :1=YF/0]=7-]c놁ROS*eغtwq ~ɂnbȷc ߆.jTGcv/?nMm(:-AJJ6zyt0c|-~6=^t/TYUYUY;Yp(;ʪ5ū@w!CA#{k(5e _Ko/~{TLN"ƒE3bսʐkgX΃C^᫂C,pKlS( >6^7_Qֵ%?!!RypDɐ6{=gI-8ffla7na׋lW:Ou]눬>M: e<Y0ng+jocQ} ))%j!™eC &Ak~%MK$ , /ӫߝ1AF^;ĺ."Iq2R/KD>"Y_;UB ؠ-C]v|z)"mwmO{Z?Ղ>z3x17<;};yGgGw7oRŪGos,Uq[zנ;9c1yg;q)=Capܚkv@}ޥ.OkqC؃OׁyBO/yOQ^<1ljn|?O\lI721IqͿ}4P~Dgwk::8Y꺲90]buNy{_ cvpX8 l.SU" sV}HOux2Hɧݘ?x O縦,q.jI83n]tˋ>`EkX CY^\- 7"ռX;~[扺xc6/^w>_ziG1 ;rŸ_;,.40zPWS3֨U7IW#SN ]Po?! n6MFKک;I+r uGwS GKU(w~"rp.vӢ6/Oh[~9O,֤go^; |yotefer9Mj(?g'#EnWt#h-< d yKn:-,/>GxV(K.2Jyq^ :\2?k_=7x ~zb^uG[JZFn}|y?ōLUE]F"6+~uULgܨ{` ,oPG~Ԧo>'=|vB2gIKz.x(:cp,!Yϟw((8蟳W&Qpj5r<x|n:m/0[ ^ׯ^Ա>qz V)"^FWCd{hA柂i] =[c}?c Z0u#, :|\ky%z]CWVwt6kuSuq= ,fx\qb`MKn~FqN>;&kDx*nelg g M%U~~^8N[0s{_ Zo܍O-\WQo1fq|2t; ]Œo|R=mx?ãʴ?ӽD aOE"Fa}c]A(43Ծ$w ^y~}8RX3T_.{٥b!(ֵ Zߺ=E|4`s1,:_qZ,eDcw/7+;zwj7Ps]} pmšW)TOj 'r~[G87WoSl+gwݦ{^q}7mסA#Iiqޥ>GzsPHk׈ijNܹ/%E(u6e<'6xway9hXNgy{^u>ՙ:395"a`{Ήοf'㺜,={D?`hm/3ެ̛UHNv&C#V:}?GM [G*'fz\+_}^}x`e^sQ!#*G\?naIgbq#Segv{Ac9:-|/:u[)i?Ao W(pe毘,czklj~zNhDÅ+_,-3ͣ|+2Ќɳ\x|9y8c9Z_8ًE pt[8x \K}xP=Bc-wau{@F@XR= nUpЛuiceN_>:9xЊt\˻uB-ɲy]]=G@6ꡮ)m_d>DW[66~~qD'mK2M^WYa>~5SlA^HVG}^U=)V 0Q?G,uӄ^!kAWco@0;}K_ߜ_赿N}9CU(8Ly!#t#z*ּۣCɞ ߺ5ԺT5[E|LY<{mqqGKW\_)M?ϩٚ>~=ZC}o[1˨?P ךɊNƨGXn^?{2OW'җz`̤ e}XzN U\ >-mp8M+q| qSq+n[FGq ֩zhU.1kٖ-?"pDžm:؊ ۯ](I OdtS|K=\Ȗ]:C5rTt=̦?gӯ+Ek|7={q< %+ίp 0@n0ě0/[r紾:3-Vם޳QW~/'Z#nLz#դNsM uyn/ԟU>/Z[OЌl\> O_)oO@e2e_\S_ʶࠁ?vb3}x_pğlZWc}[hboTľe{+"O8A;Y}/Ւ΢&ʼn7uyxqog#j_Q{U֗z^,<>̓GW4Wk)jcGC?7LkIjFKſ }qы͹"|#*2+k}u=Skİ>,)qk#F{/[T5z|sܵS]woґwl'ts?X~ jm>a`h_zS=/맬+k8c_]b Leu"7ϫˣn<|a8M2!<"&Mu:ăK+Q 34fŚԈ6pە\ڝ0]/>xdʦ9K^yZ }qKh?-e4Yc:HmHs/Fru&TOW㖊qJno^fY=Wx+?;kqPl%hAq*;wU=SJ1˸,:*=qb+ﯘe#Oq}FI%4uđ^'vhsSOR]Џ~ i hNx4ܯ;<%6_.vIdU'O9e]hUpP_b y6-}j} +_޽=Kktɯ'v#58#5 w_}K&ME/9kV ^\߿3|ߊpUgO~J:1։']K6L!ʔW|PÅLWK;3}5xmN'nv}+jp15SWx;E[[H+WKH~ -?"ο{>xh>Bo7=3.j<[z yl )Vﺻ_V7Ċ?l< -ܶ }]-D?>;ļ?k9]\C}~|/J<ߞYؔ#brwuH-5HzbSy܆BmOMjOZ`G 1BB]Co3)2,9WB*k.?`Ks1;O?-KGon~ގ' }d^o?~%G~⹟}}0c+/Cit\Tuom/X~z3 #ry wkKGԘ6~柠[>Yݭ#"}Hl=]\Q5zK'_e糲>^@}[zv)H٧ ŵxQt={.d%t$<lAz\7>?sX<'[*CE|!S`ЍGֿŬ~go.uc K?(S{5@:`ҟö'/J}=xՃ/c|knظNgaʷ㌷E<y uh,d<ՄD-w=?;}&YD}/n| oYDŽk- |tB$z]P}Y?N8w}v;D篱27{8IW 1|Jp4]Lq\ĝ`ζ;+NH̃-pЛ~z?uqyua;U:9s70l6 euh}`MXyݸ}j'=:&jt0l;XlI>.}zIS>d~1gZxp }X=^}R|ਏ,F(6CѺO8ʡ"`}ADk N.hR ]L\+Qٟ]֦:&õ³.Ym}^q X+'V7%ߌ~&?}~eDzzb#z7G%_b7Mx&; {sV t@l9g3 zXWwrH?4c>ٰ%?g?'_ͳ\w.ps]tq}_Qq;wߪ-zz}sǩm/1\G|<~"|+uЍp]7=Lj$O,e|l}bBz?>QǓɿ8nk6NkÐ':9~|Qc~A[OSpz2o:^P5Nu߷0XoCmag#zL;DYR?>|WwQ|/ɃN^.gVrIg=Md9y.3q$W/ɒ}$01; .9Bx8(Ͽxi=Euպf_}1v\Oد) eW e}{_t[+~9l_FKSӐjPCTyԽ> Uk13W|ɋ`_ptɓuou4VߩL dydzq~&ա {|.yU_⶛U^懓4F!flc7M/?q~P@qp;ЄG8:N \5.+#מ&_>q |JycP 7]a}Fo3elC1Нb|q4wBrm1zS=2_!# ÇRE0Q՟/:n~}>L T'juz2z}fgudyj~D'OE#ߦz17 Y+oEW{ 7YCꖆPߏ{y=j7DO&^:XZ. CЯ5VmMePw5?녿R=O#ȐCl8LW#XFù\l;)Lgq-}~Z3G&^)$Ps㚩lݯ婔dwokio ܃/w Qo~q N 2mӌN%>ץ' ߰P/V翤H3_w^s!Szz}\Ctrz2]=䧣l3׸SoH2p|~ LCL} .Λ=&],3WM?="?%ЎYnѵG {kκE޼Nϥ*,I'a/uCǖQ}%WPBޟZp(tcSYe_+}jIFЋp5OW f7u<,/A-8D~5f9&mvpqԛL$% _|JǙ} yӄw mN}8~1-y62oŝ|0spH^bf΋-Oi{nWBĹ P?р9_p;>2< 񷰳mz>x &XtʺDC{^W5ib>}Pp#_xx,oB'z7.I[E.xM}>Ӽ"R:=qk% y4d(f/k'먦6?k:I|{8F&z^=&>pw&ŕML-;:mb)~zga?uh*ˣf>> kӭkëe躽 !svfÎqy?Yf=^w==J>pUo<fFj̾4P.Z!ț᪟ߎxGuyd8 d uSG<`Ÿb]?O">7Oc:ip H5Ī"' вG8fY({'+ ^Cd~L\uLK6 úWNE+-y6~bG_ZZ^N5 u>s0S|uߌ 幄ǽxt}zzZ6HG7T-ߗm]wq uc1LoП;ֹ@N~q97MV(UYq=D5u캖8?ܓ~$rp6Ł\OSMgwΓv}TA>wTn)xhy| xN~6+:Ep=>CL8~u<ǥxyWlF ﰙFsak$8_nvc"mCES7-VF#ϟDȴ8 K(C]qHc1x4s[gqWa4ެs> ϴ:o< [ߩ I3% |_L2Vl[kЅNWH\qR7x#G~X']\'OkϑU/y]cӀG05+*ZQ#H{\\XiWʭIE?е<ų8j҇Ck{_|P{_qL7?N481c*oñ~1N|0.pw<}At|aܾAr~}}RO[zWmYJqMup[ϊ{+WÝRyOdKܭ8udžXd-V^Ϸ^\Sy =C@qpܒtp;O_S?7ϫ\=x>`ZE^: r>Ƌ-雡(_4yOnN:8ꟲ;U&лI"?,1*o')6|[az]WlE7&7nx,lOOo{ȼHXW=w[x:zd{^u>>R'[Ǭ]Ju,'w>'tcܚ yK|{ T/.I/pt (< u'<ozgmչX XѺv `{/z/Y}H7xvNy\MBupmNuV]Uw1:{~P TOjah08._ )ǦU߯xl\}ԑC/;&hv)YnZ=ytӪwQ*mW.XWoys ZG^&ߍu-oO׷_ߗ^궸` q>,-ky8d_]PqfVqc[nu|wbk1t>܌vVouκK>U?&QTCdjKR'}?yJyۣ󛋾*.%cq%xH=߆sōOnԱO>qbaz'Mi>cA\orvwW뚡grK,52c;֧CG5=wy~BۅAT=߭䶿a'qsO'ho?cF֗ߋ}nlg..{yaF5+/13xNGKSg%}^NAǀ, :јKy>]qwߢI:(HK )PoքuQ<QqTya2J3쓉8)R>węj\.u%0TI ??íӋ ^ǮI>\=S`H b֙&Soґ #OK}7t5:z1>ˣ--0C.MYMGr^ jG3%qG<|=к_^I#j{kpգKC=1x_)TWMu&cu .R}xegE~_صu#a(ct~-bez}{~^7.6ů}OUCKCJ}[$Et.ct K^C'lxpbx2䤣w=+Wo qϾDK[k2ae ._lĘ㌺+|4w_MJY ~;F y;4 |humM~#z]1CՃLb.q//SpBDs'[u ^ٌo}c :G3f2u*&~22퓌VV9GwCݛ Uݎ١p>T(tÅI]ph!גwa~:=?&>#b*CΎSTEh/*TGpƕxS3@d}_~B]φvJm&\}ǗAj|qoE6p>^oC(_}16uoA[ )Sk8dCߕ/6eXVꅾ\9^X+:Tϸ/{j/wXǮD֦=u2:x୓Ч.S.b:A}|fK2MwxK%Ol|-Orceb?81VU_XRvy*ry~D;A>^rMsn>2GsX+ɨo*Y uu<Ej>l7uK܏Gto?Tx<CI?GjA:$敓]W pxjċÆ<^=>7i;/56Gu~3|`R+z]Ɩ:"hwٯeW(i^ ?\.^ ۆ*^08LqQ}VhO=,v~W1gs.:Ox&1}}WSҡ#_Y#|xneY_zC5ԥҚT7 QK>jS|@Qd3\:~RooP_?GcIk>ܾ8>_s6amJ>F_psQYfxnsX:e\yg(y@_w~QsqJnQ,SU=Sф TB}Kxu:c3,yh4NyYq߫'Ed7Tgk-_J#Zou|?SqOMNQZ轆ģLV5 Q`#/3t'#xN:Ť J"wǣ_D̓wO `AOl'G~;W;o^BeŬU}1鵠 Zm2x"\}5WF>Ku}oSZ' ?Wq § E~:+KF/NtHI#G~IS6n-hݠַkKOBx+Cs("9㑢sȟr?ճ&NU9`B<0 V68?U M?<?&^%pYuj1Yp ;[U|< Bm;`_fl_'/NE< 8Kюcx&;TsGjdzglw.Λ#DAUMOQ讹Ґ;~WT7an>oijש{S̓ڈy>nlAzR/o  ԏ^oPx2: w:o3)a8t>!rNO|DG ~ dhX j᎞OyE W~  =e? _lͥp%|vBMg?a7"\ٺk<}k߿6Yo^hIWwihn^>?^`dpRMn0^OMJR x8)^wH|Y)e?z/F:Y"9ju;ݠuzM?W⭭ӌz\#lc~q'Qd5&=7aD ۖ7 A}$zJel}=Es|b_]TgM:vR k%Wt]n}>HO@wRs3D͟AYW?y~]m rkmy[1o./Vw;Mp2E[7T'}aW*:3alyv(]Hw`p#I-ķut9tǞ1)5ne |aUuRǻD} ==Dž#7?ecgO:]h@mslwDq͇;r%ܑ?D KӺ#(ӱ<#ނps1ut!b8JøA<W_~jU4A =N < >rO׫)ak2݂+ŀ%7~8N?. L5n<>_3M"l2'xj`Ly=j10O"|k߼g|?=K::aEC}qYBd>W^>MNgG6絣XDS߰r**|C_ 齌__ /cYKqBC_.-ngޟ>L8Nq?&i } M82=cvsf vuW/&uqmВ8^]Qvm_-}rHCa}=tdt򾢍]ShW7!/\η~#}S7cMCݦu=}?i~˗"8?nܕԜ# ~{of <oxźEg}PO]GЈt Z hPK^qb?ǭ,Q߀tm:g5ܢ&' eԲߜN~x mAG|dޔmC!{,M>p-Ga;uɑwteLڌkmoգy=t>U>n*ip8 MۯG׼(/+^mӭ/kbAԦg]׬\N6'ͩ hN'͢С}Q%(鿏D+ yq03Y\1R/";>yU[I] tHVs|WQ4$~i_9l:hs8hjֳ| # }ƺ$q >ָ2}n̸:5T7_WǵGg_Y;Ww䗠e}V1̓G?}cVL+: ﷇ'(::h>8ںx=&x/.>#QoŘY]Ǹ:,>TR9z(k`oܸnFu`~6:<`DZNzKϺ엓O'{u窻j7x%oZN /~L+AxMY] l1K.xJ[{:'zTٗϺTcɏK\8׿/w0u76LUYFRbg͛5w1ug4Hq3_(o Ѓ~' 4ݾWaG 5W ,ge't]uH#=cep/#c_}1ROh⎥/u㯾{j}kܾčG,T 7.Wׅx }/[`$֥Gzǭ᡿ |2Ѝ97_JZ]6XgNz_~X":AO^kyh}q >3_w5"zQx^{oa u D~yI5~=7aptzxuX{+vXKVE; zBá'7@W7QUe݊k1LhB:#΢NrњzF ޔDOOh 9wWl}DqJ}Mwp?}y a7fņ/#q딗ǒ|=LaN}imo~F׻Q**(CSC_.I]>*;9 =qUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVe'ͬwqί}ʓia>Y8/lҁgul\3aB$ѧfÔ ^_g}x}P`4˗zhKݒG,20֚*툷0Ae|ih﨓g[r|4_zv=̺{J70,z_0׻wԿO}Pk9鯓>Bl)3|ܐ(27=h3O6־/Wp> ~)8P~}NTӰFm㦃bzm}S Yu^饱tBЈ*1`_.<8*;;F s2‡[|('YTχ+ݮƬofpܱ/G?Z[xlh>AZ/^&~C*00W FM?^OŸqx^<IHpt9B-~bچnbuStajмr.y|DQ,r;)׊Cc97xӠ Sѡ?2±B3H5\l:!n\QByI+}9lo/keÐj~x鿋~V~nwk[?4ί54%=v?^ϳyI thdBoWѪgM2v>sԙ u9Zʼi>ȬiMئLAףkz=zQ"]Rq5żz>M }iOuX~v>}ռE Mֻ˶)ysmpTAa>e I}W5qg;J}=soL[͢#%:1鮯e~vsЅ;g~u l5Pێu=AQgkFu:hIMY2ηH=3z&J|ƻkzKZ>:&J.tCJzo>x<9Gc<ޯuܒ^%ƣCZ*# y|Y{Z&Bj)#^)j^xy_p4" 'lg~yOlɠcV>NP]U~r5u&`_},p#?UD—2GK$pcO ~NdMW1Mm\wnJ~QGY&݌e} >53p#YI|t^9E÷^/MqWnqsx5Iy<~Vԍ\]qi6%CڠbgMAL)pOm |tpoD}{st̴L~WBx?gP&xX~^O#~yۤKb>01oh/1o{euW|E?=?sM[P̀;6^p*VLX# Fc/|JkW=ԦN~sRLoW Ӎҽ_uAwԄQ|1j% qB'6lO R:u+Ktc~y/89?P`ؿtİ~]#+|\ͣ^X~rFfM[a=JduI*qJy.dN:@eF鄙L@] nQ]6u5 ~Ν.z}/"T3^珈?7/>1eY[zR,r!2%x3|1:ndߢ9Jc[ըc?a~kuzX%/Ϡ[a=韹bO{i}G6$~HM'16Y[<3Q] v3ɟ RHCӘxbחKp1qTާJ}6<1nc#؛.tǏnqu&x~y?ibS}x5HO:˯tlyJ_cwrӉK۰}Pt|00/ >4'&nfX֏'ݠr1ig_g<ٻc+P3=ŋYp?jUp jﻗNEVw.fwdWwQʀc3uz\NMJ-;`:ϸ^Ô|X. ]²Ŗ,w0s^W3cYR}6ߠB`W\xTEEwNI, qpqYcz]='WWu9Ey/OyPfo£Ab(fn]l?e_ng \#/gqM l .u8/A'7c~|g}Xo1h?~]F~}oȦ/ߍnFL?p]Ӿ+J8wƱwc~"}M俠~]6݈ mS֏דˤ;o%8:'[xNl= >~tvԅ04YT5ەzn0YGhQg`Kw;*Kl8KsGwH u p~+| &Mu5xU~"拹}>OQp+X2|yodp8x#T=>EsT]}d=_Ȍ?~XXb_6wRI)"-Sr2qm']|̑gO|Qo{xxY,|OqsZ:Z6-y.ۓ+q(N"nOt?hۓ:,$Z&~B b;m]wyO-;.c>ʿRs(<>;su;s?P\]E낻<3OĿ:]_ػO{QϷc|E(05jxP߿~?Tn<˹'b˖'Jky!>1i^FQT+S:.λBy^+h,$<Ɨn~/~,W1c O8'<3yg`}x"iϦuk >6 =,pyJՁu5>gd3wawwY}B8a#h9Fh` OI}dS?l(2t^;Qc_B˦ဇppg'<>\R^uX7C/-- {5YᎧ?ן'^2whЛ7/nR3 VfsG?/%= G}G ^ۤp.~<+} +SϟF] G(yȘ>gӢ-/Igc;8fJ *G瞛ijG(/r 1y53rt071|Nyobq8oJg_s>ŁX/Jp 1~C*W:A+0CKo] F*QBȏgJ_q8K=7⛚zN:9 Ɛ GV;Ӄz~a<._8t>O.qEy|vzD33qͦg2ɓJz|MIz]`vqد[,&s5eWny󈋯7:q~>q{ꪛ -}Gkog|9#;{=>j-ȯa ?y 5>Ŧ]z'O2ۆ^/(UW_)܏pa;OLP$t/ьn {^*;8l_p?Oyx__T\ڕ'bzXX_,\qd6\?9'ŕg3cg_+hJ[R\Ϻ)ŁotwG*Z6/_8jS?>T>gzlA7z_5SKm|v٩ɵJڃ vj܉9>goBם>CLR"Sł\Ws >Q;R4RG+-] 9%c5;9*****a/Yܽ7:.<' >݉0)xq*p;^n3M:2hsO|>CŜ'T2W}w#D_O&RS5FMɯgȻ:][ P^.r:2= G ]!\z[GwCPy A'"c_@U0J!7\R1E4_?z&폗GޯUԟd1^>WlKYV'tڶ 8ST_͎7]}ݟU-mp& lN$#g &Ő5j]{׿AuŸ~vao_Bݷ>[/3ю՝r)QXm-`AGC?~~ȋsA<Ꞹgg(4ȟ' :{Gx.#ibWuq뺦yنS€C ~#w2ORӬgq+/'f4<uϏ n뤁_s;βz]~ƗŕQ 7[~$CUlKe1}R?Tn8ts|j_󍟨x%E/\m=x_fXQ2VԉMc|/=cX8Rǣ #?E[qL;y|Yˠqj_]8|tIėV,?~ε^|l<u4cJد5mW O8>4AH,y(p[oQñ#~?I͕g! 9G^xAW9lo}.gJ}U} :ި&4z}G#?OL&ҿz[Ktby|z[e|naV$뿐C|Î 9ۍO ]|_7OLTg5oT d31u,&JͣLRH$xrEtݢy y%ozQԗU,q~7)x,τ;:kZ:9fMO [ޫEoq>߭ض>>{5+g郙tݯ/MIf_?gS_Ip>S/nS'[_Q`[4i立FLؒJ$*:]o=O;_YcqtM1>i+ts?V8^Ivʸh<ϧ6Hs5Yy]SP/:G|hWdmyE%mq~YcUM5'(^)x-ޏǻQỌ߸ZlΎqdOIU\#XL1H}Tg}o'~Uچf~Fgy?Iu54"e*.[^4~8_' ɎoKujYeO>l/nyI5g(4 |t3,O(/ sq0FCC߈yR?nɀg]qs}q}kBtg>FvozT=U&?ZNjMr[xyuoCv#ku+ѻaP4WX'+Ư/:_)J :hHDa{geq*iߺwG{#f0(=Ϲ4w(W:)횇fRQ&H-ҝdH y]4udm3[3:ZDS(`|vGTԗ'\-iEW㲮L߭(;9ݳOp zgqڬ:cT+n6nUG<&?dW]~G2A1 1!C8i|u+}ȼ)>-q^пǼ=~@b8ՙӺxW/u>"2_+qI,2+'%v?y>u7es<ݡ%~:JR>KI' ÍLӮs#?o<nHq3p-0m)[_x(?P897udhxLk]?݃EZc}3=M5 q&r{ǰA1ri᱙MPO?`_}yyZ1]_c]c?cɓົ-G\-T,ֆ[kascϋ 4rM~|Gg:p)F|]'xG <9n߭8&?o|Oz=a?zu*.ynnZ}.qWN\'1yڈ<;z:"~| JX]ebq }#uڠus6vɡ`}>ה÷D|ܞ܎ }X]P@&:)_)>Ԡߝ/Kv:OoϏ? DJ|Cl3;8$$Aȳazg(zDz9 =a.y8Kia}^-uǀRy(iRt?,@s2wH\D] 4^'#ﳣ?cA7Y~#l-[lO`rV~-r1MQv- ~D=?ED@)_c`isntS5&JUo ]]uĥF^,hF'}_7Nxda<։x럞oSuhC) %ėQ}n,-5#hS1y=_HN 7~@hcX_ɗCihѯ50x뉫qB^#ϚO!]{UW \(kxW]Փlqq]p_,Ԯ溼o20<6gleߺ\]wFOC-ük_r;_}W:8XS?,k}q5{+ȸL3lkܿp=p E],ujFWČߚUD_FMؗϖ<1 )?vnvu_ņCuR]WOyN{\ -Ca8#~%-7j5}p/-MC7{kse#҂⪇Vt1a+B7+1 ."\ߔX~8xhtU'e|%uȬ㖤N=$I?8(qÝq| *6/oy#;f{o]8Wly[U=}Jen5׺W su}1d::%׷C!+E^||}"W879q< ,y.Tu^!0YpWoΎ0oi6aޘ}:,.^G~Ч7Xoxnꈛ?(<iLyږ+':~F{>f6yݢ~&>#B?_"Ĥuti37ม~rkt35q2{JMq~gpQȠCTYfc pz}yql}|GQ%'65>#utmbꖶiv(կ_Y翯;~ȯWǝ]6>$\Cmwm\78U bZ1 ҭ_Zϋ!*C滹q"kup[t$N9x.߉џӉ/2H¬=v-4苣SYx g6տ\͵q&}~4HG/GC0}--)i:c'4R{);kҫIOf'Q˯>"Tg;Ͷ}vV>Ky|LNAn b/\գϋ*k<Jڿ9iCϛHVŦ} 8zԳ fIib~żn}Lޡ:1 d0\Ó߫_~ȑ?[&롶F:wәO1ۺx_Cvu{&͇y3y6I0hKx_[_o׾Gw쉩Ў[+o_|RM+s* 6D:-&mw)\-~ny +kq:_9I\ ӷ<߷+}̾MEZ?npߘ]\g1pvx:_ޝ7ofmO0g^/{ Sռ$<jU<&qO ǣ`ia9/{KRu{[_$ u{(< O.^0Uk8Q_AEOS/z\C~{z (ו+~(F|yP)mqKZU(Y 6WCfP΢KTrϷӯ#eUo\-Cj,L]CōY\odr::Nιi} >^e ڋZE/pf\3eY3Iz7Q|> ,Օ=]-ؓ:jw64('ܿ)&Bo^z7kǬכg[d}bD>XSDZ~ Jy]=t> ǹ~ճ1x/_!eb8 =cEj>WW2@f(D?jъO c ۺ[їٴBp >sV-K"v) p׃n3ZWhPH_KDc (ͫ& H=vQZ۳.\km=gnWT>~󨭞2Sbɗ*4^efůl8`/[+wѯ~ KvfX+Ⱦb@,<+>&:}AEWg)hcL q1Umַ[N$`]R^~"` ?S<]&w^eS;y3 ^8Θ^+L:(v[7_x/q>(Hg`Y[YS}_ƅSa[dGo6d}e^;I53Kp$ԪKxb> gǵNǭo s;$ Ow5Nׇ qV03%MJ^6`M _q3hnD\1/"_wC; ݢ[c<,o= =8Ӟ/a{I0?Obtx7gaCOVCtX̾P5y%n֌  W/vݣZD_!VsXׂ ozW}|X*MxkjZ[$_g_O 5 :ǝa+y-T'Ǽhz2wF?(EijzC=O (ޯmK-UAa> ޏ]^oy s:#0'X}U_2,Zͳu,1:N&7bOd79H:<2/M"'O ʴpxG"r-ulٗD<\;Zvf̳P7~^\uĠ]-:AEQ2{NnK7I~u;ܟ z҂OY3?jZ֝Wy02//Dx| ~{=ʹ4|i%~}\8V;9ǭ~%BlxHsd<Zy鷣_;6qgqt֏7o/>ƣq̏J?;}qzw?:M@8I|-e, 621|obx~VBZ#Vot:oBl`vuAZ>tys'O!&K2w>>R]R_u ћy 5g b7?"xǢSCxu;/ΌLJCy)r9{WL'8\9GX\;fNPZA˖ĻoXz^_~#{#컅z\(}6Gсpy T9 SIYլ3x}Тr5hzjՕ3?%ሳ7}]b}?86Q?V=!IM" R*Oo~l>Nf[M3K)PN_d G)xy eo{O?ԑ^xF:z$p~#̯^>ݱf,MQug kg|p{ˋoce-68;8Fu>O8{P+&>d?.&Pq^igs`tXN" VӮ<_Vm#,HϭlCNOZ8๓#z~,$_-OC_`Db{O7>: .i+Ӄp/yzuw,ƩL^6x5V~(ǧ>*/]8 =WF|J|un!`[uq^bΖX@qby/>UbIׁ0Nk5Mg['xE_K'iz`7W7b*ZZ}ˤۢ?[L|tUo"I[3#7ymk]Œh &}i|ר7 ~+cJmп;S:\~VWn˺]?yiU N)^`ZKdӤm&>Lx]1ycqkُ"GRe1K#lu[5 8Yz3WRVDߨG6LCkM>/o_qp.!뷈7^7?/Y& gЊ?T}E +t*?꣺x&V?E7U쇜PpDuIu0DyʶIϴyIN׷' Xw Xe qϭEoW~ƃA^(؟ Co(}9~Wؿ3hC^PJ}?p&o2X7~2SgޯDqSa_V|JYOOQq]:qW]ljY#zթ9NŤEhP=wsVC7TP[+WVӺy7F*{/z?}'{}_ϸf=Ãr'5ŽO]C=ߢE8`the+GlXK,6qQS͑$|v%6 ^67RG.y-]s[%n!w>`ʠJ-zUVeUVea72@g]>#ΊqzQtOJeMqu[yS[PG}Kp8X|69~A+> )^ `3m amX{Qb:6㷤& K\_~Xr^wEzu 8iڞ[ 'fCy8^ILf\{oAq;zȮ8 z#|xMD< ꈺ(ә@sKs u{'[t%x24qu%6?_nprZ2Y4:P'fN ̓륝 daz>]},}ͤ~Rt~_y_϶W< zǛoК B&{s bM{qnw4Xu},H}\/*zL/g} NSq-cXW5m嗻=7X}r]bjMz]C7'QFx::0=Jʿs]`E~t־哕y-8[C?&׸bMdz`Rw ET_nc=>f!oFͩ!fc/8n#iW pL=P'4?aYOK"81MOM [\ _ZuʦLg'`աknvPW͎[EA0-oguxU468N)}Íe#}Kƣwyf:gq ӋXlSبc.l=3u3TW'a_bdHKo7/ͳ~8P 2-xkkB#?G3sQ-;G>Ewo6seh8#zy[M_Gٳ#rcA3U_L䚕?\[mKoH5^yPVr%k~ly,$;g:G{ݎVwCX*BoNz,J;ˏF۳1 }O:$^m_usOG-xV>@;O]hϦg$uo~v+]jޡru2\ xDߑ=; Z%/-  <]^ONVeпhcYW=u3O0n<.nq o<#KŎϧ \Rآ{OA̋nMWԋ]pRSp>8ٸ Sߏ5䘟 7ShFG&#Ⱦu op< SNO:l>|Y۝6ip BY݆]KR .0{vΊ8.R5s frr|v'3B?UVeUVeq N'WmC[UYUYUYUYUYUYUYUYUYUYUYUYUYUYUYUٿɍW;k(n,߮*OgύO(Ra0;'|Fz]&~Fh1l%]~W\ O5Cc~k=Q̯*8m)n:)he??|V 5 ~t*4Wgln4WqǙϪ.&.%] ճG0e{vgNB.}|y*Pk !cR@wdxg&~f]X˔&& }> j鮚}9N NtWuǾM>/ނgwtǵSѵG+Kwm wc~Y3G<_/Y!O\g+uz\{n"ޅopq6{;! C*osMQeK_:8|vqc-Z>Zu̓1[E:Ij"u}Zb~Θn8􍶎֯/j蓖1N7g% -k,K!LQ_sݡ hΏo}@{>zttCQBzzK =ô>1}6_#.ngz_|(gޮqA]Wz}{/wӿ%˧a|gZ6}zn Aolz95oysuG"U>[ߍ{cŝu"tO^|x%z]I?}uXX{L}O8wtOg5׫}ns VfΝ_SLD<|>5/Qܭz䧓]rp7B~!s]G=Ү>!^e=&A>D(mOoѩw-';z?C"S+i6]Hl>/!з}Z7uςnE?%mzv~&)iSTw`T~m}+=o]ů+u~/dʵҩq&}O깧m&0n'4-C76nK8^MVOHs 1gxu_C'n|nدߎ ޤ-շox4wyD~6?Գ.Rw3h1'N=:Qՠjny{az+ez2[KZ / q(6V/7gۣ ^~ %5 ͣ){[BM~lݎmj^hX1TǪvϕ7a 7&GulJ gO5Q;ᗪ6SV[*;9?ReU+<,Wl*; ݣ6 >sTe7{?Gmqkچ} 7^,j'>ߘtP͇5.0+& g~zEd͂J aI. |ӏǰtoԏ_Y9~pi4^j/? ֤ris?O Lثhhخ!g]=7y{==}6/6m^Wޏs<]˓7+&wh>f݂'1A x|׃9;"lxp֎u$t&zYyrA_k'smϻhiX7 {P3vܥI:KL(q{΃pZq@<>5S\}?ytqո`Ӻ`[W*M[~/ބ}-yh׈|JwgNL_ nXr*x`_8ϕW`?U[kh[IL~+xIPGtOaߙ]Bva(=L}dzR3xLu{­c!tnv?,<_j~>u{]ɸ>3?I]mk~;zޭ~V8c*-]˲@@F.EĊ kl{5h̫5M+~ߝwg).~v9眙\uK߈v2^j~Q-On:p|vσ_'~_/~xjQ-Ih"Ք0 g|_srНx\wu,.PxsF%5ۺԻ>}Ȇx%e7l}>Bpy(ΟQ\G`rN|.0@~|M ѳ@ r^*\-ʫC1lq /O.q͘Lt՚gguw=Bqmd/bfy ]FCw5nI;NkM=טh}_ \= LVJ 8_w.1Sjx'koE߾2~ӻoh;r_"Ɠkyv KD ?[E'F|MX5j퓹8dG*ܮ?j?oe~N WG3˘ae~rSq$tZLwygot<Ւ]>p^$g8}YKAUj>dؾC wtd|[W@׋Gdzǚ_fЃ}ޣ.|㕯?ïQq#.cz$ ӑK\{87?aݏ`PV:gV&Z>,A, CpV,7e1S,z\/ҭfxuW]̆j"G;6? |W'pbqtXFEuLGhH&Sxm9T6}h 8Z}qsEֺ?ȲK5_3|^^uz3Fע;n]~jWp>S-|4)i\ϏQ3o_[CO>`\ޫ,qV{/ }yr,qwKɾ:^o`hZ/>>^%Wþ_ pL0+݆{o<ٓwKoußv <#,/VuR- ~}y<)?wc5%a}+x8M3eT{R8/^-~'|H/Ӡ&MxSGL5//ӵ^wKd\V^{\<Xi Z)Lyۨ>/I -9'@\I]o3ƴM>L{~5~ߠUt2;t>o[_ӚgƱW)޻x_ӥ\gX߶euup< Uz~-]3t.,l7nJ]ǵ5n}<~DrY:xh1v56Lg\%:lGG"D?n\vL/^g?-Vs;&2Q6,=! /0H9ǢżUu@u:r]k&׽^'&  re#m%&|w݄ux|*߳/0__ą4#Q2\J-t3;G0RwtM}Q-WvПHxZg7nǡ'z(mg8S w Uh:Cqvёxݽsoߪ+/?|pW0}t<^w}\D%h¿',tog~oj{dEY}~'?Pj{wzr>_ dϳ|xuGxoi*};";?OW`s3l|fvzlj{ŗECVyk) k4$m8HF IV9?'/~݇m8˪Yħ$[.ii īj:}4o[zqhZڑ;4zz><>A ^g fb1PHypAE1mh^_]2VcYwoqAO_F׍ 8"μ/Bo(j(Qq#x&/a)z}oJ/y;d-է+#Hg ?wC_fLqNa8 e"hc hO4("D?r97x &t߆G b!;xV?rAg.8vdPc8Oy =y^a8kQIR{D?Q UTC *x+pVxu6gQ ',stWS}p\Wɕcd\ǐ< N{-f?}y$/{ WuvkYWqY{~# .'1ZS: _xqݞg}Y< kp"7MaKۘ <_fhq= nr^@Pbj8\,> U ;X_9W׿/Fӕ\{IECwLG)ex'udfI8^\Z_^k4Vo0 Otwc \t w6a=?9yu+_n*n:}P[cѲxJso'O= ~yLj"}eu}ע?kZ?_W쾺?DuWogc /es9o @ xkx PB3~llz8:z~y,,u,[X<r74HPC YE8ض~p#l?o}Lxo x 5dXu:>+u>c ͔t]>.ߠ :}M&۲Dq.~W90|z_nc>옘f-; nǤ /7?| WZ^ O4.~F28j}s"ǎx0reS혞W}5 bS>?FԂ^Su]'/bz(g||p+>1YcW`G<>;o,\.8vq:>9~/RѰ(E&/?GnãL p޾z@WXyL)[ y?No}q9iu_*sm'}&=R4&qY3M_A^?7d'veuK~vXKY=>ӟ vۢ\m}Dv<]Ϲ2y_ $ov烜u' qe>ND>C|-{kטt-y0{pm\|iCo :7c'Z'Dą}o~|T=QS_i~߆WCE~hMtD &p#ޗ<_BxS8n2~@_5'cOZ8Ǐf|eG%{DvJ zV1POkk$q7Bw|ɾ,n?9yΊ ʩyof<"}do8LP?w>By Qq~#sX~coK{?OmއqH~xFd8Z y]?Z:;;Ba͖&< /;ܾvHv ~f~OKe8͍jr~Q1_RWTs޾98VHtgq^?GLzuNQ ^u]yS_7R},)ך~lk#tssJg6)N/.zO<sphG5nlխ3:=ߤj AS.tO3Wۘᛮ;q<0Mjh q|`VlՌ?'NJg5 *j۩0t=Q?]w6/Mr5eCoGmu?e@8ذ)'.ƒ/X}q6^[Bvr~58 O/*OzΡ1^D_q>=쮯{< uOuSǻ[t/RƩfϷN(ћw :b-un,OF W|]zuћ׫=hN9/d[ӾXOף×P(~4=; |r%ٯ0~upIN'.3bV u2ꆢR .- g)D'n_VԮ.Z3 NYכw4!zd}xlrxyP-9]TF%[EMpzxu>Lq6&zj^S3[vՎ}^]H<\5z_Ͻ1#z?Oc}l*u?_XX;ogLWxyix5$p5r"Mu,+ oS̼FL<'hhp kX/^;#>$6Z' =Nh/jLx ޽MlǾ1O3x[8\; z^C5w395+Q4y[ ?ծ)>6p\Vvrp}B{BkMOU8&<2 3_ckߚy6h۬+7kp8J Fa,˓cUI(P{܌Bb]FOJy\0 <~9 ~?9vQGM-Q\O.}0+~ӎjT9tO ރSl;}!;q>i\ r܉ƃvyݙ/yQzo$ C|5XYd W~03=ћ.)CGwCuBѠCےw/Rzr}՟jA\Mk0$\,p*W~;"]bl(fҩ`񸉗>c tI5?ߊY$cEB=z$mD>Aל~Ӓю߅vL*j:lGy2-P=I;4˺ 4zcly POW 4ZOiZ;;<\_2ѡaEpKaBG˜sZX|X}BMVOG4tUQMUU< }>S_>K^\[sztD}7 K{3%2,uF~ 縋'/yo};Z[/_ g8|)ՍObNፋy_X#Mk6bZ;ݼ;z#e_# /vߨ9>|RpGcqлhO'[ j }W~KrG>1w5?=V-gaS^m<86Y7/tj,?!)pbqH^?麆,MO?m]\=NHO 0Pqyq韂.ϘpWDQ^pܼOui5q@Ot^uet8J~]MߛS]&g#E/S38\3%n5wDu0`R<_aMYo_'Ԅ>Y~TKL|DZ% GF!{h"|n;Û<73QWxszpϋxD/xX9>cca,ao/OZd~<< nx]mS]rEUOV7' 3c= zO/ ;M|FKbj>޿]:<,}8e<nEX|PU9Om&K%݆:ո G_g)Hcúb ԸԸy4ʍקF>z3n?BkW\TFeQ)k7Jp1׾e!'`+,k(":b[N#m/0B~I2?`2tӅ(_o[u٤f%ch8< :8g@8\a7n-)oGxeʸ^u=7p}q4xkmM}++߯>K^ FV(0{!o 6ּ` _M밵e5%?{i4Zt 꼔wA{=6Jsd o*=cO5e-]D ck,F\wh]Eןw7身Лԍ 'm&"<~+?eAoF ['nx%Dke}3Rf=_c/e=K.^$ -b7үoѯ}/#ywű"pӻx:95uަqބWo |kD`]W,~MΕwǩ,#x=6qx4THlG|m/Y׫8g+۠w3$LWhnh,z#mV/BZW8^t6x|;S_1FxE7:Z,1oz`%ܡjSVj?7^.P?IbMW=a?beO@{->v}07,~aOz1o-,zQZ $ qr|ь76R}xfiSi'3 ][)a`M.N\~`fL6̻ 7 }鉏zhL0WK5xfnkp[c^98J5_7nWIK *ǢFd}oOD|\kCuQ~. Q(luBms"ѕ$}&~ o@x: ^9|~/=ZT-؋]GtVA.]Ѥwm獻Z81ʘٺ:>Wt2fooKo&ǣ=ے2Y!Du:YU9x⇳})yI?s%3ILjkN~Cz$M{8^|8} NLd^qkE r^1MboìNd'Ƽ.oB-quQ ~-D[~ay&$?Ըx{ꅸM{oGDE '`'  Oo$=Y?U1M#E_nnAn~܏?eyVjg!%{^cG7OpxG%_Q1_\{uaxϐ뿆g/zb'(q7 ej\Jw٥7`ۣz>լㅦ+3x G'τv\M-I}S?UׇDbpKoo_y 9X /KBfq+ 3"[i ?B8|guF-ՎFouCV׋wL::1jԼ6t}hBtWNg¨a?ĪvV#''~ kE=?\^ҕ5$_ك)~OЖu*˓cY DNwbiG$YBpVC3^ٙLQuhu=< &(Z R3-~C/y16;-n"D?nOp_+mgek&>%[1cyp]czC_lxS>a< ]x?^hIj>' Pwِ|{Z@j]\\v=J>עF2hѕ 3Ty/1 }x_GۡV2jqXwúьh U ܊:yd[:1 VrwuF|'sX_jKZr^ov=IP.(8, ֑k׊D`g}yrB]xxM?/u{nz(pEth8Fޭ}*5[tz ]q4]̾AyvPq]j?I4:dp[;:qp$mn|b]g;$754H11ߟr7?Z3DcuUQE∖8דG:Zyy=E_XM]&vq>kQ>4]x-Ϯ)LB*R_&t_8~쏢,9cR_>/ݽJ4'hAPƓW5w4^M=Ǎ}u1z`6X2L0Gh&[h G3^qLAlcĻiŧ dG3b_qc<)e;Є\Nγ<Nxӭ[Ny;OMT3 3 W<2g/`, >e4QNrML#7!My̷Rg:#^yO9-|ަC?X/[~p˫ѤN+=q%5d@xk qNÿ-c蠩8-<+Β3߉z5}5<&TmCgoC1}ƺї*;:̞uyɠ/D??#"Oӻ+V糗c↿zKK7ThloF]n}A~ˠ֞mo|?{fXJ~=^ 18u?o#G^>jbilye瑫'0աRix?:=970 |ͮ￙#f0~bOxq"t&'|RIlkJ?/%/x^myly@}hca =>k?=~uѸu=Z+1 d-SQycțr}p5ϚuΛd=12XUaޏ#;MVi}+/h~{U탿ϸLq~!_/3_yeܿn6ڧocܮAڠ#kG_yÍ#Ij!Y"ݮ?%~h]r. Z(ae^CӠ&)8O Sb|Kx>Q8.=u֌e~QUr5k.55Ohgws%,=]8Cu wt|q9=%`B) W-sod?pyIr&_QgS}pooi$>3r4D/&sXWu MoSg h)߷?1l=;/)t,Uy>ע3#7:K a¯v<&j/ {>)-5hfǐ|+wz : f;&oKzṂԿ&$^'?T;Zʓ:i/APz*>ܛã ޷ _Cg98|4V}Qb:~oD/aH2q(s'=you`xƳ |iY?cAOz}h>1Ẑ ^}P >WEwmקP8&}y:!оnaqd -ӭo Qz.)"ڪ^Нؕ=O[CBs7ysܯfJ5d#%~}1!\?JxWҞ#χ :lnj=Q t \%7L57q԰{*M#HT< 0=E|},HO o2 /?> 5vQThKp<\A:D;qZt:}o6կ8ή?eF/xyf{[գi֢u]抏w%!ïW㶾sdB>qA5j >[G3h?h~P˃j>?𳟭?#<.O#җ`ߟUq]"ũyO/%Ootwy1~7ڽO!1Y86p?EW?]&e~'UV}Zꄡ2J֗s߯d~&v}IIco.w{/&xNd>?b猏{Ν*)plX|:S]]1w],>!5S STA;ngBzqq+N ofI~{sÅb2)^^|D'͛w&un7^ϥ)ϵvyo[48Oys\ԒoID?u?")0+cyC.4#?XVX_,nt<ܧԄCpu oT!{(š'(~k;6sӎWcg=o ^V}y9̬de%sZ@~Qtws䥛K%i\v <{y=ƑgH<|F0rY65ۡ ЈbO kKguXRo. Lzp6"ypx$tIOW𽾜ː;_oUzs]ʕ?JoEG uywUL[ H>eXkS& ԪxXӏ;YoAǞ~SE|7#ԛ}HW'M9Qq᧝*'V'ke; M~5iqs5txpu=x)|CoOԗݤ~C%Fg-me,}Fdx״[LG3UxSt[%WS\z#uXpgEg0ԋ)]yԙ:P8 O$3duClq-/4&Nr}zꯕ ߽/4.7Oc>6c)X_)dE/a$OnH?65|[̯sܚtup)WV_ U`9{(g}_<=+O1Rsu+9u/(E7&Hgo4q=꾛_8mw%hxd' uXJ}ig4|ID|Twu']_%Ի!<5l_p\ϺPuБj#yjuUoE-_Oz&0<|UƩ-xӎi+_1i<5y]"aePؗExuZ^otXtuac}ƴ>{{qwW8qox4%UOw"~4wnQyM ܯqv{ڟ焗O~rޤi-վ@ Z|7| ^(z.ts.l}z]]YuM,FO9GuPQOOo̽DiPf\hg߽]<\yj5堊=%SOyTWTA繚uO 5ʗ\%' wpη獼DkSRa =sKA3E]<$U}ոo{ѿyPy(?9\J':29oex?'\{l/E>F몂{a K*BVȺ]L@P X>k*PiKז㪃ßGgCGL'G0Sԗxo/kK2 K/S3^TjeNY!%}ފ ]Fm]MP.yp_w7ƫwgqQWsqz7u$OW!tq7 x*/GuEɣ|r CtGjs.45-a~[%xWp.*ߘ 7+ZRaE}G G[5_qqU΄. pX{ X'p޻r9=Ii~y4Ӧ8ÍG{YAceuaC8R_n ~(t R}$н3ıocG`$n/ ϐew_mwQ2?^ۍ'rDz\Qd)z< =ޒy[Ar]űQ/ cD=hdпl3ҧh໡|5~jxx4gx!m2%\u?<N:y0W-z˸-?Ϊ훓8LQ( סВ_.jGW>4Qad4?Sy] FY+@vp#x"{5a' ,~zq 뎮,(SƱp=ZxvcI 4N!Xz~u9^@yے/E}`D'QU;Fufj7ȸC7@7uϭW C4Be&"Ob}j}M9s<oC >/ð0K_ Ap>{8:kMv7Σ:<7î#jR׳]ϽG~mؔi3/SCoܛ%_+/{PNoNΧ9W2ߌmXoFy˼-7΢sϫ{n}}hd|`)\Xk <%^ZzEyQ`TBO;17 GH +`Q٭;2uq@VGx-~;+o&h6Χ:v<Ƀ %렚Shx_V__𧥜^˄/s5<-u(O%Tۯlk)R[.OcS" /K%GTA{v/xt|S.m'[XvzNkczlPq" zfc_wëPy;6ǃ u+ƅkA 1RޯnēXzS C >ZyyM0'ѧ| OGtyW\__m_?ѾG}}MǾQ+:F{߽7xy@xpӥ1P3zDzuR]~oɬXZot~15" ;wPfs1; 1\ㅼ{p=8yO\q{;C)°>}x\eiכ !?_wQN#"MuϷHJ޺,.z?lHuIko9'.Fqb?vÈc;=[5Jƴ?xvq/qp_|N#tC7mwՎSguJ[{'ls`\1~O7d cx >Gۧ7Z#'叾U?6~o:ɶ!u#xCvK_$"RX~z5b=\c: 3yPAT}_8x+?2 z.Ge0s? dq#{c3:ϫe[y6) CYnנD³e_G|6]p]<)U[A~%\wʆiۿ G' s7ˏ'o}yƞZk1w@pp#do] h^=!q<5rDuJ-5wsa +VN5xs;~qnM˫`lE5mыgt5O ]ʟ 4IT ߺw<|~ۂ'1ޞ!08c/ ^b&o3y㥊~zOٓϫ?`x|u?vd?<-r5qޭ+vƚ}%SOCǧM}x?DH>C#D8~Dn}?SgR{pQ7^۶K8q$O g},/($joڇm yQ҃/AA&|} #}Dߧ/g_p ʼ>1ZxʭCx(yt:Mߴ㗇 LmGx7Mylo+'/7^; }^>'oF_*] LC?a*|"GC8zbC[~x+|e?0v3|_B렣𱾯<0éfQWqyxO`ֿ#i)?_ćpm1\ݿbM'zccj!a c".m ?y!'HY~T%x 3*[~.:qےVr}+{?,y_z)z1yX״ 2n(yk4B8;܀; ;y\;wW6w Qw$z{8Y}xcf:F z_W.'xvP/8 O3ps5Z}\\xՕze*:3`@SzHjx)h^E|~QKp {4]@;(2<$mS8(Ĝvy>|?\>ONO\{3\,pNۗ.C{EmJ&369{1Q@uGV -m1Y/,?ϹXy _lu1/ F^Z3?m)#I~xu|ܭg@ xԟkTÌO!o3Jċ>?vJu_x7#Z&ô/Dy٥:/K?Pgb}Wt_Mx\ru`>#qy<\|]<0Gmo${_4oM|quD~g|G3X_(z$^ @7k{</(gu(]~Gb5#uj|RބpOoǃN3$wDӫn<-e]Du}^Y}aWĵ?\NIfO}X~e `~- >pC%OҏT1k<^Shu)j4u&cGa?to?Ae8uׁ0Mv}uu'9Ow(C9jFœoR_ 1'J^onM~2LR}{P\vMsa!z}*GWvr}گB |&.eD bqcC{ޏM煷2\U[ǫS<}8Y*.%$q ַNlZ5!O~`N}eIu%05?8no7Ui5a)}:E[ 0q7OSNו9-?_)sMqXR)?r|42EٷEm^tsrX;| N}^>ˏ(Pʼ5Q,?bN߁Xq=l}KOg_~\6jֻ"o=mtX,u'-yxѧbQ'e^7AV]O:/~M% e8N+N9TO qevG*/:~Lʍ..=a=z! B:e!XB gG6??2{3^\WI_UUR舘ar&ΟG~,M:8:!FcG .~FtP|_v50Ӵ)4? I}ΐ k:~f. KW& '|K9+!}xkY;AKu  y,2Mo'p+8Q': 8Dw"1hꬶ>1ztխ}̈́ᅆ.,*^Ӥ۔_I|{:ƲanC nKzp݉/yaK8av' ?s7Ck}]z7mF=,o4EkIk]4??:ԎיW }qhC3y\PoMnq"6"َ ]7ge#HyѕOH}>6eR<7YOzm0{TmۤaD eS/&ch_f11nOAp\m㍿qT3oz=H>θƭr ÃRK>OJn#^.XT8Ko/E3I\01/iCjxS>lǣcģSVۍ ס~qcԴziy<0 .EQ+jQ#m6n>?o\Wכ$?cɛ3qʼ4r&4%$[Fԗst1u9y>0H\ǂ_X|sn0>~5m8<=\?w Tq-n?cnɰb ÕLWՍ)iqqfJN~^=+pRxM9vKG.q8K`J/8f/DS}[!nfGGC[7TƔW6]pIOSDB?HE&m<>8e>o쏓~ync3cS!qn:o<ƏQ?`UvuJ7 E y7 ]4uE\y :<r[8M<Fq| uٚ!PupL5_7sĉꮋuُ*y w§%IϳQ ~c?cD@\}BW"TM osrs1ᤐi+X4Cwf<.z#丗m8@ToLtDױ|dfLGWn~_ҕ!Rx!qȉv]L|Wy"jO\7u]d5y:S+ Ǧyn>6>P/yl'ٞǍM <]&e4c)g.!yr;&8?=qNAg坉Є=Pq9~v9kH>Q<^鼓JL=Nb:kxhHyBczj6=3ռ9jª>r4S/xۑey=y֟Ε{.psXp8fTb1ooL|w?)~F{F̟ IJ?VցɋPv`6tۆnhO6xH@ Mǻn$f>PצO6e^٬?Nbp[M`O;kVKx3w`I m ~:j Ƶ*"S';Mɒwx|Fbu^RA_ (!mt0L+g@𚝨ӈ~˰jY~A >D1 n (:1cFJ c3e+u<iGގ]KxKo*,u)ӀegҞOucze cV P}?>,Qr@a:djfqg&FKo{^dLChfߡ׹}pr%~XS|y ^PJ u5Njkr\ыyhL:]M$y9\'7c|uJ6FywxI8ܷ?k?"xC&|_r8Ɛ6͇?EDefݾr/^* -Vh^<ְG0u NWG]I6$ж7E:,uݼqxt\[nn[=xi|pB~3t>A(H=o[sG75W]Msz9OxbXp8I7 Jo}! LuђW.&>,m&m M|~;hlVW_I?[_s?x#wy0:q8skq IZ_Wo̸to9n(7lW6 '?Īqլ cB4,Si*IxpQ?9Oߘrϟ77kPaoWLY]̿.ɟ~˫YiGK9OۚLOMl@EzuF ԑOS'PkO7bf y ǃlqW^GhWVAfણ!|@ |-Esȸvg< MvCί_PǢgz0{LPNpD4I;˶EY)__O&ϻYpKnSjp'/QR[åqʛ~'&-ӵ̿ILz{]~Bi{ |'9~nU~cj.W|>_D%n-_஽rԙY1߳G<|Yn%.Cכqoq:mU2|VQ +TkD0r?A٤y!_hn'>^phT^C'[2~?9oUI/մ}cx-C PPK^ GbU+BZxo|_ryd3Ngg0/۞|g+)%֞?-υ||o^KglzNu8aGp~)WwNMu/Gүp`N ''U'JE=xď,S K^e:Btay:qNl7U^gG&ƕ![iABN}KX5\qF?'~xuhc@y0,QB覃p_^KtK|]UNKS"nb^Ǩ>(.C&H}=q kۧ*~yG(x߰/xS,{Q ^F,5N1QCW7yv8qtVQh?"x̿ 3(X,kieѮ2~/{_Q.a_\{OzFZfX>cD?OW@ xr> Ƈ$!lERJ.߿衩>\wp?ď"-y*g3ÕRש2|"B7E7;8ؠfqIǓh&q%_w:2C{D]UG_"CLgא88~_<пƼ>٠}iO !QJ⓼h ypRGd'Sc'~Qq}+ph>ZZE{ۚ'Ehy+?-or>l/юX^Ir?\/e,c pRӺ!0?hq]N'A"#@tԭP#<ߌM#A64xo;n#nWߘbX)n8Lc]SC%7e\mhi.z<"Nӡ!F'PlGh}_`XO[]'oWIuKLД}T|{u}.u8>!뻅gMLO},QոBT~>773B|%R|y':uI ] 8`Ϭt4WZuG4:JB?ns`e+p|*:{D_^GNv8HWDß%x3Mvn(7߁7>yoh0/QHsMhܺ)Yv+X>">[8,-q<&יz?HXxe><~~?o#w3.:xlPc5N'_6:CӖo?iv+Y4p~Y|: QH=4sqlCyq#}Cta`ǠOh8WBC y{\f̒/]W}F΄@f]:m5q^cү~I [sپgy\wgHpx;vw;("CEEv")9f7i5hL"5L $XwxY_!HO\߸h4?f_ .o> {?Gx*SNf[侚̓[Rk?N27^Gc_$D$f5W~tZYP 4jzsh-Ϻ`e8<ĔO, yч.-4Tzu]o*7 8L5y][yC~y0ˠ3\#Z&e^W]\#YC|+Fa<+Hgy6gLT'}x5nLWx$MW^!p8Jka sh6y8UJᠧ^ة>ȫq(\]]x^>pY\&;u6 ~~n ~_K|8yT<@})<hUk9B^z8/JlQII/ ~TmQv=aұ>S ve'ų5;w>S>q#seN ;~]]7a߲?ʿ>M?A( ~~'X'GB&v}e{{8 ybI.Q %(֕jμϿXg(MVkyd,cX2e,cX2e,cX2e,cX2e,c } k< qdqΙV}Y)zz"Oϕ8xS0O1ލOuxm }O+ ~_ܮМ4gdg" /G]W~%Vy2aG„yYP](ܿGۭ#FyohtUy8O=._r:4;tQÚ\~'t:E;ۙ }^/pO37稓Y}o '~ΎW`}R%QAfuB0]>~$ * 6RM1;ߨoSo\ NJb>n꠫>2w>+𱝟$>֭P+'Tk|R~;]_5~確j٬?k%]}u/.~[h0Mp&鯭lu;Ց =Fogs% Mϊ"5u'`8x}?[nuуvu] o ZWcy*θj>s8tZ䶞`| 4-x>%4sx_:K(w ??@—׆wt<\dk;Z<Ёk~i/HnK~M/BWlw5< qi S;XW-ޛί2sfAkq+ڍBo~Q⍋Eg2|yyӸ Si#*/#|z]8yl}+W2*_q_3Gq~XM W)qG=8_ %yگu '|}'[O k󑬿 Lމe_85$qc/z_ :T=vrsbsumgjIW3e,ckpzBmI3ﲡ*'cnJg8g.ZT{ɹrWսV8P׾.s5u^LĤS_r[;(ާ J8ٸMaEdf6?κ.~g|ṎPT\s.9s:3ջE!nMGɐoP7'^8M5D"%:zyo߿x7NZg\ONz䳘e)ߧ~g~Z W}ֿL1.u>_GdG+#g:ZU/*V|1[ =Nd6u,6;?[}1.RCzuw]͟÷$#M_?y/`ƠOs }'x%=K&r=5r??R>IӭBVaQ}ǞOr ;)_ 9nOi܍";.}%h=~ ] ނ8{fJ9yW'i*k7Wt8E6Kwwt<0,M>m€EL-<;A/~Z统rX!Y򗢀kg|/7ȶ>mhW4v~ʸă)?EMʋ)xK"c5,HTvW}"x_Gϣ;L}Q cQ VMf~aOgQW//jJ~k3|ꯗoOQ7M𜢱j{5*_['tq,׮ ">+ z7ԯ4ϿoR#0NQ >#CU߇ߟ@_qM<n) ׼OޭCu<txO;^xRxgn_~XL뫭Ɏ,u*U|+޺mHJq *|v6 <0~[/糶 <&hy9 ‘u@xC^˳`Pp755Lc Γ?_D]{=.'ٿ[e|"~Vӯ)H{U5_| [׆OCG> y*7bX1QL9@8Fw"3ײ>wyN.<@B}ƕ柳8^WԷQ!~2_p'~jwT%Ѻ&u4O:5ײx~ g0p5qo/1u=~ՌX3{/|Qyz]ְjJ0_Q?N=.&.vݠEz}] `&ޤjl7=Q/1׹mRYgr^Wm=WλbsЃ1pd(;ߟ;ght=ѕ_>L̓g`is¢_2bӦOjt%m& ԍ6_~O8&~Љ,YyK cMe2_ŠGY65΢.9FQ]_\^Y>ruׄ!kV&٨ɨ%]_`<|缹Gil/oo8B 'F%ϯ-Oˮce GpGK?LUE`.ƒ%2>b<ѦC1H]~U0X~W痍PŜTNɾVSBbH.L׳tA0-lN7A'wz僲w5O ~fY6O`C׃kWϳKԞ?u<ٱU5~#sY3 }MuC,o*#u>e>S? e|fmHVn_#b}Kh␺KۭRח\xݐL?!YOm:6XS~WKqk?R>\5޺ LqќDpU&C\Au<g6W6&?446[v\_ DžuW+αO9r>P"G㟣k|k߁%)C.>7˼M:Q9L8̔qAw:\93 rd? yHIgKp&t}2^y=Hd{x+pOAy+68Wֻ0Wek~ϻo?&^!,~5``{Q۶˄ǃOeS<Jf,cal02e,c˘Л 5ȍǏfu\ }}K}g%˗kT;{_j_v?'|M]\׭9)":x[C&.C8u*닪1a)pC+jъc9=nixVu[A8} [^s㺃p&h_>*^~q}Zs\o)sF8~5^|i^&Uy_?WޫHGDĄMWӗz4WxU ,$aͶ?9?d^,3 |=#CťagKhopck6+JB;7x֟pq Nsx4ׂ?!>pvq&= }{³4/5t5oOtO,VN;O ĬcּX=}1>*RoEnD}I·#xLBƗ18l4sq,r2N`Hu?YʣobgNZ޸2hu֭1=m W(i$kx]ⶫY}͐?^V! u>[:Ѷ<_8Z㽸^z[QH$ͺz9E:|&'mku:uy|{שZ~|tz]G\~)ˏ9ONx{ u5zg~}}cDZv8K-zVyBu]zQN#g8`]ks\?t)Y▢#t<}O{ F2/5: )_v_S] ׳1x%| )$|Pa?p"pd=%>"u/rlxAu54G@G9ѕu]9>EbqDAQ_oLI>B}R^vkEY7̖İ&5;/ݡvSh|K|<>QYu0p }VOaө3xusSso侽%R>]8ӥk2'hKP4 TDy'8_V }K6G{/˾k&q?sJ濌 pߞr܉):0.?>.og {Gk _B;u1Wo¯a)˃ K}-?vrcxNb}_d}Z7gF?ºԼUP=MC` npw?Pf)_Ss{[Po>h׍O÷J:e~SqEB7‹~v]m s3=0TF)cZՍ4X^+SqIU$aIO91k<#jԼ&k{`zq͠Cz< }P3"e :XOxtH 냯W(AنOu λpT7S{%ekR{~GƻuHp3p1&SO^w3B_BAO_ /J$>5_sa1joHx'{n:[4}MfN%_qPq#4[ 6wSz}=xrUꢡ/ޟW9ޝ yy}xnq!ռMv7dѽWuɸO#:Co`AGuKؗ#ʥs.oQA}gzMU>XpB0>bcQnz/'6!WW[:5y 7'1$jow'yR P.Dyߴ+Z'}jTP릸8ρ XO=[{2eshK7ng#Qܝώ"_/z<:ոCreɺ1Cou:ڢy:^ a8E<'O hoö~8QwE3=I@URpHnOn"yaMk 7|$Og{r~mx\:=H`H>cmNyf1zx_zsaU~Y' ,݉O:=ő9("9tE++%u&8v^~/~MOu2_gܐo%u4|HA8Cxєs?Ȓ' eWO9^l]1𫥜E kb|w穫9_GWh]y^#sou7j7~Eocf P|_)ݥ4xE}5>]OsN񌃉VZbypsVr=7^:OWq{Ĕ<\l~ޘ78<獈^(/}hDpw=38v՟Bs.m|.OnOy|,C#\t"Oyvk֓G6< 5A~;ZDt:ICkDӉW M67ћ.tu'TWJӷ}sf уǫ¾ G~yP=lIxń38?WŏhI\]Ecu Iġ<ΰ'E3 -qcl<]gueokȼG+>?QG4ϧ-?LXC;'km!O]w@_]<)ˣ% ׇ'P03kuS_Bi [K O $t]ql6L? g{aup8>:؀7~'돷e_!``_1Y(vCDTs nH1ЪՋ1ޣccBuet=IagvJ?\p[<+sEkٹա<W{^eL%~'lqg(/?B cu[Bu5${cwWωՍuθL֗?5tc<8_zV=ŝc{ИWt%w˄c"Ky 1~q yo\M0C;зuL}) 7M#gdAr5SCqN_O0-&- >I#EqY콦qmq=IzOpy'Q]/ʉeÀ}^qO+Z߀&C6:YqeݞYuǢquKם+cE_3r4<ټheT^~ҋ=.Wqjvp|r懩W%{_WYׅ5,k]{i8skM{z0^W^x6]ݷy'4 n0hgjyp]2c9S0k[kXŽ;2Y}Bq7v:K#8S?o8CHw.?\Kގ:%:= C nJϜi '9nhkx&jR.p`oL/߿XXωD\'xZ=T]pW>f׋? 5Cpqa]!}xx:ۚL=޾n(Q M5z.׸?ۣpMLb jLyi(`^dGs=/a8c@Sz"Qq78!qiP/}#݆v|mm,6okпҽ(,OuM)_R)gxo%~6OE 9m&zh8Gu4(Sy /drGԎQi׸="󙃈Aϕ+J]sLq?g\}S_?,Q-a ץjc4=3`&_sW3i9m'7XXZgụ/$W_KP{]'kU/Zn {84B3ӄK3$Y^gxRrڮu]*69רna퉍u7id\-IW}1t+;HIzGH7si+ý ËUUHy#0$~OE05OMg=Ag w]|6-T~O?ӯ>JhT}Mu!y!_O|"Hڄ{L5~}aL9ߠ#xm'Mu6J=tZ`̘ϕߧIgz6s:_?gZܰ:a gDQ)7XRM/: Do&/,$O'X`Ёryڇ~uqhXhio*Ȏ>8Q4՗0{LE 'q!+Ijd>O#}slc |_iQTK'>ORBkܒǵ:@E/ի./ qOl$)"kVy돻xxۇbU?gyu 0E2'~y/_[8Mj~L28vw]߆75qq,WM Y⳩}0tQe{$x )?N>2OaK7G[nx]w\r'Xgϭ n}Qwuo ĵ?0S۽7^@~.5"?A'IJzg/O{G_7&\i~]M2_}/v7d7('w<Ghy-l_  Q:ePKq1x\OD6OWͷ{9}޶ kޖxt(W1M6'}?&Cz~N8C?Å{&[_?M:q:MSr},<~(9D>h/^+~d0c-u7z 5kYoWքǒ+ixq'8N`@ ~ԷN K.&\ƀ8M<|yσw/]3w{'s2n¤ ?iQEL\[nLx>?pfG;nCh)JX/nC+Kt2Lf=nip`p<ݎvsB /mU#QLǢf~R:~7;Yߍ}@g8i=%ѺdlN |yGGC16@7nzM D&/Lhj(mϟ)7GBPBtM4-SLX[q=_W7{H}>;xp:^Cy{><+rF u$ ]LM4Cq|Cv*ߚym&;upEӃq|a,"8X LnϞۭdfxyLvMqrOX۴h3$95Ľ:kp׊*Rd{>]-nã:hx:^:܉l= U+>Ʋ_}2uZq]dxuu5I nO]?Z'BcRO+Bp?Ճ{>DQBNq BӔ@AxP0jE㪓WB78:=%2qe{mhGp[/Dݏpˣ5[t]h- OŸnG]xSz#8oJ⥉衄5g f)-R piG%By?'1}݄ofxce?Jau(6/p e]8/9t@#3'\_ ⮫ nwiqS0~6deCogZvq̸J~|)^-$~29GEvukF2$ ;)^ yƒ5x}o S}N8o~,9N)ް~|M ~I 8~[;| [ :}.=p5uÍo{['Ac|-&xϻce]'e^x~e{ Yoܡ$eDރō\w=cR_Y;Qj=O؝`2}N'YޏԱИn*q_ﻞݧ۾T6B3|.JWRmP%kIq[X?WCh?LG K+) ټ1riAQߍe4]3[cρǖz0<+(Aj-{if׮PtaMW0.Lj<2 cz][qОjX^- |bD5 UN aȼ 8njD>ˏí_z+^s7$V7n-86 }'uݟ!}w`G@M`z>}]jҿߒ&zF_^FHOp!q*? fLG#GWԺ-zi׶#< Xspk_Y[y{|>޳+s4~xx!uj|4nN\Ց'U<䥁ŝj0f ~">RW ǞVgyxgc]* ڮO ?u۬ѡ2d7X2:~)^h}Ɩ@כ˴i<?pbcL90\nb16<>Q,1 Vx3~hSnH`yy%~5&Ae2>U|ū+Lgn /v?N7 @ n<&m%5G9áQE&m82%oW;y{@Q:g蚞z0CчL0PƷq!d%=8z,>>\7X*P~P.*t#idqV˸_EUZ}~y_fk~apwFu AX=D Q_=K{o?kӾuN?ڱj#Ϡ<ݎ{#p蜐&CS-BBxo89kG$#Ovd>t$z 'mh7x[޸y_);Jy|xۡAhbEkE1G>vӒG7!r|:#=szwWpn5NLuoj?ZQ~q>QWԟI?ۺ-m@5O{[)p]pɯwswܲN3Ӂלar7ߏȼw㢜K.ZFu|Q*:+]FuhKs>  ^+`>Mk't>On&5lt3Z m$|ZVIۚ]< "ԗG\[G=^2*] pqZ׵SpN=הLCWė#hvquSP[dyiJ{+^#}o~R%*+P7޳ X?Nuo%!eY?G'Nf,cK~ 6ʳe,cX2e,cX2e,cX2e,cX2e, *YחaE{IRAә'Iu'jU"zqlj D7$Z:w9Sd\8ĥ3daY2^0ߙI~/nQ0vM׉.r0oOe2 "xm7x ֿ9:+TttmF~H`?\dO‘k|=>ZbWz/Ys#DB4Y,c?3;'\j8 "^0ѩaIzGcy| }ݟo{?f!Bط$z|uϯG:Y ߵ?aS}:nAv;tlh=K??e~] sEna8y B8;i3=na^OT/@7<2e,cڦX2VrP1Y֯dqnOOVƿN;=}7j Dq{BFC E C xC~ȓ;Am%a?6F3Ժ"l@ޟ-GpkGΏɸ+?ybZ_/n JV|NK 8z©eSM1ZkPmT S#:xVnz}&,UfSs,M @P B? ̷1nS&Bz3}} C8$z/tp/}'\ ~X;ϣ>Η{\I|@(+yy+SbŖ߀ 4C ^bz{_nCyr?SpGOfW~~RBw6@QNxX3ևp'r+&eh/(/%%ubx74MI7pWW [U п ?ۦ3hI0+T}CQ_t1sqhް[owĚ5?7ȣ?c^wOT>"BU(Nq9J3Vw̜x.bGS7YW& =xR{pO>/`7^%LPX'9wT,y=9V';+zkE3Ks2W?p~e3y4yG2] wvyކx-` o|5+pqkKwdAB?YL8X@cyrM:=)F@/[qS~^ >)_ | 1=QjnNy5'pÖ<)uS dki/#:\np=GjϟW1!}EZNP^8Qkɷw{/ЋW6y-}z0޶3n<7r nӛmRd?;O䔼g[F&?-dpX꒨7ą& ]{+:zy &6Qۯi)cZS߽y:<8?׍ފԸDFjxey89/n^ӫB, b 7f~W QߌmL}qFx}ékx{(=wÇţߥHza!%~JJ{½8DKRw}.[l%Cl=?oPrRA*d{m>kwM͕uav;dwuD=Zu7n=kGuyzjڪq 髞ߓ;|/j[/F_o{e{ P= s woN/ Cۻwr}Ys3vlmHì3hk~U{fGq+0A9F9gWDi$DAH&cqX0Y5I,cLN&HBI}TWVuUu3go=[v\k($=~ip|6ݥHZ⥤vӉqH̯tkpNvxis>o:QF- qQc?LZPQ g ͜>\+uVu>~Jԡe~3ԊSЌ鋞qS<5P.ŭUMG>gg_L?r_ڌ M͖a}O|ssZW1}DpǣytЈ;*HGlGjޱr.~L#L%)>>SMe oF~xKK(QGG[4ʐ~7Ե>=K|y3x!$[|k\ ?urmh>+O6_Xϫ@WLzqbb[ m ߧtM[‰o穷|DzPlBW]Nɢˈ/ܷsz=Hfҿ/kCd~ p 3qt˴u9q딏<= KCUO>39~ urC`Mg[uI l%n $=K vn p޺N]_Sx( KԽrrW9ͭXazC%]1icXGawu6:?SaVaVaVaVaecit^?QC1Zm}Xoܯ%Ky >޵]#g+0lO }隣t."txJ0X[=3)~}Wm'Sa ){Pݮ[^|5ɿ}TgY&o=o5T<'nP􃪤mRtW} }N-!Sq5WDa'ԌkfPq-R:8pFu9D9yb(1Q~.9?ab>ZûY6=tg8~ oD%|`WD-}9,8 g)yiPV" =zx8to1T :^_W;~k8 ljW:;;G1Nx4?FGi3ankgTXnNY2O"/| Mk>z z{?{[ >@\C'gR/Wꯇ}swòSƖ? N<ӿR@(Uwua>~M2'167u>k} Ea#8[yz6 Q^y?N^u$ h6Gg4bko9:?{>P89+Dxܸ$wS꨺ Ma=UQ۸_a:@_=5qCuK.,S/:WV뿡K}xP,ޤgϞlp|Ͼݱw7Uw 3$ӣn38&4֋SNDvCFE1G"?w94A~si!Sпhs};ue|= H|QP¢jA~W}v3CxU>bǣwcwx߆1gO֧Yh)k.cx_LG[/^|Nz:,8x4\:y֠KWNjMeL} Yɭnp֡U7qIn}OZO:B8)GMVC5y+xϳ.pzq x:=ļ,gu-,?yӌEMp>J1^z^~ }?V'%qMr߆Lt-|~WR=<?ɬ`ãW}_ZfWqY\}yˤɰ5??g,{2<cseLWth.%y}^_v<\߸;5WN^szn| qϿtʋJ1+U ѫT?gGxxز2[<>w#x ._h?A- km7-oEx-- gSn}"+~'uTd=w) _s+ׇm"ZBd~>fT{P[1#V ޴c$L%ȱߋ~<e .Uc}rB5 *?qJdw!_OIP\ yxT7'v}:"g0ς3%6} wdžT6-Wm=wo2/6z KO/Cp}GczNjP~ޫXnL<4Odm%Ieۜ7O>F_WXEb=A]3tZx8>ǻ'ov|?} <8$]tcq0!yy &cM7~.G7Cݡx?P7fO;;^QgTwGPRǂ/@:i>`>37TEce7p!HgcD( H{׵b-!|6ė߳m8]y] cc˵aQ`Yj)|nzQ "Mg¾I{\Vssƞ<6?כ}L(IǸxϯ G]aI]+t/.P!2go2MR_Ԕ=a x1+>=2N|;v>~)&2W2γIGo]" ߯C4w}9%K|~g>Qq_#Z%ϓ˱}'WB_v(ói+58,<'~~JH z/ϊGnZP|ğ63A0I$-hS'1O!ٲ-:y;AY-ywި;[OjCߵpf~aQN'= f|Oz6= lϮߌ|³O}08ۮѿsWMz>v_a:_κ jFavwݛ<|I,ɀñ8N'msʛ?E[cz ha|[X:XM?QrkanN|U!9 2멛j'a[p'i?6,p(|Sa15U­ y|^˲阐l&zJe[eʤ' ?ϋC-/2G hqa׳ۍHsMfg:J3)5bƓI #?uw`}>ÛǴ|LꪇoUq1b^۸lMN=Tb}-z~H׌~FqEmOvMy&Bz2cjbk{8?C36tZ|*z▼=ϴ #~hBQvu}Ё-&π^}(X/u`7S=ͦWWތ$_ywrjn<`Yth]XEo;+!\'!xI_7Wx{ޟÐ{}\c<7_-Zpm~tmu7w$^,^}鿲?1_ZWQZsgy[EţO+5O}|aҾ{q =$a΄_*[wDi>R0˓~Fn[J޿ 'ɾ+4 n6CGǃ8_|pT >gu;G\#q< v͊6&nSu'O3c~yC?S[?ŌC=VH%?7 bL<= dCys-󖿪y,Q)V-=/<4|їEߚ[ >ÏwwuT`5K8WX8a5,/~.)L$p3SߗsϿ8+kN+3o\Lsnz!|a-?~>\c§,}.2x+Ec(wXt9Uͦ3=c?mO:zM\w.=yx*ZkdTN6@f4G q[g!7I቎/)y7,\Qccr2=9x:Wj@񘅿z-j>_$;7VX2z2nB>֋w^BʓBJ]9sG'MSq"_SOo/Ǡg~Kķ[\~E3gE#3'Vї_{бzPᦳ\_uS|* (4u{SrՑ^eSYayvG[{]s@̢oQR/Cd~eG|KC,؛%Bӵ=WbNFp,;;W3fVducuL)Mqn_ק*q k j/x(6fqEG-8ȼQZ:?ύu모8~ ' 3ԝ!>wRJueJxRW{??[s߶y+;qA7 ac W7ny*]Տ~V%sT|r~]IWuFMv~$HOPx*= tWWČxaG\hcSK)N+m6R3) =|M@w?}Tu|xK8:W/#cO{:ΉTf| 4!ɐ{)9n]e>LwUm7LjeСGӮ~?Yj}$ڰݪ?߀Y͑}u(<ԗR+bɰxymwu*_M%tz0V26KpSoOG2Z5^GxĬDpXt.96?:D3ƹ_de3dž[wIYps|Z+xø#Y@~k4?a+h֓IʴP=wxv5 t G'Û%zO_  yS{rGUYޤە.נCU^ f$oY}2aT5ԑ琖I w޾7+nSInVaV-8΃~K#2 )_9o8!ׂ>U.ˇx~*C](^(7F(p5Dtx{߆aBx~OwV']ן\#c/Qy\h|5/_hk"nJ<:7p>G<մ<'|/U+o{׹2]}+oqoߍu$YEwܒkJɵH~Wt4\M:ui6|=/-G!Y:ސ_$᮱zݬ":=6=/pg3"=ncLʯ8YC kvǤf$-W\pڮ:(͜+ xP"|;ۙ ]2b `73wـϒF,8^'|-`seKLz4ZxqW\sAwߙ%|?"ֺb /;!&_+֧ >آU.&3A9pN5?6`K1\;2(sK_S I]یϿW5CtuqgU)W8Po"\9MqSȸ„O+qu zQW  s~02sj8NA0mj^yN}0/^S_o_CCMg2ۜ;8!7_^83CGGN{̣?p})Pű07ϦpaV7}^PCq)?/{|1S =o療w}}IАy7 abW ~U#nAez db*+Co6M}VO$4ɲ}gq2? HƑpmhV1?7eoT$CD?" cPn9* ec2S.xVqm:|].~u4RzS*ܿ[t>ȋߒ0אVӞZǤD)V}Lťanzn:T1y\778[_þWD-җXsya6o`󙡾n?+Q>i_q^'G:Xλ_%ܒwąbh+wg>6EjXtҲlqHæ_:g_x@3D{,u{~qm+y3"7+ H N4ihp[h^7Y\׮\} ywJ6oz)8n[0k f{7|bh^hc o8!-|,Ǿ~F6$1:Pp-Ը#q_[.2kr~t\b^ʖ~cy*V>_%OkM9":?uH4fswJPY[Gm9~ht:ī?_M:qa ?Keƶ7cz?H~hkxA U]q+vcYޏtTw17䟇"J)?'o&הhXP\ʴuR'}Zi6VW3 eg8lbRCtM0r[OM:R+cmyeGAz: `cs_xk #u=]yy }ڕ~+w5) soy g}IjΏCGW.e o4n]'g>h?xq*e~C%jޏ5 z}v|u[m.̷|UtBZ<<36K|h6_}ZyO/,urf!xO|}tN̟9/Mzjejf|͏yLЀToMIO|?._ j_֞Ԯ]ޗ%8 Nw_N_./ b}|k߱C~{j\L~Dpo3A|,~I;|q{&ux]Ħ'%Oyo4!>/Y%9dבG]nV 7"`{u|?N 7cyF qxdtC-^?鳣H.]{bK(sX(Vė&j|gz^7: HY|0?齨N>,u7H#\o#?NL2p<__%p)>7nx,gKQP|&,xϠ*\-^Φ8鼇rR7EsCA]CW geX3!:a~K|>?潨ǣS9U-tkk@BXŐiaؾA/ 9YaY7C+ۆ-x)Ztleqgv K}^An-cm*H;ת}W zT&ۤ&ȇSnv}Gy7ń-YW~7l&ܱߜgdπ4_/蟠 gz|юk[]u |MO\HGZ6pmy)Yo:ђ7ovȋ}"O<;yZkJ &Oi/V*4 .OhP>yX=Q= Ƅ &>Dpemܹu>I[pݣyf!C!{yD_)4d:1y.^w_@uv~8V}/4&]oGBsI_>)g40xS]L$!هniq݇$ʯ17w5_x{輊c6W`3ٸ2b{WS_O_7xMyqxLR cgܢRCGK߇6 %y"W_2Χ8p^d Et7#:>I./fko}V>Npj/Tύuo|,6~'稿W\6ϧ_g|>yYzs/#?wjf&>zbh5j`;X٢߃3> d+m ?*z|R5z)HXgjj|a\¸%DŽjW6(7d0ݺ;4鼥mK}<6]^/܏'AL!uѲl}P3~4el+ ҿ$BGpm^^ i~:qn𥘯L:HS,?O4 wUocv=Q߳O{.'uX!m#5ul-҉^:ܦe&t&~P@+u_KB<|K,J' ~?w|nGAdz0oԙBTЛ=0De\+;J6lap#ڐd}z.}vߘa?u r,2߸#_xq{\μ3E޺G_/KGc?6/JMg%W7mwpx W rmIw˗r/}+****************,;9Ӹz˪FUW" S੧h3; Χo;MOCY壣ނ z=Ѻ- $1,85j/u_tz_QՋJ,ڠ~vqahtv_p]]ul1+ R_KmeI_EET{0gtFe?gnȆJqx:C񎓂X1OSeq$"3|A+z:)&Uh?Ym^L3n\}l QzzZNt}[(25OD?|Ooh,to <7o?QY'>*$@.y<φPg0e]!aDzXXKD%aB+7^i^ m ~G|y'ʾ~iZon#>CN'ؾ+՘>'r>'j:R|Ԗ'k ;^jSq^~=T[Sٌ5>WϪj둫v|pq㽊1Doҗ& [I=$4shhXg.ݓ~Ǯx->Gi/ 3==딴/C;cܯs']+>Yq3S5-[H?_N؏ =-U m=u =~uQ?vp(tu\ʏ~~:/j__OtT?os_ɪak'.ioD͇un&Î{r>+yuTn<Oz#jcK_JE˽ꎧ[N S}[5ώ3AOAq߲aX3|u&^z+cXݳ:ťb; z~=T-nܢ֟!(r2nJ%>՞.tx!z\;x[4V#s O5~osPxW=x?M#{ՓGϓg{U/bN |e(aXujPM# O/Iy5ȯ:4fqEަ]EoT=uՋN$u;.קv;y/zy_B<'(49p}O+|%IܞǷLğN׉#}{hԼY L)DHlC숓ha?ӖxO7̽{Z@5KE'f3ok#U?a%ոi~gěe܇pԊ+ >WF뢇H04i8c1uG: xnzFyZKKq]X]'}:^~_z8MKf>(  _E3^ Θ{Pǹ/5{HGhȧJ ƟRg>9P~CjAm%tqN}PMw6?gN\-E}?;z?W zjaȻx((]GO]}e/s!ʛ!oWꗌQU͟"PRv{z HWͷ-oz?q ǻ)Eűh!sн#„tup<;yY'ulX.'o""U77Gnd_U}b;Dwx1{e h>]mҿu\WNYk ' 96{:zRRyX\}~z8 눟!8iY3'\V&8\)^?+?v."ʑ`/ʹK9?44:?bGjYV \`zȾ5_[W&}sϸ$h<.(xG5-ptd x_+pqqY=߇ݜpq8dc½#l0z= uvqwgu^Xev!`T1~:W2Տu"]Bp[Q?[iyinv{b7J Ǒ m *Ɇ}d̸>J=gOe2/U+3? ga6Jֈyj*Y y1y -`W_?i7}*|ַ~\1!%ԛq>O|1 u8M4o{4#=3!ͣwcO|յ:6-Y|V;A}:Z"x qZvZW}QɕF#C^:u WqVСFٯ4 בuN7_YVnPtu|uMy.=j3jtxly>uPtloՏ~\ /ւe|jyUqIZǬfLx~TaЏǹ/T-{,JsO.>  X}R񷆟_A℁q4ZwNȹFu DԅUd>z݉W)gVY͏?~MZqthA ETi>]׈'f!5:tOtL`x4/?8|FQuoQy\uÌKNUV"~~7g{JN]=Fᄀ9Mhn˭7Go|o]+ۆ&TF>qOozpsćW<*.uzNt1tCh=!焖YD=/zٛՕC#O`91h&n /cOrVqɿR#|_TACۏynjQُu+O\ɰY7\o}NzȿmיJ7.yV3Q= }! ډ мObG\&~qG\@C t:%{*VO?#?IbCC3⑧hV7P'3%qP| %˄d67Ût,~~+?=x׀s8 w|~~Yy1לIt=+#rpIVqR_WKv 9UP1)xCXckJ^G[F3{'U녠CJP 0`ǢE?X2]ݦ&-} )4gO~F̓5I錣=2f!IyRgģ7^tPwN ˞Π+ U<&ȩn~RjqONSEuIW7W/IZxmXT1=hƆG#TyOiFk2\eɘȼ#qz+B1)x2ƹ} zC2v< -qco8Lg㭲rynjg/PF2/&unR.#/YƤk޳Qy7_iBew!u :Nq=uK2#w,,^X|YTWYS4J_K\ONk R%Ro@o}~/8@uI}|Ju.<;"?]h4`A:[JT/{ݯ-U|ZRޘ!tB7!>/TeГiQ>_Xo|Z{ۭ#'`=M_;kveab:o㴠~AG诧:I\ss8˫azgs'f׏_>& úSqGǾ=UCfOy X5ZϘ_Kkk |7Y⥜͗r5ɋƃ>HymJ-"m[plQc'Q҂wb2CG~F񇲝Eq<`\6_n1v=g4R/'1Wtv 1'sh= 6yaV=%;(1v]ne3.g3{n뫉b|wYsW{ 3WrOn'MeVvx5:G\pֿGٍ;^RX}\x)A1 ^e}9"iyepV8_0z'_.&~Zb2CCˏ@#7cqopD"8e/ӌq{ 2׆Yo뼙\QˏWngG<7cIщSO q(ljm UUOL=zNNjׂOQ.C_L|Ztq>](?Wv -^8lh ꋸS,ϻŭw%^;Wϖ,C蝌'u$ï7$v7'>o?X__}Io":/G^q xE4c ḵj:yp3ZWRw2SmoF}Cz{҇WpɌIyO{%nm;ihkkLIO\]`38^D8Ov=! ~Ȁ7[S/:CSP^KRy1|w'!~y-]>AOA`AFC :̿J:0/N P>$k ѿ'+UW̧z]FUi"сyf|$Gibɠx%/u>F[# ȣmAW/Hy7-f{Vxsa/Z;9 s4d.gYt SG4?;q>yL~߸&qm |`c~7qA2?Xw9F5RlQʼ ߃3 0kWgm;f󣟈%UC<#_]9Tz}&y<}CDCa5bqgҷq8`zܤ?d'/y_O$$_5ήo_ Q{-:}mbSXuMGAE+~[U5yw5-=$'કj%7뷗8nJØ T' ՊϪtHIQ$%>NhkgK^d ap% 0>:5ql~gC?TMRts<^GQ/5#y1VgZGo/ =G<=GFA glk_bg,.Cg3(XQ,0 _Uo--"_dw.bzTZ_Sd8Wx1Np7=5޻>WpIY&z0lpu3":Eݦt>Hykt0d qֽШfY}vj_6d &|v,#VZn}`Sq>IoŜ5_rşLF]7,Y?</jdp}dS< >=0̟( _p7|miQkiJ>FRSEŃs݅K~.#\se0ݾZ}>k HOLu%S| fH={![6tqJtpqMmw>_W7tq6Ac_XgC3xǙStrjKSԑWQ %ω]]CxQCcE|N'}ޔ6J=YlZ' &~gRw- oSՀGqcZg}붧˟FW^IKms3gȃ^Hz<^C>Fq&C^3GiJśgZljlWnyYVEA>u[b3],K ?$c|؟a 8>R< '/I㊺~S K>59b_7!8$C^?2ΔT=MOof87VCh/@w~[7$xu/%źx=kKģk5 ~K*I~o=K勫 [݀۟>k[EnqK3u#yRbO:x+XH4'!q,tJ7*٩sb~=7]J\Gy.g#Z2Po<4ɯt.^ElM\q,x!>H> 5z-7I3\Zpjӕ~S+C%^U\wK^B]O0A+[}d&pO>nLaC$xq|E?Lz8NTaN5ߔ~_qKNI* y}Vuft\n9~E櫻"\$t2ӑMw&P50֯Tl/Э|7 {=XdD1OT'֕D~h/}c^n3J~~mNybm%(ougWY*#ڧ{mSKK=e Cx|\INOTdzO7]sux >:ήnd?{[*OQktS+fo#"ruTO#G CYhȷ>*.߲bsW'ȤeRC{)_ D訿:0Q3:V}5Iy̟ E8[7xϲou5t㛂<&ky5gKҲ@e_k>08TLzЂ ?_[g[+OM4YCK-c']Ǥ;6uo iע;a'u/EQAbw.|R}!~j S,HiyBEy:>}o V_%xGXs9IW;E|~ 5cՑBT~D|91=o(/_7u) UXS9ͣF`8OKQrM}d2s{߉;kߦؠ5x*z/H}!.aO7qCEOk/^>V?!3">NφFnR};9\,%7~~,ˎ?ISͻ>5NZLֱK0KM99^ԯMso9Y̹U?31~s2o\MO ˣu)8OPiҽ,xD|E|Rくu5c*>ϋa K~f喷w}f<~YAN;[nWyv]2~3O˜w y_\P||Ϋi<;P2Ƶ7QVPZ{26oѓ#+([/4gːGtD;nKτy/h<yZtM[D (OV3w{.Zd#E jGA :>W.PDEz T>1GM <C! -T_٩j^ffngcq?(&<䙕p(?xnc5_\Â٧z>]Ժu+e|W bvk91Gpdy.>̠ahfOL8F4֍SQdoGjU1Li[gD-.k^R'^^z=aR|!Jdy丆Z>tֵ7Te"ug G?_sz='W?:F|g_E,zGIǪg$~2P1/$OF\2KGa=.Wn8]o3xRy6z$?kb9.WO"*]:ҖyngjւFmzΪ_}d_OO~mFoWq&{Σk|:WPMQ=/r}Nƍes~tIOyb[]K{:-"{G.BH/+T^gm?U+Ʊx{(KMoҟƤ[@%KKoys.I;dRw{ze.nbVt㣍34t{Y@u]ƍ5ռQOֿŁ(p[_$Gb@:}|x_~Ӭ/as3ʳy:d-7d}mѯXq ^ֽ [3çu)QJ}>Ztx򧚡nv"Lx?p?lƌUnyƹ2Dwjp.7bwCiv5%KsMP 9ߎ*. FOj8νT|Ûe+?erS*Isϋ2T3['ocеkN~!u׍x?by(OuE?PL&0:XͣiGwx%ϻ=ؤm <3 5(>jx ؠϱz>'^yE%W#k#'. 7ty]Sī߯cgv/%W~'f5@?c1~~bXxW8|<<&0]ՉEx +yo,1p6 >.2lZ>ceЁ%Hߕ1&7e7hJg=Ԍ=O5)/gaSXD9ߑuǯF~s|o>OaaƩ5ME-7Tly2R-T#Iȼ7 tNy}qꌧ<;<%aH?k>#f#{\۔NڢÍ\M)/ћƫn/74U_K\ȃ}-ߘDQHl33U\/I'OCߓqnה1i*߇b> >CEL?o~P]Q iZ} 7:6t¡`OPo5~5^e_~?Y|U-$K'tTp:2l$x4*}FO{+R8aQ~E7H(uq^B3xeKD=ئ;x?ۖ;T<)vã_;H/T$PE?o6 =@׭s4n'|Ok]\CzPےOZX.h;ySM M<ΈaT.66Q ܠXypخ~c~5WmKuѺW7d} ,7=* BK&p9߸ _>zYOWx k fIH^[ߐ|ǥSŲ?)/:闥oPm콻=x[}[gmDkVcSHۊ յDz^D[T??&+;\G"mÖj_4Nx.R_7|- ^}m᫇1/ypYdq& *|F/K8}_Htkݧ{ ➩nNĄé1XWY߮p,<6㱧mӬYKz!NgچV^r1U]}e|LCntg̳R#ϛna2?V0} ^-*Vi48%:w '..ؗ|y XߕkMY9ˋ,Y!cߣJyēϐ_6Y>ЏuPT#f*^?dJyqM5t6YtN1|ÎT6FȏjHgqpˠQ+:nk}C K*}LUQ |sޟu|~~?cƼqy5^R:T2tL?+qi8ŋKtpm_s6xŇ~#O>v8߫%3&>z N!I=yGѷL`u;8Nd&av:(dndS}K=1*~iJy^|L:aXgvU-|"7~-^k?"Pxh7vk5w1F<Sr}|ےEGqDg?MxcLO#8wLb"?N1}>87n UϮ.;}^?8şaΟXD#T6XNs+~/Q+ދq.֯:''j=ļMu5,;7>vwOT`tR*;Lm]Z*9[p){-u km9ޒ["ڇzXx $tR񸣞Ey<$ ^1x( }7 8[x%^]wo(7Ul5m7Gv# ﹗CYίضMJnU8 ]~y8q`~ҜymiE_@m_?U?}3{[ r{Y!}n/E^+|C9K>sng<4WY7e"gk[egzǏ \5_ yV6=z>18xni /zW3Jgr1NhwqapClW8P܇N6ާPgfk(yWDчΈN{7|:+:GW?HߥNO~Cs?0/X';WiS,-<^n":q ܬO yVqW ZpV2~Oچdq*yKWI2؏:_z~]pSuV솃c;C!i_ ^rWLzEq)?L2ߺO;0 #7ݼ# _|>qI^&Wqp7\qH./m'Nj8?_O8;:[9]v*qK&x1qq6WX[ }eY;pRAged܀Rږ~Sj ,J<ִpTf!Pgq[0~&ۦ\Ĺz:8syqL9|Fu>5 X{)<篓+.myLYq<+LgoWkw 7*ݩfO"ީx=1f[7p݅BOuN ]/7\W 8l#]>@(P2['F ]_tq3p}L}R[s #sR1Gmq!⤉?r緌ǻOM/1_.$1ѢQvsWQw}`֊jXZ׿~4_>:+ ?y ~҈t.ϫ1FUةaY9gk7T6Wq wr?߸'u]҉J%u|hܸp9Y=9z?V?#֩mzQկDi"^,p㽓]4q-}т➺(N~G꘾<S`M~s;O+io$*97<O?*Ⳣu'W'ʤ"fK!c`ۊx9rq"gyESR;F &m7-:}:W:t wẊY~ccS=W 168]چz:<8ä_tWsSYy#p7d} sqHtmWE[X`'>n72lUS|q5OՁqOAbfu44p/PLmQSӒxjԇ<]p3ԧc(dm7u c~&)^/ 7'y_{8)Ƭ?_|o;|iӱ_N<4:\,p__:Λ,>TޢsVn,_V>E꣞秳-CnuԵ%ϵ:%шG旭`Qkz($f/y8-O.b=O-.Q>Zl={3[l}?kM]z^VQT'_Nqp|^|AhT&| O%7I|Le2+R88WK$6Mj6x\9'R}Аɭ' PO]++5N=;eo~6] syq U|WE^q&?G)S!rş;<fƸOcO˖ҩZz6C5~<ř8NR_'҇źYYw<<V~NPNޯ`_nzmq|4/o0կQQLv}yO'74r~DZxнRk/GYjzݘN8ֱ? z9Ad떭Lpjt ՟y^H)Upu@|./Ѻa> 㭛(<'@HԵ=i>!oŤw|^o>'[~^_qbo;j.lK.A~Zx6 ^ xab㰼Pӕ'߳H'x49jZ~w!#WVͤGdI}JxB?>]=nk_E_mcba^_l7+|1j"}\ Ϸ V ߟLRy4F4@'nb _< oqMphЏ]q~Q}_Wzw/sVa*z,&˦n(qs﫹qPwK [ʉ~EZ&u'Ό4Toh˕a4M^w| - ,u,몢%MbpM_кߑ- DIyhs%Q:aD:&ä##s?ĪB?nn Hϴ~} {y9з%ۆ"u yJii%k_`|_C_] IMx Oҷ*>@SYx?żo9S1g|/gQ+Az}BKS~dѕ72t{TC:Q\P37+Z VDCMW0ϟ;Q؆8_37bs>Y_:'c㬻Ro>h`h8֖ubXN|nDpĺz8?8p3S[dppt{Y#yOW&qi)3X_oGE㥘.6>)_n,8k>?9ӗfS'qڹMˣ7fǻn\5#.[,]qf~f~g?x'P 4-}{1QO̳Z4EN6i߼*L5V{%V+zXG*qAE:X&j.VWuO+ܼ^^q|oG ꢸz]o)ZM0Kz1qMq~O8zgnz8>IhN%t1 L{=ooxHwo2PV^:̤#K,;ȯGď\0Ez1GЎtxˌMu u7-9KNJu|ո5Sn{T>(nmzA[>_1Țnŷ>9@VaVkcےֳ+****************ٍl37Rp T/)UXHŽU=Ɏj_ZlzBſ`<4?w4-$87:o_NA-OOI>q;Ew3 7m'_7G:x&u9wC-`sпxϩk4GMЃ]⚞}Ny&p{7Bcy/-&?DPgxiI=1vqes5 sTw?KEBu[ܞq~HGAwuM&io"t4%ԋ_Sׅy ?q<> ] U?kXBjMI֑.CZ0BZ9lOV2e}ez.]uQwX^ICY@^r\.^Y;,xɯS]O:$X^``>]_AMOYsz-R`A.-yZ?AX[q.֧Knĭ‚ŊmHLx_?3gbm%saa{d/Uu<$Pt*bg~?q:ЖvtLSu/:nS6=szy=Q7P2-2hݾÆuY`Z}7mڏfAwW}I9^ACSvWsڷ-xꇵߣJyTx1uK3Yh@~|?幡 fu36>ܒ)h f7E>iSgo*ztP޼F伀NP8ߠJu}~E돆 2/%ߑq>n*EyK:k)։ՍnwSUK7h+JEmX=)/z>ׂ?[zW3$+**ۂG_aVa*u 6jgRC7NyJ !a}enRCmqx [k0AuǓ}K%R~託Vލj(</ Q6^kUϡ)j ވNCw a1gs]qߖ8'$lWMo2φDOޚxO ExO)לG7_\Ēnjk`CjU0ޫ~ck@n* z#/a7^pb7 N/Gyl?'/}ϥgS+}1}mQr_?a]+q?<'~ sԇxy۪ߊGhɡLy'>Csu ^qjבq|`~-=O1h_.> S떭~\<uxpVzZnۙ}NyѼ\L~i>{ñS.Y]O'+l{2@!E`e*D4~cmeLaS{_!/qǫ[WN R\?{W#zE_EpI3 +n }( jSy}ɯ4Q?V|/FS҆#m|O7 HgPDRG 礯[Ʃ7ÿaE"?4NMp9[ t?Pϟg1~Oj[Wcqp-CS$5?~)Ok2566%[/0P8sO!c>npȑR:x7Hn䍚փp>u1⥬q,?+ZgpM⸁9sy_Z?__҅x`_۠tWpЂtRpL/~Mz/o+i#t%?b jݎTΎ"X]>.pcƸe^|m~재i/ wQ]xR'7t6Ä(>+0pphb֜x)8N+c^< [Yvu2爓ʶa.^NN'|3|'*Sg,#%~XnEX~46ދT=~&rg=6X7~& #Xi?oׅ$-J07ud3SxTt.2Χ/MEĤIOGCO[tϳnRo:n[< z:t.٦ǹ/0jX0&e_յ@N%aKgK}V&ij&ŒskCxZZk=W?S+.R?R113׶*bkKY( _rT'=2FN8atK1v: ssN:bw<94uua~ɘr'?N;FO 80q|6կ?@Өd1ObX0St=gTgz$8d]wxD[4ˍo܇kD+[uP-zߘF@(ϯy맓3ꑶ?>|N{]}C_rì<1(`/^1jFo&#Wյ"IMz52/zP݂e}J$>&cj~q՟ m'~&j>iCkq=bu oO,,޿j|cv/'xWƧ [6p;Cd/a]^Ɖ뵲x1aciaA5•L]?r!RnUE xk]bq; 6h$so0+<%AnCYBq6d_ ߲ c}^x^Z.jGO,x+bvndz1K^մWupY=眧ף c6ORMWKC&z~c31F~e9_:)_'q[0 ZQкQ T?}|A8^{u+xNq¼躚447_(K_ yV2X!(kv '~M^}p6|>SNOt6/_s2n}m ,:(֑{E|YIneq=t+-sEVOċ4yf7zV=/s~`p&q76=\*P3^3RWE(n)wl {gVtr=8~C)[ QE繣L׸fa_RYѺLzR8Ĕ%zqiټnq{}/z[[&T[do%;H'B}m}T}.:Y-Ͳ۟ zx0]w]|&b^[ċ8zÚ"/Y՟[w>^.Ruh>["}fR'Nn\v^ސO0HT2tXnݣĵh9&ElA>ÑKt5\ϣ羆IkfOaw.^8l~zsp npZqY ~$XS<&uny!Uf_W{=u1PWCLux8̈́-Gp/U Au1ʫX}"t`/q~9[2'`A[*1^ӮsxM5Al{Gͣwֿ}h0ϓ3=~q Ebz5I`? .i>O7Td:s'v/5|LML/s><=p}v-))7QM;k;4 Q:_G{[5}ɚT=Q-!RM?uyàgN:R"﨟BQtYQدT5nWax4 |wGkz3bAsm.1Ku;1;$g{uES_aVaVaVaVaVgAcﯫ:+1s=_E|F5U ryIh'&mEW|R ̧Jިp5p2+4:Z{!>h{< IV= Gc)+>8h=9HԟgG%%,so4>ȩYSzϣi:-Ј 4x?WԦ<\\4C8<ipMOYDUe̯щN_QݠhNi>2YїkhF5ps,ztX>R$ܮ.">?Fp$'uw|FGsΏ?~K+x\L#ļ<֔!UK_L^Fuyncۖ}<(֯?K >բn:+Ǒ}tmm5Ncgʻ ׫xe~'h;˖applnxo{^p3CB}9~f6m 'piy2|9PW+E/֯(PWw}#xE&gma/Taݑ}eW&Hq'\D%~ GD@[}?q m=%Bɤ3/ ylYO<-S5:Fl/Ƣfٓ >['-~8S[cz 7AmQzwy s9䗘xߺ\-5G :&}("pX2RJVb<s/]G.Rd*} 1k㥿?=7Awx,725 8?SM;t3.ِ*?jLX;S*85)OG^Y_+{+7ˆWXFtpI(yNW OT֢}kѸ+}mq{Ulcx6 B X8V<R߼[ \cR/8WeuL8*{w]*R8L&upԺi<[ :xh熓ʫڳuV4բO~PQ>w̋ERo4w1x9<5uiϸY1[ \L=u>^O5|?\m}~98:b `be7Șy.3[1=B>6q?GJ#+c?Oz>W\ʗJ=:JҹAzd|[4kKգ~Gy.V,A-Ǹ `Y91?3nTق/Ϯ.&!}[.cwX}6gٺ-M x FFw׸(s]yyi4Jw@C'6l+ŔKDRdЕYW =}_p²yIm6膏OGڎp .{V=Vs<=1S-ӌ;N=X1sa7|Q?zECrl} q<ϓ$5c 7|HgS df(d_6-~]_tf P3L}y"oeI=xk ~,ǓK]tVʍo -qsZGyU?9^MKrJ7?;2NU(^Ýe>cjL~J:875[J8p}zQ-I+Շn\ 1΢7s%8+cEyʅ&Uy}  #YBM e=D]_)7_j*> g ׃Cgz%zMX [zFq˅8ytqoŷ_< bVo7ܹdy5gsY>/A?_d!@4 TYV%3.a0dxx<6Kx< +`1<#3<נ_e3깔4eZ\~q~,O~?VEߡ#=( 9}\nhM[AWyҸt`u5}}DC H/P/Fn׳soL ' BG yAJ~0J}i{t-RqnaL` 1on~_ #c&4M:;#udug,-۾G8KɤwwpcTƈ~,>D}ucQo궻M :oYj~\xy!e؟pLhv}lQsP/ yIy׆ bU hebBU^$s=n_ zVoS\sdQCp%[Qӝ)oAcOY:OtuQ䳺};GpEb^5B sx|;bj_r/oExqPW8 \*e<UA. ­?JAJz7LOogBd{i-¯cF4TŜ?~GEw?Iռ WC[!R_X{/v['u气¶/% e2emنKzum߮M[cl5z~|N:n8~;K_~x-4$܄CG->j}a%Su^ާԱG79Ⱦ?k޴bԺ' %]^;־|ӕ:͋n U]1{c}pאxy >5K|7+ 7bϭ7/WQ.Y_y$]=$qZڗ7Ȁ+M ^k @?;':XOL.c`|ǵ~cK3?Jn*j{Gcoǃ U~vZ!~,ǿ^|&_3ۦw6/X^m%"߷-x%gNyWlW<Է 9ܗŗkzKifwݰѹ_G ('j~d5tr1jL:,_׌j,yA8>Yy,={AQ'dt[ygh1y`񩿙k_0/wG< ?GOeIY Dv3nk+GY޵_^=.y\_q)~NExR w3!5|Y~=V_Xԉ5`Dy_3~71s73$KI/W&}oA# ^~JG+*3>͎ y(wW~~VsiL|n|WPi}gP/Acuz _,xig g^`\Ctxkxߎuw2TSty\_0X䓹.ջ0?T\ܧv>zOfa,DBVĥl$`[܁b}nS8VY}mFT]WE~w+GlEе/$`Obg:;1P)~ ˧y1)=7b~*~i8^u5+7sՙuyzL' l>]]Լf.׹ԌyA?x |Y]Cܐ~> PLͫ~!Ńi ~xaqK=~.֛| t;L/Q;]p)ǣۮt8:oԂyzǏ8﯉buWNآʉp6xlryXٟK/ [RqAlߴa>:H'~Q$/ck׿_z8 $W??OLZ7[{pLb_wk>}fYpXZ:2obxOOK^KTx`/ϛHku&}1$v/G]i1?5L>tPCFGW}1lR0{ fy-y5CF3 =ߞ >\OSMgr?7j>_]H[i柜d`S(\o /:_Uu ~]9Ksv5~YĻIJ|^`xЉ9P=|C݌"OԄp5 < /WU}_|[L.]ۢj-8)35K[#5>vJ?O/sat=կ)QVկo&oȃJz\X?mN4|N:r2hIk6SZSlgxpke1Og'qJ?=*? UdKvDc¿3ʙji8O?7')IkoޤS4L6&>{ )8tz5k:cb*W\wkxN{`kz))JG1 znb!s{<͔vg};jϏF/G6|sR. ]@5<N+Z:Ԏw_w=HEa̔T9#/"1iQ0Efe`gbHy.U H/R? u|uṖ!^+Wz+'Ϲf_L?!}a;1XGkz;c0ḗuu57:aڏ! \s87- 8 Mӈ˯_7>)7?"E&;|!6ZK2ckTϱ#$yϗO= WuyP\*ǰYRIAH<=|#v,~)wS/e0ݵ!wQEo~觩Pio0O];QW7Ou=KN \Z΀A*<ɳ}L<)4k+sLfy;3SSmT&\lj:Og8%%=VPd|ӮonXm!_7ӫ~s.|da@gz|T< ~D_tpyźLK07y͠`=|? (ܮ15В_Ĕzz#?Hj/_<~'>w,/J)k%`xMp}%ux-Yꎗyz}머Y!o6= ?%滱yj~%Ej5տP{N&~DZI0Zޱz^MXX\UNp^㰝k`T%^2I Ŕ/h`$q?c^ܗqKq {]E,bnrWԠH7pm&~s_rYW]G/|Up𻞏ӒGu@/+Xw|.Q]?C%[ Czr.-^ƨ$`/}TP~>,6^5ԧ<%uP3:5z1ܜ j~{q_,(8ht9LeK#֓EZ2磻'#oX>\ԋv PSE]C3r};u[#( u$un#<|smDVg>puFTƉ}ZrG yMt=Bߢnh}oy U5\:+j5N%>6l} Z?`oל۲5W!R~q~=Dd<gj' L0^*t.)?bLB=-/Mӿpjz_ t&j>שDc1Uiϯ+p{loDI38K}1mu&y2u>:o3o65P=m}%~aD>m_N`^E-3]ycskWpb}h1:(1!~oO2z|B8YRD-[z ,V 9s_+v:lGpl-' /} ۿw %[`s"A<=o7{l]d]۸ߤs l9;V`7mP#7X/l8h\.ګ<اƍ7$A[~hȎۖ ?q]?CW|>1|n3bՍ2 sWp~^}`T?׻u5BݬNJ~Go<9ԸDBi헽*u#5WyI176^;kRχö:[Ժ XX(_og8n+ߜ&)mG9.3K=<POOǬѕ_\X)rVd>iyfd: R}q͕I>WsSѕ@?6LGW0cqX:tDS~kwn6KYtEgY)[1"7G\p\`˻y_!N f4UynLχ^ON&EGߞuUXY޵_PHxu7 TXxJ|uvz}6'Pe[dz>>ׯ-,/V'[;E{4!OǸ͑V~~}gV3ǿrw )ä;aӏ1GOjANzrehB?CǍx u+z |$퓕+٬*:|z?[dx~G_^U9zF)QQloH~<}~yJzSMo2Z7#:s8fTw6q8o9MMfwSz nf{~yu$^ۄä;E#¢Cnw0%=Fka2Q4ߏ@D3xvչ5j K.W]G<{d祾nX㷮fٗHG*kj/?׋p哊?q˷J }!q˅6=.3f/<ꛉ]:7Mq)}G N5# REu.R/6JR=xcƩcѹ| S523!Ӊ.8 6ԈO~z}e1{t<6hGuQ\fTA(N*Qqn] y7px_^(3xUuPj"K}>+#=W4<+G>#'T27t/ۗ05H=p 8ߐx{b|G}=^Ke~_g:G.{~Wpm Y^tR(?f\ CXa?Q*ϒ?Džqfy 7g}5/C}*ҍ²~~z _5/W%s5yc}bsH̓Ay}ؠ Ć>T5Kw{a{K=x 1uu-vSY`Xxˢ?P_7^?q??3M*΢k~Q#_?wys~}Zҙ^3 }4mII=ujּS6~U r 8H\Wr{ϱ:Gigo{_G3dw|iʊzqM&kwq>W$nmy/T5Q6/_osݣļ_1*Wn%~ևJcj4kEױ3P˾uRj}LθC~z ob~O[z ?8Eߚ"Oşs;Nx<~] )R/P5=LfX[ 1Q集h)g{fK/͊N,8Lg_N/qp~i@UƳi,oj3,\7#yo-RѢ~ysR#ύeK]I p-[Tqg~mX< u,21Ts3_s|Bhޤ~֟a<#&?//P'KugGb~Fcpܠty>>訟0Utd:-D޷˯_'+ش -W LM'꘹z~*u9b/z|-K&ʯH=wW+Az;G/Md@?mvtX*>K/QӮs6cY޹=G4z/K|L}1a'(7Oz==wƂz\ywpTqk 4K27spy^vuf} Sbx^ո|q '|2ghS~jًo)~#Y5eRl#K7L}8~Yl}R?c#K"?nR')P0?! }:dQ"Qp,x!rd0kyC~{[ sicC?~vXqz4nk8?}u; Ǖ}I9'aWC{ޟwqtQϒ_VՓ@JYŔ4!]qZsvc-o!=x ^H8/kCJuwS*^'=wx~!L7q/ut;LKÅW˔tъYK2E#SxNW>V(c(cuޭG=,+;zE :q=/_:Gqvt2sM/+[3q} !>Ojht=qֿf<;Ett#]t\:U{MTGj>񺰴P5PZ}qN R]=ls]uT$\+cj4aS:ޜ#a}Mud F(m)0O uq,a-Ox:oGyfl7pL_ ,>nIDp%q]=t^>"Ρ>G8hz|[#?p,X`L٣7Vt]}Nqi24>nJ'/m#P .*ݧ~*)0b>r~=Ϲ6st=~cJb {L6Yo4ӛ5WT׾&iRo~+YPh?RKLPUX0(lr}/=^Vq-L^(/bb3CP*⟉*aP(~1BaatbI~j~[\TcYܛ4Z^,K4Fk$<#PO rm&Jod||aD-^3\cY1SS޿)N&+<u"o]-}9/*/*j ׏Eׯ6/f _t;,cц3pR%IYxw0+s70Ӓ7 2|v(i[pp ,߁fC q,e+K9ߔL~x_'wMu:$>.n?;c,QǩdOJG UinsXUҽp!M< }VKJSBC8^cGg}l-}yO\):Hq=*JV  t6]p*k2UaZ_v̳E7tN [np%pvhn^7΋}C_`TP|=74fqi:(); |Gj?a=9zK ӑm.OA{š;;~A:}:33MPV^=IP$S\A>;EtM`SqF_N-VQxN\bWNnXG-q~XO X~(y^p*I {y`s>oj9/5p! }IWC5oz;=x%\Ng/l_oVMb>]WjPu;W⎋KCm?'{;c T ah`Y̸bfZ2 1 :7u8r;v82#Y{OU=y]{gΜgݙ9sz |LhѸ]TCYsMz~/\?^4FߟG%]gꙉ=d$?;r|8x'klNmIG=q>/l2ovbѻ"x^Y[?j`'=(2Zsk͂`PǬAn\ߴվΌьp 7'qGECUݙyz53o{['_Cyuz {o%)b! p\T4*LZyկ&;ھE=~)ǥ<|uxXx,N2x 36S#OB;@ǟIZoWz m5u_`'~NìzV5WU-#8֭Sg ;~5?,<5U5/w b^]!tP0¿IgE騡%|:Z}h"s,a^ofsƊ6lqw_W*[ux\Z*jp(Ò^N* < ,}l=P8(+j?,H|?gGǣgoz^U+$q`Å U_ y٢ϕl;EɻR:[?ދUN"_#}XgByv5>xVX`V`iYTߗTV`f2W]++++++++++++++++K(38 夁f=R#JC#:w1jo wz?su|͏R_['x^'\>S识£DwP#Y='d収|T<.q9{ȏ<,.J|Kh O|y ?gzn_<[7]Z G(4f=yעEOu=/TyoMS렫Ǧkû0w5LKWwVیxn⁵A?릯ה_΅𶙏EEu^ȒYFʺSW#b_Kݖp,zo/[K9f]=uoZQ֥P#f+?ci l{/G1[Ǎ˘r`8!:u-Fщ1?ohFVft 5wٟPRVv%slά!늾2W#b6%epڵat>X?9޻if"^-#hX~'_#N6hH ^; 0>R:ͽ꯮E.uyU}MK?:%e+X/`~Iˈ)&C7JKҢ3߷4ft5}O: JŶMv":a}lqX@;(%u/[xzqK;%C׭M=҂+隯W1BpObk-x٬7; x}4}f<O_]IO?L-}Lovܵn5T:ˈ~K']M??8;NnڿaX9}P=XpW=/ՎunZMuUxzӯ7=^˷gJ1sDzV"X' C15yӨ1>6ޫqْ71MqAWOr]K͡Y4=7j1T:6Yr]x-׫|wy qӧJ7c=7};azwa(zyy.8k%ok7}OY 烟qb>滋#\[FuC7_\y]]NO>ޠ ]j">CǑM}9_7nc9Ϥi=y?ϻe+_/xƮ7:ss7T(P–~4>H_GX) u^a~G-VyM->Ƭׂ5 RƙB'@'䥌=R%GW94pOqߎ 4co31O4E& +yv?m| bx.8^_{o|~vWN@ﶿT1CqHXw4~)n3Vu:CsC&Q^O">Dq>XY!Oxׯ_[†nCQ,}_mHlu|@4idMݿߢ|pH}ǽY=pc6tIQӏٗV`V`հV`74ʻܷPmUf\H']Zxh8i$i郃=hsMPxbׯxqFqDyhdzoE뛔y |Z~ow //ʇWq2w@>ꭻ_~o1~y|Q?efqCt>ߗ)no7)'itڿu<NݒܣחP:?<}d+:qn{eh8$S̸26j?oyᴞΥ :7ϥ}9H`eߨTN__SW`];ok<>5DZlFo`}C?tn@1.ճ9m;P)7 w /~)/`oWu1ZR.y|yOהrou.EC1,yݑd~eҁhxޖ<@`^V]+܈Ⱦq2^dz?Ҷ:Ie}q`׏~W#@6&MW&^_WE !9(LS``C:_`n;_Xȼ=jQ˾u>oŒ/ĸJɿ.GǛ8tz P+(L8)†o0nW:[&uQ'$OTa]y\/$u/0g>Bb*8⟢_p =B֙ʏ1YWYD\zG+rqn/??$9e<iNgZpq}|,+fSg* E53Ьf"`'x#"],<}di{kC:O9;8)~*6/kOy][oCw֋\]uQ-hD|:7OӚ/ܕ_%_Wx |N|SV7}_7D?hA6-tӴބ鉅p&oK/?XRP)1P~D|Ft6Ztي2RfxTD7vOJ})NkHTTCm_Ly?~ ǵE3UWy˧V->BM7UnhhކApŷ)Ng{a;[W1֗FʸmBtlz3F|G_9τN҇e\бOǂO/E5ܰF_V^3m&WC7@擱&~NuM\E%z~!jV?Qnt$okOVB}hi_Sy[<=3i*OEqC[׍< .Db=[&p5C{ՅJEb~n#~xYg-\G5Id%oy溾0i"DWE*bY*cьH'K]]Oyoe"?dsx=ߏ~;nW0x[?ރu=]ɯK:QrVEʣ~Y6{80$H1=]ʉU2)ϵwO~#)YR"K3[c5#zh+/qdTG{PHJzbir]?ټn|ה뎠< _MwX#*zkB+ZJt`kXy=t"'zQ /f|hxn:k3-̠}\eqx"0@ w:g }yVg`Q3.}gGz7C(U >yGrCf*۸Zk+g8/utYH3+,hO_[Hs9OSwTޛy+/]csV {>2p,c~IW U Op?/ߓ՛,K>b5̦ͣ4W5ϯeaBo8O?rXW±`~C Y3d;²nL|!{e?(@mDC3zL.f޿/C)}lu^1Nı9._ ~I1G,S`r\o+ǭxo0s7Q󕪏ܞ63Y=C˅[—E{H=šzfZ[[`qŒ;#3]2:vnןoU7t)}> 7o3ƾUz^?vE {h7On3\?8/~o1O| ˆN~<@6%P Gyl:PoWEaW7]'3W{4%/jcpW>s9Fg N/76<ת) i,0Z7̤wʲn}rw5..8?pS:IKtoG~2tZQ[t?~;7,x^UB/wt @ϻ.}~Xy-.\mobr\S1PŒִ1:.|Zy6yWU˫K:<"ņƺڼw3b]$-JE)aA;}?N Xq#N޷Ӫ!)z:̗`:zu8!W*xOwl܃\g~UDImy^L$v~+P u%e~‘|8'jG~oNBWؑOtnБWWJuN\䜤/Cb@%aņq^ KZ6Oƞ'eVxa+筪g8爛+'b&^'*#Y+$9-u0S߿yڤWˏR<[UJ})JJ=z+⫽4~;5m\0/gzO9@o^w;|Q֗ ~H~&M-ϕS njH|'3 '0/<L(O뿯bui~Ub;UG;˟k˚SnxvS_Ci Y{OĿCfVgle~ZKs?t OD~p6SW}Q-zȅ盌㕮L8y7a)ך~I㳮zR }J\#gN`W_{U(^M = BYg|ڌ6}OsͰOqJCG1c'po(ahuae9=~EYq6T}}fi#!+fݤsaXX] XXav[ݿ_9׏ŃV8).DmJ=KڴCJP}@ ϊNwŚhz#Xpu#Ǐy5EBkd%k8p>5X/ դ.b2Q]գUB΋KG7_cm.pօ:Q_˄6Uǟ7-|?ts <!hH<}( z];E}U2pWux9Xg=J|6\&F ۼ}-+]UCu("c(wH;.d/wF䵡:A} _Iu|# Ws ?򟮥z&Q3\B̟ѧDu3,&n3OdYHs[x^Cm۶^:n8"/e^F[koNGu7Ctߎͼf\.`E]O?6\GxOv`0#ްx_gT1u~'}mU_SG{w[z%yEoжw>-B..mԈtTc/U'-f7 _h=o|gc^}o-#`oiOwmbk?z73/*2}[?^%\I1f}ZJoT`}U{r"2o1_`e{i8Gx$?_Yӕw5^w~]y5zN53"l&|d?&^u\>_u5f`ߏLe无#٦\]9-M-D}ct¥-0 /$W9~)wD6/G4/v[:}o~uY uu*?icdC!Oy01Rat}00SuE,"]|59OzD kYt_:G!31y7 __W; o1{XܝG2[Gg)C_wϤa]aGE>"ǣB -"b駖zz^ڿ^*[z=OZ/1?ֻn8r|؇4a麢zH\7> SU'7n45>j*}0t&\gh^Z"T3u=c$.'ÌoYoXI9~!pQMNffݷ7̭GT=Z7l?%"D?'cٟG{EYY=^űWFWzl} 4[qgBQQ7fF>z<<ߗ˳.YSx l(Gܷ|]]Kx_?:((rNj/?z`w0M>&[4^wR̿]w,DFlZǥtx?%O|iߏsOh>?/q@a"1 x kΛk'ϕ=Ge~"?\Z֓dg<3Z?%8hyBƼCיm3m%!Զe<Yp7Ao 79稫ռҋQo뛟 CuGMOb森{gn-WMmW҇[cN1 ׽$p5븡Xzuto,3kiy +$r#|_u?rSS\.㢱f7糊׭~z}c\0*%ƪnut:_5 '.<ԙ wZ6O_ͧ4J{ | z/Ni{:ќBeBt [QՇ1} ӆᄗ\s">θ.:yy ŗ]c{X5~K׽Iߺ{?\׍sW`66 )*pSjUy9xl}Ҍhү4d}7xוMד,ZV|-:]iUA% .4o;~Bt~oߋ5cﯩAɠ,XBx.oz1|?[OÁ\i$e54{1>h yC׉c@s~x_,:o']Eo1=?{ 7}5g؀yzQ|y:xøo(olc_9ʺ U-|}Wcgs^Ǽ/,^N.G~RfH~ۭi}}0*xcyos}}0d~ ƃUgx#x]:EʣGkLz}^P3O*~S=\(y%)o~cR\qkX90|}|?N8 +N8͢7'oz' 99q6柕{X/IsGyc7TXg$?N˚@Ks}ŐKPQ`gԫ=Co[/Ì&9sބp=BqU&Z~e3Oi43w]sh4|ߞ_!>89j}jOs]xH͖giEާ[}SP;BMXoӯ,_4_T߯4n7Iϳbeڏ}.bG> |;L{!?:J6w_W ^?iCzۧ'ChgXՆ$XGѭ%&t(*s=.Z~NX-u6GM^:5,af Ty(W=)+x藍_u*Vk_PS^;jp;</gckr)!>HUY缥> z4[H7ktG}(Ch?`@+x_Q4^P%_uV\+]| HfѹUQnzZqg!tu'Vdgzz]QaOa(wP׏^=UOߘEp}yY}i op]Fٴ? >~0˲D>}W|U,xߠ|m>\\6h;ͺP}L|\Ny50?-)pOQK0jOJ1xJ'Q SKD;gt1N_wP\޻nHkIBW#Odd , }s/ ٷz܍ߦ]'s{Oy@Rwէ\Ş߿5b%H Mi߾n;WT2LJ ֙}ҝzČ+zO45t7B;9K~3VNO^ewT"iL]/@Y"L?8E9VzI)Ws^ƅ̷`)ghNnҋ*|5J[cwLRuɒTv:ssTW}_Uj~`}OyQ>Ds%2/΄GS]g~y? fP axsa~?jO] n 摠P{e?3oJ)cѯsGp{tBji7BW݈?i7-L{QCOׁa__`SƙReE:Y?AUy7 YV-άp"#[7kx0NjKIg)?6%~ Ou<0=7Γop>v}xR0Wd8#ޤyj8Yt~+>G) }7l=uUFi}sLИt 8~`,<7x?ƚxl|=??=Wq_)c|2/.`CZ_^}KUKաR&c9+YX=t~6 èTpB77?YJ_42ν-nb4&̬jv8B ^wO!azS~ec>{12??opGڻFhHyczTB|u2c߃/V<8Kq2& Gj{&_X]bw`])ױ1R\J$g z)Uw׭TOsE5, /u\eeڄ}N袹>sf`D \٧qqQX%mLJ+R<=ܖͿkwyXIޯTN+2ϊ}9װ8ͣOIF[1xxS+q&7,xQ?ޭu2`sp$O F 0 GzH QzbXqۋ*Yub᫄ A`yZS&=~̿ɤ˺/2-qJQ 3)/^?HQDp];:J!]s}D54w?EiĦ҉M=u3++SfzX3U;&F- '=}S|=N4T>Wbwv+(u 3m 擪5J2sZXᗱVN`GI4JOw">-ӻ\m)][xW#4ەiN קO qsWu_Ld?K츗D4L7WBFx|ߤ1Dk.d&^ 3T_/Syh9`[[:>)5}UVkq=>xjh zQHO㯪~3(? EBBnj:mߤ}k\W]˅#YOBt zCײ䋭Vv;?Er! Ueqϧk1t3ϸkYn:xnT`_ubyǺ|LܲwJ;Ԛ|ﯘD+!"ь⧉$/f#K4%T!I+#+Z2Ռg@d)`?(b eƟW>ז<O81/F\G7)/tW~+1exÔ#Xaہ`އo 뷖r;ty]=P<|/YR *d?fϕs 2[MPC/?#Os[A_ƞb5JH]ے6,l~M1Mwd6f%zaŤ J0]=~ݿA+/hfY[i)Us˫H+U8iÊ> oTa}MǮmBtfyOqL Eפ 1+R!S抋?MG<)@=~~=D{Zm:ڬ4ba}󮢸pu ^(~A$fjm~ ճ/aG5OL:ny0տ*.2 Kϳu0)Z[]>y } @eE2/+-}w3v}}!e'#SAu\Nio3GcΣ/w+KGz/&g$#+'C"QC\CYӣPN)wO:_K6ߢ}]w݇SΛ5^5J3aT~mXiUE7 ',ε?'1>_`LC~^ZoD"MI}oI+ GH>F%溔2k/9mLz}N`+xatբad.__p6?G`?/CF[Oyttl'>njOP}]O\U_ge>1lekn I7U|?㫍G~FIgN,vXoOSt"uɒ_.˯ݮ(w>U}6\V~O+ҾpT^]H=כnZu~g]O^w1nsA>0:Q\K>^*phzE,cy4Da̗6Xs'{JZհPZcb3#w9|ѳSH(_p4(ӌº&5#| ֧=i:8X}W2BQO=YI߆wS?r[*9oO_u=)qڇjX׋ڲO3ZO0vL4>֫=FYt1r^ Ƀ'ma:uE~ļ1zΣs8|*p| Qv#/wBdYSWtS9n?g5o<' .&OO 7yXzNy?;{yP7DS1z/W {=P[OYDWfp}~:uʆeG|kIs';v|:.3j:=>x;RY_UKJ?:OZpljR 7|| SxsGbꁥ~qp>[kɬwGk|YtǤw3XP- ǯaeK=?>4w9VVޥQ,޼ˣs򎳎_xz>~^qd_g[)|40'>}“5d.dW1wZufۼB_ƒI]8?:VX‚[.jO~&}jo}X]}3/up4+睉g8z$:{/q(â<:OQIBV1Gzޭt<q~޻'̰G䵦W%z|׏16*"O-~Y)6|.&naHp#:#|(q+COY\cUnxX:ϮU.RY%p}?Y^#ďX30<.(L6Q❒•%Eh4?\n0Î#հ ލa9sKFcX0Nx\xM(X}:DWݷOǽ9pMe2VӔ;/q{&_k[14Dg2j?8T19n h}1G_/yxl;߹ʼnbK| GOw4]tyu_<&΢y"syhY5E;al?ԏU'OJd_u;fs^p| q7|oa[\) ̧ۘ~zwT_5f^_I  ֏^IO Ӝ[ќla֟W\{V%9g%\()mj(kOp ?ȡ742[g;huf]HG~0no8X7u>R~"w-r>c`[EGaan! !~aH]Ǫ3jƻ37=بqdtC=;i/aς|Z!yyO$}jxhBqX,ˤYxE`J k_'&mp%串ec2fu5lD6~QyYXxOXowTO7D Oߋ~qŽ+?Y_qW<=HwOpNJ|y?Qci~glxg|F)()W ߟ a 6/#=!e>P+3zFa}Q4>b)MK"C8Rmx`N蕁7KaI%P>.rwQ#DoS?t 6D%>B>ƼHRؒ65_Q>$_VB}W;S&-*$WrY\܂r^>1[y=\ O儿`0s KʨfSN#|,:z9:󩯲ew,_P-_3u]2WGs57l{MOdRw(_<=,G^' %/O|fE5ltY*l}Cs'-R!ܷ$^e.?IV/ $rh 8>RH^Ju9Lxovu"o0kM^طY!>|ްw}X˰.ԕftr ޔ7^$j~M'^}s<YqSƱ"%~HzAUߊw%I>gy>8/:{G\2ouaiKH'zt?Dw璨`E7㺋bo{e'ŭWsI2JnnT {&g",)}0=k̮,6iFcsy}F}Oa:XȬ[w1ں3.[)?:dt4~Z\q'y-}Kg$%Fw~nD%c?K>~/v9^QRVpHBՍ~4Q%~pۼΰ [Anhߨb݉xkȻaRυޏ /J a}5n|Y B^l_tq)8EzD5侖Vuq#O>Áǹ_{θ{o>Xu\oz;)w=Sy8 ?aS=V.;H#">s۾' +h3n#<[ʷH/Ϻs 4O~Ǖ(+tBx)u-/kgƜϣ|Ƥe3=OhFqSa- +wV[ ɆG*C+Rq#? sK$-έe|o"qQoe龳u ,a[(I͏rkF^D]qͮwi2ÿ* i=C)tM{ȿH@!om;v#8_O]@j8To5u5?(+w;moǮ~fi?Xn}xeu\Fw8A#;=t-j?aWC(UD̯?R]z+`zh+Y#^Rx\w?@ԕs.?~'w睵- -l*OQ _B}ྱiH_GUF䉠Cps=y'^O >(V #֋R1w XaD<-Y?3K\peǏ0˭K̕7Ic~[Xy A2DQ#Duz^ӫ(/(̼HTQ;"fsqH~ޛ}c?+x,Qz%_& ͣ#Q7WT3xK`L]W߉](22Q._kCSy1YxT񥒹>*ebo7[:8n'@{qXР7v}zˌߞ[K#S)xܶfh~9F,4 a7`s3(tSCRR߯ sїzݗ,-o gcmBz-xP2#Ot׭gr5R; O/_$mKۺyN|?z늺>Zdbzq=I3.6I/iMU YBY_3"_N]+$N$>^`v%O&<~qJ1>Qt-~[?!u1uO Wdj,J5t7 ; tqt8 Q2٧+"c܆sj{b,>"#cgɫM#=[~J<j\M2sݨzsԁc_O3oKowCt\=F<ߧoNluΣDA^J0 u/dߌ+e]T _XCຉw{s]ϑu CDc&z̆H9ޢk|>S{dk _(n*)ii^d^w *f'Q&OUT-},1~j P&3z$Iz캎RO8L_ps4=y[dzԩQ6|^+(U ')CKo/AK>BB:6}c\E5s^%?w’gdz餮u D#ꇢW~jsx's'uP ?r~8\U WHy?c!G{n= YE]}~68__ 3'Q;3z>^]q߇sBz?)߮P`xI{哕o(8Ox~*fWT߼uf 82/y\2Mdп G]J7j+ij0=tfqy ϐFPqԛ_ru_^=v:>#eFg)%To!Bo1$Of'OhRNn[ITUF;HGhOt~FCB&}v[QfOי#0.n{R.Q?t>?tƯ2D(܈B H(?y!x9l=ﭞ;Ze6?$uh0Aa_RO9%ZofҺ4ۘ3c?ΤLᮆ!}-ϰw!3y>[=*t*A}8^Tq:$'C~⅔gnꦿU1=;6/-:ƨ7yQ +gu@!Ɓ]j{i*o3+빶||Jk>zk~!Uq;COdҿF}˾yYLپ/pXE6gBCu >&k[`=ߢo3:D+gur.O?\~_\9ދ7CfOŪӨ|-Xw㥜G:ㆉ|=ǩ>cwxwEY JOoaZ Warg'7om.Θh:,?3.IgQ0BWi`!K^;ӘҖtc+ <#?Y(m΃n||UT<^Vu,rk$z!8 ?c _DOpa^7 WW07TrԻ\UZl:z(syV`V`S $>KS/X`V`V`V`V`V`V`V`V`V`V`V`V`V`V`V`V`S0nIeIqq5\.Ↄk.iK~W%A_}R&mE9X|$>E^ f,}J.;bgK?rNʮ3oqb_ k<:Btd QOWEZ}ư[^rǴ0<;j1_sIb/Q:9]^'ah|%շ9DUzyXʼJa]Z}1YDUf]Q}Y7a+&u ]ynV;z!+"]GcStV>mA_ @\r\QQ]4=/N4oj}i(ԯo0\Gޞ?b٪8+|2`4M]Tg淼,mNЎy0?'oSw4QZ@u9#2K{pЩtQ/:Bm}z7 \wxQ֧{B}w?buot끣`rܮxRCykq;}ܖ/C݈zI&H=Mh9h:e;J~M2j9mq!~ Lh.y*. =;tKZ7;)l_YozU,>pD}=K\sOi_\tu$*ڷ%~1~GoUfuW簎[|`뤛w =bsT|~e1^u"8#wtFJgWQ2z^RJH=b8EuA5-2U;i~~(=j럊®P>uV6Cu 5o:αx~-Aܟ2׃-'ףa06~Uh:_Z[҆/h?XXX};?XX_fc ›/zp}J@eǺUiL(_k3k\X"zw\fhqV{~ʌ?3Bsrul!y Bu4?cqG{:>HC/֫CwTykR"ه܃Ng$iy^};xikM;򏕡NbԈ* }ǂHQpըbnᗑz6VR $~]xwzxTxے) _)Jv`:6jy=="{`e&e KMG>? {Y*'qA?({IzIP='۷3Ӗ bt>%WHciXYLjMy%_+(O:oEwtĸj0_'m7*³ ׍ɣ>w#X2)=\ܒhVZC?2OΛ0=mobGccE51Wq,9_s)xo&zw:ڸu ޑͼ]7J[RgҽO`lW7<Ȩ.x:<Ў5yE-_n/V7Jfxw5TO:uB깠\b=ơXe7*Gmn$UMrM~=/nSC2iC[\r/k}:9=۷#[=CwR]~Wt ͻbΘ3?N4~Ơ:>,׷8_{%jJ#yH&ɣێKܨ]8E=f?\<3c#T/ ;m+;s?aײizS?c%D|du*魣 XQeR1D;2n`g>,G?0_*'OTw!Ŭg,3B#3 3o-6} 7Kw?Kl:^IK]=߽.ajKa˺N:yĢ/߇(=B%Ɖ=T7E/2Z\`QGcNP\o'[z"NQ^en̏?m/~ (NUo7=9s#i=\O1Z{~5](C.)%]=~սc֏9s!G?1=Hޱ\=F 9E ;>׃g<|R"w8^G'>0137ϯ9E/%7o:7\GcuWsWt䃟Wt=w~}L].7]04kkD(N[՗.rI0?e0C!y6\\C7ޛꟸ ~ w]ṭ~;9|~8z゚Uw=~&>D<8͓oÃyIJKb%xZHT;‘Ѽ/8_ӫ^>}3Oq7Dt/ou[hUǧmt\>6F/MNq_s]T cf~^HC|_*c7+L K_Q|؟YU:-oZ?cESS:Co(bU絡I_[wYwƱ-yf%ff|-)RiY.~ʈ/#QJUa/He{k`=!gq]u==Joռܾ^Ǫ|YTG=9O;~Ugc}W}Lp$׉ɯuA}AU@hσ]](UX\p }>ys߸`ݝ+$ռs޲ʣLR4?Mп_6oG>쾓♦+x'XCL_`ʹ#u6}gˈ~:f|W-cw$nqQ5qc~އGiL9uRƫb>XgVh9=d?%B7+W bX?ُIQU9phewc+k o֗7n]|WV%S[p| :?U ,whW2p}zcŭ(vZ=L]9G  37cX\! )+^c)_{eZ?=o,jh/@Ҫ:Q3 V^xuoKz<.Uqt S]Pٲ,N2E Of`Œ&եE[#x о s<\'QtϤNn4'~#!IB\C w 7SoџKIq]Ew 㹿,2:PȒGg ָλt(+++++=ww,0w3KNtp+kt1X';jѵeپk0~G<-Xzhz" ^l]}fu?:=V<.P#Bn?QcϭC(hŭ'K6O |28y6o-)u t㼩Ma|0^曉gs^1*b4rڢǜaV'jhL⁠H&*yg:J}9ӡ_f>ghPW`ɯ iE|{%0]A"x]>KIupY/h`e!Y}$-5dW$>1| +񣣟\ʫ_ֻE)Ư:pz8\us_jEIfhHT /󯚡U\;͸liu†T;T\Gn+jo Oxɋrz!Qߗ瞲>69tc~ >j^=0SM᝷\>nÚqyRVu[o(󵭇[_ls7x}R\tA{Y7BTà2{'mbЃz/LjɜXyk)~yc~j3Xzg-v?I^rw8?>Lbuǐ&mY]雥{8/SdO u7~췋ҜghxYWr],3yT<0y[=(Oz5su-8ǘ>[Z:|+q]9:er4mmHW0͡O ׭^W}_V;U^>,fh?e7z%K꽔2mT,Wb~_f tޮl,?%]} fU4mNzqgnCg]KX1~ =C.yՓp~_T^uf[8rߗr^h|:Atf-T\u'<].{_xqR;m/q3ipy gV3 0'a~V6|py;/j($rb眞JE;_mQSoGۼS" ^׉\qo>_a]+%}-~C~?0uv[YwH']GgV?$ qYtD0o*}N揾KTwތ\dx+,cs<#tz9=N#d~k)Kj oQ}yYw^{J)~#GoCI 1yy?A2exo&YC O"[#n,TXYTxJW^2n:7)-FKIՕO 疔~?;+Kӕ WgQGͿ/쨿+םex=Lc/=Bt,\gpˇ}?ǏN$uCu>CWC(rܞzDXyjL 5XW:y+e6ގ-cSx"-_׿7:abζ'?zW / 8@7KyiFKq5f~P _c*&p½#n8[G[kྜ6L7?O~"/pY7dkFfnfD1d@x}sZ`M%Ϻ6VWöS1];®xeXƧx3m<^ϕru}HOhow5-Ft=;J>>ןx^.M7OA~)F=m_ӯCۯ(YJ?9ł[m<:ַ!eɟ)݊”/x:f}3W ?u=qR8t 8;an>* Ewu8x[=+xG:n}.Cn/arq}z\*d#:䌏Oto״闊iۃ^#uUPU3q@}dcf}?>]f@t+4ͶBfgcػҵ9@ 7m9f\pq7īȯg;UӾ\oH_ ߥtGm.N,;G:6kBiհ q*>υf}7w??fi9xDwm1ސw/h2.X|I޹L~?lBK\2H#yOV}<+s;CQs #e gVO~? guo_мyu#~"!|{'!k;R4cc,j\?nfX4yWKﴥ~,tOY k]^(Te~V %! ?^a_73%k(3=GZ0bTtcS/ˇdf8M[*'CS#t ?[\U>0{/܇'tO(⅐~&Q|: ,It?'Ґ:QC]~\ThNOCK\[RxEqnwNFu }2?ַ?y~gMxSkTNy+!݋sڥUONF{e\) )BXOp_96`>>yybr!:(lG23|]c=f?-W]^wD}o7C K⮱O[i4{z1y-Ϩt Z_wӁ٥ Nωc=oTz<|ʱNGz Fʓy6LnC)ǵ8x% b[;u x>r<O9ޗEzuKNE\slH:Q-3!y#*A~^y0HC)_Y7KH9M*ebyyWMBG@⻌3-gM?tz<3e>%C!~Q>.9}/Rm7|'? ߣwK< ѩ/J4`l4y"#_|pUt\EU`=w%c~)`7ѯ߯n:Z's@4O>m3:|gJ}볪kAhXS'|I:d):&bRq]Σr{q8oTGK9} P|0}>t}p¸85߰{)p/{%ū#OngI}J4>AuaDuRn~'t  ӳ" ^yt?¼n-&Z3JO׬B!tB?=c)r/`э4T^qēnufz uA%n>pJ\u76wTo|=.X.x{i=zF1ž<\{7>9=.f;Z =.}BDq򃅎u]b^[M蛼W`YMOt:}fQ\r/*n2cڶ%fAe??ӓL擜Sy5sxp  8>j( n ̓u0!XWzA72i0q_~mnz /-U[埚1? c?*9Ŧg3B%ٍ).Q?W;3#0agwI}NIG}ybnu92u#c3ubm.̓" =d[nX_OYZ;n!AkDބX\ bU)~q#MG*϶쿅9g;Qby/5>Ha}Jly3y&yウa˔AG˯a3:~p'!}St>Gnzy>ytYwM|n6Uhs,ס1o0+8($4O5?AG/I* =O/;)/4CNvp1z?Ip>-ͿUḤv?IJ g}s*31?m1Liw0wກjכ7㽇)םۙ>WOhuaѯO9[~ϖGרuM)!z7y}5+Wcq!:>K?KDXȢ#~\z<}xX !HHy[/PS\7ʨzaj}6^䃗*A5a,Qz>S7rfe|Vqߞ}-)߄oqZ637iߝcScǏ$ `}_\ 敠P2+M$BB+ Zu#^aplxt7V`>P"_~r%ߟ׀WD~?]GFYxtK}DFwc)z\ zG#SgM|,ivU{E#+1ݴpPm~,FBuߊZ$3:Oa}O}Α{Ҍ{W|6v|?p3U^bpQ1@vyBiC7޿j[p/ǣ4ׇ/Чcf1{~oB ;t/#oggrZxՆ;F;,oop6R~/?M]]k(ާ_mM:hOLoU_=Ь ]NS,|>f\@c0Oύ8'NWŠc(_;P\R)dU㭯ؾY˛.t0W2|OSC}}zx43nE&6>1*%/wD?Z㙁'B?߁כqXKZ4-R&S[`~ܒ~oG5.6׫\Y3V>eBɟXxX[1.Ae\?,O)8Ū|fd_[Z +8;aǖ:R7!o(|EۏK̺rYimjG{q_O`SI1s[Z^ǝG4ub5πv yG檗t5\l vRgs4BӨ&jᾦnȏ>%U)".1U1*އR$e))} {[I>ڦz<3:؏u?yg0/f`'-d˷8Mu z=Z$5NR\W^?(c{yy/'B1js~s0=Y0@3zIk}tm䯶e|7tp!ŧ 7{ rmc㋈co)E_? /".6c[m }JF+\@G->XV~4յ EuopZt)4ˣwIz1NLqj=)~iR>8Ǻk)OxW鱭^Rb]s}R_۟ ~qws^Q6JSyB\<$Q)=''j_^WE}l&?U7~b'k3_?w1#>T蘌6Q"E?XGM 43s+"Dq 5"=w* jAZg7[Xw3/ʦ+,Χ Qטaη=t=۬ۃ]骾.lE3>Å߱k[iݯ.ķ}I:%z`vv>\ء7pm{9M߂`1D=tpk_ǾWPʂ/Mg xCiJ~aEs6-L`4`=IE];O<{hФ ouNaV_ݏnC:ZG[9UcHFD?+h|c Ԡ//*7qC/n}1O+l٧8ߍ`>N,u,l%Jz>/fa_qzY"dY̻6a57x^hxS"=$߸NJ 8)ϐ2~]77*|3^mo~~7=NtQ#{}WO-ۉ>~|JA7(eϵ%8Y)OWRG,n zMh+G>O6B|Q͒sDμ9.zk=xQ .S'=x'8W\7Wя{~W?gw~7h^06svqDv4=':'B&FѤd-J~;|Xȸ[0~_xiVN7_kYIMgُ4+i$w,}Pܥ^'o V ^҇*w8;C]/;z0cOh bOlc^]iKTu[5(=q?`>/0; +quwy>GO1^aߜa:erxhiKt%'ڱ>A)Λ0O y=ع30W8% yկ˼;ّ<|d1}ʢ__*nYT"y苦3_NpaYdun}jc 7˚zݹ_H<g҆i=*T>OⷣP,N9vj>`G)|#c )>1Y+,% +k`E=mb!8Q9Su~ol'utI>}wt%?tq~O`dX?[UiHh}Mpolg3MGzޚiݸv9f8sK NjSGu%Ze^Jgw a_Ϸ5x!z|tݣNa4:n>߫w,8T:ct-.;QjíWQ8-IvT_@N~y>+ #u޷bg,uEk5O72lXb_d=N[ښO,}iD) bDCԏ}ui~^^5T_+٧bU1ߦz <}y}PنO_Ke῏M7Ŀ;"oŕ5b-8[cX?q8_7S'xiKW??_ׂUMo~-Ow|ɼf[V|*>\0}E;y90DalMxތ7=8I wҲ=RN8H{z*y'Պtb|yC:Zm}\Ktz\ҠY`Y΢S0dy^yzyCtS`^[Kg[{IOyٜPR>m y'<d`Z;?QR-1.U Њ}qa*̃QDZ;[By3-?;ok3n]O:ٕV5ŘXׁ.u@ߣnH?zBߡX<|yG@Oq=f$q*_wX25]hz˶: rjf\\*,Qo) +t>~r3U |ܧg!wןtɵpےbzxT]xXW 70 P8jKG+<^~*~߇s^v'G}T뜟ÆO3p 7^gY#D'z$1zU};u U g-]OKwP?FեOūנX&5^) |{1Xo%\r)4ҧi^uclFgs% *u=[OM7Vǭ[C:J#Xff^d#v4EE;cKlHP~)/HX~ W44S΍UND?$ ?98-3ި[~ +)\]ӕ[踍;T}'W硅nʃ)|:?#7~vktֱ%[^6gNu;u:}_0`?R=?*~}|z+5:ja܂6,ui)SnQSGy ߉saRYFcJCM :aOşz1?sHwh6)j{3Qm} jGԋZ9Iҗ n:wqWӹUQzy7iD%swn x_ ۢ^X!NēsŕH%3d"W&~}vϴ^W|aБ,o-p>&q׷^/{Qw;J-ch{<˚&7Ƴw~Z RT1ǘ =U:.ߎԴĕWsh7cV !=#2/zgԒoU@ݖ Hqx\NY霬ԧFJܹߧm7{VFbQ\\Ryb|& )uvQs_yUOt$c9Rgi t~+Ւ&JϜGFGXWC^[כ5gIAMDz"*1MpiA0$Wg[[Fʫ_Ltf|S?_Vh}P>52Ϙu_WCu|nHcusb}Y`ֿS>}fQBz V~Y Wq֬?m۶Y1H]:> ЈRM+W(+R uB T>2ySm\q!Wt ΋"ooЄS=}b>:&Ǽ\.4z2Ǖ_ɿ?:>8`ӧSzV8u0/)6){UYKt(EK~p%7!MДu7Ua*Ҝy7=T:cnuZ,uȰ.H:.t-bR'DO_:TapЭ4>룱'Jޝ?`:=^}G9\'#FC )һp'*˔/׏@;afMB!nIp}ȆU7X~hP_Rs=li_Pe\Kʐ׊@uboՕU8Yr9fHZ~kvGl3U}>uļHPؿ5Z)vޑȾU5Bphϰ>\ׯlceC+U~"ue?/,}Zgj;Tx1 #SКq.9lH?_hr| p.R?BtM׫l~aiM EgK'ne7ʺ= hkԍ3֕tkFec؅^ԇKX?f_LwQ6^ovp\2ZtP6nA {8]x/ʭ9Ӌ$.`hDxR@Pш"3s7tHCxNWC<'KE g?wV$_pWﳝeZ8C԰ (>QAo~Ba{>v7};ƐfnuJ _("幬?Mgub!iϻ3q϶[*x{t+?m֗S3ȓp]=Q2Mx;O{33 3_Nqǻҧ?a4̻Y_HEㅧ}Ҭ3ugT] NGCa ﯮ*ta^vp/^^%}S} DH#)z}:_gg*2`R?ܖ)cnx1uBQM{[-Z X `j}[Ԃm=W+"6]{*ze,~xsƍ4ΟS~(:,>΃ ]m3_PZVwYYp^SŃ/UpU>ТrQx[z_?yB t< ̧y#fү@y7(O_T)ݽd>ף^MǘD:ZKK'eC<Ӗy_x\z3/>1sE'+3Tyf(8[> E?q>1tB|t1{E4ސӘN[/6b6Sqĺҽk!Аy'YZ,za7\q y}t`ӅNѓ>:bؿ_Q~쮠n6ю=NnWDk tSgѽÊT][>ݔF0^_G14 3\HkOEҝ9qy(}/P3ωY%}E=}Ye])*2#ΈpSFl<:3/:8ggP/=U_ Btg~˭mzо )TƠ61RPM8?SӞs'`+T}^XBoLqWLo s*z֌=_ŗFSUgm¼pXۙ}'}*7סy<%^#m=qs>2}@Tv˟I?\Ye<]5 6뤣7༔My?鰪z+ qyjnCq<&~gM6490Gc-0vZYU,ϗqBJOSꂠ;xE?)6u^ꚌOpG/sɯT:e~REu*/a%D5 yϹ~>@N%bU WOT:PAER}Sy!G0_m䚯BQ\9x,d^u#ǽuMdzal] Y<lo'k8.єΉXsCPܵN/iϟӼ.Tuw1bM!=O1~4'k6]U ,3>e/K?r֮MP7X_2L'L TCg >' ֛g1 ]6Ļ|T͈zws3t>}3Nз_{cS:+. Ez9vF!_3O#y1~H*=-?ghUO9_{f}V֓#q>s\oiz'-Ɗ'IE0b5x>UOOeFoJzN}D[e#9cfJ?4)jyJtۚ4≧/ } ?*u+pEGfz-ۂ ^Ϥ/~fߏy^>F)f8s588jqYpZ7˘gQ\_yh-#t^/[CyÃuXQ㷵sh=H`wɉG/"_CcVp$'ʉ~58>̳oHǶu'8x0-8ܱWsFe_{~Ѭ;yW8%wͻ>1ֽJ糎Kw tן{D>GO/릠yz:X'#~}+р~z~AsOq#?wu/ nݿKkq_^֑>Mw~¼Sl"WaJYlŪiYO 1G掿si>F < v^G_՗Gy٬[F6Oo?Ǫu9:]&GM:G=C-ݿuZ]o7tB}yTLbc4E-zzf{dBtxǼTELuk)O伀ԃŰ:ȻtE~.{U_/#3D=}~n}>߻kXMQWu]fw 8p$ a}@\ÌōmO;o/B֧1:rƮ} P۞(z3 )et>1?R"ϙ[Aߴ y#h>b?/OOD7e??똷9}(T;?Gc=Ƶkk }MH[X*!)I=Ģ oYO~5;2kw9nĜj~TWpgfHhei4u_quow?Z5R~AHoN;|B͐~͸Ϋh_2Ty׭z3k {ow*kcf.C>g8nxj|niCч,㍓<>> UoDwmy"uHxhlǝ9.g?g>{z\|<_{ `գ18Ru2B:;)'̧< |ni]Gg , ! ,=CcYwyza5D¹Jσj~vM77|/(E;1Oҷѻy"W: Vg|֏I^xbӚ{3)pa3Cy+zG{S֭vBGzļ σƬ0oι>RUE>=խ!UE¡ e]*IÇf|ħe(N;RҼˇc0׳,ċI{ ϵ|$Փ&Oq)ܧ|8P_j}[x\sX[6G?A|`90VxY7 y>*wZ4ʴ?M~uxnEtYucޗ2%|>r<u꣊݌"!'>"QMoTfVe W;sSZnz6ܭ.ᣢ{[': |=Y{ŢcW\S09`[C[V+̗&ZϙW1R4IC壃1F{׻JZa~#F3~ܤx6nw-u n~>3.ƒqU|(O|ոy7yG)ԯ[-a/YPC6/ W'mԊ7a=Gq3߂%q1V8~kUWI?z_+K~ gnTy_񂖾By1} E}1E+lgXG,]Mĵ={#;+e;.弅OӾ熞-߇ zсl1<"kQXTy\̓MDؾ48]}IǛ"u%[A" cБh:|_]t&gr>D!>8jYx|Q»@ *};y]碾7=K,WL[0C7azh~\o˦x)9"n837}/-)n5a/(u!;":u>SΫMDЍzwUh7wUsyϫ/R /('|7gzA~?}qYH)GJ;N_b=껨 u7̸>51eG0L*z ouy}' fR.򗧂;)~B8cC|qDǎ\xWǼpTFȫ:ɟA<8` PxRSn7?=aq"5*xROFvy3¢/qNk'~uHN3~7).Xޏvh>qG~ax98WChymoI~}EԺCaI]oy(Tw~bdXcc;0EU{DcM?\6CU}Y?ǩ^$eQw珿w6={{PRYw-83 _r֢WqsX/cҦNGӇo}w/1at"1t\/؞kqx)޾?'}tŭGXq޴0-D|ã{%eX;ގGxeI} ̇)Y{c^bcȂ_b\tǪ,xHAE)Y;_>S=۸8Σ4.xz(#Gx޳,kC+zqp=n?~{{!ީx%D~_sjA~ɢ h^Vw=OxZ=b O*2^VOM~?AcUU5axF e澿s){?}`ǥKK~KY}:񮱽]kqxI`>>׭sO[, {^=όˍtߦOҸFU6׹{P\)n^ފ'Q %^.Z}м8ep"ҾQ-_|194NI#*Kz]tafg*#b^E~ SnHz>}xd$u%n:%ޔOmX޿:%XDq99aЏłKaC5Y;K}}1tFǿ{a~h=-m{M,oGyOA/GU5g ;U>퓹g|[:Ou0_K-W 8qOx6}uF'c?Wo~_5`ο|Ҋ:âw\qxºISQz23oK.~f;x;Q6>Ǩ)pQK fq ~\ X7#q}҆u3M?on:؆}`ޞ!k+ˆƈ, :XnE$> Ŝ,ڏw eOϥ<=1?i!OuC:~/ם'OtZ<ۮf3: mvG>hg|ot r\1O->n^f|=<^.|-EXڕރ^<~֏dI-N|(˶=  ~aԱߍnx C3Ǟ)n\Eh~?To9#UuټS>ad+4ӯ7^?W/#ڞv%mx C G|ؓEgU9ҌZO?S~RgX?+$/n ~PeEՂoLGR>(x6&kUJ}t:3en(Cݞ/f)n1t-_u;]?fvf>('_ /Gl(Y"|z~ǛK٣%nXаSZ\_;pxDP!^ޘE~V-d0k6B3CxB/A}ZC+r՗gI׼h@VgEaWsi_WОpXPE[?ދ+P:ߑ׿*y]"pǂKPoje;Gv8uѩ$0\ ڿLy~4/j4y'z;8a3>um ΃i]7Vb}7Ĥ~FS_6Z0>uVGaz$@(PnB9QOڋIv5Q 1xV:鸎/mˑҘ굪/0`o;o[MD?G8@N1觛3 }iYUz gFg)Nby \Z+0..0W: ð<)yz$Fs~Y8jΟK]BL8F#>5*ĸǻy.7x1Ϻ=;M.x> ^EQ _+\qqm~(_"a:N>7Rx'K;q>X/>AuŹ):W4M-h+kؒN~CoBt>44S] E^("錻Xtsd,/Mϕq (~5 /z#J |Wt|x5:&PaYkhƢЧ{?g_\wz6c]ys6k8֛h![W/<] 4 sSQlBxn{?}&/rWuKc?kV}Өn6|ywwNNZ?jyђpRC:hz~+r)nXK'_K(t^Vf\g:kI>gFz,TW C^۝焗d']_zϡK#}JgC9̯*OD=M.D=[G >ύ uD?^fl} cۇU?| =CדERQ:dgHLJMKo׹K^=9ӵQ6}A7Wklqz~ݱSǫ>㵪[0Mg{n,,J/КB'ꟷy^䞥'K9Wa~WK2nzRg{>y_8ѣz{9yJ3nP!uÔ^z^>Ĭ}&WZ*1>_AN7^:8nz(7e+__ۍj>B;[~ۅ51alwM |1mMW >y"azR?#4MK+Ղalk{R ;HOsT}oӆcsۑrI(~ fڧW4;a[z1O缓w6=}b[2,}ZьXs[n ߗ̾pؓPQzg8_90NUU#~y|6*Bs/}jVLª_SGfߊ ՂquΣpwҏWgNDpdRGnxXp'{(҃Yĸtjlf{)b0eylC J~s)}t{Μ:\|[QWW4WEGf}0l"Ҝ~yokz?Kh`~)d à;^֏a:p(RK]1 [ʺ*|k+HN:>_Z+⨳w<ԁ 2v׿vԇޟb _bw_H`6,.EcQVGu0Xԏd G]tUz)26gjw}*Ss_X: Wu// 'TX|,#~eoO~E Մb>v#6IS4;Zq~^GWzB>?/ih1QRwFp~)eß?? Y?xXMGq9p'fT_2}9T,K|Wl#˾U=4/sB6=Lg7kt7lЀߟ,.Vʸ.!gf=#ْAe{7׽-aHG׳7zcsZ>c/j(}0>'I֑5TaHFFP~*O x1e7c7$Ƣ+5/^4|)!UǦ^y]yD,p'Cˎ?+!i߇үYS~!x/J'W3~ "yZՓ~8&p 8>G_  -)54~]`? b2W2@qe_wE~Ky;Cs(8oSłȟ8n(d?~MTu~ܒq|䃄.?ExKQ8qlDk})npb><,y=AmʔajuS_qU-:cBE`LeEOqD)iDu<%|Ü'a귡WO7lH}f;(n.N/$?т|) .DH'i}nV13-2^gR[GI[Tb\GEsI$:XY+F=Hx?'~⇪(ܣ׋AU{Ƽc:^N>T׀ 4̟w:Oڏ MRy٨xl/䐛N=Ot8V\y=Ro7NjR]Oi^wu28;mZ2yݛ$>䱞C+>}a ´U}80{-þh.z>rsG̤F|Wr]9S<6r)ї[H<XY3 Ʋ^ :o5շ/omRAx)Bjq|,Dc_NV(*M6b޶x2¿`q;FH1?7T=ʁ H,F  d kclc 1LRN=[nWuUw$v3ϼowun8\WeE<Nj 0Z"sOOfq\z9ō78s)O:O@ۨ4ˍ tkK4dGJW.. =C{êDvT&cp n~?Ӛyꐸ8oQ^Dݹ߬p^{4$QC=p ͝"Y#np|E[Wi_y7z!!<خODx7Ե{}2 "6jEYdxf#  j!5Y1ayPp :0)T`ST4;}uu[&|"⧙*~b \N s 1vuhCSPo)f~U'ڗ_ 14ÄT|I8eoF7AS YNr=nD^i-^s6ճV4W|GZwۿ^Kf1 -m0PGz)坆ꌡKd:3s= S#d5Bp~5Ogf~Aes]g8 7ipgq~(5i]M7p)qq+ۅ/-FywM:02njWfL CQP(|Xy: f~?=bsp%dz߈_v h^jAW{J+{{kQ\0/|3qڷy +S3O1v2=]ɠkX@|qLx"6 `.uBzmD#LjCS=xZr+g0+TdC/)<~+{M2=<Жg 6EtݫP^8/#:R|"~)_'2=w ,U7^N ~DΙs WIk&'$?-~G׻SE5Β6]hhX/_5:Z5)"=dž?gYǛG~;z?kGU|l}pmkL8+BM~/>oXiq8~qKԧ;a'BwQWD+g[p;U@o/^|^5ԽV/p[ `29%GJ4vIp|>:Gxyz]u5|lAN ƻ?cV齠]xJLBY.r@_~l:eiԉK?n|&ׄ?ݤ>#.&0޲uq&_iAyuC #om~@qE,e1T/EDIVa |nѓeL89?C+]x\Q!^8]Wȥhļ{c HGӅ2GwUWqQuɔǷum{Ť,Tݰy۱sO׍=.M|{y':e3d]>ؿB42ϣMfy=Q (8(z]i7@x~1܅? $ڪI߾SG[||náSᣊ |W%HD..͇wU9'r p+#N[jp}|sqꤿ|&GFQ?T4Xcͺu^|ԇճԢSqSA[4b~CgWw?*nxĞ\WC/^KD] 묡_u5}f*n?~0쬽: Y|9ţNZE F<Ɛ_Al? m>A+ <^hu?S5l]̺޵|BUeaCqu2N/u<걁GCc?)VWs2}^-Q\հN=<o=9>[kL']"8_-*O=Ø q1Yv3#;my+=sR=\t= *Rl,*.,U9} N&ǀw$^>qOdqV%|n3nun,<ޑtqw@~6H qqT\U:dO93ˣ5wq~rʱ֢T|ߍu6O[>P?`<<)@`_x'QP'׏a??Q1ܚtM|<Kh )]tS<]㖋1TpaJ(qdj^ǿո;ϔy67i?x,[EoG뇷տoO q}yTҼ3y\,nWquZhy]])}CZxsiJWxUQ#4cҋ^~g: Gӷ0jC-x?qݏ}iKï%]TFup|<:ןr$ܬNftRP2?՛')ɟk^i}B?/XD}EЭSAߐV6uPQl?^u串llmyߑo3ݫi "+?Ե$aOGz>+JB.R|/EtQ}dCw0FvVq)Jk=4L;YPV.xƬD>N8y:2e<.af^jܘRy3e(M8WM*_r< k -XҷQ EWd<~h1.S˸:9m@ܫ/A!}_pf:QqL<$t_b9pQMalut_,(%| oj'k:Dwm2miD}1|i; w<鹖T2eF̦/_hH{yRߣoTk7CIInb":+RDGPFW3šE?" =/ϐyD~ReRv<cJL_ĵܿ+S?FCU{!H:ݨO{w᧜fzƳq?Lu}8[/}`ܼڄm5D8 ;q #ߟ# B/U`ze77$拺dEoш̠ ~\O}BuS|^7'Nha J"n*`w1k#Q>q|R̒%U}mKmH&>/]wy3Nȸ9?qqn?(J=cus8fsƯL3qRfJ;kXW s{<Ϊ:zp1cOQ auRϘ5\O=rߠgo0>2KSbS< }hq48D>2\gc_|m+6U"q@~=pMXs:9n\qľzԃ_{sX>cÅ~~;m3<>/-B#u/ol[plJ {OmvsuaՏd\'p/w[e\:YqE8sYHg?EK#?@}M}ǫen.DŽxӉ_xf{.ݣ+竺g*7#߲B>6Ǵ՗"^dSn26^,?Rɰ]C^t>z42e\7}:}Ǘ} Չ6u _ Y O!Hy)uDC7 gÓ2O%l,ɿ2z>b: /f_Ϗ~ZO'%?'WgGe[YoVGKmU(1u bs۸8 /4Tƒb:_ey,i |.r]X"P}=8DQ?:'F^n'7D1Y_WW(٧5HW3IN38T*xN٠'?Xica2 .ꋇ&Pzhgz^M*%)86KAU} xG.|mQv?{D^pSVI5<-wtyH^n^kZ -<|K|#]{^(3y$!Ch/R'KމxJRsՒ?xf36~i']zWq](\E悇tрܴD/Mːgւxmֿs VՋF#Y4vAJIiR.d vu|V0Nx~lB:xT1eA^!{3}:zIRvsکu:/z,eӧ͇mp3xu7qäci d!a8zWiI}ۿ3u{N$Dp4KX|a,aLB%ZxGf>+ֿ X{wKx o&]TSߖ }ě).R *ﻼz_1E6U,lj+ܹH w^?D^L!Z>?Jy+{_sx'S;`e%ңl)[  HWZR.EZ0?tNoN%Bڏ}=H\B}3vs>)NN O6VA#}TyʔGSOdTl/t|{8hutSu4^a]+/ '??A|eovL*|چ%;?:Qk^{$>p~3Ծ"=ϖqQ+2ϛt x rx_AX(,b[qC8D6`>#QӒ|Hա>*%G>X,y1 _SLy2TF^2\]K7q7nʇߨ}M68>_o2.Vu%}U'c|ӗb]~/I濬eEVD?o>,EeGz j<.D߆-rzch꺚Q.ݺ};\w [k ΐ埰Mw6oZ$UcTL٤wy>42ϝT#bA5)!+%j2|]O$U3G,KG8ʄ7i{6|/ wOĵ-O6=`\ϏUćkQWyUҙ/pR&0~UfM 7-uxmk }Q7xWyx'~TSC{ `NyXp;r/>_yʃGk0?ů&{wvs ~lN*_@mg}JnyBt'Kj2~2xWX0IQk|G[Y0%Yf13Fch&疕ew}H6 ?jP016}?t86ly1%2ŧIVXxHP'a\as>w~}#Uc:@SzQDPLׯL{ Ga#JC>%\O*p1ŜgL2Lw&~|tY|^ue>,:~ǏuIƷs|xޯKl0| JcLC|ZΑ^ÌbY 0j{+ȥ=Նߋ[7.jwHyUQg9bD 3cDsLSj^%K~~%uY0X@dBw}Oǵ/\7)j!Rq<}}yBg~LWDk6J6>c!HF\C[׼)sH |V|oC:?c=!++n?q^~A7Q?~;>Ϝ0:3}l~X#Oqcxx0lҎS+]+Lj539 up?n ƿFhtl9Ioӡ}?МX^ԇ,:i|[o,x/Z;g>'2OO-p{ˉz,UWx"oc{n׀a"OX`B[Twmb/ڔN wg_iä"ޗ|Wìʡ,=7s9WXM_v(z63zqͤh2 BOg ?#%uEfp9xWnWzo{dVƯ֑iwcy ~\n!W8#{-[|WtsTqyLźWІɰ5s"vgYhT%75<%tWROt:?jޟ 7|B"K=-A!\W1ms"yzVV6dѩ:6^8_s ~f'2ժrKD>@; !JٖS7#*~_@Nf2+Ü|A?8M}"׼鋅ɻ>n._]N'M4}<*%ebfzI<)꺌oc^_ٱh}v}/G:;/ʃyMImד~2ngC`[Ș6 8}$?=Pp9boƏ ^JM~`3G48mԅJ m]uق?W38'CyR]yt ZJH1Dp< xհq^4<| fgEf5ᢈ^)3xN2NE&P/KU?VHLf෦:}Q΀[b(cQQGdJ;t-HFt zK"P+b7~Kt ѐwTy}n^*uO)O/ GV͏r|(u{.$ ̕Ga9ǜ:~~}zzP{<`KQ%a~Ԥ8~#/iwԢ `5ЄE 7gC3f86jVr9dK[u}%׍bN8qI%戅w%_Ӎ2E+2Z|2WNhЍ+:2Ugaw/5ٔJ ~OU?dqueպ029{nQ}{j_  E<`2}Y} ŽЇLƽ^w\:ƾ政edk3W6C[RoH[w m7[.q"cu}j3=ϔxm؟g>o͉]0Јˆ<ĥz_c}I!mUXU\$˷km|ΓsfD-iQwꎦW ;uh'& nڦٙխɢxÄ&R%#qQ>y#٧#SS *7u7RgEU?2E546_I<'LLg&=mvZPMӹϼ{N;DNN7_(<<4/"Ygj3)zgttǹdzoZ"~|7=S4߿w{umx2'_d6}Iy)Q-s&^p1-|cöKDJ*{,:mrm/S1σE4p/93N#t;ƛR;nm؟Kn<ϣ Qxs6:ohN~,^pFV&ܼ%<>tƫ;vF߰#Nz;#x7?Z [!ݼ ʱV::-hM:C"4d׊a7 }SЗ- q&}dhk^ d/yI\ =T}_,Q9n{<_f?O>9mwKQ7/I kE U 8; ߣýGwf39[ <-x->yȨ)׾GěZMOw;Muuˋ7OtTҲ@jG=TuNإ2چuDH@ަ7sTY j>7Y!l-dF"prٱ[r~ge&t dr,R{uo'5 7'[%Y -U=LjG|zXbSu)3\$~Ew"[IZC}+Ҽϒ/FwCUuW[B:qMϣw$_=?3z;LyfdtFVB|CQwDp< 78A0^$ l(~2p ߫J ~z<{ =Uʭ-^#_h?❴w#7j?uLǼXQ57c֣X|F,Hl;Jzx/ OrF |ȿB'Ͼn/ Sǫ<ƪSB_ۍǺs)7? k~nc>P!tqz &_秪#!uD| NnoW0?,~f%~ b^4؛ﲠ^U4<'ٔGb:zӉ/.<j/A3xu["ԗidfJ5>3Y8xKӇ-:>^] 9e<9p\ˌI~V~,4(QuT,_KF^zPeVfeysL_NpT˴>]}YYYYYYYYYYYYYYYYYfL36p_iR*<7xDѺh$^wa'Py~G8ci$束~rP t`Дtuq1< 3]t! AKO_8MQS[;UOD6x|'.?jm?# w3 B ǀt~ =U=Ml 91τ^&WuV{ =!?>^?ɤGީx+}TޛẚtPkfz|]|d+bQEMgzgJ2Zͷޑhωc?#K#;b\Dz"SU_L\ ?tZ}>'?0ocf:=: [GPձA=Qphn:~~ V~*P߮ˁp}뚆yqDb)Ηż{ncbj_kGasGVC''w+RS6}yՊ}xgg0};m_5ni_ cvi˔I|w/\ǏvN7ꗓw2^co;L/QoimeN8Js1j0p\<conCD=MWC[O*QI2V,\_ӇuM~#lFד[mq*z_~L,lo48nMjgi2㐮4VuKkhG=iyQ)K^-oLuPp#sbZ dtҡ|U$z=rm&}yQTۿqbaeZS߳:xqw_׊k\|^?jߊJ%:>s>Z#vRM ņ|8J>W\C0ho7tcP%GߦWIU-G 켽-}G9 /GY}6)\ʸAG\HC;($}׷|S]3~ωDg:SNx5u{^iC76"򷬿fsuH~=Oc^[uw.PAgNn*WL+T~_Fw#E"Ajp$$o{^7.. 5r?"42 |5 #xgvEg|E9/`po>ߑR?k*^&0ߥV:2+2+2+/y:28Qfee44,~"5^2X%j}<'˯4_Kkǫclo!򑫯=|T7; }Muֆ~;iCo~ҟ 2 #Rף6ǫJ|wp&xtPKjK⋼ߚ%17o=Y}%z)Z>dCdn *1]}jhӄ{lP]z]}, q<yK{IasuhǥqX޵5ǛG0?Sѝ Ϗ?#|g~2xnVM%CNV? _I<%u__߿yu&K^u .5tryu\ 7oUxzJpU?.p~Us>~UA㈺!vm[14š;K|O  ϑS>bfLw GIMbȳ،OhH;c~nxw 8Q̟=>O_ Λ.9.6w؍e)l2f3G*#֤baCX:S7}ƈq?WSAa9?"_OG%:5 _΃j:޷U@ץՉ[cK[l넪`wbI<|@ٔX~A* %Ǫ }n :|5^kssFC;5&0i X~w`?+ w < XncNG|.-_M*9w0"dž|wFgWAm8k7RZ_7ǟzq͆K5#VGA8o ~y֟¿hXdmLu}ַ穕yKhIG *K.KRIꏭUċ߳>oy7+\\=j;[–bpܸT2Y8OrՋ*6!Æc@O+'_7=-@<' fϷ'g'*a7Xݔ!E}ٯ1NԏRf1;|#? _Ϣ,t_y@?SX>UΆ2s7pL87ECzə3[W ku?b^3ri ;904qX].q}إƳXt6/ 3Ks*@E?5e7ګĺD|B4,4-~-%G/FhS8g] w)YU'zΏ^pρ,88󎼆DgXt_qf}jx/#'3D^׀4钠<G8DlqӏFz~W {xx fIߟy }FηUxt?.t:\ Me?ɐ<+99ReaTw&=vWOq&a~r7aiqxDORu>٭ppr^w"-O }^gX{Fp hdnb76|=EȶƯ̻v EjG`D\]+?{9C~x۪x/ō5R6=#Snx,Aq5O{^_7]=żמ/<+co7R>SٰI=5! _?%?Uuq.pwu;ծQ5o =w/ƒy"tȣ{>jLǝmY߭GP 5r];P;E:5&ľ-y{⾎ =sz])ķ~A}ЀxU .%A_aj#pKD:㋻+%K9ؼ|pP"q׋BJxh4zK*4Ud}#QC(뷜8ԡ;δasd{Ȃ .I} 3A'':-pn ׀e2W5ߒN9v+Z;f!uJ- }e6dD{2贍k0z{%nahs_;MgI]L >Lzh#t1oy;I,vƄC]y_j&h88?z kSU5>p*pIi:J Sk .<%ni";nS19|7Мʢxx69LzquTs4oa}ƂO)mU7pc-:p~`D{;~X?tp }E}GoC;\O\ϦWogǟ=͵|,7DLl?g/8=N؄DUb?QdAؑB Au\W*?QܜDq^t7ݯM=Gp8/VF}EV d㦼qŢWRuOq8¾UcFğ,g>| Y-XoA7K/?+(WC nɋe<bH3ƨxۗfEK?̘:\<_.Cao[=Tlp~bǿ%-G7·ce}E+l:Ts?-:G"o+uXw%,W?b]f)G1T\0e;y⮿>¯,GU}q@벬 ~߉T|!5WY}!-#1@S_Mue[$zl[7λеĮy>bgxwuo:g#>׻^ztLg:*5|7#-}b=n<߆}W0`bk?bX?x x2zil78@-zpq1b^FEޯ__OƢ~Y+I6]uֽ+C- }jY0$4gȋ߿}AϫGAg:_}Y}J:h>Xwׄ>TL\j6IYh.{]rT>W&hM}?dlߌ4~pI?t<}RT`z]wL/LK$sJtDaᥚK'>ofy'ihnklٲ QO]M~ ojCTѫ#˾4hhnV~/<ک/w'ŪwTZZߧyfRcMz' ׏7{~yReF[akFMHexvjOSoN56&E7ˇT'|3Z>uïճU{K$Iqb'sϊ4A>} awiغMMb)O#[/33ԃQ@<.=l:Ow$o33fR+Xϛe_*pk΅C4to>X=`<2?nU+֗M#?f8\1i*< SX_6\\= Ok~^8Ғ:#'ywRGiY@gFoE/$YÎF!{l_S_*S7Lr^qT:n痷Dy~jdPՒ['ǘ}vucPlwxJއ;0'4?#xOVy)u[*7\q¤C)Fw7HY\̎v!"-Njhgz3m-,<|5y1{\x$\=Go^B5uwYl7LJZG{)4o*癸såcu;xhWm )K0?}qr?-$ޚ$q̲>L Y{>1^_ؘ_*cOT? IG1M>[:zǀcjW: } eYe:yBTHG\Z n(z}Duz(ҵ7ċvǬmH~[1\ޭ^k[+WdMm>y* .Ħ(0\&h_t"zȸ oEzvU{9+/DU RfgQ;a>o5?Wty U՛8S4#;^Sߖ bx݄sG|xzp7T _ıl3<*>bGn9]}GE=]Lj7?ތ/k+XvsFX`5ϵHi|w:/F?=/2̍7s1|Zh&|QZ k= =gΜ/XCc~&Eu~&2άwhhWd~| u{Az/L˘!#4+ ~|m?^3bQt7)ɟCηnz]nQOn Mi.3|}mñ/dpk cG}/ʅw7)nOW_$ sKſgy{w"'W H։P0/VR/Q=#2/_=Nw/U|l\-L"=:6XZ֫lCu8?r)/ςŒzR7^WaژG _u^XHLњp 4p3Q9T-L,UeuӒ¶3ev~E7kC^|M@ߡN:}>!x1㻏7:<#bjbwdk4-o.$7bWL#1K y=nڈ<0D紦'-߄>gzjg${cQ7?ϒqShg,"w,ݿj9#yQHxj>[gF߃WĮz?J3>X'Oo,wgՄGgyQ;n\{;'-4h}<} oFd:iK<,$֙|7h42)hns'jx-?,!Td)' l+cx9H<&>Ɏuu|3,Qz?TB_w& 8AӼ\^x@~D\w6S_ɟED'}D䧈&!ZqQ}"ҿ0v K1E8\ ztҿφ]KiDusjOzl_g%نO,yr#=lzU'm=unKz )އkE=UnyWTާ|.j=q&b;4L<^'SHa~߄j%0:yhDх;YG22z~)hbyDRrk#ϩ^E:aėBy.6.ސKД9A-VWO(NdK?'y:M~fX҂i. z_qB|~Y鹳KΜ!].u[W+pZЕh=׃T"Vׁ#~{M}Lpn]_wxمnpCE7}CKYl:h_wǫ_y>n6"v^Qzi>?㸺`tYXc^ z~G|3x~-gfA=7 xO2$nD=e/GZ\]x~--u%L7]K8 T>x y>_x3EqDx-TS/"NQ"oߐs+q~coqch:Xy?֯8.p–Tn o otv$>[n!钯}ԷR깠V:B)*O|)_gx]zk{+gWq.q%~΄{RՏ@jg=>/g㍠z1ϣ!f~_z~G}տWG_/aW uϣ='Ky1~B``ZoEcONE2%{0w+X'(Q>Q@󑬗+ߛV] ݣcVc 5$tdxMoۈy y >c\2eXKR #(_"1W)o|_zHjԾE t(f}=NFS,[:Qq~Fӈ߈u?wi]k q,oq#P5|䥻G['5xVXOwد6Nzy_U'ָ͇/?{/z2`.b-: y#W,.{WUI)RpSobE*sjůƋxB_&,E57Wx8:Im;ރ}'+*pD2)Xog MNhYG^0Zu3Bx#q oҚp- }F*Ż^|~=VZ/ y2e(L݆Z]U^zץƙ6xySpZOl#yt˒X{8:^uWuU'ÿC}l0yޗC˅J3r、' BUSO`{wq.n̨ivQPݠh|uk1yoX1LHo8w8K\{Y[*Gn0Za#SQ7;ӆ+U x:,DR?`]OXslg~u:!}rcІ&OT <,X/75ކݛJѢ'>Naz]C6>>qٺ{ļJ:}Omf; Db[GbȗtQh(u7P>SOjYMOīz[M;qǾsbzP]30I`byq~P4u[yE RYKE"+zyu5tsԒ/S,VGN(u)Ȣ+Q0=jny{zghFĶjQo:N { Rq"Mc-rE_}_RkLJ m' t0u>bG8oYRm:[ǯ,F>zN`\donKSPDz]Wz~^}Wz-މast-aG~ߒ2E5zWèّ[a^.iɔwtF0>d[N GOsd%__==\;?z9yɎ#hLO[|>+>UDgu}X,& y#̧cΏyy7bʋ0Ncz0MSC#GT$~؇ N1uޫ}P}3#UR,nW#^Wb}m?wP+nj,p>@51dkHW1ԇVG#g=r4Ǽ6M.O2 <y5ޓqߏb~;4fyWD/ry'ȟ9bk~[>'Z$3My6J̫į@ W l΋:T_a%6zXGO1dS~yb5P,aJ׃}[ kxq=/ʧ<}4^x< V]&ut睡|pA/qC~6o9b.=Wz._,/{}e?-.@Ztf66?ޡgS7F FA4Ӆ]r+U6cE pӟY{yYLoQӉ/YQGpWb~Np 5ڥP υj:wWr2cQ 'Ч4~B}#>K3:郴 ;ԑhwGe=+5> _D{>1 ߑaI4އB7G$>s{mx~;*NU7뷈<:{ | 'n??)]g1 atzeӷ<0ju+D_hf}}]'c\'2đH45/%ZRe x IVŐW-~"o:,_.9~³-x2nC.uN5o2Vj~H'ȡCk$o8#zl{ὐqnmz~ ǃă<+ wn>g'>dgF=qmK }qSfVbM%Svzٸ&̲1Zu/O(W*U]~@qA7,pckѯUx]_Mf"fDGIYG>ޛ3TO,͟j~`?BS,x?-}tM} Fpz]3:bzGzz|OF,_ ph`3_1[3oe?~tOyXtsp𸳁%h8/B;E]bNUU}IK%0nhy@?!2A1TC?qԙ2tP#Gg&\O8~>`e6KI_Р'aڌG S֗jPKЗtp7N>l63IߊTHK<Oi?N~yEL5?oAm玩yս8/M'Qu -2=2jd1z:(esи.xA7;߶kWQ}P^7z\B/6zSxtsKXpFAm];Vlgk;]ؤbO߯xxYc0ט_oZg[w7$5_?=n'贠a4ܮi'du-;Butn+[tj QZXO}2zg6ߌcmiDhT>' Mơz ?׭hm>wgc dw4ejc;U7 z=7{5~+rXxƘM|u{ l7%GB󀍓g7~V /0 +D9HZkvբNȖDP}vC]}dzVOG[9Һ/q -kyʳ:%.Bj ؅h~w<>Ʌj&_~cC(`q:NP_~sSsoS`Uxul;B%>v ,q?eWM}[O-9@k p(ւc+V6~LqoQc-nuS?L;oa+c8p6sy X#uE"ć*[?.}H??0..csyMt/+O;Y[!}>s~,yq\3ˮo^:?6S|ϏNsZ&H1K~<7„2ߒ`JǪ[~}"ևt_ub+84.5(:>LPf_.*Я%cw_(gcOo] Լcr|~m'_&ލw?x25;j(ͯX=UܴΤt̤u'u[N~czy(T G-ZϦܟѐDwΣy2&[ǡ<8& xqWCɔ1+xi|Z4>G4p\jCOfOWwZQOϯat GGDסۧ:Q*ق9&^ɚ6 ;xdYx3n\GgFV8$_bp(V:umunU4)#y EOm8xt׃D5ndQZ^_)EOQ-ϣ)>S}Nծ /fK\yC:GLTŅ7%2Õok;2\1N^ Gu-mWS)4r}uϙި<%dQ݃_ƎFa} NU-xP\1y?V,_`|C-U?m˵t^;D[o@?͒疼kB'(p#߰`L`_=~_%6I:qj\}.I8  bp=WQacuЀwSsmP_Y@TvEOH~>ϯل[SQ !y1Ikf=yUEcf5I'񩎌AˬʬJ0gQVU ,vEڞ/]22!??CZ-Vkgq&CcdzqܾA#ݪA}0:nBwG r.WʎX5<^[ _5nG{Vzׅ;cqwʐCKxQCt~MmՂc1[;Y~)/ ޢ|x]_Ep]2l(5'qƇ/m YݓQ<`T`uB7ACL}g]w3CSߐΔusR[irf&ǥ0%%d;u*1IM\GQL˱Msk>PkXʆ:*EcҝF,I 3#+N&{O3C}5~_>>UMz֭}B#\9I_ߗ7ԙ uzǺѢk/t"ԣoA[7zEQgEZG{EpRZϫod+ :q↻t}Y3QUS?GZ;zSԱ)DŽOG <7&߫]mğEU85=顣Sq|/hޭ{Rҧ D WGӼ"aBx#.8{EuW`G ;j<^6ߞ?W63K}x.ڄ;c᥄Ģ.aAy {Wkm;tMz$6^U73_u/)Cc=?Mb[߿k4_'5?/8_bwN;jḐ!T El+ S|M׻yhcau&.i"Y^78D~E [|*$x24 ?SߍGū$cUqP}_+>6ɔ9oq=%$o647|N<èx=BV|܈p iGް>FXr Z=ĺ>>Z,^vh;ok_j1W粐gOR?xm 0_3m<4ٿ ]D)u#+hۓ>&|+\zt㿐Z\N?&C_/8[炩?!`k*Nk3i y@\x=Z̳VJw^_KhV:}0\`Wy y]uq3Wt6. P3Ft w(Z-y>z-Ubjׯ|0nk-׉utσ|b,Jķ'\..}.ZEgy+#Pt5Ru2?=HuyTI5e@(z Aql\tHw6_H3L)_zD}>/냦!q6_`#CT!36GQr"xVt{)aLQ+ ǫI^X>:SsGu [^_<[FI&K1Mv$C{}y,Xϣõ"\)-CtmRo=h. >Or;c~{:cq3^uL*E{>ѴN1891j\KqRJ2>=H\G'Sb|%;[vANVq4@_R'ݦ_hy7ׯjdPzy!@)MH׆A5')g}̬]>?{GN+[tyvJ^4Ǐ~Icolާhqb$;_#֏xszN|a-jaQJ$/麿 yI}<_knQR`u(5@5L(>qxGJ_a!7FxmAGKǬN*Znp_.L7+ױԢhCbн}j#th8\#ҕxǛ*vNpM/1*7 .h&u)/::H'7ʓNN?__V)vSSC» ,,0w䑾pM5~auoyiYao,[[oo3YYYYYYYYYYYYYYYYYY_NiaEEi<#p6~)7Wt\XPHw8C=S[It<ۿ_;/VUg +" "A<],yFUxD>eq}# Nwyu|uwz}ThEN^LsJӂwon&DQm w}6/%G/d{EtuC/AڻL,:V?5O^Z~>cX\^+e:ن>ha}eŒR_53@}yV>&%b p{ᠺ>?_Kï\+ }G&(iǾ+;)zvU&w)3W8~J!s͌Qh[_:ܤÎnzw~YLWzAwh21W=6}bSvwjбPk8[oCy?d޻"Z;p޹S{tyo:|Q=C2H'1Lߣ~qR7M<ʋy]!\D5lF)g /B}Q y1v8(_3=}7fY,o֔+'M?_&%zO 7Wd%t_^WFj?nXK}7awUpVտG>?,:n5;|?jwJ}]/ Ͽq۩cRw u^?1d\LNm@맢)ICCEGl׉|[!휐Y2}'4_GD]/`n2M}ug:WϲRw$ ;>}D%j4_` @ΎUXtj,U]@]Ww{^";b~c:/أ餳ẞ蕬 X?l޻%54q2~C"b`ϠKg<{3wW4tz8#)Uw_,*7:IO_}?n^x~>/= ]66u;.C!LeR'mx9KGCo|X4gu?iғkk>p q;K;@^x~,NH]Y­B w62m#4lVz7+Y"&o'e|Mu/#ygX]kV7>gxn;no?=}} 󗺿M#3y~ݪV8nzE?=GUIW uc}XϏ^{Z;zn<RZǵ;|l~N|]$&^A43~{Z|S*&;W{y/û ?o/:7yG~W^%~> t{чu>PŠ*oiy3oF;o 0am|`^y'-Y8X1SS:O^W^K+__bϞs‰^>~MMex[xФmCp %{,pRu=5{8[W/ö}':U7_n=digw>ަrSLy(Rull$>7-=Blw͑h{S| m\0N*uyǮ&\'L.rc~dM_Y-A uפK`c^\yׅ8^K!B|#ϑP8˧_ C>Ң}k (.| cCԡ!"_fu $);]obUT_% ϏqWEIdtwȠ#"L婑Eyҽ1=iď豱2mr0^Ww۱x8n|z=(yu5- ,]g|'AWN|VO7\[|!\6!W#{? 3 b;y;ujޫ:z9qw锌;~m1F{AxtǕݮə=%EQ{PzB[:Qc=nB{ ͯ7;>K^2S3,4t]@9_> ?z&Gdsu~3ӣc8PĦ7ߪ\N҃IJplJ~|Gw"Ⓖ<=Klxi8X?tD>P_zagb_i2̟݇|}zM9sx3:ߖaZG+/\?rKEM Z}jMu֑$Az>#LS-;Pvq%nx: <_)~ǹjH,D':Z'ӏq@) z끳8o 8w8_u;pX.3Y"T U6FoQІuy`L_:_=! z5w~W1qfz _P6ץܦb}IqMgo~Rkx*cK%e[p$.]7~A=FtD{GT(X\53K}lzϮ&xAf1Eudu{%.:ӆ3xܪ<7meq`xRLq:&.З4r+u,>)ܯOm|?ռ1QY#ς?-RWN\:]_\h|$|qE:)M="/к*[u_ZY7ѯϹqvyZJZw}_͟>?w`tWj[7.>dJ,0JC1S':ʿw;qt޻;yd;׸Oǡ:`]"nGqoOߊvUiiArTϙ() >JGEjQ՗ϽIN8'z5:w7bKE;9\ 4#W^c&qF%ϿG}}>ȿOL\o|WI:]5Lܟuq82ZJ“_TP[qP$!s% yt?9tG]`{穿oW3‼?a+ 74ċoEmh7V  |ud[ EcNrJ`8(^Ÿ PߛRLX^0P>t:5rj`$ B \ {۶xIxǍ}/>NԦӐl/y?c}C!'Wqx&}|'"ODz*n!~nǟB\Cg h­|I{;<>aRڈ8YyGἠdxU^| W ؂L1 'wLF⳱H n 9 ]N1γz<ѐeLlj>_+uhůPU8xo$u ,xס*au^vKyB[|)z,󿸢Wy cJOa嗆=V%I,~ .mS9f O Ypb~4̿BA4O`DE5~\AI,1H=5JRE1ϡ!݇:V!^X4t^@C\M8#$%s gn񸟳zGw_ih ?nxnV!xF\p?bZɦ )4'D?*K_&ɧcz=شgz 튣눝 ~l}Op82,%zqY~ *u&bȺjZq8l{ukx~~|8/9Od6"qW8}K},>b|؟x2! /CO:,sY͢m?RAQ>燋(}㮟CVL]\P$qtf-3=u&Lxn4kc}ziC8V_I8U1y' j#ZG:FV#zM-\5,#=ʬʬʬd WfzC9^3Rw:|`yfXE8[k_$.~\G|"YKO:M`7%\3<[0qY;炱;wO.@*;Ƈ6]{1>4} +#0:~,<~K[ulǡgWC~4_ޚE&P1%=9;q]X޹_'ӆ obrI"'.Cz@τS1PS x&\&z"\%sޟ*?0lo-w#U'- d|/ܯ.ݞ#G2{oPK\o/h/TuGs;B7x.(^RPPQW̻)QL(UD{6`s!AQ'w6f:s\ 5iVs<=8Zǡt5"ݹ3ߥӯ73=z_ݱ˝rC_p0 b|Gxx+4uN,?bn W>ϡ3Sq\;EEgs?~ce^78h.ϊ\Єxq;Z0}sO[늖Mh ^wۨ^#kX4@<^T=yEC6 O"\A7:[ W>.]]~^ϟօOҟ֤ckhDy yKDEc::A+F߮KۢQ͐["АcBx }BBT7J2ϯA7fmx㶡N(n#۱vKjo^BT{4WำZ:Vg[֙2fVIx)p>~ _l'yo;uJʧɴQGuEM)Q/3|CEOSR3kIM}ln; ]θ= x37G~iXs~R/Ms7\,@|q S[灉 N<_}Le9^o=!.LGvͳq}Gc_TiF} :~_Z\u_EZW{1\A9Vg^H YGGz0JOyrQ]Tz},ߜ۵c9äq>28^?^5;3c}C[n'{]K}3;{)|]bsQ~>:kpFcwT\*tOe<W1-_?ٗt<ax7~CRׄ<\WQź$p> Y-lnxw|%HYCw|5zxnZ,kcz=%|~1?c~>X#˜خ>OjuBOEAtI _ǛIWE:@:z$=q_OϒZ\gU,<T;\tO͛clE1ߴOxnR_nTnX;7݀չ"TI/s]7rN1>}q7Om~1lȷQWt$T:Q-SfN<7Xg3(&kf_lC)ߋق0F!/{sᅵW.-'Cnq_VT"Xn?HOʫn-lWB.@ͨq#N@]8_,j =y?yzm<`D<_g~?aZw/"h|GzCOEc?iϗ*^ %+|WxbĊTxQ TwM CjSi_sͿ W/~B_8_ ˀ E[7CŧPܼcJܵ#_Ӈ'[aȃᛸ!~!R1gb~3%g[ҍȢ38@8o4p|B>^gjDo10^ ;cʏ??tXaV>[cEnQC٨d68/SyLG._CuoԺ;gޡMhgT]>4fk:6Wq[5c,iyB&\ vqޮck=^[U;3_%n.p(KЇbRP>oG{Rk WG!1\5KmuԢbo8.o.-KW(ձ8?xN~!:Ψt =  ߤ.q>F;뫫ˮwc/D3~Png ao!t)z}#~/Nj&fDcbX/<ώ"U;ǹGBf$R%wЅ]jVG%7YxR:?;?Ʀy:3g %|?)Q`WC{MxC#AMkFu _QHT!᧚>"n|/U/bQGŔgr-DgAĈ ?TtApO>xn_[Qwoz4/7ݬ bI/Q`$zkxʤiۿlW5#+SYWQŻh>{&9{1u/^,3PS-!~жD|€kI n W }_w:>WCtF5G K_(,< ]~ 3Zk&>Q׳?ƕpNQ:Kx|QqIҚ<ۛd3YJM~06\X;l,_~[@=ĸ=YP[^bZQeYk\,L9sxi?+j8??8?G?ҁ)yH[te<+QłV%Hrf\g6f6^ zLp0ot*gKy|_KZ :uY^АAUkN: B뇕hi#М⳪ C|DQ[Ko-#9ROWF?l}JwϓpX|hhSo,i aۺw:K` {LtZ ׫r|m|[$v< %~{}}urh~ = <)2]ͧϯ+K\7{NJu(_ٔslN/ܗCտ~fW/f y}qwzW5;]|&&}zѼ\TSœc~oe?V C}P֙_9)Wn+ lo ̄ۓyr~^4awq,rGG鬠#uc}=R߁ʃp($C?Ks\mwiOWBQG6e|rDL&' qOߦXh8?٠}?\Z c]n.Y6~+g;0pWP޺]t6O;?U9~EE59t%_Ȼ4ML&^;n7߯A7]E:Tdk` 8>XyFRtіrV5 -oy] 'Ohl0]xx)s~9/Ĉ'{D=)-u/tu7̬|԰qvAҖW<{ӑYn!.u:ݬV7 {RbZt2ywבW?/>cz=i+9y}M_K3Kcpj}9Ow{:1u>T_zGͨYE/1Э^ļ墟Tf M, :< `ү>gG%*M2i^IZ,O3p7UtWSY9?~#C笻Ϻ)ሗ"cqE5)Hzo2^{/:Sh@㣖?NA~Ï=O&xYHNc<^aƏko;k[.zPw_/Ӻ#byO\tS}oj<˳wJ:_TWw5l[7S-}~ڮ*+9.LϷd}rpT!km?O|t#K>av-:b]¯3ֽxsG^&:<\-{{Mϯ9n&*sa/d M- 921RulV& ]$ɍ-z ?͗q|߅/,dG&ܲ>xo5J_ҩ`!7|N'jlp&>2Lm?3G8*FG[ċσIT^5Eѿ(dyΜ+ RHϓC"9/1_ >)櫓øwh}[f=MN}x]#_r Fc"OL 94ducⵥY"z9>?>q%>5if[ X|q$?CHPڷH~OKQ7Kh)sp 5(;@o4~BQ묘yQ<0Vq>&1XG.~钡U<^-__13.4ӿYh w*"vt"OoåtYKb:m wsV\Ϗ 𷻌rTٺ"occFx>d?8_9x9|^Cu]4 {#)e]=\t9Fs/h^NچL?J봎g{`ןtNE0Ts[p.?8^(gGF>u.TC= =&^tT/ Wr[~t/x|`'ѵln>D yK>{1OɳW6ezI4u_ἬKQ;Bp<6U|Tp6hH8z'W8{_*y83`*mȣqͣhQay1\:OXt?,/GgxO{ҁ@{r4wU>`b᩼4o'ov/BC~OCoǚO DyЅ|Rˉ6bP5_԰;"W8ʟ@?Q $-^5~1 ehXJy<}~~?tSuD'K?fus? E\>tL7 zdDŽ|@nyTW罣Is}QSE{~< ]O1crL_p>U?le_P?gꗅ*}l'yN#Jx}G=7 $(QU?fZTܴpq F#xٖzOYd#ؖ~퓌Gbwߨ!Z61/юg*t,zkB֑"G|4c0Jy-̂/z!YܟLři2eVg\T9iqľ)/~i޹ CL/Ϻ;F[!)@Q:4cApGd1R$Z]'T9b37'֪yb}w}NE¿^e6jq hnѽUϻz/:yy먆zQL =uˬt +Z$Zo Hj6%ϳ IWC;sݪO%o+kI>`S4L?=`ڑgʑ΋u/L?lS{:+{quwS۽ϓ_(ݬS#pstv㴏Wd/^6S 0]xʆ%˸/ f(@~F6Vftwwy ݸ_WXhAqcAx~\]m`Qcs8yT/&/Y?]#EvԹGz}UJ[_'ĵ|ҴLh\mx{7H3´3}T^`"Ψcඵ7IfY2C3GHĵϩwAFC6+ > :KjR/-}:w5q:w6Y]յnwtv^/OJqj].0GR}O$E#y݆ίE=!=Y9!zd4}αNhq<zgt#=6/ h§cQxXp~5?DW55P]\gѯhϨCc(?ʗbÁ yS oѪDI3?驡4?T_S߷ xM>/e='"?6%gL4op?YɑGg_G#uچBnqz@(Rgxy4I9X hNFy >[ڥ3?dPƒ7ݾVed^A^- wuUnAn4P-_4mj<^[* 񥟟_.E>!`yd}8ok?ubtf'W9\ *xQokB0غSgQ'?aqZbj x Nc1&=+*&:y>n$HI:G7B磪޳zJh\asE>OyCM; KL,y>}qp3V}}+ O,}OK} l:'2O}d^CW!&{~L>mxyی-q/Kn%w]XȻOzR{ C{;A[uW1qT*n&KDB_tI3?8^UGzLRG~=K>- |2{ӍcG]Tn d{giQFcV"Р7G+mhL'd~MQlߞczX$+T>3dT^;J>IyL|t^D #ݣ^fyeNՓ<}Azş)q%>|H`Sz'8^?>/tR jh9݇%ЎA>Z2],Q_׸Ӥ|1[{~%ˀrW1z8,5? QuTRx0UΌiOXׅtf|<;փ0l<7̤ DN8 Ls;WM{eS2aU 'e =c&|4yEyP ;-DCVX9烶n.MusսESKiC~CCX>jEuI{yϵ-U~bzvtnqz??s{E*|B3tv}zN J<㔧D +YVt[nHߥ=n8꩒g) ΘQ$[D<0U䱇 8o/=Ü~H%}e:2_I#:jFrx,YkCb~Kߐ)3 aѡxxޔtLY9zS;spb8r//H[gBE }o;=̎6_a8ᰙ ,ו|㛸6Q1hƮOuTa=YnޭZoE9|^L6߱_fG(3y=4*#W _:xESP?]>n{E>; NOwvvǿY7~S]Gm|QEǕ"[: ^El2# j{slb߀O2n!B}:vY7ۅgXpa%Y2wg$֎ Û4/[=z~4vig:tQߤv۴t,U ?zsu[z KO'>KEfgBg{ZG+qH+ח*zGqgC)q5}D}xz'Osb~LťO-l'&~jb:ɔ_ƍf0hxN>i~&[,>>"}6_3y=˼rvt}ٟ pކ'x/. yįA3%5T뫱 ۙ(~ƥ* r)z|tc_Q}UCɾ}C%?-Iw4z\l!!Q׭=dP~o 'wL bdEԳ ,J>.y:MYBȓv^-bz nշS9)ǩ~/H 'a ):ik3S볩m XWU φ| a3ےūgj3r5{v]WC=g߰Icc::!ާTd}///*W4{= M8N;pMAo)xSi̫0^_Oy ??H:QOS?߆z{dpw&.뺭n-x7^0K߮:Lrx\Ìt$Sgo}.7VׇAb+O _o+$̭ix>{G ȡ%y>??'׊[!wE0T-Q|f>=GzX(&$Ë귡";z 7mODOƫDh[8E^.DsMoW[=ҍ0:[h/ )+Iu.L:Hmَxn Ni[vyޛ߿ x>ʬ̾<_'0#1 W}Ҽy`c~Ml6gFGř?#=Pysߩ'}~GnPD)Mk<̔aXq/.PhGzܜqᔏܑO0h[:UKj%e!Y{̀gP4ssT>}pzǣp ' Z&q/c'^7MW7EF8v>nwp?=]'<' ooWSWz"خV[F{٭~YޤI5&C 8:}~OcGA~T\u\KC%ԍnH\5F>.T[t G7|ȿo2֗7'Zyt9/6oddYm}#rW%p~e`.G,!JGp^d\tH{'Ƒ: ;>LrwxӧPՕ1z&<*ƋP> 7ޫcm޴,3=ċ;nυ\wCuIN-Z?kW31߿+$#֝iMtkexfϯͷh byP#/^]#q5hA<,}Veh̿YWs<˅f{Cu_~oY|L3wf7], )֎s~؞`*ljjdݾV< G΋JY{3Ŝhޠg8#߾c|ypp&CGTi*ڪ_}ǫz8Iidkqm9' | O$ܯA_oDd:r6?Xw $UߋJ]5u%!4fD;& Ȉ+#3"TS`FBbHbB #K c<?sggg0]c1`d1#"c׊sj{_w[{ޱ;޸`{qG>o}mֺ#!W%yS.>$;4ǯܙwyo>9')WOs/xkjŏvj)~l獿KO|O];bN]g'8+vNxg}OxW?i ?>#y~%#~I6*$_;\vo};;.־u=9sSosNS`g~OuJ ۺuszesYo~tWߘ<>wa+[nݟ5u/Ww{Ys3#gkޓ v +~sw+?6[NpuQ}Oܕ{n!JϗګK.wn\]$ҟ6'>y{lmN~臶WfvbP:}~)޿t'=S/o'O3PZwۼ qo󶷷Ƀ?'K뫿}| a<<^m֗~1{oO/7J~Jb;+ ~rgrn7 _G|fiGg{k]ywo:MO>w_?ujrqx_Qߟ2>&?/`g;{ks޳'|^s?g=_pO=OޓÙx|ʋ߁xxyo 4ɍ?^d{{hی[p=ގ[/o^~vysZ,e:5/=׾w'ͽmUKϚ:ſw>;O?3WC'_zΧ}iw—흖߱u{{[?ߴ7_zzQ@Oҙk?wCqێ}Dww_g? /oC>#OG~|u[>:h1{H_\uGѝ|,I%9y.{Qzߣ~}sَ3?f.;Lサuzw~ޝ ~o}{\Frzj=w+7N^yWuo9Yߚ95O_u|7|Q7N>o?{9_Kq-l:~^|w~:~i9}}Ojznwxe}W\jΥK%/`?wGm~߱O__L:׺Jq΃>ᷟk"yQKǽOgΞ<]?qsv3|~[9y|{;y{^kEu{<÷wO^ﹿ9?ѿNlH+sd~'m;I=r<}|OCo~s絟l_qyǜr7︧;޾NM}۾m7oIO&vNUݿn/yoNru䞓糓6ͼiKe=#pɃi_y)s/)hw;K̗_y󾄯O5䃯y7q|zߧ%ϻ;j܃W?W|;{N^g|ϡ/>LY?xo9[8c]_Zgv_p;;kծ_fWosټ]Mk~L oAMng>|v}^g |ޡ{_sKp|CWuoK;;Mm[].~~Vv'}Xߝ%?|ܞ>=K>)|@3~)uv?&WvG׳O~/$KkϷ?0Mk}B=N~=g'8x-yWޕb_7EuSbXo{goGztSϜ/4d\Ɵ4Xκ}:iw/yۃ'Zo|w3t[weهyf{ݾ<K>8w_*e>w ˋ||7>ͼvÓ#I{ȫۧSͿh{?}7e=hO<;&om+I='3 y$κOH/;> 5H%o1 ⯝\~WJwk{Ol]lz}|w9~fe?=yvM~>O {>|uy}{37;9џJ.|AgR]E=ߺK ߣ{{;47=߾۵y]Nmvo: OXcǾk~;6^z]AqOsv{> ߛ-6 u;]N?ql'ϭ풷b]MO?N܇ySީs aǽ>Qa{ɓ/Z:Kݫ;g%1sirlX/If?]J]ȻFg;?z3<~M{|k/r?;%޵.߱c?_ŶgΜ=vfvG|T3~zX}s|-|yɷ~ss/{''7y!yizGm#L/6&%On 4#lSKoVɫ^];$spoc>_7y8w/?G=ռ~q]Ͻه=6ћ˓}\f;M]~6f]I5 Ifu܇zWC[s \~>=e+*P=Ϫz4枰jkm5m֑VO4t$zk?r=^z O k{o߇W~ߏ#~_ϩJZkw?cu_|styas/7$}i/ub>&yƎyi=_W͹5S~ϏWy~6e#rZ߭_]o_ϩuz=/9|?W|-SXU'G||w%amy]/F~)yxL#͹;_zXra;͸/f}~|>6O9ur?wOͼԳNy {䛟Zs/9y>oNs+~~6yxr{D~F{̇f>"y'kHr_*ϋܱp|QxW/N5}|OavI;q?8gܷca cs?G~zỹ9Ys$ut39~ݾ|rޓ<.I?I?luY}?8|s_J۱>:L3.y'y{_tr|_<ܝc:<.ϾA旼}' ګ>[ >]/mgɻ|Ζ}Z~?笒<>Y{qOcf[s{'9>аW|YZ#O=_j OByϾ|W࿝/W\c7IdܬߴO:_>kb~Gyw7ʇ8]Y{ܭ_2;Kjai>35mm~.|-^:S?c׺Dk^ahwuKdc}T3+G>O 91irk7]f_'wɢ'mi>Ykl&_?;T"/]Ͻ쇹u1}|sĮ{tz:s=8;.q{v))>l>ccNwx'$muNGg>㬿9M?c@|mϛ+kE9a'{n33Ss`3vu>߯ҜGͿ%~UfՅqϝ:wLLeGͿ߹Q5u{}tp%o?y?߫{wBo7*߲ݟ~3GvT73CzOG~}G~}ߟ_G:G%z>;]}}Ok湿ݷ\=Ϸoǹz_TbOz?-y앞g'7㱖?{}s{y$}R=U3xǷmȯ:/WqFs~}Yl=y;'o|џ7lO}ɺ[?yOzjto)F|Eb˛{Yو})࿬YVy{oK^w龰v|;>_.},~c`Yr~wzS>no?u߇m?F_'N]W~5BN9~{㇃5/&wdjO~S~ww6.&~䗚wM~ct峽+|RݽNܗܷ_;|'6X^nᆱG?~wߣooB:¼`ZIrgz6:S8zjÔ~<|>>Gށ߿\΃"I~P'AkNѽ1'o꧶?t?/ݝOos^j.m^cooܝ^Ͼ_z|*ߗmwԜ=c松K'CǷ{~+"g3+>@ nw'ӿ$o~R=O}u󝘃?j1xƝ9j]_lߋ&8䩢⾁/w|ٮ{dG=q)zH~Kζ/yPt}ɕ!y ާ|}ߧy9wY/IͽDgzVoo9|7}g=юFt,lǸ5-)ߩs|tڽ~]z:|LO=f_9澹OG7PM|Z>x0y9urkwƹ9MVRn~>߷{w{kR}8|W& g7.> ž}|7'yu3ȯ%=:_)8w+u=7-1"w)ɻDqN|;|H/H>li$5〃3xaO'_{}9yM<_/I^{g_n}z";}yv?yl?<}RXg}oii=X)Ib~'~Mm/oK^q_3N[{ߌ/m" CϾ3/s;uNٛScqi;X9O^N|7إnľ%wvˮ_Z>x[oOn=Mr?ެl~)׽m^ vԱ:^ jk:%g2O>ugy+vwoא#x|rξ[w}I/ѓ0{of7%/o~|+?}-}|o|5?>9;{G;^c~Əɟ;y8(q`bq'QO}s|mxO彆=#?Tms_'[ 9sQ~W&懪~_^r{%ϸ/JwtGޟ}~iI;ιl{&O:&</ܝ{qloQ|)-$=w]{)6}|Or%sO$UH,>~,'grno^6ۉU|y֝$ڭ63'Svh}~?uqhr;zϮW뷹%^K~=yr3zaw_Nl]'wz'{;՗=؜#zYs7]_/>1'W2|~>nWӚ~_אl+9(>Β㳤}u^ܾw~񔷷һޘ3ljqyTz)oKk=]\4烒/A|{u[Jnw^Ϋt:3wr &ך:<;E1~6b~;;qWmAIgG9M=}=~v0qv5,mj볉:e js {I?8Uq\~bS6'(|6{yݎtƍOec߫Y]]O5zZ5W_uS?ؿCCw?km%h_jϳcme~{^^ӯv~l_~>>i;Gފ_G |\9oo_kS1Kwf? Y|؏r1;o)|ן'4c3I{<1>{_xs>MVgr^^ѕJTRJAn^OJH~Wtބ2@+O :di.ByAF' `!1@FC'H[ =F5ABT+Jfώ=nfbF0>BI[P ԚB7iA:9!sӹLjmܻO@Au}ZVNPd:YAD1B0GXs+MR_}-0X5K!CX0>BRh2jR:o?O6 同a!0MV&CҘ.(7e1iWeK/k}8)F17 B"DG:}/ BjJԛF~̗Px2Ft ¾1f"sgAt|h^k}H55oXvT;( An[X}CBy9nP-0 €% SXץFszcrMmݰLg萏 Dh %bw7)5eH,mL^5Vm5N c}ܦ;h>,PsЀLj)F>Q(F#j 'tK 4fh#F#@"c0fT-W,{`-COx~F[s4}ǐcqD%IK}TH*h ,)F2m] r]`wKTE 4AhPuqL01:!D eǔW,R P >`L`NFV&>PC5 ! STԴXޠO>JR yF9?H/Y`0,>xam\LYxFϮE9!w,GF NQhIL*(%%,@\37\堦UCWr.))a b %ZF@죑jkPS:fRF4ͧ4|QFbT}h^x4A!B82ƞ`4`86nܦ1us ϩ>)F&Wk6v}Fh丠1LҎa;.o?^ ^sx]2(> G4'k4Wr|$[CdǠs{MXR'X0IIYcJj jJiOQ VQgKM)ژ{9hyjBp EL!i:v{%$3궴]IJ =F\O1Ҍ t  EB3^ͦ0=4 قZA3JG=>͘L1Ҍ :" br&!ː <Rֳ?h5r4"ϩ#gyv 6QkXpx7>PYdzFhNv}ǐH|#Ρj*RbF09:}|j j#Poʲ?Ԁt1R4#g0 A0rs r@ݼ|ykA\Xb}f}-3~:T-dѪVrPN)簌lJ 5>q99[۟ PSh)TD%|!  @Ok>EtQ-5FZRC ad\{54 NzsHPB(4fz--H[2-̹U(t5G#ͧU(8{P3@ ;qsWp*8<Cn:Gׯ4<(`1S̸mH i㎖7vDI:Jq ǀ!iF #1KH )o9g_󌣅|H;s v" O.34nOex fGr> [go|}bMeVˆl䎄0+V0Sn"~qVWNj,#PC$ ݶWki˵Q)рmXCG<:*#W91zA>:oBW;DwGBr#*W,!Ӫ#XLZ23Y}K:F(9Qg:+C Y(Zc*EG:e1Ԥ8`+/">->LNiBLYYEh]`;m6^p TQi(BF^_A5te˒F$E>5Nt8bއY,)aoW )nȫ~e*LHOx0)ؒѻ@MY -iCh4W2cm.7Y!@7CCB7V#X˵<_B?Ī Ch -3p>|v1G#.xR_aAB 4A!B##H326>?XӵqQWZAlO "Âg >iU`rЙvpms>x_P]^H NF6Y;%מAmw:d>!ɝr5\M@=uPH s%t 㸆pi捎Dٞ:=VZ\9Uj7 tRЂk2T J\S A#!@"5̀u&X!*(U̘Aȏ`v|1l[a.r5i'DTVEmjјb4)Ԓ y }œ7_Cp,,`|V #UP &15 \Cw٣zX]`we@y֐xiZCZa995Y E.##>u֧=Sn-ka 1zDEڪSРYnsץ(mccָg{~cxY`^B,@ASb!C}8C1[k)u(9Z~_v|Jmr D?c>c&=fa6#ܬV9>: [*Zb%A^Tc=|Pṙك{]`.- &SVƴCHiX`sb&$ap-]8@M׊5^`e92]K~^(8UV}rcYrqS4v@8?CLDEBz aŝpm"q0o`pJqcLsW1 C+++}Q4#Q g7VEӂ՚IKjMK(S(%v^ 0F #+(G ҨM9ءš*?pXBoQw0X1)5ktRzsJ]ߛ9If1YQuG){ PM T#0nMQMZj.VS= >l ]pl+{"H-3& #Q(d1! ]NH0lZ0 jxV9 >ǀc;NcYu+~_~voO5FW#]Z{UY fN\H֭ZZڀu!5P} -A" -DV } oԬi #~#C(tGp?ZO$Xu t-NL'1h9v^| vK.ʲkn)KB!@Yy<>`7ZTDa,Fm*[55V 谿}NhXI4A`‚BT ʚ:ھ5Um=sjp*m}LGp 8o]HNj6a'ahtH[k iFȄx5q u԰=]IW/0-3t|!D四jS_tt|;「KΪ9Ģ=gTQ,^̗mB}k%],Q5Gñ]Cl+JWc%YFit.wsq!`J5Z@C< R7aVEhn9HᄤS#gPQRcݒm;cw)Me%%%]}k)jGn*`eN:}K3-+HbLPct#ň #Sf,"MTdQ1͆N ȩSNѩ OKx+F34ZMEAnG{%d9jrp堜B[Yh2YBY@: ]^ nJF0]3*+ګp!m7 X%5ԫl0^1G}.j=F\/`2BB!xiF40tmNǨV֠6Gj.ͰQHQ 5#Hd L'B."Z9u !@SlԬ?^QlZ3{aT{_K17KA(p-F )(a@ɀH b!2tĢ5{|5ѳ1%l݆C9 =_D/6#^$">p/cJdϥ:i{#7pVB_ p¼4 %Xg"1g__p?@P7 _({o$܋{tO|HħͮMv"8N,%c *欕`_t96\ۈ3Ҿ/={_x }[ ? X*P">??SK4BER @H+JmT2a(2QO d@ItF#:,cFEq`8>VFh/jC1g|?9)*VThf}*|EM㋚b_H_D@ Ы0d:8f.c"u:H@Fős&8:VV+ EpCP'p?FH cX"cªI'J ) H b!dp/yȄH4 "O)O WJWR?,+Қ7d=>C҅~5r(Tw*vn7 "!7q7qbT̝j=u\rihiH7_AO 6\+Fj!ױ/P6JE<)PRFp Xxi}jǜ8c (rPŊkC3 X{PPmXx ?ypъ?k25SͿk>g%W,#1g-Z7¼5"|m|_h/ rҨET2QGU Xk*/})3b1c)Ѡh8##{ [h{߀;(<T0OO/'/[^js}= xbG0vcŬZ>jUDgOUU },iC\k¹@x5 ^UWWJ0*B x+ylAnpխքjaNߓRs 8 a i+j6ܰϣsm}kak)O{pր;p'Y= !w^"jOۣVܙU 2''⟧vr\Z=P ^[g3#^b"^b:OB7fp !G]x<" = bV_0G\s 8 Q;-܇EyVK":"0Y7އ=FlYbiB-5qv s>7sPļQgT Cnzw &'c%c<\: soFj855 9HIU{G5o4Gath6CBRFlFd}8SSMwÞJg1>BBK F6"ZlA/r7r YI"#ES 898scM8N9CEw.5o٪x-3ciFޘ3{cfoF!hvt¯ %{46U}vb`i<>Fz`%)}<[fKO0 BZX*ɜb1y95sN(G:y %T!|0`4F*J1{wNs{y`f ; )nlT!B +9@5$) e7b=F\O1Ҍ :4 BXVE_'`+h[,)F[mĚ icNM۷oԪT4k ,)UAQO,C-kA!h! w[Ūu tU!#RoCR$n wʲt3PРe U Gc1qbAQ8y<@B;x({ ,8jMlRIx4< D C(7˕9wSi7I$Ӕϣs>OP//\I cMˆ瀧P}d̓ ^[-'9UI !O'R)%اPO# j',#<#V7Y xX:Ū=cy3/|\Y|݌1X]  <6`3p, '3p,}tb>MHoRD,! nȂ˴"܈ydXF,Y#11,#V"^;qXXEgY4c͘E3fY7f!Y[I̬e31K,φZ ؑK!+49G}B! m7 b0 `hFahTks|0sA ~WF/T:)tay`42E2@+$=dkotc45FSPO ]9h@]`.]`2/FSk4޻ y[K>JX]Jp&Ҁ>B Y]bl:ra;ಌي]29 ~5o %d*[0]_G}nATrl@ *ʀ:e\93hP@23(a:u7K12(]:Pr>1G!F* H)e[&+! ě!zF.Z%ݣ'p*r<88Hs!=/5 !*:5$!A8t&MA0  逪@ 1 ,S5A jߴ!`!m|`ي)$q)z mrA2^QczbP7:gk+dݼV KmXnrs%'q ip ^Թ` 8s 5oܻp;E9>ǀcȑ!RZa0FEcaiA9Q p`H hGߐ>0 `-oW(#A!h#P4 gUz#p.φ8_2\YI'bƬ F)uB0GX , kT݂VG+k(B>M؂\~$x1tF轊t1"mGk!G[FB]SaD~Z1@CqxX#K w3JIC%ށA4B9#*R`@[:@c) ֧F}!Q3rl4QU<qDzvWG]awuDU֔3V Fm =;#}lk /d>))Ʃjj!Ǹ1|L5xqR D٤_fƀrzR9儞&lgwE=iF(`2^AL/Xw bPCbad[9 K\-*M ԐS\XqR9LsOad)4n2?7J:Vɾ ل*2}F)bzci2 3h#0+4NaB< nl ֐ɲi,ks>TX5^cRѭv෵DI9}_%4AUr٥zLS1Ҍ #h(d##_K1#iF(`!DflL1Y3G0+b a mfkŚ㴿\2~Z6MQ@AM2T\a!ENTAMsȡNl i4RB:҂Bȭ&9j!i)FLamoևu 9j̨N=WZ6_ϩjfG3xh/ʟmavEmv!9<:dMe24BX0P] ³QXpk"bdF!QcӌXP`%`!DbhR-&|y?ȡQ Lp 9FS톦c9H)7`#ifO3{VfhĈV*ly3F!#2mO,K>=4ALqJ{|rxMOCf  V]5,o_TrE P[0/|P@MMwXA/۽jtP"5x '#:(TSQ+n(5֯5Vh5&5q b@=h5q#?λ̡1'hga=oS.fYy1c@!  T]Ϗ4'+ȽE bP,,FT-,ƐFSŔV4zNs;Rq/@bȂB` :K8衖B̅]r ĥߢ(ۻ((@?/ iub^afF;5 s \۞˕ r+hW8Sm8?%5T-Re_/cHɩ%sTc']F%9]ñ!ZNl29ѓ Gc1x"VĂ EAhTh*MxR<"*bP4##hޫ5P(-a3 |=G(`&f2` k}F BW׎)=P|3b#g0 Eh&{ =| /aMwY@Sl Kb `@#"W`0(E- -Hlu2_ހԏ7~ o`}Y¥(r CIG) I10wn)@x| aRR+\Cxy9`y+``|!D E`byd0[r Xmrj ez)@Tr!s2" q;pOFEϭZڀ5aIZ*-r[dl1mA!h]"?b4T@go!)`M2`|!Dh_w1Ć}ަ7DA&6b5butA-ô @g1 8Rܤdq;9E*a@3.d0(ឱ(e@JL]gj%8% ˔]JXI(3% ڊAQ6e &X,#[#pdA-7pNN#rXǢ'.qYr 3Z8R"aa%MpfĭoeX[ŀJ f.4eF3` X!dB }UD@kܑai_j͌$M5Q%\"R¸<*Sh)F)e&Ii xaav< -1-Э=I{U\ZPWpŪ!sVhVCذ54KXug9 Wո/Tߑ*5y¯<ƃ|k}[\jAj1QiF͐`hj?h_oD,,b UN\Q A|] Փ-BօvF8 k`@:u1xCl?X`0XX$:hP[68+H1Ҍ #Q38 ]ŬrҌ #@(b#Ai Wrm5gD`3p|^̶aD amRYSJlNa7:} K0zYVMSbh=ҷ)F%SY7>_?)1xS^9a,}Zr3p )Ll; . jw{p]B{}u`~5#PA=55ft@YaF,zԷH^'+ . 1!_B`ӡ4t8}5ѷ8J0G9լ9l%)!i^*5}C8t3pKgpL1:fȋV&pґ [QP8R!%sBS4#(`w Gb' /UP0=tve%TO%r.Ы#a7[0Go` GG CC/Q!w > Nqý kL^_0way|\&w8w;Urh>?^@2[}݁fȑB*^4#ÈV;m#at€qzE؞;:M*UTq1+9wp!qSb jEA ujMlt\e*pNcƐAp ju0 #s(7*.\ EFqu dW=juߠfh3$׃5]d2K9TþyّbpU`50L㙛#L,s,rJ"mmi6U.BV x=a4h795+T/IҁNftAΓW`>>SS8l!(5꬙n'8#l v2./A>]lVDzmtҀY`r4V@h$5p8:"-| Ҡ6ׄvuR8H鴹SSRN[GiޖK5) mJlF܊$>Q#W1jگafN*65*4#s}+5ADb@9J >YyɌ9ܳ=}6:y95! r0VЎw"CP&֐TJ8 A#Ԛ* uT5GÑj] h[UOPas,&}*s@`A%YX2Z:TgT š*D>!ЧvJJ=8c S*C!R00 #4A900t;%BY1$9`PBp0pܹ샆)7>z7>ga|L EO& ګ0 b{UCClRt5US#W/VI`|+6r6B}_B?~(ۃcWj;,'+jT5Gp8 ǑKj5`( ͣy4+85=Gvhv٩Y 4=yF/ chaxNw]%PLJQ DQBJ^58 ۟!^G@ ۚ5q= @gS T3!@`D1d-`RjMtYrj8j%gLI4`2$՚,j%2x8{YU5Gp 9FyJ ]vdc˶5AV +Q{L}'=V㘙e5c5g2PÓO1+5咕`lc[!CZ[LP#EjX\e(0 xp*9uŜ,u^5]TQʈE1uE=SÞ[҅Fh{yh]eN GB 5K',֒jCR4^4˹EZU{ubx1X#?!JjiفBҩ* .:u(9!tɠ.c!#Ph_!w0\AQ۪[/ *1" J>mlZ wRs(kHMԬ/^(zpSEaDIt^@~+qGI<@΀msV@Am@M>c rL\&4U¢ 4uj+Jz*W+V[ցA"[%% IV$+h`Z[QjbT?Y5!1M=5ps胚 l?<`@ZàhݎÞԅwiw꣘,IJuxN%ZbF(9ឤኞ4A1#iF(`11(^QռL= ͮM[8.gt"4^tgp u]ÇG/nmGjM| vfWV[i3S+PjCPLM~ !om'wlBN59A"mGg. iCU[hma6@M͆rR5CyѪREY' DVB{5E=}UviBFҪJ Sp0Ӹܦ 87S4!] ԔEGhGa]۵p+RHtE& h2aSpSSXF謚;pE ( Ǘjqf.)FSfgք3^$dJ5Q&}$ Meh:Y`|Hс(ua Rׇy-QdP9hWk߆*1@9c9 *~%U%cBh ` /U@=!L a'^5ҫ&zdoք pBSJSEi1L!Xu )M;u19G82,` ,,Ga0F L!5\: a(+R0/T4Uio\ +4L U͘jN2ZA.PЦa9P'-&H3œC1c=|)v*B `{aѽ/NܝQ-$!: CP G&?>|diZkD32|F#&V iT@ӒUT|U0) 99w8\54@Am@:uԛho u0z!c+_ |4 ] 4so@iNԪ! ϡ,UӣzXI)nW. iro%oџzNjh%$1!McL}.&K1;bIUܴeR b ,-YP][\X@c4/ E{I L֤ Wv[Mjqg0 QI\SZ/I6 .U&cg@őZ ۑo̤ =WT3N h\NMBAc+=44A!q'ER'EЁ#t8wpf0gdZW-SO7Z% : k5Jȣ+کQTtEۑb0uDٰJZWT>K/U@jjp:!ԐW-gz:ݑ$qIؑtRTvS I")ѝН0˺L_ t|73x~e~NLNLA'Aǯrq'""|baӰ>qVܵՉE܉E,cuʲדN<wS޽Ny:nz$wTݱ)^.t£;)^zr=Sr=N9N,:%\SNNNA'^AǯW<@!h#PA& uffra^m4:)S8RQPn@ +H: JYƞ5A0Qrj`Ѐe bCㆲM!l75ꞕ7G#}o·6MAZ+^2@tspO+XZе E,P®7Q-ޑ;;r:VЋ^(*>=0V5ySiYo&!7ij7f֊oBpsJEͩ>-ޤ@,M<hjjf8y(ZGp G6T9HQ+D cC{.cfdF!3Gsxü#(GpC1,b ޢ=VM5ͭ͠s.*D<9fʹ-F}VIstkT~ [UGMOS95NujBv>p{0- C !DVMHF>՝>| 4L9*J)Q:'7sѫ5 =q]u\^pӆs]U5QCn9p*8k1  ݌!ONcw7qDϽ nS nu#_ioq.3Rܰ57a xR<P⩥xjP`xvQ13yм@iI@{nDVF4O-SK3O<Oӑ@n8.GC֎ 7_ +IʈydbVe8eyBtyB<}>ϥ>ϥ>ϥ>ϖ~rKue9)@Og܀'_/I$x< ϘϘOې珰ǒ/xCby1Kjm !Oggg^S/ nƂ; yj<5CO͈L7@kWWē3xrF<9@ 7( E,D12Q̳LԦXU xq ^e{#`e&I$y Z 7̓,I$yy m V}89f)}N`TNg&*H2aEܰ.y +4a$f.~a0,M|`IoX% 7遚) HÛJCMee6fY5,+So"kg#z^! 6u]`P@sBLrnrK^bJbOz;s!hHvT G\w)d=TOV,y?4iа~E췄>s>K>ǀc0eܣpw9dy*dy 5^ľ?0DRZVa_*\/c4`4d4b4f4Abe/b~Dڢ #zRr`C#ױ6Xy㖈%tL WlTn*1$Xq[cw1w1w1wq0O,mbVSZ0vqԧVRuљ%GCa#-^a}'|hbGHZ^Q.\-@c=Kph蓣kj jXBZ!q}O:|aRO1UrK9‚BBZX5JEQGԴt~iF(`2{jb<;*XPBk B\z=F#Q(dKؒEeIHkbNb(b^[j'<,pck_R~Zyάaa aA1, KKKKf'fa0>̀zA c`BG n" #hU5oVigh#SdF3fS`8}!ѐ$tȯ"Q&V! ]l nH1>.&j^[mֽRk5BWCI~iU+PkPPoS1˴c %oL;y*`: =[5xQ!BCoATb YF$+e$ujy#3nLMB VaEsŀphL}9|𶂱7E =gn!MҖN֠JjDWt4@kNۋ-P~JߌtUXbJO :1^@}iqDM!oOYHSӳY 7U #&ZjZXp稪޳f=޳ !B8GvGhdƊJlMcސa #c#˩YŖOw芃Cn{5Gk=PRR̼z! KmYa{Ʊ0>3=ʋ ccBa{|bq0Vgn}(3gë{,=t/bS{c@1<澵k,?X<ƁfbC13XZ7|az̾_}uc_?{|DǠjyxb:fps!7=/6Uq?)>Cf> z31buױfqgL_ctʯe-|ʟ1̛ݹyy{1ܼʣp~`+p9@pbVĽЧ`NZu jZ͗յ@-p^Nu |tjlQ~Z9 :9iu}V狜P ghƀeP> q٩YD-_bh/Pt=h}x9jQ'i':G)d|!Bq\f}]`.-4]aFkkf~ ,WO ~euڡ tIǔ*I0tFQsv}x3 10s@uqaLѯh:fhlO;G4@Ie KSRBKZ*У=* S2]Gt>RG4=PWތwkbVCV֬ -Jjʾ׃F%Mt@+bSL2U6MCwaTH>g>XQvAaS`To 1@^z{V0KY+Q--:5Mە4R[+X*RʠԬ2X6[AB[ud[,F5V h@3gkYCct)%Zz0Zd:+F W_+,(du#}zt9XfLY ԇ8dG*VpI ؇&78l=Lc<\P@:uLjQS{atDsGPiA \8CY瘊1r;^aJΪ{@Am@:u4ukܣJc0֐hN4 ~AN@Ava_[*4HlJ ڢgiݚ(VjM؍$ {fdK hn37̍cIV̍>>'!mm08Z+C9dI3C#cSY43Jek#!¦B7#eN!^Qq*bj*Z|5_P,U ]Qs4aH3&}FQ(f<9kn8H,(0&vkRSe_E?%34#UhoX촯b9X౒(a/!+RLj˫vΩq ^kJz yhM㥊T#|5Pd+L hM/{]ܦtUSRx}trZ &~u_ڀujD>Uoܼg܎E2t?SS`2]g iT:F+\(rLεkV=5?#330hdžgKlűAkQYBl[uA^;JZ3/<zor o8u`]VfPI= Mň<(`6.VI7dF!v&(NYɪcd4-T _hvf<@htJJGehNtU+PS^gkP`Ǻ>Ǻ>/!CU#~֏6:9 #]||h6YK nt f9|!2 m_9i=KȳB|8sqr`s;#UV#@VdnEe>JAMEj΋%hsA[iA!hLс):E|A6 XM.;5{,`%Fn~A5Ӡo94ۂzc[hhӤU:ub ]` s5$P~LjI|(4(0ޢO4se=&(ahA!j`m!TTc!F3aD3Wm)nj ~=a:"b'䁍xT"1#:#\[n*X)~*e>GbUK9hREז C9| 6˲q,+WHqa W,ǭJl/㴏˜z5m*cEu`fl/Qj#M)\*titqP[VMܪYm6DY (`1~\@RS/XQ@1Yנb:b| GAjxHfL85|,dU8"$:P)Fa3 A>-99EA375sS3W4s3f6F2,a 0 a6,܆% Ͼ<>0} Z,Spl ,p!K.di)b!X# :mC+!@Nrf zrs^ ]1r>gp kA@T`sR!Zg)#9B#t-ŒwT2Zn˸zJ2F)FaS EAt X:*P>A{d#(Ge>KjL9RB9T4# ro UX c1 ˔2C9pvgqw渜@.teF,(`ڞÕ G?+r:C3 03 Yvy(e1 if;c^lh9C5dԔKX)ah3UsZ5띖sc~BI}I:~iCXv!50%We%'eh>i"dO: px4y+\C[QEf nRX&nRX&Y j7:sLs`vpښY0p[7cf#0@tqDm1Y0[+9l8nWicXr)yku gnL*7jVX,`jv٩qLm#s>BJ`j-L>ێ$RgR@鰽j19'qFn;t-NR*KM;P)쀼T@-r!PhGi!@)pc&TФ\i q6jVmHMQ'YI5U@"p6 Otb=FT B!PО`^ha2hE}Gt#i TSʝ֫aՑb+0qSf6`kL7k5|!,m8>l[g2:UbDi1= t5U 7@5`[Es6i~.]nSLb>H}T;.^͊h8cG#rBY!Pi W(נ6AJ%k5 Y8<}YGtmMcpO$##pthgQ')-VH"mLTZ5unlI6Tu9-眨B4P^V 9'*;oӴ tT`fi;iQI&v[tE`:k@L!h* B]Nb}y2] ;8VRI@ R0L̤ M |F@ !3^*2z^:T7Me|STƷڶ,"h{=Z-rOǂ's6"|Fψ>#gD3"|Fo&xI:VGQ@)Y̤JI%,x/ėU*J($ R-ЈG'ӛC:R~XRnU\Y:* BN:SK$qXЋz5oмceix.p"PuW`0A@^4"ה'ƨ3 ᜚uc} fcxo"syP_LG.Aоdq f%1XZBhkAAa:g+ ')$1s @` )hfߍ6 X5aBa30 .F=J70}19n54YHF@íƈoj+}o:"M;01&]ZêQ33ͬ99!KCˇMt0aQ/ad9#h\BgۂBh_,9%ZeɊ`\aZ03 b@MܟZѵ@CmtX|!D!ct&h,T)*e:9J1TiA H.8Sca2 U0T$À X)`!P<a( 0 X{)=֤P.+ESm]⥸*X/^ 0pr ͗yXcHw=L5tHJQWk\_00@":h4јfSM4=={PJp܁BSz͖cJ5ܠ`Ça&\:9 FbdvWqGCNulj~X#Ev9g[5%Ҽqռˊh `jR] p*44XdЩ]dU! Ϊ)f1\ڀ' 4WZ E纘BDoS_Qk+K5]{RN10vQ7h*J|s!fHPﴽRU^ިMm5{(-VЧ`|f tZF #hv!BZf>$Mc)Zu]dCsu!Pq*:A:u{X` 3BE(o}>8sYz!XmMZG1MqN&VEndTnaʷ"ň>͍0)FAnRUR~8]X!hJ.Q?J5|Xab9XԠ6A:uj%87H  NY5#Q]a&^e0Xe1[-]K@.wjSEʡWAS7_-3ltS+j"WUVSJhv--M#XPװ٥# *Ԣ%GA#!B# 9&U@ei 3Qv@VaSq1N1ƴP8EjA!h#MV R*Li+ޚ0)BP\]U` Z3X]a4W29 eڿXy}Zg>L;CuH֤>UV̪1CuϬzN Z7ޘm'/^-;6X4tld>̽4^} B)ZLVPhթQAݮ ]o\N[N۷o/\?lGVz@Fzbև1CnrK A#ZZ3㊑pъᬿjAJ5M55Z**4>]B"D3Cxۨ"T2Ql/(QQGN;zCj'eT E]  ٠$1ͅΆ4[uaF!ƣG'@t9R{%Ke vqd6cNVI˳iya#fqn6Bp{!oñs]ػA%" jY{yh{}b^TpS<& Jfwy ~H9w1%dwb%eLqk,+b,1n6Ɯە+ι9=;7,X2wOy=\*-4/ܼ̇)ei<ruSĈ4{(reřZ[U@K,d'oxxDxܽ@$x <"z/ ZZڐirح(`|!?!\#:5rUK5bF>sݾ`0⺴Bcr b=F\O1Ҍ #Q(Bϙy犛d+b;EX:0'5 fA,(cA,(Et0>BОެ J.rXZgXfQ @aAAAᙡy(<3(<3(<3*X@u/(1K8[c%ځ ( \b #uƨCw ͳEF\&4Y"QA A#!@hЬhGVnWftFYBXJEeVfP9j r YY&4A YS#s$DgJ\@F,(>+ B]@uh11PY!Ñ{r=5ڦs)vB|RvM8ObnXh UAŖ>B! #)tMk ]hGv4!0耏 .t#rx&.NCP>XA#P`9/,IcY+g2?@ݪ F:`nO?\rjFQ3i@".V@m #x^3C'm X0G^+thWs0"BmtS)uGVkJG9 }t@Uhۃ(9\ t/ѥGpl;o53صˊffEk̛0eK5$1`R ӉgV֠6/H]AM "z!P9NLF(V #HPnn%܆aDб|-ՄpY5C1[uj*-vB>xD@VZ"0F#͈H)s3en͔27if0`j <Ȗjt>D4X1ML_|t.VhAW!DwA B0t7t i6:Vǽ^mI4Nȵ@ B!ƬF cBKt@ԓH DyAO dHh CBi"q e#i"!U()@DvVmʓ @LR Ҋ0[#Cɜ|iB~J%?_N76q#Fv2P~P&Q$ݐB2zECKoc!tϓ-F |) ̇ZZZZ?HoVIGtTf/Z:e\dVK_dҲiY鋑#NQY@Jt4?rF~9#ndFVKFij|FVKFVKF0F&- dC鋬lLK ،،FtF@1EFVFVFօlO}Ye>eetTfm_p6ٗ5/k_V |_]—mȠ }}}}q}q}OHTB_B_}-}D.e> d> d)@699@loٻ d^d5ȼQP ;l x1RIӽSV^!0+~1SP_ sP۫f-sLOcJ@ }OoUB-Zi ,Hy]tܩ)tft1U|tS\ڀ%L$ZP4u:_9 45)E]vj<(а2zn,F퓂ilBIYᅍ,0nVq/YЕuyA=o]7R[SFVAt㦷ô%juϗɶ3 ߈)a7,AMݸY#Y~DQ7~:~= sNĽnn>=EMXy] iNt5[D]?K nލ-io 3#@Ž@wvm1wx[ݸ[ެqkWGwe]ݑymCuu5+?+M@_V4vEw3Y d|G+S^Щ, |˰Sg[}%9ͣ.і4:ɺ~ĝrd]<wl+봍뙎9t-N^7nVIS[kfA֩4ٖN}ee[ka尿t:+CVCޖuNсVzA^؉uYyuZTNTJu˪Rp䮹PLpzQ'aVnQOֱNֵy|u'+xD#MӨ3^drCweݼa##ꖙh7~7^:#uFu{aeJw =b-qgM6^7/'G >qgP8Jّ}=mE:WP{2-2O[:de\X|7K+(+S2#b' --v9䧠Z/ˑ'3q)BwdΈ2s2Y/5'm+?Aɏ`e,N&+J֍-oLVHN&'*Gu-tLV N%9ݲn7xT-ved[Fdd[nIP6=-iEr`Sɶؕ>'JF,bWNVn-a|xK>L(Un-7AOvLv<*Y'nvE&J sЙr2dt*Y7,ltnt:ZfO6j}ON6VUؖu}ĉɁui}ecddޒ.d:"rm \Pt29@d]̖0-lI$Nm[ȉ +mmziˉh+LT:ٖt_(J'Jn +iߙ4p- *?ɶ|m s%]:ENt*R䤕A[%bb ;VuP+KΰuYNԖ4L~T '+b۾۾jKnŨ^gu +Y_ݭɶON֭l-ݒL[tۙl閷pKnG[=*@$Iv'B}$+ 9Rfnߢ;Vɶk.Uax/D]ٞzqLN&'L]+夋rnc1Id[s(դ2~qdc&n \q24cvyB=[AwW6;=!PɾJGB*Aľc+tO q/G.)nETh?H܊ GEDB 7xuR ^,L(O 4[l>1" ߈pÏ|)V%Λ_':A@X =Q\$A Ty! o;@i!yL(@!VRi񡌎x"r2 _|^F7YN D>5V@Zc%''DEHX&poa܈O"!P=0_f x%٪O0҄֗n"}m}?&DA@R .k?ʐX()&"bmJ+"1nDv (Iٜ^$Dpx1O@/P d t MQ 2 mk1V9+V;hVja}'ƬҶi`#Sw1';mc:hi%"f|2.HSQR t4 qb!ٰvXz3XV#x= 7/LH<06(⎚dSPe=!`a}KmFa0w.E1+Y "FWTeuY@"\z X8yJd|2DcD;%0q+0"」5V )[<=h܋xb R r dFZ_vU}] .N>6  =KJȀl%@D.2rA$Y1uXXXrLoc}@xJ bHF G' Vxdzʓᨎ&A7҄//xn܅SU "`xK-DH+D2 yX DHchV(io[N 𔈋,nLd W2)Osoc!&!XlDvz\+i]a J Lzv`Zd'` ;2Y dKӠR !P "&ie'1] >6ŵ-F-KZ4 @0,@/k< f;Mq{cku;V` H0ˀ]"u9@8HGC-p4ꉀE2 ch`BJ"tyl+Hih,E4!tD>UO <%" J+V~, [ @ ba%6=F "A$M=a")1G_.+"/܈E!71O( r{kQ'Z|OۺB[E+_:W|db!VpCutBeQ[~ cX6&o| YB['D<9D„B +u`@ N`Dt A($QXzIpzRK?8>j |-I% !J(m{R vEF2q(GJq{'_ +V0b 1„/K"=bjlVRJp4%=}>v@Ŝn-k,+Vۣǚ+x"Ns nB,<[ >6nGatQT TlhYfC6ӛ1+LL{+cXQϯbMkՔ&JO0]>k-=[| yU ܅<[`6XBuF>E޶t4A6EF'դiFh.a).(KRn.dpϣ\`%')*ᅸ_尠4%R-r`4A`‹V}8#@g:fXխs{c=ROy˪St|,°j ZY4F9uu~A)Kx&rjJakL^⪐ Eb B-zF"0yj9`*9 z,ܚT*t&=o[.7%2B|) gӂDžP۵}Vx认+-0 K,˰i؇4CLÂsɐ|>g,0kl5~R,d,!K1xyb  2"M  f{9C֢\0d9xEդiFOW}x<\s7uJ[kPwVUk5xϩ}PCn{5UUYQop"mz!_Sb7$OMٺ@FEFfV刔t:=l7ɷ,@148x&jWYoA,(@'8Jq0SFRi )ݎہU tqD$FB~q$5183tn?. F0>RԊ@+wmT}9`|j @֤iFT!B"- -kY0nM)/Z[W*`#[=m;B/IM+oV0nop bOoP8.DvV d}]0 v77(5SFc>Xc x؇pilZ熄TL}<Ԍ #Q(b3/bExƀ#s7eIYXS֔.1W2ʀ0mٽTՐY2#7aDEʈ^F}JWDC+hr!XN Rgaޟ|~$+o[9MYkb!96 BO8Wh=F )eze )fOL/0/)5D!b:+U<%|Â1qѹ '747 ][ĻP3ӌ x`6}FHϠVq(tGp?fdq{#Q .İ烋`e0X,)FlcfoPS4#è]hJG`֯FVд>kS C%_9P9)FTGTs;0cB!@C` "e6[Q`aU_fK̠[Xb(gPQ+X*oذԇǶy@KE F0kb544RT A#@ %DPf;H3N@I"Jjn#ň4[ݸ݂#{l/vDEth6  8IF7r(a: I%c 4/&yڠ7d)Y_P bk"'9yk72MnPlV Gț3q֣@ m[w1L/&#;[Kg8U,o>dؚ]#r 262mJHͬ6Q@?\X$qdvY, [hn}1n-ҷNy0/můhmvmؼ_Y~&Q<B$PGs QG 5pQO h$Kڢݶ% 2B١KQԾMpLlwo&Qc?]%yqn&m6[ ln&1PQ'ly{AXכrkS 5d#&C2Xi;߲ͫ|Rv4F?C5h<їDYZW QP,+Az9'>^G<~i5Ә8\ޱBA( i kVB7!Ԅΰ۠X8N4 |9KذeYFow Mm0,f4g݃jz  }(N4* a=rď'28ePej,(Z}7ςىqL;'ʞ;8=l'3;WV=3GAT$^r{wu_TƯ}IC튣Wf.Nl){O~?ta={9?GMm{CMWad139!k3>dBaؼBlt%ۋg}E.msLx_g,$cxbis Z5紿MF]3IɑP':;aY-xa^/k5 v^ mj:q-8Ww\)фay ?ĥE2NbPpWRՆNpsKvYEP5F?jCCC٭﨩UjąE7ZwT%IH #(U@`C*C*ʅ[})bբ_IB{GU*ճjN͂7]-X]NRVs_bQ*:O(WR3JiuDQujE\dV Phm*SVQҵ6lXÆ>l}|jVfOzUϵV-֪ZXkrU*5t -~l1+ŨX($W-Vja:T' t½RWJVTmj;MZmSTG:j8^t ۠1rPT>K)\."<-w EZBOЈKBP QB]O)4AiO ڧDv"A;HN$h'k5t?~tO~Pʹ# ڑHv$A{ c.}RjXi*V-$Ӯ"hWkUC;AX/(֊\oIV(8黹WE'Q24>j'kD1RݵhF"n9 r$YFI{st\.,< 4R@G'vQ}L>Jz<@mRaAtn~P,+EIBBB"VA EP FHO?*ڹZ]еkuX$v=v=tZYʞ] <ty.B$VŴo*o*o廄\Z_еk}ͧư(׋I] vLt%0?1j?Uh?rA.ʅBBg8p ککک:)uSj/Uj/U9LsYYKXRRRRRRv!v!K:)))u_ꤤfffffffffffffffff CRRRRRRRReT / /uvQj^j^dF z z z zYʐ,?UTjX/UYrUj^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^i^i^Um|%;ldJ͢[2A2]W\AZtA*ʋ戅¾v'u 8F`%Gsd-%zV\)rP'I{$,W!R!Q/$"ha\KEPJu+ :-GzaYh5"kv˺H '<9YBbT,+ VzR_>RJ r-(s((ߙFgEEEEC-%T)d]<~J y *! _V2*`UBu-[b%; Ok2*dhUЪ Il,|!g c҃$~~! 95_HM %Ůàvd*kJc Io lwAĿХSE!F$Ja{DK )Rv-KSHjԧRjB)+n[)VJOIR._)ϵՉ$*iu*i+i+IJRVֹJZJj-ëSSQkSFVZ&6j-mE+L$Ax  Rb 6[_$ʵL+͂ ͋K\N?T1+./͂ۅ"R-> ǁ]`X|ɝ# ?o%.~9b$|@O+ܟpދ]kc&?wfHk˷_efhOd&dD4Fp=OuHɿN+$^W^^B J: >A}2v Q< 1Q]%(E-F&Rb)A etLPmZn(tpjD7'9M iI^I—2;[tsCd0ǫuf u5BA(KGW di4 ЖK ˈ'tv[˗`%p㑋 {GӰp@25É#O|#gqKY a#kzEp!=FIFD=! ,]0o*#F#Wm-=q}:A>C*f*Hy_z{?/, .l+ca3ꎨPLWXAS 7hq" 8'A%KϖIIo=P`^{Z~Iۋ֥ } ݭDBIwߴ9l=M6P5טu8뽶yYx2:Zݒ |<\7sܽ+HBRm޷7!}![Ox@L#t _£AFwf0IL #;6(ؠ RУvt_Wei;' O ݯgi#?&%C%.+q&lb!-aGp<É4(F[_eגNJ:Yd26 c~E\k8u:C'CwW4"=lx?xc;a>N`kt:O.{Yؔy! Gk&^L*@H|M0/$s`<ۚH2i4NeGWs3ǝ/e'$(l/9&Bٟ^݇pg90#R~ٜlZ/s7\myrWFk 'rBșKy0[\C y  NYjLN-ݰ!f+'bgc d`@ 1wCDzW#GspoH7Tȷ\‚#Zv׶rsedw&G呋 ?$yl}sK~<`Dj|9s&C] bRJ%o#64,f^0m3W:# Ɇ %^#w{4l09B$L |zA/xA'1bOc7/|4Vp9ťnVG@PzD#S#QPHݛޟu /klK[nQY۝y _.sH>^l/Ɂe@WGˀfpO6}ٱ=+.LY9IFB֢N|(:ja8#L&N_$jɫZҩV$jIzMj$y=4c`UAnuu' &YIek#mm-㶱R 'Iش˶mWe>"ssۉK9Ï p2O` l`$`[2]r\AFtOnOT>/ +rU[-OO_WhZC}`_N脼P+f?:ŅBpnvoC߭l8ml+5fD&Od'Y'BVfv[K$7ͤf{kP(nfR䥞=~P- OD(nwJv<;<\vgV-oY}[?-?;=%ԣdR3/d%!;_.4m۱"=peH㣝 - ZK2 `Y]&5B̝Ų;k68"7d/+!5BQ/$k8[yyi)["CEc-ZtN\^ C$Pf^(EBy K 3kNK` '7o>Hf@hM!Og!5sB^cܔjtbM(%"x9[ymwyk*O鏃!e: #r 6sjߠ b\lY dպkc>xBv;!%K|[wM0t+ #r Ðz 8¾\͠V[~L  WTk l͵$ ֥bU:y5o hkWWz' !G1IfkMmoȈRn֚\ovnz뷘uu̅SAru5ՒyQ$HYdpn ;KG3uVLv;*{ӳ .@LP: l_&\0xNW 3Rkmt^:@66FՐmJbǼ葯=]OmP g౞؍wvh렺7wM|3#DBacIg436Eg@8"مb]MkS/=i`47m|lc{ߴUY~:t{ [TH5UƟ=Aom_m*ˍ h놁uז=zheu؝J"(̩p͹&ҟ_=" a'yGTL vX$`2REjv~5f>AzRY~wEe2m$ȓ]fp[hYAdoMV28'`; ;Z›~Ep(ku_2Iܛ8/]/u\):E 庵g)߳,$l5 6vQl&lh8mX9ZH6=B8Έ@ٮn} BQ7\& ^K{ yP%&$pAbGl _Bl7I7; 8 !6BHJA% 1 (c&MBˁo%I/%äv43HumIChim>1AlfK7\PўQzUx'Ѝ}E kmXzE0Y!sG.fQ^I{kp+jDNlNkU{RޱX⬆f͎tgzp$n_\m̜wL,0gzw.RGV&R3qZ"bJA (&̪5ӽ1ިofwZG@& Ff䄼PB2F,"Pg-:Ҋ(a,V $vVB @wTC^(E! a g0/qM~1}+PjJR-6kYGW[%s'f;1ۋ^̎bv~ BI:8NRI*9I'$%Vmݶ"ė)Bt/bElJ9I3wQw"ҋ; bQBA~Q1"(e|}JĆ=n$Xn$Hټj6ЄD)-,ENJc*lB,+[/"OQJA|%Mj=r9./b80؂F3{Dt!SUlkc#6UTa$W**WQjԮ8#ثa4@ŹZvzʢU k[3r뽘Y\ެY׿`QLlY6nt 毸9ra2h3s]x|j7?^-&#oybF'䅢P!T UBqC3'M԰Z}̀t`"gbIù7 v!v^teg~b}n ߠ%y##xxC,l+#gdO-M ,gդR+:IY!pSaֶ~f}7\̒{7ۭمx&pOlO液g&sQ yg!5sB^&a. j؉n6[6_}{G&G?|db/a$]D;4nLOhjLg6Ñad\ZyٴZ V]?yBkiOζGl.\dguG @77$}m6"4'H7CI]OZhg-Dkd"h@P*nLQJߢn%IFH>x"d G`\)O&R3'P:Mnên00oBM@Q,`dk7nD7'`~& 5<&lUt7k$tl[T XheC2Q#oƾH2['w2M t|,K}j+Mk|Tow]:(T UBk3$TI';8]gŅST$5 Bv:i"I$]3Q5~-ƫ^-LW QP,Gh'&V%jIZ$%jIZlo'yKsbT,KJQתFjԪFjԪƜvU'ƍzݨ|BtͶU>Җ u~{~' e6G6 r]qsLUpojV \Т^62KjpqEA!vrB^zj̙tjvM ^} !" 0atȲuNjLv~}J't&x iѪ})"*\[5OچޝZ|"ֵdFgr1 H5/@7&AL[h- m+_o%}hRd:m5! 2!Z% RYWl5dhP wɚcֱAbF{ۃ䄼Bh*[ fێuȶɫ!v`q"qhm@LoϭX+=2~S$T5m.#ȿnL-N [O5ٷN,Pa{: 8܇gيblk$`ack_w[qL4(- GLО ݻ 6_ #f_pqm[Wݮ#!"݋, ',v/'gȖ |0f]({6;ȲEe38B MG7`K Z j}!DX;ߨWX,=ߒ|uwt^ive&' e'{ EmNf7{#L24M:EhQ>lns!,:ɶߟdnptH,* tgt'כ!r0=ըB:W1MKdo=*B/f2Mmf3Oh809* `5Xcp \ؙd\+!K=.nu$א;{/3 +/=ߐoX p|"'dMKƿmkŽo'BОⶊ  DuǓMl +4el5ಏD/ttw0NqKP߱VleQ?, ?1z]{IQdc)8;ݼ|A2Nɿ 01x^SD{ <-#o\<R6r>OD='%'I(m/-9%jȲc.Nr ϗGͳ"K. Nu< ipV\X(eANN*nm>ыaP4 'h8Q*I'rbD#G`4j*kOJ*_upxZS' B }ÅWhmKk{sɁ&L ũIɤfN-e]9Byi@s'{7[:O0A1*G 񎝆\/Pit@WT4kǽ4ZةuV.u֮pǹeB/9;btĢى(OG%@@,ɘXsr O/i~hK]\ٲ\YБ0:}2z8q39!;ޝJw2eR3/H]-$ {^r繱9usd>om0^;ϵgTb-λ g"18c qF&'`/lf$fLeBA 5Q-1.'+0ɉ=P}[``3֔8XK$LAԾb%'1+~wB䄼PBP)d.m\tKk+nni9]kkyIN0aqa{ż/%u^h+ y=_nJAK[hz'6ስ =|ٖniNj( 3 _jFڒ'8ũ/;A(2ϼW E$t&y(z(z(=U{ K(:5ΫoO>~XZK/T 4_X-R>ړ"9RX0b]WtRΚkaIn.i՚LQq!ԄaO8P&_WX\_?J 'A^lz6Qy^F8ɒ[ ѧ(Gk?p~1|V)nbi:+֎#G7wuMcZVR/~&vօt,IO<{x55bc e_YkcJTI`YdR3/d}m pB3+߲| VqS#P$itl![O ;?ux3'BO&K3| jBKv=@B ΄ `ڡTB.wG'IƄX}pF/ 9, a\9.Džba^RR(.6LFhX(CX귏(y8x|*nw{$9J!M׵fytRV]C}qSӹlxF zΫw>4 Q}zTMjK y; ̈́ zF$8! "}(v]AB4)ߒvbF6Iv+U/xV~o;_Fl`o,z 5zK`*!n~aqłW ^B]P9ubT,KJq4ƌ);Hdr fj}c y   $T$[ 7t$`7C~{:bgB3$îg-vo\o83W{}?\q͞_ddǑM ?HEY#bV:LО_vO$3 Z ; kA8{׿`?pմ=ᖰ'G/Dk nt3=xe-|zX'|: 2]zlm秌i( BIV;ǎ)U+zkD==^^P/d]h {8ʥ+#æ-pS›jn3;, xjX]&fx(C| G;ێ Tr੫f<9!q2=x'EE1*a G_bfVMA'p/{|$},?m~1Ч1K9(G=P-DْlIm0#9 ۯ~n~L ߒ + ;yN'TТ9q =k{aBqP!T Ui'Q4$Pg$ $~F3ڱb `b Dۧ4=wkyrH2ai0`tuwN 82ܰ! C3`lgqA}\@.!W?n8 w^VdxaI $T 3< Q:ԡ5uCMja hh@qF|L:4ԡ-cJlە9!/Z2]ZFӏ?&x0&T˄jP[&ԖlŖZf/@4x>-;8<ɲ\F;1 i9nG2``8rkN#tyvl#Ov&XHn}ۛFgrB^DLV<}OM?0O{`IK ֯6+h83JJrezyy05?;0va䄐]ln>7mXMh/<1v~Y)9!/*z=oINt B$`=AMhml:-aG/3Cd%"0^[G#cԳjl\ta ‡q?ےB] #m_w_Wb@!+$S{7%,'oV},@=,6f}hwGټc(Po\dYĭbݬxz9A0K%Q7,i7h {{l-;>Ͻ:R~MzkhHg.6s.e>k暘ؗ\U<^"J/܌ 3zh`=P`FN?"cO?~CX#xB Pք 5VHQG}iL>! jH޼:o *D3Sr^[kk a>t|YNȋUBh 0poS|4fC@ ӭ;:v@;vyξj)zŏ2;jM?E:&dC@o³\Cޛ}eBmfجb NР6l烸1B>BÊrCΦ͂#[}l;5^1@8=8\X ԔM`dDasBꅂP*JJhm%ʼnP?_ɖ ~$67G,?vϜ  BIl(^ZMjpDN*K2|oj;I7v8Ͽ78/ 8;80T8,wbs:w/<f|]$C`9F"@@\@k $@- X7ڤ|_ah1=->q- mvB0g/Msw"ա< #qz,qNy -x?X4bbIac!5-ViM%-B=aPF7~EpXcڼ+_Ia1xd68!=-b)bOv$LUF^m|9I$ۍdY"ĥÀa-0X`-k،ɗ>[ŝ`dp?p-8^1(FnST"iBUpi1(5jQW\Wz5%9 B΢ eRyrL]G Wۈ6&뾝C$3T\+Vl45d,_l$>r_'?cRg6u>pIA.6 %9<ïio-^: 5ݺS5HV_f)._g:\`~`>~Z&i"O\ܕa G?5 ëukՏ-~۹wKIWX^HŪ݈gx ;vIw=J2.B̾{Gc;B)IjxW$$ n n0ܗ2Z#zrB dhvgT1&kn1ґy NIr#՛ip/{jwfw;nI>XH2LԉYfա@ƬE޾@ZZ*08dkۯ mmϯ sLԘӮ}).\ŨXPBt 2u0{4T 5$/6lDG4չ Z_$Ђ_1bY):-V)P88X%'x4b̽=P(N+;A\ogg'gWH= %ofa&Vh+ړ$ ! !Ag? 1+L+$A &kj4 6N:i߭qޓ6r H7d+vlUV[wfpg P[xw…W (-=KpՏ~^Ɇ[~.9,8.XprШ^\/u:KEKEEEE{?x'!킷K^][-7.L|y;j/K8\5i{Ծ G`:c:b,'B$P ϛf^ǛП EpoM@#:őXuQ ^IdRz; d[Z $ DNȃ65q&G=;- ZjBCh n( x?JfBBCc$XO.`yg6p*Er%`t6Cۛ:IFۑ$ʆ'L5ɉWjHAA+ vPAMc&'" }$S6%;dk/x,>yȜ }A=(sE6&R/&wsk |:0/;`EC|lp؃NFkDvvcp8L^(EBPĐI+ܫU$Y NTNĿSa>T?7)$c7#X/plܟ2>yKsw.; ?*bְJ⳽L񅲃l#$HJhey#WV2}g} 29Zl {#'20${a(C$bOpJx' G6x/8yXSdq[zu<2ޜ'z) $ch˜$Z=/xi\,\G@ BaP-rQ[X+5 DžbX)9ms9NVŅE-?x.w uB]P-u uB]P-u u~_~_ߜxn/ B?,KaT#+y~p9z CvnǞ'v^1PbST\(S*VRguڳ:YN{V=ig鴳t;:ziU?L-Apa儼Mb Rbng)gY:`:`:`:`:`Ryyy]Kt){)%VjVjQmմ][eVcUk~4-jիzW R 3m}^^['k++əWj)zŅQP,+E;kUsPsCn~Aw^qa9*bh˨ eؠsZSA[YAۖmKKAi my7L)y8c# 5o?g3^&HΦnz&SE@l_Vk m;WqJ5IO漇6#ۄdBr/j]Y5cX3:|d7@< jSԾ9ۻY LP2K^y(bqgxB X1 `%Ug(&^%'$!`P& o>{-Q4w S QP,+ŵ UFM*٨S+ډýwb de>WLjvtʓ,Ç-&.Ԛ Bj"=@k$J^/Z˵G8ѳ"cz͂o;|kոXX/KJq-4NSũ+ŨX(VNr 6j5^c5~Vc5"}Q/QE=Pɯ%RIHTHTXx4[4[fKTjUZE*VQUԔ͂5e0m$5BcXh a1,4ưX1,4 MY[> n.x݂ʫ͂-ZrnujhEVjBP&T U.JsvCVճjUiWA`ەxpɫTvEXp\8K傫2HDj2?|////ŧ_O9?/矊?>9>r>/?}9>s|,CS JLTb§O,uP\2? e(~.CsP\2? e(~.CsP\2? e(~*CsVů?9> >'|Ox9>aYW:?չOu.~sŧ:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s\e\|*ŧQ|*ŧQ̥cбլ1bVa ^\\pn7 ^DP\^-\WЪF/Rk9(.LeLUVWZbTUVWZ]Ehue:n`.^K__ kb]^/vxKX%,+mиE\".!B*u^x(uAKɨ->o~D .H DbbfW\dW\Xi_`&.J}bJ*EmYVW)BVZ[ҥf.RPB A[.V%KUR +URmJV~`MJTJUT- Ph+{ku9'jXFzQO֚7kM}xCKb/r*?㞑mb)4[.x&K j[D p`$JT&E͖# v}e42RqO\z+}~wfk +7xu[=^YW+xeZaZ{Q1bd9B. G2,TgKVkC|Mvm^1V;ۑMAn䅂u]bX"Ȁ܄tǃě@=  xRgjymhLEIm tB[qS. t^1(Fįo`7'*{y"`BgObX)qaa>bTQ..BV ź+߯`=E P :;1Bygۦf"KGW=>T95kZNu V H)M @(<wmfR3T Poks7"ƚ= BM"'ðՑnrc-[)~ #Z؞okܓ޷Qd"N6&τ-Vd9۩foų>~o`kCONh z?[_ k@IQJkNХ%p(0g`px0\^x2O?xB Dn>BjL! BIHlQǕ2,M:$b&??ewJZ;ؓAwl/7 nQ![dƣGUJn,a  .d p} B \B%ٞLD.,?6~O&t^1(FE R0r/َKK-6qKNW|8!8pZ„N+ŨhMʸ#qqpGNF؜ܔa^a-V*Lk#DdAB% BP%FlFFmdfٗL5-D+Cz~4nco[2RoDU/d딣mGG#o<P(NfȞUiՖ%ŻP-Й ՉFkhd%2/$gLB3nO ׳s=c gsẂNuJulķJT*k7B5-<+P%/A W6 2rdOt^rcxcOG\qM4 NŪ:p{{G>O6|}JΆ@`(E25d&߶tp&u}acv{oD#Gro.0@Q ebIsvw;E~$sflx]PB.L{k/e,a䄼P0ʀO¿M?]Z? &p*?/s@]|Cg +wv +뿳֗O,# <: 'EBA(At>FHTI3#r !?,?J(}(ƍ1#xB$01qy{ 4=e:#@E{q:C/@7s%ܸW,`SjZ;eK#xB$l5!AJ%Rtgq zO]A% *kXW7{}oSM .NYy>ĥ@z]^"8@K= F#Tۥ!}[#}l 5_B$ê_ =+zŠX(J 'xb^1xǵJ)Ov{U㊇Q z\htJJf0Ln?sPfFnVṽsw<bPb55;@=rZW-jTKZM\+֢UiJyMw+~Lvi uNE fƺhj+$ɒlB7س|v%f%n^4Vkr>~/ X a }vPLUcYշI8뙛ye}Oч `K[]  R"DAԬεPB1 V*=Ⱥk< -[0kv2R)VBT],clY -hlK~]u$[d)fxVsaHpc``xOxFpCGo-F-FYH,zB$ eޙ5#x¼0n6,ۦ7M /Xݶ:·_l˸"8'h"C'-9'dv.?Դ/91V lN%Э~HGEu5`֓`%7#eČisC˰={y!蝳739BaJ U56GF+L_YqZjl p[6Epv[5d;,`v|E h2\ygoUΚ6u&c3:'ϣf@[ y qJdww=_7M{,( GdU쯶pL?v"b<`Dt] (VP>t1WaMkBj[;þn,MeG#=m8t{thy0dzăxB$.`G$CoXbzk`{~lrq<%+n~GE[N3#,`ؘу,[F)ɒ]rB)JF t>z~xcZ5D$t#TbA&&¶@fL[- COoa Ew,!. xǕrPH8`_[F,+ER];>_[ z1NɄ*cwM谅Z; !jڹbYLUq.5Of.{z͂o{^Sk<#h<&hp4ܠg7=P;h [\{|g~UGM 5g X\!yjZjr@Q+TBo)nETYY.jW]BjaZX׆&HWZ+ݳ@Y*,\^ƤUi-oZ;>[j \k \/JݿZ֚Et׋Vו^d JHYK_z~i?鞽bPbX)Jr;JX9}N>i;Z9J;Bs͂%mkP^Үikr59 =ӞiO崧rAҞiO傪 FթOQ}-$Q:x/4JjM&wXmծD;~sN|в7aQ}.8?KNDMO0L$RuuU?ݶꛨ#>2…okE):ERH,Dע:P:dAZL%TM%)REITR¬WB!iZ4_K[K% ֒kVa_:f K\tcY;_[G,=̫߳uBa>"el]ۏMƟ?ϙ~-/Jf*hO RY<8-QƚvMnI?rg g1֝n(jY~j˥1#[_RXe9bmgB w[>?OCs9tfCY.~X-X-pK~i۬CXpjo\+#RWɝR\qe;U{8(FEkZ3!5ю Jl\^eXgbX<T_l;c\49M2 6F:\j۪|0 Bv= -]nO^2uFW,'g&:XġPJEdv|Ij̽ډ j ^Z+k'hԤge?[![q5$I~(qDGutG+QIfWhr;4ݙ2:#ξ[%c!_Fc]n>,;5vZqҢ?&7ȐŚ51ڝnUC#/:LdOEy\i9$^Q6eQ槝 ^痾M`>VC/}s8՟ la,s<= H(63LR?2' M^]=qz\ď:{;>/xiԹScgS/A1o؁xp+5}{1yk'-?[7L4H;1 biȳ&痱RfbG쏵#XOG2PB6H( cULBwбDTEUUE/xQe^0L| le;Q-D ݞ~Ph N$20rhb|ܓىߨDg1BA( BP%krB9]~J1**:QY/ yζzX{OmxFQ9fG∫ ^qDibdO:_WQX0\ۃjedY#Ԃ식Y[ri] 6FcsQ ?`߃$Xcw&X[:b6^luUu /[k!__;([w;t ,݋@ڀΖhhz<`Pk7_TG&S&Ѽv3 eFnoy8׬Vtk }78J#펧dpm!{6]:(nϰ?l|lz1qOH([2=)zŠկbWXQ 5ҁtLtpbQwPhD 5`_@eAIqXTKS{ό TSyz| * T P@H"+RM+0{0w}_^kl1߈KET# rzⱣl\,~Vh=fﯾY[doQsGq %am.v؃m3oKd6 "j9rgv&,ã,snQ67hpco%Y< INrKPmOa^kU-.P8_9hbY?c&ɿwM0v=L>6ɖdzoivM<]왒o!oY[ ԌŔ,_h GmhKAu ӹnwv,#|ԏ d -<KML M< ذ8C D)+Nl&6AyS4!iE3 8dgN8οl0rA-g%mԫIE5Mlw5JKy;k;jgyyv>-Y`ۣٳ$~U6eeoۏQ{G?&jov fBP+43y<пp9Q6U/y/n*moގh&yg_o/q9B. +k7*Ouϒnݥ/)vK\BM!>B>Pv7_dV`0\=hT;f9_nMDx{\)ۍ->hN:O g dH(^I_A컷-qܽ۞a.t_`9Nvjfᦾ&l {0w;8D+,`?0ʿly˟͋ͣ*Wy蟺w9v0O'05PXP"19Bbq%|W_vB4W3ҾZ3q$a HA'HQ,bA6'R%6n؉lqiܡ/X+Z ?"Dt-Vl[ (4OC2yB tdTbEl;:?M94L+Ņe RJRF-R(TBݦGc~+Y3i@bT,KJq-JivUG:"2z!5BP)4!?Ȧ%?'=˜0wS3{nc|{Zz ϲt#\;؎kO(i/;8 zAk"p6vd+N+Ezhk Š@k ^&k4/]੹'c:3(H R傌+ Ԋ Jj)ݤ 5$H i$M&tAX5hX5 _$. (IWO ؔzC!YYC!(H2 R]A"T2 ւ zԽ u/H݋2ҍ2;ҋŸBdBZDQĸx;\-^(IXTmW(C((>J2< DT(U=JR(u6JRgOZx T%tQQFQFQ8?vNVVDi+tQXhx̜5&Dipb)I2@2@(VJIKee2l:l6lx*{xnBQNڱ(XEE6.J[3BZBXXLgSZBZBBB)4_ L i \2/dRHSVMٿgHBBBBF$҄҄҄҄2(A+ + +*dB_HUHUDN@s=6j#v$`i~ i~ s,   s3AirQ00000002*d_UCM/N -MBbn\zJ\x*]'^(Ui֊^|*JJ&e\(.-])m[)SRZRZIJ9Y{-y*y*zʜ/eU$V),KG]V)Q)C2nZ'^@6M)IY/X!#-Z"))5(5BږXrdWZRZRZRZR&e _JiQJiQJiQ9+-r$V"-f5*&؍B)dgJ]U؉GAZ,4V2Vp($SVK!&k%ʳŨ?WR+,HɩdR( jS W(6UdR`F5W)RS:JJnŪlY?SQH"p?;¶f~L`v> y!;DL 'F/xe*t+EG?vAv+Glu'WOJ>Hc>~q2K'#&zVmv b%^;1tS²W\ yFюO䄼PB5ysx]̭@!.}LiHlE#Fr:I[P4$Bf4|h^v^fp؋)71IO,􅭬5Y+`yln%3wUؾG,XpWi!\PZtϘ8]F$>Uw12~nzF^b[yχY㎊ܠϊàd-4;qq[A(zT͂:Vx4$b5K 7I"vʓXF$Ԣ|ͅabP.D(Q*1j1"}%b%8z~/#$ ό-)؜ʾUкcf5$nt'|!}^De 09B9I,ƪGJ6T+#s|aꨈ`BT}Yi@̻hvQ BM~iQ{F 2@ 8mt<Ё2pN+C&ALJ~ 4L L@ X)Ex|jT(#H ~@> "Vr݂ڤ4^5GH8* /$X'Igq-"OC$2W$>~h*t'm;!Iـнϙ1\)%'xx *r|$g W[m ֧烊0 RLKFj>w_pjȒS}$K< bqDOG\ASJc|ūZ2k0dK2 )r4 &$g%sGACwA~ %p?4[!I=ܡZNYI'e F fd+DK$.Y{ #Dt,3I' 9SLmJPBpHD6'6hs(LAHNsTF:i-<@Ap `AZSﯣX3HPCU)s-ZM;ߒͤů`IP>=ԆӀTXzp# D%!uUu]_ʪ*KyeT@z*AҖe{oZ$/;XKۑ~bj@> T[,)aC!!dj&hMŐA{T }lqPWl2ųWMWNU-fN"[(xv@xRBe W#YT]vFKK؍bL 辈&I~q v6A"P.ɳ$x=2ղO @{A&ZuQeI QQó LE#WzhfPjZv֫_1ҺL(xvVJ+5{}]+=Kό-C}X/-o0X 5ʙ!ALlljg5 |M~(r$yw$f\P8A0x:28 I0ii4QOS$j :%DJ[Cn RH}@Y]aav9*X!=Ξ'`y,+ @&6 ^P͒m+1 ;,*XFm2Pr~S$k=Jkf1E󈘐!^q4ԤxNc*A \;etZҞֺ\QD$:"iRδl LۅE)E䊠BTk8Q[ol`&v !(Ui=늴.%Tx-{xtqU' N gQa(9RU,W$FJ;BImY7xCby}!(Dٝ( f u1Aײ,y#YyYR3ڐWpS5$ƇF4AtS?K4ROh? Z WCL%3Uo)H@\h`k)+e@ ww"yTg\~ F,Zn5k&Lb@HaiW)\ˊjrYxqW50S3BbP3['mszK2̊L>"h)AW909 "I$rNYC!ᘇ* /XT9"mըhE d'H^ԡRji7mJ'b*!5FFA19H"sԶj ;Hz4 0NrD 2"a6NibOlQI. k # M6C3r \·^tKyqF QZ(&Kx9I(NWrDJzn߉v4'sTP/VWy8@}e(RoZʐv F$.DFvx &u6=8H'J0F,ɓiB;6V">1U &&PxKr V q, Ԋ J]n͵I&m>QmH^XQ%MՊ' -Hc'Q.wID'&$AN$+ءlstU!B`(RB qq!ElrsbhJ3D! pd Ņ=yIwPqtvgW QJ0}X +UK_^3Bui(}qE4IQ4#Cc:ҪEv6ؓneBDtW&29nC<߄vvK-8:qNaܦW%*A$/BS+6Ⓓ"EC Xu(js J3d7;E>:TE4_܎>Z=7 +t 9%eDΩXLnhn-ÓNxUxa֙PbݚgY'G'2q2z %{w Խ}5R Jh_W9IEAՔ 7-,x2I$fuOf%"x3V hR@=պeJ8' 1j ؑ<ĘHىrF/Q؂T(C3;x (WV)حD߻z\y(҆A :T[ZE 9I?m"ja!=ߟo] #8\GM<NÜʑd 1`|i9S;MzCro@*1llIfBo 75:Ѻz@w̓p02!A cd= ͭ] 3NcUdXEpf2Z&DV0B*hIlgdJa΄RtNlǽImU9(#.ay"-i-&O Htm& Zq)MDQ旅a`T0a4,S'ƑO|'!UDhh_Q 4Y]WZCV?=ҟ8-5r(q~(;Yמ0"cPϖb'qbօXk(A2o3urIU m<@OCQ =A+v E:JX%Tj}7N󝋳-҄kP)D"AL-{BJ!`)c>[+@(t,ϸ5 %# Y-X uh?SҏYpΈ@`$\[ne]58a(ZBu| L#Io2Ԥ(r %U3!lb7(,@ Ra 쎰$ "S).`9b㠐/NJQp6!UrmsLL ؃U"NS[ mW ͦG 0fY$Pb 1H3fJe%4ڐXN0iufYQ4dP@M Kм[2 bMnkvֈ"B)yhQ@cYri"]DFJ|':?Y5\LȪ[(T?P8Cv L*jB*ws%" \ږv4(ҒʂKtHL0ԟyҲxB5czdȉSYۦy@#MR%tuR^JX5IP=P&@^"g8V*K@WՒFNʝJ!RK^a:*Bb{[ٯ AtRp0Qk33,ԣBu[P_4ه뮌y3*Pz,XŰzd`ٚD*EQ74RjP*sNؐ@3dqt1n3aIz' >e,&R@:wdK IKEptjr)ȃvXS%=h6t@2$-hRaEeAIx;1q e5\tqy RySf 5D^-FHJ Aՠ)9 يnlΉaA(:3hd=@@72bԱ0X ?ڞEٍ,s>acaeBK\)A,`bɋ@:)Z< cIEcO`"!8XͺU+DcWCYB\ $A!hIC9ԵQ$AT }@%'ck$f}豒f(PdD|xKxbw&QDRp5 u=TK:.j'D<0T< 5?ф4c:JGP9Wol͇cH€Q1s!: f3`jdl2X87P 91S$T[J@+$E )\*}LrHxkjR K7hFfeucDC!~Xҩ=-l7Uj)Ѫjb6w32Q,l;J_-xF7'QCvRvϼEr1Kⱸ9mSۭSA̋ok^z-ӑЖۢJ&EY?GͿݨJڴ+d*p@a,w-~:\D͐ܐ^5hBjQP) S{2'!^X\h&_MCF _+drG"t5DPs1g``w";IPjBvRH~4G2_#>} _gs\I'$j"\r]?R O\uMIAOՋAWla~}R؇LR.*].T,OIsYuF,[cKa+A@VVxP?^vݣ.'yYb $ax[5- Ee !Ez{a9YJᯬϣzP 8;S+B'0R_=&w8MǬ]9н5{300Lo1U %Ge K;H}moe$v@i˓H E^%@P/+z! IdJcQʍ)I>¯eUtIBt#)) iHjx\yP9mT k;"k13j xnem $$e*IGոYX<_@% : 겊 8LWC4gNl4wтNX+ a\(PDԖJElH ,kʬijhNp&лXH-D &I:AݶK!b%0@xO`4 s9ORkUiuUcWaˏT3HpP9)TXl/#͛ HN A-J$ZL%la>Ij 8 ؟cNr^GҝX4j7` %.A#R ,ҎE*R$##ſ2ŰM ,5C2 G)-Z(}Q݅2t("#@V#H`#bHȎO{"(Cq m^= DDPzD(Y6 uĜS+bssALtlIF'@N; yHx*S#WHdD$Jp6b ]M U}1Dj_EiO,.%񊈱OHPr_^X}ehRn㶉7TlJcX0 HGRD7V6KYh υ.,/I3R2`R) u)}0UBَ4XCB#9DŽ>Ns=7 K{>S h)kS 18ʼnG&=Y' !ƈ FCNJM"3չ8 Ē '\Ki)@= ݴ6-HJT :z`y%Ed@m!< A1AQQhY"`iub8L#Qb [~=HIHuK CnX&tQ0A*)eR=Z)֥(oLzI]꼵8Lq!_H]˵QL킩d5~’Rk?HvqQP`t*Z%HR}bk$yL<uR~!eE@gnN)Z5?t1*rb0RXa{jz><'~`,i:Hj>S5ړ?1xNya?{ A!q]+!$c1rɛ(n5qݪQNRL4ՀIk !`W,LUKz4 в8MvX'6߄̍/v# +(ƌ %uŊW7s7t;`@_&I9_, GƥF \]"<+լyY`ιrpUS>4`F%( wf§'g>bCD&kxHpფ߼:D$e2@r1Er X @ q38~bce?nZMy $2Hc  O!!TTϿP$I L?j(74j| AJZym)\aS+uHg3Q(ؠ0FE8%Ǝ:)l|&ZHI"#>is$lK7F +$!^IiP`SzйGR*?Gr.JuP0ϧA}4eEQtzb]2{kA;h@DSUY76zA6a`-+Tɑ o~u(N#-o%QS4w ǽyn? Nȓ)J`RHKo0? rA!b fr.l8kT IFmu{E5/Kjͤo1 C*S\e"Se I6Id2):o:Y8w>-^04ݎ06T^wlVKuqnZ2SU.g:4@sh0 C|]GX s^jER Ibxх>q]:`kv@;ߍճ&pX.V0 oJꅲ-!xV)Jqla7ʮ&*?X ,( U] ZeY sB@%Q!h P=.ZaMPL#"9QN, '"09P I_Ax .Fbc #Hi_๗*29l`IOPl]$+Lzح3oOD29}|Ė JƚrRN3!ń $:qNJb?-ǟOȋ)_>ی˖nni%΅;>Sv)Ċ@qbZWԈaPub*>dI 0)K& x[t7(lUa CJLyzU7L{#$n:S#"! :tyZ7-$6mQX(= !^^ iPUk~n<*?TycVVeUYUiv`ŤU yX8%L &"a|ׯ~Z+yBȀiO ~AhlbEKK2!%ǫ _[ >FthR 1i 3xԩ*cv$9!Cq`=)~|=Ϧ\MQX;ۈhBv,E`6R'r"qְPPU# 68B%"XcgayׂHPA8{0xd>`,{ O.c7 uD$ 3~% ʃT]6LvŢ?ϟfYIʲ5 3ͫæۘeqpw痸yR^C\,^<0@rvʎlMD2oAU!0V(M'ZO5͊ lHRxG 1W&2&QD,2'%dHMpAHTM i:i J( R f.k]|r #5Z@"B\d8=O7^q[ZVADN1JQkѝd+ ^`mr{~ѨWU؂|4}:乼7Y*w6#rƾ/3hkCF-%;GH ;{|'u^+ b)z`uM9x-YU]זB50VL O%"K"0QͰDfDžn,kY /$Ťlg޴TGr|16ѥ$TJAtfO\{+/L}aR%4 )V٥bHỳɠl riP⋼#sgԲY%ٞvҙb>-2"xQ!:)bsUHn㾩wl:6`xBR2̜_tX8iRtwsߥ Uטy^Nם\=DƈXʪa/a5a9E#%eY> ٘?_0ڀ%@:Z+˲gdє@O$uQ.°4CkҰP` Y8פc &`6cHi5u2QMJ`ߠ9,8X$>g+[Ó޵c+u-q u(K1٩#! qdA$%ms.V$ PbdJRD%p|%$L- h[ :?=DC70!+bAQDȇtJ:'Q4붮5LM>@@x(ǻP+Nǭ&ΝNvyLhS5M x%LBX!إD;0Pk(<' y-s\2 kȱ\ː+ ժ$ 5t̚,,Ƹ*}p:7fyހ?z}L|qjk0["7'׏q6w@S Ҍ,  NT ~9cLdMeU?ֶ@TU/aVBAt(~ssq? _:UOdz"8zc6J[w7Ӣ}ëƘzg#Uut8oN^|Bm~z|j?9܉dzɈu N>^ݎ&}DbmϿ; >ll >Nuldzo竛˻<^|b8eY}zi>_M6/rZ?WyQi狪(ojsg"?.67'u}wyG5mlEf/ods,B㜜ӧh24hÝ{Ti6 z~kQ&U<=xkc0sp/7ý]7 ڵ On~wǓ{zG@xWzj eT,zdc=_ɲMbޞ *Mɪ+˄ODd?o??>|q0ޞ\Mƽp@䗓霎_@;aDMӛ..OGA4F[ ,˻4<ϟf^Хlq`qw?bc{3C=>Yubo<6F_>~pW-۳矮#|'~ӻ//n/oENN~O矮jkw~#r?p~ƃ ϳg?xrx|{o |~翞\01U1M.O^_.//vEX?d7woY&qyuzql˯Cfp{;N.zE>qQ?7|?)X>s'.p0ޜ^?ߗ\hkFg{-/KFlqwq{˾1DX-f>:P󓋋.dw58578w6jh׳UV?۟lm^p4Ovs rņ.(.>_QYnlrU{ZO{+ |ίaIxuvx79V96y庄&[;Z?ΞgN"vӟ~]|*vJSӧ?vks^1\|Oa_I 3nOO^mlў ýx|lpAPCkJ8Pb-ܮLsbY73 U+pa>^S ma+%~X.k^s߾:i+ nv7'n19_rzYXQ{dc8}sk$jl4z٠_ln\7\lQ?3&WsRz`gy|9>1ْw/?#F藟>Nw}ѧwgoy,wo><|?b~u;?ޭ0gNAw[ۣ`4]ھn[qcsuwBa"nbxsk˥+Wwۇhp[>xpgoЏ fhwcs5no]Hj?w7SUqĤhcc}tpr;<>ؙ{M cEE3[;UYUr?b8/ۛ7mIYrU0|8l{̧wvv6_~!gg;{{'|ҁo|ӧ}jo_o8񼃏˻/ヽ?ɵCo>>?:zyxq:7xoyiU̸U|jE,2eqOOѰ/z|0VH!LJdsc8/WB5>b6^/EvNkOV6v\١l:nԵEu'vEWed^eZu"(zEUUYu<AmG'?NN:!UʲE7,[+j uU m .{kwsz7].˲q\~xZ5U\~ޝYbx>ڜ̧eUWexmnow3{l,Fft ]vgUY&vw}o&Bi,^Nu=/7櫪-ʪZÁEURY6 "yֆvE-c_TT.@}x-Ve\VӻֆT&[t(^lc{~4}xlz&FN#VX4/G~~l^ ԧpX4ln2?-Cn6v6bml &b^>Fw.2ۛ^8O@n6=7;) e;ݐE~XnnD5eqgB mv ggYanز(=8:7P US{"kb1W1պiН 3ߐb}Kˑ2;u䩳<] 5):a/O ᠪWuZ5,,+ʢ!@r{6eMDJNhu3 sv# <V09/ӣ[M1+r)9jUzfge)5꫺7eY֨›i~# j6yb vL7֨aGם( \27\lCDyժ*2DHU}$ma6tRZ>DU[3D>vHi;@ 1ϟZxv0MϿ{Q+,z}Ȱ=<9{.O__옖 }.ɋ' Vi^]~MWΟs<{;'矮>ou٭+هO/jNzr8}TR67}|ywx~r{.o^ݜLn/NN_}M`ŧYŋ_ F˃O۫ճo_dYYC<vvh;/r gW7C7zz:~+Ofv=q(?Oo?/}.^WgNGNN^aj|1_=>1p l5Z_ojۣۧYbv0r5{j1/gwӝɖ]^Nn,bѿhX$Kow۳!}ߜ]Mw7nP;?Fzg#^noO?<+ ;΋g'7'织ۃGpcgk>}|-qq-geZVȻѓyW̟z ForX=FN6dzղ^.ǻXRYyZO;gx5/ժϚG^ܝ5;}9sVZiF5l_~{tϏwm6/w7wge\A `7t>.6,W^Lν/Wvڻ[c nUS)yUyk~8 x$×gu]<<,qm/Vf#}o/ z|T$4K/qs{߿qw=yzu~o';|ߎ<ÿn8x8/ojUūmn;VXwNO.0')pM)^|{<B .;Y-d%@>tMD{[Ͽ9>&/_\D7ƅn/o /fk\^==7'{G{ |,qY6Gӻp2$8/_r8,oG~vt}yWװ9v!lˏWFpwB`F[$޿9i==؛ۧ[-;1a3;&xs{}}~e7G@"H C j0q!xuz^CȜoД-zDF@>S#p ª,и&f4Qnj䪍We,vYwz6N֒sx$8H~sx7m DsVfR$ ": 5Zf90:ZeIRȗ9lDN7?db SS;yQ#!% u$ Nı]L腡ȯ KE?9B93́.Z`ci LJDzv6 t3~ zq0nq)ˑbuwy[<;(#*x`*nGd2@dek*j8S FV\SM6 O'fZ6-TBn#^K m M.+ɆH9zw8 yԻF1r+1IbTZj-#U!"`MGF@de]AuUt,&_U// \UJn+U]gp` ֯bhGT?Y3T@Q_\~v:H$W,7|ZUu/~;7wTכ[߂WTeϿLOy!IaE9ףɸG;6tiӕOWw/^z/^r5&ceYO6F7]r yC'm}΋%Z"dzzr篟K]T-?tu,a:6nH28x2 A[|J|IjmG=!u-*ƧVDߺ㬸OΗrF[UcwHY]]6ʁ+럐Ox$Mvں.v^ꎈw{> WmC[;g1RTʲtA58軤Nf4B\Mvv]Ma8OUUw#ɕMϕnC4K[Gg!9dr?x$6d] /Mčuq9Gçλ߾ΫpzEq]ɓ;ؓ9FA/(%8U|`vC.T 𢫙=+$Λ3>Jgx: aL~7ڽVTmu{$SRk-R}7&OeߵiVq UOXOv)ۧްmqvWT$>`\[/t/;Yq?=^ D ?>4t)+!I \Z}xj` }4|5lZ ^O潋f޹ziyN^5L=$_?/^+xtz 2 ϣaz|B_VS_mַcx&A~,5MyԿ蚦)e?篭A:lq<:JZ0jB/>_nwVc0 Z?r_] ܽ\f`9Nz|.'Kǵ+"8/嗇b5uO13/ 2rʞӗfLiҢbu=/-P lUstYఘ,~nYng|h:Zgΰ[$_&r76LRo$@x ǐ0A˱m._><`'lտihR0֪iQ?|3n\Ů+n}$ ~gj;1ONvx bY׳U2Q@r'`'cyyFQ5F//xOJ\ka>>_ P=ERi5+m,0ǏN%Ѯ9Mrǻ!Oϳۏ]0{W^^{ݯOlڽ"@)/fNADs-mϳj$aޔһ_f/ {BO_7j//ba;fQ[֕o. {_>jU"٣pT*~tKGQBJfb2rCJRP)`y]0k1ʟ#XQA.jXg<>6O阗:(,ò HX6[o٫DjuefųfN44Y훝Zm:Hd?՛^ŵ*tg9*uy[[hfKյ b04FͲM˶{8 xecU[fOPHWu۶t/X~6jwivF8 E|Fd0ݡo㘙):ͮVQJsӋˇ,Pޜ5Ikjhwp-hyA Sg>FΠY`Oqj%3I%pvjnl>ybƒvڵj oWf&=6ep_Oy#x;8 6NGEANa7;-7nsجtLU׳jmW(ZCM wFzq56|v1eMwWn6evz !bNۍϯ^)ME?p=fŽ.v}8Ov o2oPpZ7}ԭzEvvl1|53|5̖tj_9^/;i^}WಋUmT[i^2ݿf58)E^VtY&+T1טfFBzn"B$ePƣaiơ"q F)ȝ~\4ZSɅ'b܀0&B !W$18M6O0tI}j2? ;ڨ8BH3'009lz^Va (wmoSN|>TFI%1/+<W9,7b4|W0ot؟ֳR/<թoW;ߏ[? zA7gDJtE9t 7ફz=>M׋l4=1>C̱ݪA'US2 v(YǙ(גCiiK-'rf~~/śA3@/[9j>FfZ5 ]\v/gG=V %w,Amg@h2-W0"b>7GMyYmL;/gmOM2Πgak4*o~x%?tlϳ'|C7D+}̷1_a[?FsN0c_OiMBiw>0[`fl{WYi@f96eo݋0mqpܮ64;-FUo=;ϵ~?n}.*P\Ygx ;ҽ讧bDm[xv*ÿpIF*$ioulN]I_0a|$y1b<86Ұ}c(,x=_~$t5] ى#Tjǯ`YA/'w}c$9&8=C8WLIBnn܊|76; xku1DaY`XG?כMvzmEoMQA.l돟ˆ~oD$tu=Le7]V}XMGzTyŴL隝rw/_& +|;:l9C]fr9&y7f L! Z!I\!ض]2«׃NU굎v3Me6㹿=knP4`zet8^BQ6DYk$ e&1Iw~KՆ|4wNUA7//\wUOR5-B?E}㪍y|*^>QϷ)-M|o;7mEY޾j:{ (MlX@Lqne?xàm@}Bf]x߼ٱT)J\#D+ۆ$!2T\\CX1k4JH)ZgZfASZ͌$6i!ghsKp[ZLC@ HH)=FBReQPVUOM0mU eݜ sVЎH):QB )"s E\+#U-n~=خ}q=Stq:n;ˌP`r&O8TxYLq뷗a G1e3kFO6;jH;L𺟷(hR0֫c7Xf %t6Au]͗4A= &jf'P2;oJJߩp Q[YD'aҘ^%+NHYf t46RFERo d(o;o=STI~?َm&'%d*5ϱ3 8m&r{xYl։zZ~?NKC9η/ !~M)(8!k W*9|zR<1THKTW4ii~EA0 (^F/:g٦Br4̼(X̄WTC}ж]) Ѓ WWV# $4(Q+D-a Brq"<D N)(mD~# ۨ2@?X'\ RR,QIi׫ TX;AZT{6Tf-F xD :Ӟ a,Qp))FnLJ_A(,q@5*!M<∶ݲ*KOU*pjYZ?n "ЇC4d%`yGKK ַP3^(:CDorG* huB*9 Ƚ:&Q 2Ϭ1 ST4f(&+GR)y< 3u<3%$i̺ߋe#~–b<^Q"@VufQy.2ZꦋW ճ: ע@B1 G-#],kVWmeT>НPUw()c9[Hf8DE,TuC%^JNq sKP^o(>[U񐂁bI"c@4`x!NB-\' rAkND(JF)-t"=U ʒ`bgQo]55EPv$[]bQQ `H |6fDbR(z\h$Te)H1!غ!ЂiI#dW74A|8Dv,sgөR$f6} IFy͙T.8 vxT<†4sq}?||?YNVe`9]~|?O?hZrϲx9]V*r"z=_>}y.7ˁX0q|%g"m+ ʿ(3p?_Z?/_.ns?K"|;rmӫ{yUςxN*K #[iGˇj}[epfoz6r?_WO2j_nwZM&P3CHݦa /nSt]`Fda 7䕅8n t:vM~>n)^Xv@IѪl]զg/,V~~p Q=l4z ~4S"j/3ͮ6Ow~msSdzec?SIJ~~s *y#>=ΧK0*Պp(z[7/wzznQt:Z$aTkϱK4<ϷjW*|z*OyR᭚vlKn#J)]*?|j7L~aif F{?~N-+Oa;o_ɗapfxbѪ06?=L᫡ qaF?#ԥFJ(Sa<~znupWevr'6~U+P>y\/c6?~ZӗenC&u9ߌƭnSdQrh{_n7}R#,g97#Ow/l%O ,XLWwE$u)M>==ގW8]퇇sz%?qﺇ2 /̽˗zZH?Ojv/:6Ku_%nthF BIDATիl$iZOwAB6iܿҦQ! ۣ|M :jkn_(V+Tq+iw7Z9qB07ں4^s*5o8|5Yyq4l-6ayLZ()J)i˷W[8lKĽn=lE1!jҟi62mǞ7ﮀNpw5`uiemfՀ7h5Uks}1e~}vYfߏWo/}cƼ*5`ov[38^.PNϷarh6/WfRuR:_.zR^o/_gfA_}ݨ3F)֨ 6LӇ|vZ[IsVfҬ%X=|1+-7݁x<?֮(1FN=&:'S2?tG-{.G5NJO4T<|?W3v61/OWqVnM_:uNV,Ynz}U1z]䉔n՝A[`ׯaU<7$rjEv;} M:՚g:b4򑢜~p_;VY[ֵ撒9x 2|=lun?/e=OlL׽Fl$W|v-Y[LVv=Z:yxs5Lǝ>M}jnozf #$I~a*O]Wˎ7b0 ϿUaWnA׭yٖIY,뇇7lcI)Fkx~ީJ~>|ZV ( f))>__U͟>jUCF0SU Y* ǯ~mV7 R]DBhG3hV*1zv/5>>{FZWSfY[zJsyM:ݒިSuWZ9NL0C,q)!qBmn2) T7yTxg!|2(>0q³S^E/M 2< mby5?_KbeGaFތ)]Ǵ #IJ-gZ+qJXc ÄdMj9ie-Hix ,>, 9JiT$A^ gmN;|*+π׫V޶kA(Ћe-{K͓!oZ7CfcJ3 Hnt=;~a>bn;[~k1[̧.C]xN^Kjk8NYnm"#c˴`Q䵣阖aFh`b6l3 bdc[[/P$R00m@{{GÄs Л1D֪>WRTJ EYLf1kIaQV[5*ZW18J I0 [ k^oy'1䟂~(^9(L ٭wNTa/خw[tv[?wNs]`x_6G?`ضuk4*x }?\Nh4=4t8sS&'?7n/ Um70(>͇A^6؆;A|b5R{-) ?&y0Toy! 0CYсVFbFp fi! 0Vh` @ࢳ-a/&ˋW}YN3 v].] "K+D<03SIcC'L 6i:Qvf^gI<gƶkVvڬa|?b9v4̾Ij;nv>0ZsVuv]YSaoZMx cŕj%dX7˝]-h c|0H>U &)o^EUߟ09#sh:W07pUE8Sѩ}m09OU!8lvSawheG#h?E7.N n\t~wowB&Z9(S请^3?S. ,)IM|ERGPI7p pfiWe'i?oO^f/bCwCXiZ92ㅙO h6fn6+A FuO'.8]/=hiXEaл,'bMw|bӲ8]͖vY-DA] h9+7Աg؎={ӦSo.r gQ,'7C5<Ifp=[~w2vyxz_9l`aY-It9[^*pz^8|:?4%ݭu3e-׵Ojc jv2߭k?itj]H7&N 7Mu^Mc{l@Kkjӗ=4Z޻߾Vniv|5]ӛm5.Up{aY ~ݫU?}zy+|$4&OL[^l h~|X(mBـhMk ?wۂFn7;2~;ԟ~nzzg@Vİ ZOTaǫ6%pSqf/(!ilf5]E9iYHG>h3x4ݯZ߽8qoն;h Dbvasq5ۓpѪ_wחצ=H 8g5E%;+i)E Bqvlj)(c$7 8^/u;Co_u3RG l{C5a6{do&K, @;W|O?eCQOCmM2=I+r:EEeCKelU*'87xN"G-U$WfفHo!)"̅PB' va$iw_t //5wW Dat18E[T|uopѣxI?vi:ӈ寗ۇ/I4{tӄ>|yO7˛~Ok!)牿Z:kπAUib`~,QZDg `J c#7eZhϫ z^9&OewumZ|7ýp2QCT1c+¹#|?zZ͓G$1?Mhf fq\e?Ar'w4K?.:6jE"~ D/ޝFGj hHՉcLƗBQZYI-3̑9J3P| -ZpJRCqoJ`tr8&0JətB ycޔh?QNd6:&) DGUʡaZ&MHZ<ɱ8CCS|8GuKBAuc Em(VJ_5K7MAYXZZ`;.JXA˼ j W:c1SgRp? _KӼ|svw_!h  J>T*Q)LT>>jeRc4Lˢ)MrҶJ.w&)'/+͕ eɩܣ+DVC%=If1ڒ"R>5UHV\U"nX=["YoV{g7DQAI#{jz)`iM!:< TkL!E#d3 tM1|x&,)v) P]~=qʃtOK3$۳ST\ NSżX@DFB &yPXo*3%dzEdS/!;CRb.;F96}uyhqK.%@R* ZRђ/1 T( /uR-\JAy}$%cc9` - 6XVx"R7$<$y҈i1))mOL=$wgrR6ߎH=Eh ܒ݆=YTiZ`ddʏE^,CEP_UR]ɴYONo"_,XQя?Z q['jBj \B(n~<BQ@IP=T hDpXbVA~~/SI|ާgªq¹s7- Փ/|8-iVTBhٮkғ))-z֮qrXiIQ - t8#!5/?gJQEP(o=Hb/E໰N(~-QAa k[=w)G ѿǒ̒М(-j;Q*p㎲(Ek.Z O8i <9@E+&Qw'Y膦RʢxoIP DS,]eS+Y$E< nP ڜk*p"7@xq%.+΀o@|a)p%%#(S9@' #7K=J iwR%TAo  ‘ߍHk+WiA9OE6CPGbz<=Eh4z/$].&`O""@,D9&_ DOP8A,0th4 JuIWYNIPlKx$y8][ /= byJX[H6 Z3@_ZSamJ( }A e%a(P_ô$ƔDžQ|7< \j^:|%+TtSG,KUP r=>-4(A1Ľ(VlPZIk .bj/8SB/y2p5/sC.G_*~CVEEk*@s8:Ru_m(jbWuAQ T7s.e[T\X1E$(Q0Su']1U>x.VBT .ŲFŝEՀB!I܂΁>_`Ts F|X(`K0&@T<8.&H^ʌ\.Ol4ʿ@C0RT%)&1Xq,fl''C (nŠeaY|JiGa)A"%8P{NDA"FR##L]ʡ@ Nb! +d.Q4N^oG+urGz)"9EHD&ÁS]B)b ,h bx8 !aa!G*X&qP2C(*I ¡IV8+e:Aڑ oRR`\F "#JXgbӔ s>=+CRe#ȱYENq"x#MųVv\R h)L8LNb1ɞ-ӈsM@8-Ɠol$ieBV h/υ)JQg'B3T“ 5AL؎cZ[`TBeCg  Š"_i2N [*C>┢% Q-HǕJ?XŶTn8^ŚJA m!l9 zE.#LN;8n@mۧx[[NHUUzYAqP}d d(i3@b.%XQT!h*CTBhI(L:S*H[sv$5ɣJS U* @H"qP Ջ=LQ)g t*~Ǔ(X #<cD>3GR<sG1 d⾝rM 5Viʺ LЩ>/1cS52TR Nѱ(ՕC1SAc|QK=E}%p[r>g`ɠ*r9FϾLI^YܚFW'Dgnuz^8.(e4yH"QQl}R|&Ei+~@.)wWBN.5hZ~>䓈͛84LVLL}IIV"X(x \OOSI )U)g󞞱 ԉؔ$8Ϙ5(FJ07A?hg~LS6  Da;|b'CT[\,!( Chv@Qm"(aAh!e( OUWgQ|dVWUSx7iעDJz]%2X(7X/\:_PCaQ-TZgD{ E# ))MD邨RБv!ų[XD"$ 'G%C9oƭ՚錊`FD;pڋzD!3F΋Ipmx.A?"(zJw(X<-" Pri 硌8C"{3; ZZ" &ګ1mq(3H%qBozE|Yh<. D@";SANWXH_Cy/knOM$ ԉp'+(8 ! JPL E RmIä́Oa2ȇ*'eRsA9# >S T+ce-BF-ƛn1MJ*E/L\DI_=CoN^!#P>FT PscP~AuoE^+i 7zON`AZ&lZ' ^y {!Β_ Z2J4,gYPQEvKPn]D/ tBiTFY=AQ/U-2s|(üoO獼ť(ܦb !F'P)P2/ApuEGINZԎB4:p?*u! 7\uB*I{&Ge*4FĘ? Pb$؞ a@BZi21TF C4z= 2[ IB'PHV/hL,keRzzJ3O!.b9YtT,8Z I 4z_QM@<-A%EbKV8ڙrD֒4!P=JxI%P,~xŃr{1Yqvtqޏ~w瑢QGU",Ti|aZ'A}ii#gT$q˟?0kk(~˗>0MiޠO.l(CRm)%X8S\iTJ5ʠZ+ҢYB~IٞZ,`UM Eh[|-T< @KjgDz^z[+siXE:N%NRn?Ѧ 3$fM"/n7dyÏWm "Y^2bT  -$:aIb %S%fi(Cfw)Ҏ2PRWp**92^ޒi) A&m)cBKdžtvY,"te̗GQ5EGeQÂd_a8yqڽ wA~8iͿA=ߴ/eo.C:ROH'd9E]"ފ3>|:Û~mi֚5yؿSkbMֽm}p?y~R )r HT = -0c3C *3Ar;E)J[7ZAP PbrF<%! uT2h<*u$>T]&v@| $$N0 h U4hVPOPoS?X+m\T)M.ky S5PqnCBA!TF4+K$jW ҭʨ2AH^܋W 8Rt|چbk+fOu!]BO%|u ڱ @$+)TU6)&""+>A"b?Őzw4#( Z0@NY'$_*`#_}4~ڨN MZHO6ۓ[IrһaŊ7~++E ~Ae!+rKUՎ[)mC=B暖aYfX[R C6]jFISS^ jrOT9 _U/zG\R-Pm*h e*v2*؟rN/R bbJeaE7JEs81E2ȏ#}L*HxbwFOnhg?ڕ^AK؆4a7RnDJ) *j@y{>[OU0L yFEV !LTM)7"!O=V&L8C$Jr B`fR◔TKaP䕸u(QtRP2z-X p‹5@Q>!8]V|o#4TQJ@%eFT!:s!3[TG(*UT,ݨt<[NWx2bs]9]ר՜FoIA_"9Iub9Qɣ,s`L( vmRViFq6@AL҄seT۔xURfC!+ i"'ɑjjR@E)%iiN+>4tr@XH&+(#DGe_^XJt pRi ^s \+ 75Rы*<|#F~.1}.L@JD @1H2]'%D(g˖" @yTxGUL3Q@eN%WsJsh|d|"Bs(7z!Xo=6P*vfp;R˷ɋ0A~^xX EYS ٩sj()y I$}V1 0]ݤz^  %49A @ioXw*)#J8tn8l7vFWExߞj4zs J&#p)T۰D"fA2|#X òGR$D-$ (#UKЯd9䁪 %𕠮D jŎh7܀-JkDh跄a19x2%Xc ?8_wACQ|#_26+rMURR&$䱺$$f]֓*C~ J*lO/[-FJQ:7+d`R"4Y΄MI1 }2$$ϩ %e;X-r+x@5@#.)d ('?XA߫!iUo;[t[q"i8v?i4jtXmî]*p6EUbCBL`f7H/ ._]XEH,Ƌ|kYKs uJ_Zgz jzK]<Zzn).@. :@@TlvyKAHĂJ\G)g7 %pA^ $`G~RB`uD,ÔM!&|t֨VBQAcV %@7:u ^T)6(=gi&c<^iXLi^Beb}&cy#m# "z҇QX%-^S:Q!\eȶ-\ Q8QPsMOI]5t 2ʫǮJ6׳iF)ij C'"bS:~G0j+:94@( ѳ|e(]F פtҔA>0O8ՐXό\x)_zuR-A[v,Zc]qe؈r!qHDH)Ń ݋S!4IۮI4-*C!\*FDȅK4 %F>AL!EC 5dE(!_$ z-4*TfōFBG2\42Rz$ /8iֆ&9֠Ϸ\0V$DL {թl(RL^FDvΧP -ل rrAH*SMIjC0d2$XnUb|@4LStAŗIiT`m}l PпTVsDD55ڡ ܏Ģ JT3hȊɳ5-cbDd(bH -U(~)CأtшΠ=UtQ`\xT? *dl-'xsrPʳ]/N&&)5p!,8-y( \)땪nVap] اIP)7|%S4S 5.v ނtU|e#冬;B)`*6*_"0ݛBerˏ6#Xu-l 8jJf1Z@" BWmnRQK_L_rbL0TV 1uYRlLxvAJ_Xi"/<W֨_'=/WU E쟚v*UV A$uyZˀg(Du/(z~8=!Xv8Ԁ2xjhW: -MnI* f,pɇSX] ôa|7)*QB 7 sPI ,%EeIkeP:Pr"2-Q#KrY^HvgRZܷԓ fJUPM9 "MC]@E4P,FlH = <$g?;;/?Lo~/̥_˻?5)}ՍU=QUo'lnnWVE-@}tjEM++oJɈb}䘸rFK{}(f(+7t,}֔>4OF=mZ"=sJ\rLKPȢ:a'M<t=0*cdoUfw֎oLz*Ő R;4!EQKl^R@p"*A1IyȿH>%1 eHU凭_(,YYq= _D0BJQD՟ۜ5 :/_Y̚*&]5W5F2SspqI3Upa!9$CY}INQ F%ǥA1Gt2Rw3Wk/AG]'Oyf[2Ra0o_݃ %DţUR>[`J+9C6csh-+)h{tw !=yntk{fkC^{6~7_ozG/AJwfА )G l.UJF߸IrA/1&,۟.u wLsN܈K3nxޑ ``*p;ĻP|s?:}FDr*P<C4^]ii;e}NZ3e+ tT9hGAPtni,$FDFC* ȍ}1Y{'hw3'g˓E1!Tք39y!ݣrVL-'gV(&jfK )G+ $ h0F=w۷ ~ ́D=}XD ūrLf!CbR<'i =)EwKߍ$OuDmBAu``2UNNQH\f5Y40Pf5O +I`R-{iśIdq;;=NM߿x_~ٶookm[nm0׆\,D 2zXd&ZиZ v=`HFR"GgMyrhP)n! E- !Xx9Tk\ACx\W–+K7#@2H.AH2ge7ЪE>[QX*I"Q E1G1<!@™٥VB"(zs$U6#?L9qSH6?㤘t dҮm?]ܻu@EҡcNLx茽= 8,$B9hX%! <#=c– y3͝`|$-P+AGq* Z1z&-jR8hH >g Tl5oȯ %Ǖmir֖K#+)W&Hv5nJ8P. =1:XVHK4WvrYAFQlGX<֐\H#s[sޝ Y|0`xW*Sj\ (1%nʪ$o]ig^* cWn)(#[qx.+ ru*镠EpJ~ [^ӡ*Wع:~J"5P՛\UbC)$>PMw 40ṄonL|jD4qIMн#a!4ʱgKrgn(d6.y3b{w*Ղ5@iɚ &X"|\+$+Cs 9pd?Qet ڸ~E9(A=Ic$QW}PH/}taeط80&OhmL\,s1aIuO|AqZ*bfIpmϝl ]pM$6HHaΝdB1{pb/?CD3b" }-JI3 GhR!בڎ>֊YTf\<|3T%(^[-k|;X<1¸@e@U4͑rW:84@ZvR\2Y# S9.0M$&V)8bDմ HR] }&6۽rDWŵV7]0o`_=륩ߞ\;SxOF/\`rB=D"Z>L3la%^0!%j%ņ Dfb=&!Z"Qh4a-[8ă_hšQ'Q$F5wRR1"1iL)fRS*{m0W*^Bq +EBiUw!b]ž$dZb \V-IZ yܭvXsc@-|j6;:~46t}m'_>j&-z">)b;6:!-!Q@6ʼp.G5aD%2er aXl.~Q)"L%̂c'@rZ-cCGڿbWQ?n,ܳ>Lз ~ %hTPF tww/)DD2@59t'Avyנ|z2c ৥sst}/SQGkR_E%REJcg}]t6 % K@YwvSRKVA^-Ev ^?y4[dIڸ8a%0}_Fc5KBnT.+'6"o޾ۖ>~nYӯ}x?>}~J Qޝm 3Q- : [R3c-]j!@!ĦWXs]Z'p؁7̸XW4,^ JKA e3kCʚ @=PXftP"*' @{V lV#3"UV Z1!s@CV9g3v+U4w؂\nbX&M wi(R+3Ü EBu4e Yf(`Ab<<\1#:kp5H:TDn3E+=]XuBAv1/J*/IlPK nQ-6O\c@Hb?ųo_\y~kwc:wGGW{;N65\_\}N޾xͷTQ`0#"h>!Q#)gfWצz0T]}/xo_<ʆ쉿Op<;ʿO:BQ ,Tv (7 & ] N M# YVum"Ρ4dhMkv;s^Ջ3`eސm,Z9DS MhE1 S)h$zh0\q!bfOz>qgOiN-xCQv!M"\\CH%Eb1veFvXJ.0ߨXod#)A5 P#GJgN5!ÂaShsr'r,XW 4]*X&}ݳ{[WӋӓ?{~rko{u}1ܿ7LϾ{ޟ]_:*;l cȨQX7?#lR*#vtʍѐ+OYNe35iRkf\O^߿uç߾,Odْx{#T `Hf0 ?Z(>TAWD| 7xJ \Q<%$5I9W~] y^T ]Af1>goab twΨzh=j P.KZok[a {4 Y5Qd#iTAx߇&]:?QQƒеF~1d"@~x8ddӐGX!_w[Bh Hk6s/,AY3)kg:JhhQK\ %ݒP:Yɒ6K#awQSVSYtb>ݢ+9Yz8 ;'QqB0:!3k.KկMҖnּQO Fȣ2x]'y0\ 5f" ԩq:1-,7Ѐ=pA5ߚ w@(@7\'Q'8҆mI[EH2]쟅 :\;x\;%qtNM.Y)*cQX&S.D͛"w]^̻6K:z˷g''o/eK$M|-y>n.׷7kn׃#^z"`_%:̝Q01Q54:W3K~fNB|׿~r}=kO|X߳'8x\NhϿO\:۠DU}Ba!AkςiC~>'8wJ,>elZَwZĨ-Q1F`u;],Ni++mӸ22G~D5ސ0TGe>BOxKWEA U4KtΊy1 ]qՒYŹse7jX< yINaV y q= "h!˩:V TqEU 4F2uШ<``:mm߼y^Su qIZSD(*RS8;N"Ǜ؝ÛɤpiApqvM;w>5ؖPt@N } B{rHAB!W]cL(EN&o_{}y{k'UuJz%I &&C ^ոAx=IѦcF8S#R-6`f@D>M&0ؒ@?|Ω\$ԚSİ XCT0 iL,5Qүr>i۵tz{};5_m Fv>#|X{.@tY"is/(O*i= d lv#^hRC#(9(k#+jSo`TK}$#ETR5GrZ@ŏteh4#,񗕎`JF4[6A Nb4sk$zeu:L2; SswH2 E ?a+)~SR4D(ML4-OpC* /%K+`rӳaMyg>Jp=ڡjpZH MqC6I`6Y(z/2Jr( )@/;BJ aE@Cf5Ѱ%  '!KbXy{ :^AKO 8@Km(-Q4୞}k7N< $`cp@;Y&w#=TbU tYmtAK(u\%*k_;w@ VS#U: *冫Ǝ,Nj0/QCUISX|FdUb)E:$W84+s6eR<\`3+'i@A嶵ZB+F2Uؘ";LKFCޖh[\B~Fv.Srƛ[ƚ\v~+xFK>_(cS͍t2>-b%! $ %q5Yq-(J1e=ц?(W*L扊+@DD.cbXL:*-mDzO+Ѐzl֨Y(]*9JZ;*Nܤ4i&q?]VpWWc!0؄㕘."0b@>$Wʀ+*U\YQ,Ⱥ4ŕ^'/3^9[֦D 39Qږa)繌YteHA; `O 1j5ݪ9=!52lRCB;>T K}A()%l3\xY:ٵMc˕1 sWD8 \dPl5<FOy5kZMm adN:*y(q,]NMtC.JO^w]>|* ĩjԢe ?'dX24}N/ẇeu`s=k͔ٓGKzhz]+:OI2t15Rd1LE0p]l2%4 (M@WwadԠ6zt&Ep [ H/^m/E h`AV-UEXãq\.$58GPlvGϖX)Ei̠g0=f= ͼ1M<A"Hy%M7<}#5>Qe+*otCPxslILde,(o3!rb-b؅3U8ZPc3 At\7kɤi4X: tv;.KMt7]ޜL!Z\<;3C cQ?Pva/f@5`ԋ/`zlϻ7׳[|s3_Y 0ʴm^]7,E]בi˄ 1#4$ E CU(^/UKA.PْR!TB-.-hpu:՛&Ļ9R} wDBO;ʅT793c7 [,t ݉2r!g0AzqӁqoib!ǧqY%T¥[|w',~˯z'WW󫋣_4 vt;i>ΎN>#!qP&?W!Q:{\+a4JC"YuV,Ӑˀrv@"8QS4M s+VqD(6QPc}fBd-5ˆ}.s{qr1YIt2/^_nn=a^\{rvyϿhRR[xf "*HJg3T a2(XDKygq 8꺮m;!?Oh`}J>w/=~{*>1"UQiA|š)d8BcxdEf突%w%-%PѥBoF46J!y>%FROI9t~#gCa X~#.e!Y\p 0AkySVi?UjGae9lX:8} 2PO2@_ Ec.69lI(!Q*l}YW/e_׾1=+f$ ۠(ךHX=xїVéH&Y-E+GA*J2ّBYbRXu(vv=ʟl5zR lFX  ~cb i:cyu C`۟/ήΎN׷6JA2r|$s(GG},2~TpJ\%ĩN6a(SlLƵ(Tz] Y95E0/ЛN2 3lK 9Ӌż&춣J.R($uW O̥LxeBq:j@ngm-hu󶙸E4υdjd7q9ˋ UWQUlaC{en׵Y+c fD 2%9b%J/8ԮA_IA]*0M12ܐTeM6dRKݥ{6J&WDb(:-J}xj. .3N^ |8R'Sjuueh68H*`K-@,./m ǹ9!/,5 Q* -GF,cMJ&1q jzh )uPN4ivr!4t fN[ ض'KPbPQ}2w''C*h"]`^+iTE U@NE'dtlK'wE!ff4µBҚ+pUe$2d.,v_x22T4(טmWnziBat(Ab=]FPhx $F!Ggǧ[ '˓e:[BT˒3[ > uE>i~gp)͢SA$nؒVD%Ƶ<./Kt}Wr] P䋨&-A9 Jehg+T0:ޖMptW*%hȕ 25=5mh]Tem_&sNH  a5LAd.=1zڡ@J$QO21"+|ߨ* 5B}Fyzm^P?Ң+6g~ xt y .F>`y*i츟ssWzZĬr1`R˿Ͽ^>{{u~~߽ys~uxv.77(&z[i8n8@1~_tum@wPMHW_'kϣCOPOT\Š5p%, ژ,{oݫDusL+V *h2Apx@U!FAx":Q"P aG՞MK-\ŧPθqug z5ՙw3aQ\7fv&8pc@TB3! sՎ=r|Eׯ>}{}~zòX\CҮ<٘1W/h'W[n(h2 dKΏvng=sxr>q~h6|JZg6 "]3f*Tٶ;_@\P7b$|X IwbPbv68eԸ>XdY!nm:d ֤W/@P سTu8 d5-2׿dW0]%Rz$FTT&`FF!!] ]KE܁#9͞  n+/exXP_i wX!4^C!Kߗ b6Q6HnG['RKkkFkoݍdut`?zwխ;mZqWv&ORYHQk}ZW ,UсAgtmҶdln\0λ]ZNڦյ͜!cOnTV= _t߱{j nyxJ,kSV+b̳)"}Itsݲy*)\柙=TDX9x7-CP.!+})lɷG^\|߲o'@jhW6PlX;&[䢰Wre *Tqm nዴ`Q렢V_} )WqshR>zb۱2+0ԆZgZUow\!FF,ď]aZ11QCD(>!&=͗PڤWgkKPt:5aU 'í0y8'{[յvk9DuӤ23޽<{sxʨ܂L?v/8"Db"svz=ueW )63D$M+eJ"i3] N|Ѷt/|vuv}q]p%^^\RF5Qh_D|5 2^qqehZdke¢:G v7$MRoov +<0 ʛcTqPA1➡Dw 7YEcdabĉ\qa}Y'Ge-[a2'Mk L`fEk Gtx8 %@to"?E0~ [!Nx dA(TŪ{: D*EN9y)ĂDpumuuR@(JXi*DJ̄͑\)&gs[tL& "'WW׳ӝ{j ;pE9V%AeZ XHS%BB S}5M,jZΡ?k̃a @G,O}çO^g۳ӟX'D_vCnǘ9^,v"\n2CoAe~J4`" .tWC#: ,'/Ae^^)hVS Ժ8DL=U1ÄsשJc!ފ6fȷOr8Fi:,:KnZv* Q*4Hw@7jg¹Nwqs%_jEyjg*xl:#Or؊ XVyK*GX\dܶY ϥT-Wv@Zy0hT]+P]je4R|1t(Fpa]g8i*u}qԤwn^"a~9|pp7h51)D@"Sj–?"QrD7qU+M$.¾LJ{U$Ԟl~QUM0wu9I_{- tl6׈dR!5&ma⃰ȇN]x`UG3Sh|\Ba~'SqP\*j$Fȫ }{7uI.pPpcPY%'q(Lv l)6VXjg9$>A_JjQ !+/Ah KxҰ''!vd)3fU=+|6gwsrW"ΛnǚV$*Ye袧Ҙ=JYMrVm;D3%$+sÖcʕ6:,2֭*U$oW=sq梄_,L"hDa=(tl!ְG<#z*p\1MP^#tfiQ<7;S?B$ܷ<~MUЀ{EJxL ;4MQF(fj8Iv2 ǟDuZ2 XIhǬbLAN;Hȴ樛2,ŵZ4\PB^ -ƅU|JNeG/4_ؠZd~3fT./m"ъHbB9?L(bU,K;sI.h FwAܣRk#Eh2b!qKs^, mF"K]R3R&0D`7B*$E2OЎlR /yEu< :%c4#R`P^x$q~e T˃yĨ|䅸QP\UuI_Jd咮0\E "5:oqp‡BH:ߞ^7Mbh]vo^.ykkcNC.0ØXԏ0-f j#1WBx1q\YT.kݒ+[ȿ/b aND`Ld6NR9wijrfԜ:1o΀XpY2PFMt+SRss+(HriȦ/C3Ȩ#z9X3&߲#mR4r3A!8BE{8HR^y(/]6Ut0Vy IvaY-?AG=yCK ߓ%MKe++^_mOona`$D$womo!\ JQm__W7wwc}^kcy,U\1vQ jQwFV} E|+aQTWl6],Lܴյt:Ԫ"-S^,2&FgsX+PZB fMr&z HB"s H c@GbiҁuU/mz)lڼqw2I, $,?J.D\3ߘ IWQ=W =G^ËiZUU<z[ZըB(k pwO/oŢ;gIÈ䕗M\}O$8>ߥq"Kߒ)h:O6PeW[xJ*pb(Zy\bn˭յt>30xL2?9>m66#IXW|ݥd'0 Qdd&Ԇ } &H/4)$"W, $hNl3P7T9\Ītt u֜~T.'8c%H hPlUDLb0܎?uŌEcsj/}ѰԎsHh(堥2>^=n:S#/^C˨F/TW?+pp:BQOV9K6׺?U-ib'o4$F$6:9D"h7'7Onollln~vlyP)*b#%#~ɋq@OD _ 2y(^Xihsj^2UF߉`W-ְ q3W"b- Y؅b Ud,bmh 0QzGa rPhoણ]1DA%#7b )X%e.i\ġc mK gqث?B_bN4RdS/+P!X  Ve"m!zCU $nA@#6 cg1tԪȞG$B]ƓJdD߾o\Ǜ%N7T8< E0ԩ)N}:sx-kmRߝu6iRP/[CF@YW]\ x25,/a2CmE(]muui/ׂ6s"k,w?=G>?^lfs}ƌQbQQY>nQ(T (F쪮ɾ䭣y,[@"ޜ<ƉhB> *. \?тP'4qCOTnΫrjkf Gd4 tF`JفI%1&*&r"y>Np%UB<* 4:}(S y.SKF"MIDATL\^Yr-Xx&V<x/k0EcdMF850*5-X`3N]gZF=vmV%M}O+ɂ.Ւʔ;N**]9|)%OQUp0y¤u|R!A$4ˇO}y3MZrHY8~vw!~( n,X|9|mg^ȢTA;y'Onߛ_]?f_쓭MWLӭ7O_L?Ӛܜ^&]_޿|? 7 uc5RxxNR3[X:4w~5gZm583ЊEţa {!ޠսh >W2Kq!_,oy)i&I'zOOb9k r hp+P͵|{ُ?ȓz6&-0~x 0U5>책?=a \iS͕5Jsҗ1lg.맯.ğZ 5P:.& PS*O b=wвa܌WU~Cuuv:U3&ok}qBԅ rmXc/e&3?Y=MuRGP7$MCYbFeI!ݚ f7Eݿ"z)yl3*^ GozeE AA;p]&3Zȑl'ĺun*e5{xZbpV?S1(7Ȱ TS\֖7%XQLIWrnÅݞd7>K лٶedo!zƊG#A7V ZXֲ\yƒe2zbO=DA4HAֹ%_:zclxs[CŖ@4R*Y:HA;/4U%x _dBYz>R~:Ld aRz[gx2BؔӃ%- ϰhR2i Y9q+v1epLTAGz+%c~lϧrNx!yXtn2mmI1ۤ'qB#Db 7ٚt 5)Kn"ƏiE 1NrI)|^<m~&ZS-Qe7q\P%dV) \%e.f96 |<9UE% Ur!T֑'?r4 NuzW'?_[_1։bú8arJ/ɫP@dYePaL[u{%`h=+Y:I{CPEc 0 6@JA 7 zδ>vD`=e*"9Q]8q}.U#EIۺߠr$hٮ~8]f&CZVBmvط0\đ}37X]9+[$ղ UrE6^,o  ߂sswRcWX -`;T);?,P&[Jp3PZ/d=o"p C@2#ZBM۔•H筺 %, a@HI*5A^h/c~R1i:}cݜDPIrVD =%b>PMv r`=,s*#/Z B[(?{*W*v8P};h3\?w>tes,ؕ=%GZkşXٙw:٦5yp{ok"urML71Vо}`7"b"}d?Rsۛ߯n_>{pc_=Xۺ.U͐\Y ǖs)L'<;>?tz˽JH'oߜ{xy}q;$*]}O^>ynR<.tȵ4.q9K ?i*GѫLf(K "oޜ}G_Lnc%/g~w7L? z͍~}N+}#b u1fe$Y`\dfN*]1 -HO<7hQ!˫Y R@u4R@5+  VR1AV5 ꧘Q'3#!OAx:@T$3<eysjYpHu lmBslR+A<M5+ܛu<Eգ6;tJ KP0;,B%'ogvc孭ɗ??\\}gmZLfgng'{_{ٽ9Miҕ6IńTaq'WӉn 31m2Ͽ)i]RK}J$7}{rχPRJIG+v.Oĝ˶0g'|Զi߾ϐ^!೰; .cc{t4}CIOA}I-'Rd3]AJk2zуMņCPK81<~͗`b= Obu`1'+Qؔ@[rޕqHP8NzVq ;&7[REL[2'XaT𲂪SWE =;!,#ev$3J<*ж|t$IF5۹__/nխU;ˡd~{#̷7յi΋;|MJӖLnϻY~9ܞ-.y+Z٦4O^,ZKb&oO__bdcg7/ovgos:3d9_fA4*!Ye_s)/Of7gI + É+|3XtZU˛;ImZ>P!7ȯݥѨ䥧p$ /mQ܏2s y #'SpDח3Y'WЛHD^8i3"vۦމ\ǥ h2& Y)|d!>IlK#:~{ 7:ޮx AAtӢKnK3b2# ._p%}fAp}Z&>AEo4GgL[*Htˑ)WGXxL6@LLNUh<ڈ )OARKIQnXLi|R5d} :S,-*6g 4uyywރ Pwsnbvn]]n:[]>j'^X,wzhKH6vxt{һrNt;u477WYtsedawwh&իjGi:][=two~}r5t{iڽgE=Y[o7Ґ;M |&@iUda:K &O&Ĭl٣X~Sn~OD3iڗ޽Փo_L۫?'_S:G7?84W_,t4wX,DAHF)(S FaBUU!uW߀MnPzr֢}B}bQB* ō HU4~#<ъ}F;L[|A:2Uٮe$Gp ڳL+s3\_QjE;l&<#0 82ȐM2QkvOжq}4iHb6- =>5t{}{f9]ۜ<\hB)og6۽Oe;i}?hg?;h˫ m>}uuW?o6V+Bd.J˳Yγ6y_ټ9ZZ&nb#L67ήWVxsmrȷ4_7׫yeqt V.:%P"]4+וEoi᣽J%}66O{6n&{zHë8/nX) $^5ږjJ&>(+b4SH"yw+?~w7/PCxuܷ]__zzV2°]@ubwg-fd|cIf2<&s 3=fw8KOҧ;Z.?J)DI,g{z<^sʌ&f=x*+3ƽq9,jX[qq $r d˅W&%m.1$A|crI9CNTjm_&onv?B[wVo7\&,ecȍ%,q[ Lh>QgoW btNǁ@PLrbzd>O2-oCBqnѮ?cg#=inAPD qT3wop~D/C2I wTy)x#  I֎";I;s`(}=2 g=`V\C 9%ъ)ցL5395#bEh@!D#F|X"l"7jWlKYO!`Ўꏦ'Qq%>1zoLd۩LT"Sۤ*w<=]?'[LMfxx<&=m5mnm_b/mWKM[AHuBv0X^$Vurv>uE0`k s)ojӴ\5 )\puRʦ[d^]Zr{蛖f+\*'TwUp iNv]bϰ"˪?X|oƎEdClw0=KzL ONKUzNCSQ̇F$ ;''Ht ǯ~ iX2d5_m86~fg˯2pbʼm70b+)='j13D.W'o2 .߼W_xe}VMVdjݿ7'y6_?oqdL)^+-q"ߺ!$ JipLJ1P'@yM> P"0 }h)kP5W_MYsuXجd/pz>{$ۦuƦKS{tvoWit} ,\!]5ݮn,B~ڷ)16[Z0"^J,di2mmI$HV:Cuo 4\W/T&)۱l_T+XzS7t1y)t0oWRLl!ep9z}g3C~9b뻾:o_H,}IFim7×\ݞ$h0~2̺BM7SED,j:dЂ`!cx 3w:gLk#ĩF;5&3vjNɱ}^dۿ^|+?% <`[(9ݵDi?~n0hnó6 IRV*;X % |:YEӒJ/Ə^T P4yE]<>F>0<0|Ɠ>wjm8*`\_c^'J~aqF'Ha:'Ԡv8Sd@hΑB)E E =wBTgh_jA jkl6֩`m }@QS7`ofI⻪Ib6M1v qL]rj6+9&aKnS]@nH47-ӴW{ᢟdۘ*{zep%B$w7>Gj. rz=4"I"L"Bv3 gPr"mr5KSf>|t&(_GdCt|{r b}) DL~9rBa]oU~/_]+癔a՟aн2RpIbitpl.ۚgBI]^]Dnѱ6PZ֎[c"QbŪapDÛp-Q`lR!jy8.b#B!m"pE] ؎vz*4) zZ{ !$#y \NlD0z](`N Ohs*(:{+s400Bȷ=e g~f2Yiw$:\B\2j\)↴`-Ҭ}9T50M SE(  9h|2ȭFT9<۹5@׭l컮UW9>_LG8^,>mvW?6xs};%hzB.V~]rX2OW loj-7Y>\~=Rݣt3\\!pVz6ˈ1tɐԋ"_pW]\&0N )sP!9(WaD>&|_ 9&$Q+K7"^].?\p۔?AA&ۏաm~<eݻYVfc!0\:*r'w&d2^͆*>n~a>>. p̙0@7:O*&`ybSh|۵3,!◐Xrm_,r0'##&qjhm/vicKUJ=\qeU5'ǿ;:g~!R 28G t_D֧Ǥoם97N;a+#d)Fo!$"ݘSVgt1WWŢ"oe"Tu# ƻ$v-2޷mg!giRp+&e%<y)[Ut{c M-~d__^ٲ"=NEwX2{E  x9g|<\w?~̆cբ?4&_|X3"2Զj_}fIU ^ \I|}'<3ީb ]DW*q~U46[:ֶUͲcsoo4}3.\0EڨOYt{z ňSmQ(h|0:Q {&0 ]#6 qav%ϕ>}.u|?6[jp Ӣ 8(T1팺v/h:mbo˹P/(DG\iSAi M tz4LuoBR rj# 3q]f>D8Zxj19W 7g+!('! \7 |Zm׽ohol5hiRLXb$Z2L諮ީpX(HأvwOgۭ.$Υ^٢ֲz]N-A% 94R_˾ڴM2ʘXKVꨩXzݥ"t|fDg*s9R~k[bXXWZI=^}S|á{YHbb=NΎO}c3XiGc8Ղ4 iKܰjn(ׇN1s"is}7\2-N8VQn 00k`9aqm"3F.0LFI>J'oNCI"-jl)3g5ذd{-` |Bp؞{~gGA]'sYM셝F7Κ|W щ_(~w&2a E܏ yi2^yOЩybS穕/7gZ{ヌnkMw10dޤSD!^4Me'ig$lg]VqC, Vվ/D V56iZ -/"aM"Uzb $Ow6U٫w,Зŕ-)炕۪2J h/B`z$ }#UJڈL\$XEZJVF1#Ȩ}ߵ:V5]!gD6Ã^qܢŜb]^fCȮ!bf'v봕sWl U;ZV%=@͊ykJd ^$̀KItR\̴Ub67pYQ|б;쯤(_rLt}{yl-% k~'N!{=!PH@zbP,[Ύ๞,1}̿V8v;#6B'G21XG5o};kM]i3N;!8gry(W+f̆έ|DB; :U$^ %ne,G0ѳIq8W i0yE!(5l*,oP#g׃2tB~aqUS@0r46a A4/|NƆxBWg%LFGkk(жΏh]wRq$3rQQc:sU(\k4Ҫ7, M,"2cy.0$eD̕CׯD3F̖F\eʁ{,3]`ۛ^-XZT`2.ڪď7i85Z-Yee6 \dm$D ym/^WodۙLHc p/Exsyv"[|+okzi.SY"˂%dBXI|N_tiz1O//ʖ4[^ke rw+!/S8aP~TvcGo}<-]˴HG_7Hv9DRӏGM pA r~J??R)*yT6|)p=!ǁ[~CCє3exFG\aWC5 /)|o37α#q+:ox(fZnӉ~GEN Qy^_DrlH!~/$C!@4&ݶJj/*Gnj1ZD9z!3VwX6lY"rUu_& rA5VYK"[Qp)$ra&t}im+{$2u,6/e')Cy&ߚAKiEHWT2vN+f%0)ȶe/bճlu}|^`cdH] d(ImMRo234+,UJ,}uЍivӨy߽;Vě]̏(O<̯tqdA)Gω~(Ǚ/__-e^frÝBy"lsx"?|zٖ%Oc1#I#I2*[zbFZe4p|޼} /`VUKup*9hu!X5rm3pTL^NF.m1 O~EyQ<i\njYgsNLn:C4in뫺ex"ǃlL's]d:+TS=n6{FXc%`wc {l?M'[fsݽo?Wl#zb=fDI"p/7rIw1 m<&<}klCa+)B8CgxH{8~{ócJҰEQXt~0Dtl\:ص'$J!;gFodᘤz)37^\,fNtBpNW&s8,~[t&RtnwsZȰ}}"Q}`-O䍭:FEAr4F2u' xKˆ]FtL&2S ` Eg"kksWwMe7ٌlWfGPE,U~6^'/_\,DiV璧$)gHYu7i*,/%n[mkmZu]3SREw賄:hE"?n%T<˦q8(NibءRh΄jH'-NgEOZ/#{PRC\VA!?=F1ZcI";~c֝L(mlx sq@@S>o`ԃ>Qb%3ƪ^}"цs5*̄2VL[2=ڢyӨBFû.!}SaJ٬RҝVwm*ӾaW;K Yy.^.ͶjTnX15~o_m#9oZG5_sEj @AdD3Ø0BJي8ޢP$זOW +䌯7l$on2s!1 Q\jvY CvFZQXhG#zAhaK˛w׻][57/0U.DoCXofa=ۏyͧ~:/27XQh02.Cd 8Ϳ6% $GEmpBHJg&x._]o$evYYTb;EC(7!H@H _3_0q:41pȻyeyfݎw`$3GyP0=A3:3vף6pнsi:cxpG ԅ ; ĝ=Epe #Stb(ڶm,sؓh@7y ;_Bk&6z-=QFgӃmkt>lhkLa+[Cmd<)󆺤dIНE\^+˘nT3YT 8]rIJoݬLJÌ-?7o^fb%\ KXk^sFk[SXZ2Kv>aIZ h{_R2T}#qYdmno^/Wxԗ 7mΖouV$R~tFYL$9Mk΢ȅxFLT iݣY/e[d|}TUom͎f3Եm/0|/\N0Ĝ˷ jJ2'v;t W.NNc7֓,&ps}g ]\N.>g_t茆!7N8a5Uaǔi=aUcx80eH&9d`oy9W-a9m/X>w ֒'{]{uy5ƶmWו qpI 'rw 9s YsS3fx" bw&t ZdopO"A =ZKJ&y j奕cq.P#x۫eimU"}&y*bY!L5Pռk%f}]]ʬXuݾՒ,/Ճ~g6rց2Fzz9_o$K.& Sv{UӠS3_Y4(~q̚~,U^fغnX^L`0]]~Vs5TqR;{|0rx 2O_袍c"˪D;bO/'VGKQ0)+NzA (NAG1#Qw:뫪jZ=) ) g3F,A/ )M%:0'gtC(ZYnox6pOUX(>"y%I{(l."Kpڙ "&eSk~aõ$d1˅YbAX$l^6dV8ciZB% 3s2Q-Bg:ϳYZ6|hV|9mۓiV [9Cb2vmqb/+rLs[WRPw $n6LfFCRO/Ǫ!=lj ǽyc].e~,[*YM'pNv1T cmp;>I/ ldDtE8Ī.0bgb X>kSQÉٟ>6ą a\"c,gcz/E=Xp~m'Fa7`A;С0p] vRpc<vlNV\^,POp=4>nW01*F\&y9<Şa$_0T)2ݹDlێ0*mK|a>q y]sF;Ҩx`d 2xNEڦvm۝FaM?IĻcT82qz훡1Qk-8g##ÈAxOc.Xs텸!cɣ_({!KX,@\l\Scqr)~pXDL_g`6NDĎzcS` >ӛ0%'kN 瀙;&f8G6N#Ky`ǻuqBix*@4BҪ&A9KdFRvJ b $ٲpyYjP$uUDz9Y>ctJF?lr^],ww/Vt)f"[R;~\Y'(B2ĮVaMƗϟ$7̈́AA:al0; =*3NE*t4"& K A$f|p,ui'jG\  嬧17/>Gz"19/ '2,ˋ"2 B\f!M4pg0ay;w6oWe1oJ{uk٣I^i'bX,2bL 'v15@O`mo:=rI%hJSa W (Zڔ+~2J)Bf۴⺶5JS D2kBSt=ϲmk!x87[ &Szl iNLB0񦈟KUyC{WylP ӂ / ljdB𓨢9 43 I7׏Y.O*ݧn>|Z*T&D6\}{kLxwiw޽g /\=2)ppF*FO6ݪ@BrsY2k\s7s*198QPNPqج ׀SĈ"#ڊLqC$BRtb+}}&Q/R'Cev7?| g ~p1~/PMp&cC$IrS'R43iR f'LOd*|^\EJD/W.߾X=죚8N={4"O[컨N? ֞)בMBL+ r!<&}׈R''*5 )*s]M(E|'|OTީ| |[{y0?x)%CfȸO!@{48I]䳸~ܝ̤c:}ۛm7ަeڔ Mҽ\ΓO Nl 3c**ƄEk8SVu^x=,YZ&Bu&K%Lzl!fh55A,hKLCHiCKs ( )L3, 6'܆8(‘ "=3"RfP92i]8\ĉsfkC'^xz(CeQJf˿9ne*%'Si_yַQ!ƶ 9v'Wj\0ƴJk,=U8n5 9ap&]߫gLwG<{3Ahb#KBsCjbu }_L E!A_iLHmO O@DqS=;;^0@O눈#4wsgmZɓRI! 8B'VNswJ=<*)@+m͒M1"aG|q1SJ-_7VJL%K95"ٖZs`Hxg*T0SRbd8*Met9 g6ZcSsYZXco4eYkVA&Md(JRk;[y$,Y'z"ᖓemkUGtR&9;%Y~ۿ\Bb:CI<ϙf݉`0I5 Yt`H!䜮9CC(+j?~}y|ū(kmJ$ʤK厉+A|F}䯋>#x_/"^e)U{p@h}GHD&]Y`ܙ@}K.efsCLV]DE&VJo[pŅFw6˔@̥Q]շN2Yi{,UArV A\KÅ.$Y̔rt1 iRȔPz1ls]0aQH̋pm줤Bż[C$q[׫rfT7 ~6 $W}޲ȔbN!N4uۗᅠ!d<4J=2:/vb:gF|P< AD/zZOcG}:2GWYx8 UĸL:qGȩҌt8/(9aHO҃Z]br<Kb>Ou|z{BxbWK,Ѻ:O3 9Y &&\qQ<#BN~r2'WxOT,N 5) H"W sgGz̓TIQ| ?I7kep"C}1:@N[b 7sޗ ta2t\C#uT:W/ІѮiWy΂A vt' GVvyhLÙ7Y.PdBZsb8Hx.=;w(8d58-3LOήM8Aq 'i>DU(ɚ>].<3t02iR!`eW;v/㘻.)O08؉CH2맪" p'r\:uwv,;320T?̿ߺM?|P ml-r) r\H ]uMaVt1xXt5+.eF1c],++"Bt@;iɺ*IwB*A46rݶ= ~qCu Αʢl\I'|9'clK1HT>F%z:FYHs_  ;jApu3E.M&!"m fу7MqeF7J"˷g]נdXLj[cױ Jf2cq‹]N4OXl%Dө>/<ɩLN"O<A$`cr9_rQ0:&> 8AO\?'I|B)!N`/F˘:/t&RtP0iVQsև8<xM8Oګc!x,i(p2K6ӑڣ֓ F"N.3%+$ }ܖtDG)|1) wu$v8 ڝhb2l!` Sx p~'W:ݏqȥRD*"yγ$uVY m疮n{)tV `ba@!"u ;w*s#=nW*)/!#Ytvbݚ*^h0HDb bƅN\wkk(mh jW`JfJAK2׽ٺi]lۊ:@^|6(i o¾(,_͌:"EMqctBaloK!F?v MwwoeYnE{}@(/`ٶ\ \9tIe0R\\C*MH xω$n#=賙JNxaWh0E_8&<H,% ͟, AL3O_s,/fUl0Pj /0 LPOiZx|lT%@hz HT]nXb4r1x9{\z'(L4|*0099]dax#چCKL&s𳃜 2Lj94ree& Cׯ;$(N= n޿fqH#L66H+'L̶<?:àˆV8r;!2g)39"}{:w[;"A1wrD?|拟^^o%Nr)*ˡ_q"8~s96/M X S+.#G B(UiYi5:sMZbC|娅L֟'JI!|{0F|~v̓ pJd=.NkȞF!h1 iM'UAS:ϕusyYYaV,ts#Y"(I~gwkyضm$C#"ěM^k2$"#Tᡩ%K5wT1SIdLH)]*w2+BP9vb'\Vu7uyzrڶ*ivEOMzU>/T{{WpdGqi-xI"- 5HV$ݎ`H;#zӶ:w-1Gwwۇ&3iH)3ׂ ˢ,dfd"BQhj>(5a6SGk{ҐKcwi^cU|Lĉ8}3HH}K/J c6yZayQ\fͻeujvDһ+|Y77.NM %fgpyڌe2O8Gg'jZin/.p #-BK%V7^^_ax{Sڽ+f/~YZx_>={rTy0gwҾmѳ Ϙ`Oq|X3a ,J| ,e!%]ç2@QaR\N{'Oi2P) 1 3ŝu'Zގ`~3<<4j̧hHPx9:.0\=U<8϶RކO{'z6uL\J-Lӌ:ǫUxYQuȁ6|@Q۾j[Lȸ}`T1JPa}6-;bgE q]`6d[,gINTm Q>Ǯe/"SBRoZb|("y<>2]R `0azL ܙH*Nd3]򾳂"B& 5^ɨ:~Y",͊]%[ubpMMu?\Ju?x$b5L |۹m|.# 탛)uu;3zU0MW-Y_U ~)_|aIM,K3@#& So]}T]U.Ù}',$u}bԎ}|u(E`W˄M Q)˝zKn%0qNv|T:H;.}TVCrkcJ|;CPCpk/fELTͩg~99aiG] gNavVM =pNHZ6|4%T|Bp +|M:wb?-w4j2Y:k4Qv7b:3ozoӚ3'q0ƛ/+DI^xB6P/ClR+L\:< L3?Q^Nq@Ng#&%XcTޓn4<S7?T/Zl3a8Wz*y̪8sm6^X]X3ׅLbD;´uGF쐣rXqC JbBG;]U06f9n{ab)˅|n+I5A-nlbryf}eLy2a/M@Y Fj9+PX]Ѭ?=VJ氭|)ajyi!h|Z˲?>T,szX7bF3q/a^f]: W[E1.uжo/y2 Fúm߯W*Vk~QZkMonޙk'[YJ%6+9{xE?nJh 3*BzײZ^ŋ͗N~)^䐺p%>!?'4>: _<1`cD ]զWZw٪bp4qP9g`O7< ~ğٟ%KLA9&u dt IxN&zL板pO_Ev2vTo]Rmu>x:`M\;FnX1ax{Is_O|a0 B9ÀR r B 4fdj>VN]P41%t,c fY=ɑ}_ "LMA0+r N ̀sAHhq"6qQ%Zk M '>U' NƄJ{XvE48#H 8woj[FR("[͵ //AFb13̲x^Ҋ a!㠍gsE"og},ZzT2w>FG=El GյJ$Վȉ\C`% ]2bFR`Z,v&K 磟]nŒ.`aĻ[ɉlV;LHʀQ1nnVu4q.mْ3(FUm?_<9ͱvAҬklnpY %4;Qݙș2q|URYY,ӹ @e "·ȟ*9>4]13ݺ2#BFr(H[z+ cٟd?vNJ!(p]"1'˂)RP?W=}5]_}Xh҇Xo?Q7X&ksIl]/Jm BhBMER@ֹusMmQh$m 4u]w< ht&\Udmxض!Rѕ$[vik': *h8[Ƴw@ѢY*7ʈ﷤Pbn>vyVZqR~n}X5ml4*uny*k3(pV"')P(h\_yev,K _;\7VE᫮.wW׭T?oJ)b~8!(lzGʌ]W٢twS[WZwvA__gp89II&Lc'N)9 Hs(0YoRWJp8i!ΖR׾0ߗyFC>a'@o(O?3_NEɹx4a;cC9քs!bލdN"Ǻꤢ??t.~",q#G=O>v)O=~pTL/;ӏMdx0n1Qɷ1s,EvprHþS)YM_؏x2Q'_O xoDt) Y>x5I !aZsi\ TN:xIjH졫E[;\CʙszIT?B;Aus }`EPP@:!B+cD8yvUnnSxR`VER2F\;q{k]H,Kt*íeFc `(sD2}pq.˾/k⺅q</;h*{$90qY !vB܈7noO 4~eBI?4W)٬L,^LYv\uIbߙ 4މ,&iHS)~Kq_bp*ʅQ^U`Dzsc>Ox~J0(R*Npy5:߇Bwo(&rhNS$$$9{CzՋKq 5+ <nέdMFd>LѡGb*εc͓m(9[qq !>nc@N+u_YcIfFl[4J]O圥zgRІ(el!4@`GF)[\1+)xw} w2kbbQ۪Z^^͢>H9H`.D4M9־\eh.! I0PuLf6sd$-m߷.H1yυA pۓ1ժtҽ*mo)bAxC܈qL7RHo;㳋ECݻLA D3T@0eiR*bO#A IDoͳr n'm[,ۚRn\]GW[_:O4 D0O)cHj[\tC CFH4FvLC;iNU$_ 57'PPgؐ^|nʩ}t(i|wݙT4H4S"ũt*1%;ԣԉ89kDFcm#DG&taF=4u[xS ǎqAIG[6g_=?-]L,3E6vFRR9!{[CݤvJmݧVN\~]F*q:\4оZ\.b'wHqIJ*gm" "΃\jedgZGyI!b ƈB(!^d C"M۶3)zE&o q7MDe{}gB-|MNeȅQ:QC%熌DewݼfRD~y5{r\X7ЕF*#4hD41R1n~g?$lV 0@gbc8ʂVȑy4큓25qMझi` sŐw(8?zГv,' RX0p19F{4N}0d N2bҐmxra )uq(È6i1Nrv]m7]|uWUGŗeV(\>,׹TEt *Ԏ$ ֢A"|6ΕؗqCD4UE髶d=3XuBa zYH)ɻuo~l:2!8# p57> J >;T*SKg0S;U!L^d| M6m=qTBGpR"_β\fZLWPr(ex $ ^+].j1'7_{hf"ȵy-#䥸Ze=pۮyfchynEnʹ%P\ H|!TBH͒\N)J@\%)1p RQΎuKM[/@q.b<{*)Ty Ly&,%$hcA> $:k#Bf}5Dׯ; fV}twrЪ0ַ?||}\\.DZ=GB>@82aZk'/ukq7Og=WGW׳,Ӧ0|3/\#AzYW[_q|.qΣFʩ䘬HA>(8t97ӀFmOT`sVu!!~_?{ ٶ(s<3Fi1l0A a'C]/@P JǹTDAT}^ĈsH z,Zѱx{6(s[e7]Ԋ9pUVYf2tF P)^̳u B@E (I.2y=~wzZ@{u5|tEmRm/\a,xX;UÒ=EGԆ#X(Ik׷v)JwB#ȊvIq*rux,p߽J쁑$߾E^Ý~훯ߣ>Ԡg`xQ>mt|ȘpYћ&L2 hS 'w?}Eexvsz}z -՞*8o;T?g>eu2xVΝzm`:pb^H} &id-ǿBDC.3TNy0 ʽ%e7fO t]pb ԆO<:Bir@3Rs~;!6_̲L+,;ո{ i2" !>|zxu^MO\*}tmw2Rp:NJpb*&<^5Nfh4Z<JmAN.&Nu@y xʼn?DK3in$$F Ea :p"s,DiI|Zw))~F#Nс1TuqŤ,x |/?c׫,jCrUyB3KힷE"Fƈ.GkYg9D 1P l9\].q9,+HE\׵!<h7aKLHEck HAx,nb@,k)۶,s=KTk#FI~bn&⪔Mu{%Nt)D+\%n~>bt$Fmv;r˕6ud6%*4=G%(6 );G r2OA "ˤ)+wjl9pZ:,tZTC͖D},@u}i:#Tm-o7);v+Do݋͗ϋxŤm!"gN}IQoG1riʙY^̇z[Wmd71U:"ʊlktm|/)nHxQ 0>gHڏç2uj$VH#|xWww C@'.V~>l^xuf[[Br>o bds0`%b#L Isbj6Ml-$nXD[HI3%'7Ot)0rF|(rg_:Ĥpe Ƅf8!bA8p4G8S"L>ּxbstrqx_??t7o.~ljp7G2"XF.|-zOuAHJDBv]8rW4p25{o6އeBN.kk*cTa`Z3ѳp(12]ZJ`󹪺AvɍThyPK" *׬ #ve)@1g&#Tu8̲`g[KHAon|^zhOUL\3U=)P^Qbm]kx6վ˅ٽÅEQ(((~^/Ǫ51JBB!XuP!ϐ <-L4ΊYTl97 h^hc (1-id(֕-2޶SnCILtW˼s:[a*?r<Ȍ*h`o?d:P90_ۣ;0z%ae\ۻ qɃD. (k76βV>dC~x_>/ed)2xeyurIu,W:&v ?mW*{OnbٺV %q^eg 7[{u@;UC#ZgEM|2ޑb$E =;pm/;kᇷ]τ{zWw>rV!!Fu=g9nK)eӈ<\bMP~1a!rG; ϹIILpyTtrj5*kO0y 퉔VGp\߆Y#!`1 ڗIQ.n{c)b^?篾Z"}v!e_N%Mvl82Nd&kjb]G'2ӶxiKڭɍ=ȏ) O(ê`'azÖs+)2OUrtɘ&fnMyvgtt-<ߟۏwyW{Y]9+]f^\I<C=R .Ь68ej4٬z0SVm =p Dxwc3Ljd}E\`5ǀ{ xIS;gBEWB2]\h ^|ƅѥ]3%Qy-w)P+ȑ#\jbfE)Em_\\,ZK!]PX^E[-rd(n\9Sh&_dzv{"S E&=vՋWv5)4%D1X2;wQk3#HmQ3mT"cGyYUݲu,^hBp-D¶U1D#} k-総5!,.D[\Ǹ3,J%!w1+j֍v f:"Yx,/̞=>'v D|v2I&46)< ; cY0*/jm8 ݅)VR!}"T ȓ'C;HO>g d03>YD%T{/㐁)QikARLԉWSmat^N=q3F7y1N|1qٞDxF^~F|uD8$dzͣ3a]Uƣ Qd`2pN!ĿTI~d᳔*v4Gp'//qwayQz|Afs %hHE(PgؠJJ!h.RKy{}.e8+nd73Pdw/ro])}zIMZA@My_QBBpU$!I2m{F WeZr-` ThQwPEi]eU(PT1HrYvZ,@.K .g$D ԭb̔z\(#kg,IeIcʴMr At!aHҕ  @afPRfU&Ͷ˅nO3PuNvk E&M'BQfD9/}7eZ^(Z"ZdhGOԵiMæH8$]*O,Wxڴ^+Z3V`QRfbOqX-ȃb_Z_(p5s8"IUxx`T\dxD*1W}WA칺cc^8q:^ ϮTi6SJ~:9^8w߽?8G 3cP3jH3_`Om>e]YfB 8%ј>rhSSfjy#΋|J]lM 8n1Yu|'ufa@~1e4iO8, ^A׸>.:bd!7yupGDWRe"=R \E&C䣳JxHYmO9ꃔ^İȭKI @"FP|(@ j1+妉B RXeEpfmk%Y-sRDF$(6BĶ-X)[.% j2 f1>uMVPEžöKv`M{0FkbvIL ̌]@. "j\3jMh!)Q)=\ kǧmJ \09@ݧfHp0s"&fӾs a嬬ݮ44HX¦ԁ:ԥHYk"6e-.ay Xo= Glk_syo/Lpu_0w]^9U VY`敚Wh|{dx)φyjd^* șusGS3sI*S.Xm_X?}㫷~U]-K~oWB=i#R8q! `>y'ejD9{rX4hƖaK90ϊf]% `.>0AAzeW@:L'&J{qj:Ɨ{jjy0ڞW S\fJ"`LRƻZEeU!EߥıR}t,E`H m.9ic+CC`*Б xϜ`%;$X,5`D>$P+ X\<ޥ$ľ&ܔ]Y碪|JnCJ"K_5\t$],|p{rkwMbL1J$)eVh)3ޅm !R6V{W=q`f03B?lr aIcSBL]< ]$P襾{M8B^qf_15Uk!| wK(}-J}_|E< ]If.NÇs6dҙD 8`Jı%LbUC7P$H0hL|Г)}`LMA BMȴ8̪Ϡkhi1aNC:.a1XSEg_L {0wT"bxt]iv_.~ڸ}srYf_엺|cbdRuJ(v DÐ[c@T8vnzgA(-vs[!XBt>'h=H &O߇ek;9֒O΃dJbt5h2'$UQ )zJy)m 0F`۠ NEENI`U}f JBA%2*:6&%XEA1")s/u>YI5!JZĘtƯ֖Ji }pLJ6m]^:h*γL-' M(RjM$tBtN`2G@I⦍w]0)7reI`wFqnJ:zۇR6Bm4BrK(KuW|BۚJ6|W?y[x ˄)@&黟f+6b|$?>E(6[(U}60Dz cOK+;2Ǘ\ӣI6W].E$nudÏva2-~|{}>>=5Rӓ8:+EC& ʋU5hʓ`Fx ;t]:jʥq>F"DIZ9x\\7\*]^KWÔ N,KRG..n)$kPK2RS>b9O^x_f(̳T>u0\at'hϴNı_}я#T M,AɜY$`Iy%nzF4yד|‡ q(1<)}iAI|xv^'>8 b.B99!P I9R't77~sۇ L399e-%A7@mIZ꺭{R{eU}oID EMtQRzXd&&n 鱷.-0Rbzޅ2F3ħ_wm߇1F"" ZoQ. l|$]}e iIU4BBYfm(k! A4]ˤjUHֱ ]HwW$6]{傹ϲ1t5FXU٧De.B'fmcseĴZCͪuc ެ3![R&r>yJ{E#D(5Yǟ]KAB -vuBBbc^x&'3˝U13Z^\HJ}DRd^ۧ# ~Đ}J2Xu^ ^JH`@ Ǵ&)I6*鸫b  ,!Cug!iSH "s.r%cBZ/5t&$<#4}jZb'ȧPs߰tNb2norpw[Ч%_u>0TMz8duh[7+[v6RuS1r( 2H*]9^genB?؁#XaCjŪǻ%i mI$1!r@)r„h DSi_MIh߇utvF*y.ҰK?&b$j"MuY@lЎ۟vMy)//F㶝wiGb/z?4y(L Mv"n77ퟫxa2gJ%踑ZdP<&p,s]8³l:/ G##2hNτ~2}Tͯ_^3oMۆqRaKa2qLL(, ކGNn S ^1jgh؝L0) ovZ.yg죣v_5 H-4^`wQ:أ6[] `&VD̄G[RAIYL2oJ%|% )Y2)-"9*|ںDlvf}p%~t63ZaՅȾPKbR=ՍF!Minz/r{ =)֙KH 0 UbU☝L:QOH(AZd*x|oIqmeBrY! OZKZe !h%ԭW:.l9j&0. !@ W>-uB0$B"ekˢPfrJi Ƥmߺ$f-Ep{OOڳTG^.$UՍLBK aqެf]nu^Db@^,t \ FNK0;›\q Iki(~;ll1'L W t4k#`TX_˚⤺ᳺ]=æ*oq*%<{hz.>:dߺǟ k4g,CIg yu)8g]J5!@dKgRb0.FMI_%HO;egzן}>+LG!1D| 3gVCGG -b r|z/Nj76‹ɚ1Ǣdۅd~/(l3Q=rˌT- moeܵ.Df>S(b KuX%$ E?=W.AbXI\/pWc6Ob=w bwPd]c2tb -p C9nUfыާ"y$ }/Yu}b*.}}.M^41aWwmzˊst]$E.T.]_,=u$D2Z}B$vt˛/AㇰǧDv]e3CB hg.1N =AJxU.+> +@_8U88#lCDُWef6A'HM)ea>Z(ۼ^8 /%L 5Mͥ>Auq?4/$2`(bx7:TYQDepb_W_qMA9j/2'e}bģuHx?|58X4&n[e0ϬaL>nZDTBRReZs(r"5,="DU&b FhԱW=$&2)ID&9xH-= ~jZH)CQkh| ڶQjI%HṋsqUM_ R&er%MP"L($E+&ɪCos!74 1RE!#LR^ X{Z/`r '$<tS%KZRPn߅+1 օC状.dJ)q1'eS3q=pc ^D\kY1>oW7c;M,ߩk5zx 88}//Gs_[8,S]z#|q0H'嬮i~p1ZkhµLzv-6H'Jc L٧B,y5+e1k#0vB'Ybԓfp|Lbf|*3O|NN6/}T#n\(S/?/蛇)WmZ~1D-`:놗ɘr\m;wlT A1e$CJF(9a YPh2~9EoRK] MN-nO'LzAbi]ʃ jׇ^Sj_Z2W \wOբmY6}ȱ +eQ(<# "6"Oۖ6O1}JkXVZ(6[O2N!IMCT+ \jz<ݔr!%% |RIuݶl.0mɢ]ϞC^(EuUD$}7`(m=ZHY.}`"L2ϖnfۄcϨ.e׵@hP}Wy˥0d:uL.}J0&jڸԅIl|p!ó?{WGWR{5r>S_m iA>+TBE#|{,-H=\%itJM|vM-.9 Ra/Eb0S\x!ӇxLO]SZ;^S ;F#Sv"If>~a&A ܟ/Өy s'V4Qf@̫S9>u.R^ }r \T+wiw Mtq^7eS`95&Ugx2z)>93/mGEl^j4xܹЙ :+0|Qi?߼ 1iwa;B(CΥDD!'9 ش^3s )ڐuׄ j439kiQʄ =(!J@qTAA"2OR3T9o6^jx4YF[Qx(uxTV( ao"PWAf; b4') RB}lH6*u%Iv3k>K41FV(j}3` m'+CJAײSxRbyi@@-I o{A):i$(jݲPR=5y`*KaDm;DUXiRޯ $ Bc"xVD`D4mdɇXZ&6VۥR*fVRg9^Z)jضrŀnӴZ'ħmgstbz65qsd%$jRx"Gu"Sg(]vd o}XП,7sa^C֧pnGrqA]2f8gRmpoyů6wV7jx>Q>U@p!KSsJ'533:+E q'#f@&, <@//iP&t]^"H5tUMt@?y^Y`JyBO?ks\RnR&na)&9)-AQa(7]L^wdjC ZEM ksU߉ 3?\MttMt2_}ݯs 1>u=(5ªص\K^xE.w׆|HD:7!ŀ!Dt+EGQz,2)E(( ʄSEF/h-m۾j<$R.b.ˌX$8l|^iU4UAwBm{L%Z%)cս/*21$nU)iiSJƔT{ K~3c=FL҉7BN:<(x{/1f>2goJg2$%SBe}A7U|vy9" x>wH1M1>Qpf˜d&L c; ᫈͗Ww붋]UU (U?ggfgo\0ֶtOYsccLt(ψ~0Oa .e>Dž0KSfa`#Ox @O2vf da~W)*^dLv˥✞{ya?ky[RIdZXU(H)!S;NIY:pkC &c_WX2]<יm&-,`*ș<&s,/X6OBKu@ u1,H<2u]T s #Rb#U3H PX(im )QIZ*3 \ˢ8aT}J:1F@6=䖍>|*!K딨(!zd"K!B,3QRY Xd>y=+ku]Wd1JK,B Xuhȭ=G*uQXriX}{'W7R }Um&qw #$ VJE=o,!gn](-%(5xŪ (&&G{#|$cǾI[-]ˊaeU*E[F:cUuꠔ5}z7wJAUg\;8گ [{:CA &o uX/; 0fB<պo[-eST\A'n|9mp4y:?cƬ^ΧQX ݶ-W6RkX]{y!JIlvv?}ygy2p?9sʉiibɍ B+Oa?0uQ$fp<K6FWMSX5/|yxI*G:7GL*Cq롱 Mr!w%L24^`_.zzǯ< rI"敁"jKL|55Wre@A0=F 2 Rru薙Ȕ$+f²I^$r)#wY+)29#HPH_4~WDBJb~NZV-84$PV m{>(usY.IN(EJ)P:lU %qlULMϛ|T`z0\ϯ::l*/@넛*N9cә\+ksLRɭRЖaȹ\f.2đhX#ћUPW!ˬUp*ϜyIqf]P_/֌@]*mUδs2ϣmB^unAzW;-`mԶZUZR}-nCS0([z[5Jhܴsg27Z%ıe~|f[Ngk19—űɂMȺ1:TҫWs.SYU5GDIH߾{ڒ׽CUoZaPKYc|:k6R3Rsu)xwgv,v٭&:5k-s_R;pv yM"/5MGsX^xRc`%P8 SqJ8eT}}_VFɱ}(bHqQ IZ#H/{E_/U1UTb[z8VSm'NC )EwOt*6!1x;zl4ӈԞK.M8i?+Sa;f!%2eO:ƘR}|ny^xkZP컘Ic )EDa'6cZC,jE$ŮQ@[CBp>nL({#c[}B)@2ƈ!_P VMJKĪ1^>[ߗV$@p穰$YFKIq*]GαJ*Y)!B #$Rט<)&b 1LDNjBp>Gc'>U\]oɉ@W54 x?Gmo~z?&}XgaxBIKXNUTiE7O8Qq 00KƼALy {Y硶6 wJ4$ҋ75_ S O\!'2|<50qs*M|o)T8;O>-4KY"lpR)2>d%1}$,䵕x'Q:u(&h_,PjD(p1>VdB@@PAHʉuR$.,1E[$AّrO{!t(;džDn@2q+DL@(}D߇Iu*yv_b3)=='.$ P LGU!+D.db[GQ {׺\H7K5(+ 2}x^/t rۺsK]lensliQua&W];u"P 2H0ieP?m98^Xd|(4NbYmKR}u )Q4XZXw|D>(iJJv}rF2E|{6sPZ_٥U"\EJcfqfuip"BKC\ VUL%e"h ҆O M)ys*> g__ףALwν{{R}v{ܒ di<l^}&qžGcMTg:N}|Oģܯ0+`xknf+0<<.Ŵ\78& !Y_;<7 n\K\(Llwgme6O5G O9tx>K'@gF,e\. #4k?<.o_N6j0Bi+mN1Y[&; |8[Pr$@̨Lj R < >*!*iº4.](THC>Abh`A/-Z[ uǹFI( A,ޟ}OgCw?h$$ZvjaT:H˶EhwHF1ă($YiCۮ}`vH 8޶#7'6S"aH!B0Jc]g 2E׆_CX唼H ʬ SHm )pN Z+R6!zs]s^EMvˋaX"QlOL a]cy$$3ٍ"B}SIqf Lutvc;ѹ/<u&r%$Z3?=zIbF38f\'d1)Xw`eDLrB讇4A Rn%~ap唅9S&V8%hӤs6\I,pM8S\P g bdMqPE0Z3 ^h uڂDW#8ZX:bbRĴ{R }lRw^`Zں^BxN"EI!M6hic ʈ?sIyĔY.NOm򠑤u\D!K uD H,P]忸]6Z Bfw14WEbsw[tn}\F"wM;"L=qśK"CWR s(4iF!lދCxmޜσQ4N͕=$s8-: DM8EkWOpy:D~@GEN%pZz0_)`y9 8?W{I^ #G0L4QN0Oo<3y?=8Rp0Zؓc/,Lx׀cEw+O?<[#hNN=nj8 OI}r78˔읟8oT:%cVzX+PHYt .D@#% Ǯ}],4FcN= ÅKQi_ @80 @@n¨o=31, bBJN*s!c!Ӻ)iٶfHo\aeź9/EE|Ιc|p?]o+IY}ޛUnr@ &I @? B G3!C3==5dv|9 q#=[@!2~3,eě#K[A v3-(')A1qY /f}%kI%<Jl\Ri JM42syAU(Jtf!H ч07"!V],bJ`?;>:iQibnr0hHcRm{UOHKT1@pHQsj8+dRvLȔ%Z݅.Q&]j**Kv-RcJYQq5-m?ϭ^\4>ǐi߅h06bbb2 J>pk]`D¤!sk/f:eD\\#qnt9댢&6sR0qLeS9ctOcOߏ3"xq;U(n_ayh@n>R99Nچj Mۿ8@3)G1iھ-q`I,PUfR$mBtkgZ5}w((AΉI x, ry!Թ2hNj^F{w7}޸S}=ʶιLu4 !>8(77W+Y(vQIRYid6$B̹i NH<ˤv{A_:N;vFqp)9?]iaJ?m۴Ġ"ݺ +d)?Q} Zx?29YKt;g%:?^/m]~|, ~Z½?ܼfmvO?SK?۾b<4|`L霮f94FT|Y0}-Fԛ'yJ8 ;]>Fxьp^\LDzmwlMO=n.:N2?OB̾ ?m_̬.s̷ZBۄnRې#႙` $n3D"84%]!=iRѺ %$Ks[H97K-]ef~׽)B!4_Q?}!%kq.Wu01 Ȟm~{S0#`y.smz:.Q9BcbV1"Zh5b]8HYa2L9=y&mfifSH ;xSXLӾO6h%FPXhjި -M2ȁC\ +)9e؄] - 0եɜsܷ뎊DB$ ʒTaiצR V) }Ԕ]wS`+9iԻ¼2-TV ]Oѽ]F\9RHV'ms:#ٔ`alpLߖ]7w1*0@\܇3 j͡Q&Ln Dt+Z x )*5Wb~Sc%ϗN(Ou1m$dԃTQ =S,UQ([귻W}Tl݇;YE mWg"ͬKk>3l#urOԋSƞs! )1&UdPCJG}`|R2FRoU[(g{1ڰU]\~78B=(].RM/,gUR#_2yZ Ʉ2&ykrzCU"N'62&7z<"/Kn~s||CD}>=l|L*YBN(\P2.*ecu~\DERhgU.>l{v}((k;Cm7c](ٺRr,dO)Kht>dB{7K8 80}įm??h ,#u5w)6oҞ~1}YY^+%.U?0J{: /ꍄ—jfr,nt9/k''Ȋpyq=%V1rwB@#lFκoϋtt/.<?~>u,{DY1qڏ}6ߓX7/C#uq e)Gr>R ҈SƠQZjJ2zX(q]Mo*s&L{[_(}fL IS^ 9ef.uA P4Ev f㒕"BV'TjYZI65 Y"J3ݞcrC@iQ6Ň]Sai,~ڢ҇bia) =җ^Vt'ײVR+Mr^aЃj.JV.lX9j#ua|i{%\vv3$ҫEפ"E<KrljDrRa>%v8ŶM &t__9xȪ*@JK{mJk ASfMow樂Mcb̻ծHq=Z `(bn8 !iT\XP]*$Zm"3lvI*}I[WR Y\Uþ ԾUn #B˪3 Y+9#ȦuaHHc*+3ؽ*u{S].M"rҶ]ڞ1 Y(4^IYh)50+úߧᇵ7?~[ J#m;t"&bB F*`P֬1FLqZLĚeAB fJcb90AF(5)pISapfGN. `ݻPG .l)$&K3kurJ ]Ք2[ s"+6:|Xnu}P jU>x!Qb\t1DŽL AZi c)fj#^Z ԰No٬% -4`Lmo6]_[KdPF/p+EP wkk]e夨sK҈P|ejiSDb?%ɠaϣr甁0z]}fAG'᫷7~wڨ U/u{o^I2?sf肀gpI| OV :N:NLQjg/>u3ҾiQ#y<kR7f\):me2Å9Ye`Qc@!z\y1H;OuMl0%3{@YZu;. 璄m^U}J{N?!Cq]j緳jׅNHELM/:t]K܄$2Iɐ·3Q$@$v69Wr_CD*.f #im_w*9D]&l vYkۆzNI͢nMnZ+LfFn|wwz1闅{L g]m|QIbR>ݮH =_衋v2"AA5iV^Y P7]"}Ð{{^)+WHZ J$ӮG"ӗ&ȢwAR^ Z=;x]/Kxj~ڜtڶ;UM9*g>ӂxgȒ.5=|OyJZ!N54~Jy =ՠG'9$8^oKJ^‹\0]"qzݫWo/|x=8#/fϾKs%meA&{pBAY4Hu%IJUBbJʪcp0և-Jl9оoA]Xw"L˅/.J -)QܶI ޣgǷ>ծρJəx#Fc V(R! !<)Rr@d$MU}ۧ`RƼuiQҦʤeهEAQӥwNTKZ{u=,xQ :dmv!AE $+%\\2RxRh5Ȃ A HO }y1e1=U(90dOVMowuh9U`}tBU=.Dj!P4n*}2s`i$lv^*iQ) k+o豮Ke0&~. QnE*v^K*-7S R^jdtZԬSaʀ)']LbP*(eRFj#d[=<Ñ7A?™pv%HL1> EUjO}|cGĖ˓𒿉wǣO?Gg|pd7UcY b`ϴc՘ <:=cŒa|gO:IZ؏~8qVz鰞`ȧFO儧~xOP|z^ݳ4q=>/ѓ6;}<y1tx.gW|S/Y@F:f@̙.0FAj_){9A!oռT6x.qq>RZMBbZK=n`^n>3JbFF^J)1sY[йD|ς4>Ȭ4p`1k4s2RZ.*SB!L]HB4jc¸ BJr6; #Zo:]X!}"`OA&0UJTB޴&f61')0RZMbZB2FrP̥;1MFK0m$ K)gH~@11Ֆ*F&Iw;. 2HBƨKR)if= ջ }zhHrޓ$t1I2xWo"w̪wH,m铻8+o39g@baTUOb0S"rB}֑~8FyBq`(M˟(MC,Fhg2DMl%si]iuq%췥Tt)ʧ<:1_z<ȜDsSC5 >[I< Ϋ'q49yw^壱'TMe s@Z<oOyF#G1;g"9OLѡO#O.oWjez֏?~n~u|a&83dtTZOHh]<'Հ4\凜/:OmF^IT=.ϓFbOsC)sGLJA3-!w a!(H.'ǾQd7=Xz QhLEf,gaŇ6,ЌFkDJ.mwUmۻpUJ 2e\Ӈu5XfSj{e[Q_ !k# H iq^De%(J) Ƙ"B鬸 ݀^;Ʒ"Aٮ{iv7˹$օ 3Bsxw 2))$2dMz ЂBf!H|]Ro$ f/7]0Z./jc_ '! 4=wr 3Rw|zhHBe!D)6! r.hE }(zȹS [PW=5(umJ)D!%";H(6m7+M/jBFXo|UZ!;BcHJrRoVlYTbB]덖· A*18|uЇ|˦5qQ-M]hLгK1"uJjm pL1F-(vhgOAC (hQS0L5 D3|}xO~GD[Y/ ?eί#<"0̘wn/Mc,aw6MP/^Ƅ`Sx8ŒmQ([pͶ7y駏t;5 J%|Ɖtn۳u?8tk:{'`H%3L=hQhq)ė!N\z3Ϝ8.c{EÛo!fc-X B}!`fș!c!LS(dR]r]M=i[Hq΁Pr빱J(`G׃EVRU1FBo<ۘ6&ɃR$ܔb+Kt\{HƠi0 JޕFE>Q+)LD@Rd޹`tA"hZ/\׋X$JO9ü}k>FWs#bmJfcU஄nfuE?UZh:C8)j0wR 3SA]MCH)6y4m bUhB[L(:e-+4q۷} E*yYA$TڗrV*һ~Fgmtg!F-A=jX0+!RLu_BjixĢaf9U(~x@Ql HB;5[ɳXÓR y>3Xv9eOs*ylc+vR+m,D.ojhHVMT?l)Y+?,r^^}}3Q_?Kif׳zsqMx`lҏG Ne`y}g#qM?fnr؊px~T-*%w}ψKmV7W'uȈD,y)8j/<,8R:ht{pwQ{B͙[yT^/5yQOQyT(8M xry'xjQ3ht™T)?o^=ζXXH5Wf.Qve}~K$ʹN2@TxN!w D9d[."٬sZStAO s_}!113jmc!yYJAԆ$$1]p()ǯJ@)Xfh(pk+CMh!>/ E$'%壇ON{p Fǝ3kNnܺ]HBKf~s1^e}J}wFHΉ p=Z# Vf歇9fbj?}K; .R>EF[YǺ J_Zf׹7jjHI}bAؤL޿yl. KwԫҨcqVU i\ JjQUzt+( pB*! fq$&BuT[s"8WZo=xSY ߓv: 14v&!geg ifۮB%sgv׽⫛z9s>o^/.#wcY\'d1u<1jHc O 4xlj^'J8qx4B>Dˍ3-_}՛wWm=sK18$,N3-FkEϽxI~|F_^R=[s cM(s|iBoDfqڠ?/[,dP U߰Qd)bQəB1 42$ wZ\J_V_|U$|ݸo_׵Kg`ׇʀr"o:|öIDAThBKa_kP*vkR']H?ۭ*@Lp5ͲfZj) >6ܥ D=CdFfY~_U[.VDe p_-Si O]{fǀZeZM>;h-o}tmTTTe ^.'bX$.\6\rQ]TLr4JAwMH}]m޻Be]nVBr,c?1* @u[icYmj6_~U;WF@* Rq"&tH(Yvv迬7W*|uxg muUjWM҆g23 YPQئᆴ}dW7 !]g+BiEaYy1+㽿o@CLӷ!Dw%9)$PF&,içkMThШ1{GN)Fp8be_/DAсONR̙2w xfΫuuլv_ ֌o~] yVta|^/<?ɍɎcg ]$ 8z?gU)ݶ]/gRC˛d.u\ )_f|pνbgs G>lt ֍oe}lxw=9<~ya6 j ֐ȘR.Jm)f<2] 0˅) RD9b, mcYֵofݔ|CUQh$]A bIJBt9Qn@"9 OQwR`8G'Jn>/0UTĄRȨ3)*%UJ7cYK$)fABͮ_qՀ딖r_ A0@Ed}Z.tT+)a/K"eUT,@V+-rʨDaĦPYX2Itۦc޽.&ԴQBBKLФROU$1MJ[H ҄zd}r]$d dYiKSLgp-_ϵwiʴ!̴$hXȐ! -eU f>P.$@p`䬐P徝Kk6CZ` V>gش.eMi 3X2KSâ}M̱ʻXGHOi71fe.#rL$˜Hi #1k+-Զı*)ub1Se5 "o7ʜ #+#*eChk]fě\ ԝp)Vk)bdTCm%֊ۤbP(׿ZwFnȶyUǥe<p'B~YqKvUeͻIH9syk~JN{EUܾ`qeU\jFH∭pKWݓQh_~$-'m$scJ{);(Qz7oׯ^5>(:[뛷߾~~W_Oۯ|P_7Ƹ'-z'|=3kO@Щ$ݹ1h:kon\ %&"S ]B&aA3r S3"(( ã ]F)u!!/J55mT 'F+C_[t *5:c3+B>:f -%1}^KRv-=&O$|,5˲ķ3}ShDNc Zݪ``i>ZU6)b>PI8k M$.K  Oz;c&˅rĵՏ֒iI;[*B7UA!0\-/P@w#XB6r1hb!qf%Ts_&[?uMR:G(}ê@g%di4aȝ㢰$|aUSdc2D;A[ʵ )!\L^6J}|.J2iMcTq^T]vw̦a4+MŶuFHv\_Ϸ3d̻&EmevQ]\v۶&((&G{aCm SI)'U1u%!',!LS̤c7舉pkGD\ф{.0XҼw7QDXDTrq,*{C !,㷿}7Yrx+L 9Gvo+!g 8Σqlɧ= :3'<"Owکo*QIkd$sAKOL1(sH:K@ؒJuI`]2&܆`nBaE~iժ AMʵ\j1rⲨR VYiAbFk/J[|Jp#.7),TchE\!Z\EVLѳ$(5<{ʋJ+zbC׆21VQmBh96΍JWTSAY tM5r@*VcSZ[DusJO9ccmDri=fxm K+oQ*R>Uu|WW.d\,$ֻ|rBZBC] %ŏvhհa}NP*"fPZ죗2&A|\!DJpM#灶}﷩%ܑeĖ.L:f 9 1?,"J?c4@AaP$I2i"lȪ̌:M >Y{ر-+(]ozTS貴])b PX]FFRޮaH[#E5~Fet 5Peɸ9,$PsJ^8(0D}F€۾YKkT[R\B9G I"-x<JIOE4&C|$o"r!rTqռ_εO+Ƨ ĦIq{߯_} bܪ f*%QAnFqES!$>%m)DQV^u{t0BAc8g 9~A}A PChSTYɶIh7>)#Ea!%Ghߧs+0v5`m̮ PNrJ>ADdYr҆}4N%)bd0ֵ("P;$fDP~s=Ts8R+C&4"!JEq b̭*B_zh:9&n}$L(Ւu'F RUGH1*H؂wƲP,ǔ8/ nGJٶPjfi9vHh }@$$A}}6V-kp1s-2RGw5!<.ҥ~z\uQqH!M^7]!fT1wMW[Jg]UJkn$]ʹDT\{y&2A lJSTz{ߠ3X܏}^_Tqdo7eqy\ۚi 2]W}ead/#D`h,4 "0V6r{9Tͮ9DBHPZ"rY1*@ V 8e}b!2*O!Jv`,F hӋ!3$ A-|/C'>6c @\ֺ*o#^\$^w]MKIEBoXB+> )fJwJvon)]mN{#KeAϫAXC`ψo}P8embYapJեEHS }ucιƀ(Fȋr rea0 y^jDhn@"%B߇mw_Bj!HYoͪt}f]PŶùCRP*1Ru* Y.E#Mr^EGXΌL?P:"<&PO8))]`cM)rYjpq$e3|ogX|rhOx|NcfI·Ifk1ԇ߲v;?ŁY/]1o~۫ mtgan O:d>pŸa4 =;I{"Sp0GuHl=*y8LGK?Ff=sRzX[Ѫѷ[ǖ+S,ZxrJ 1 a}V]~|N9ȡ2*Bc3ĪԗlfgѦ@\GZVS>lB#pjFCgҮm/ĵ7CYJBTC* 9[7 V BJ2)^+ʚoڠN!yw}t8&Ȥ8F5Y<4󗳐$ZNClAL9n_[IJT @^ߍQjwkϝ (:A ,1o/<@ܶ 2aӆ]*U Xbb[r`9dR8]g3ArB4.[[؜iG(+PdX Ɉs)*Y}t׫)Cץ<+] K[pv)V0QDTnu#Ήa#ev0iN-C>oV򲼾QYL%s3 s uȰ|e̶)XefM.gyqYȦiʺzVbV6Ǐm4]ys;@K.~x~%g FPG/FEg~^XitÙuBg/Mé2MG׿~f}ɂo~ZT#|#B _}o}|s4U 'xh;?y<2e{gz>(krGxZOG󍏦:'Cx߿z/yJfy9J_?__}^Eu7ﮟă^׿z?|FZ9\;% !ۤT=xӤ|e='c*G4sZ΍']8U;ζd\SH0EQJhsH%>o6WQ..E͛SDC@r?$mԼ#0~SW"%Eۢз;_ZmiJ!(t*Y}+$b8jtypn9faTX]؄FB4>Uq=|)&Pz.hVvs[vM8 IQ!!1GFIšrx,4#3 Hhfࠉ]NZ@Ir9_m,ABf8ʑP$ajo H痵ֹmC>{VĘv=,)9y[ Rs PeԼ*BBϾYú٬OrlV1}g2nqݑƢtrTCWl#3-}+)۶KSʥ2u;MtJdִw> 3!qu@jVsJ!t1F\4yUmXq%fn=ݕ*DwMIsM_r+gG|*?GtNaW; )gu糋k{óWA {..g _olۯ.94g6o~A(#nB0_gpJ?iΧ&Gq՞> gϕ}Spb9y7\Kgn^} yyɇͫfUW? O&gOAyZGvb} SL>mp@GGTO[579;ѹѱcuS 8!wVV?tW>gS`΃֗/^uLvV#g۬"vD;7Z*G1R>۞Hʒc*PrQFz3<_!KG"4k)cFcXD*bV`] Ԋ2; =$4d@ٶIaŌJZRlQrfQeQF,lŔ1CTHX͜Xb/Y\sYތDa>ej?Gbm"?AC(Tӟ/杭rpVhVͫ/_z-Մޒ߼x~afI aq98xMcS {I{)='`ʄL(x'"U]y!ͪY\2MR( '#6]UY6bfF)c]'ExZ>?'(8='<)qIΫ~s~OB}<|N{dݎY_G :߾ѿ !&L`rYY(n%>ei,\. !7C)qnQ?܆D)TinQN1$+(>Յ`݇]DIQg>K2]l1QbnR ##Xz^\j_BOrԫVL* LX= qsgAPάsM뇨0evݭ|H(]Qe!|^V3ݵ]m,,UV]]JQd3.>0rܤ0#wo6RC/ծK'thK /*!69VNb}(mn0nU/j,&%I)&; wrL)Ag5]F9x0/+$M,L釛%V03W}&j7dVP׆nwߴ=fu4EigFV1Z.$RX-z@lxwݙ\ۏ7zAudu F1ʛ]ẃdw}0nPBWznI(٣0]e%)[mۜ!Z+@%#{k}5 $mwERX]'#OZBL4kL+1昲9swr{}!r{_)F=wO ,0G=!'"s|}Gޥ3}8L/k`O>Rݬw}_^7fj7<_=~ ׷??}q$= 㑓ƒS{#ML.Es9/8dyCG-1t4$"sYDZiѠ2)(*+ۡم}amfwe+#{g.M49G*L_E4jG c!H%љoD"1^~뇮z?JmgQ>9ƻMhm>^yr;uOi}/N)H SǒL jWW^]yR˟Z^T}9u?͇n'e]sjbqrhQ؟%w ~2JlyKqŎ|b'>FNcIϘyf̟ ep&[VwFN/Wąng-RbLJqӾ ޔ*TWv{Qh}Bl | 6Bo.YYH(톒r7(uhQai4(y}2N9E)$0>\LXZ-FҲє#,L *5`B(nN"!|XO+cޓ1 !ZlA{9 }3`a&2%$kN1LVe.Y5@YLL2LBV㸅ȱnE$QO֖I@i~>A֭BkR1bc" gX)ZrfVш(C*bCϭ5#IHɻ Bp!t(7Mbg!_V!S(pO6׻o6k"w+- ɒ.)" xKQ(4kVY@_]DinU.Xn0*ay9 )Oek1Pq@h S.Z cC( M{rP_ݫS@1^kT/Ed蓸S)Dܟ^|(+|i (_~b~'9_δ&߶ei~?MG" >M)t ~L2giUN|Q돩UUMxf|*ѫ/z~^2?6~F=%ޚi˓UACY~.Ov~ej=0 yb8>uS]Scxi? 7_O"zXXf圸Ddv;`ln$,q("[ R mSY{iVw1\Zw8ȝQZoDYj$JэW3Ri®Q_ԦGC9_bѬU H;/EI^j2D)9A%PhbB$A(fx6sD!E])90 5JqPDـU׼aQk2aT !e}n/. G BCY* {M_B$պudKN\16ݸ8=[65lQ,K>dTKf>?>}R1=(52dxbY7 MWm!t#hL>ZMBŨ*bQǪ474ba˹ka's؄^mgq*H4*FUz9F UQ}۪2$l66\,gee0sUNtK[~nI"Ї`zrYc;#w}۴ i\?|.W9jr^!χ#<aLG)7ܡQ8#A(W޹xX\,VZ<??=ՄG3>?7P{jg>9 ?K*#EzO:q)*Jer*ƲSq# p$N W瓻X}RAܿYBCkcp)3xt^+r,4P2{rLx P #?渜¡dy܉GS'=VUN\N1`X8sO)gǍyyHԯc|SՋ?.|q-_\kgCF4R1Z(͟|yqQX8!m;؅0I[fCÀ *[!vF"c8N $#=$Fؗ[c1u  Q$1CU]Y&FUK}TT./t-UYia) KO1qs7w1) `2Ym*l]pe5,(1B(C6 w&<*m;hVQ*(C dH{XeMߗN7#3gs]Zˏ%Z9EɅcPZӆT0^7R:X}$%VU _2p97C}ΊzU6Ij3G,M57mgcJƁ<} ,6ź݈ϥ12틽 }(dF J'LD2gZ2γ@޵1t1/Ѕz0+. .T|4 4yK3JuI; s6U9,AP0 ( "m>$!<φS2׺ܵݻ;z !r;Qr6"a3T|rѽP(IqPp~ݨ2< ǵ&8xdgagxR}ȥ}<{c ly?oކؗ~t_on?懋ٱK>w#DSсսqrt'ye yS>MLSd *^ZVyVSU+!F0(*~:9&-ԆqjGJ]] Vvz}H/ 3\E=iB%DžJgc}Y Ԋ:v$nvE?]g uyYfуcTTeVͪ:W| ?g[k<(#32ᅈROL3pmw)=y~Wnw8[TǓ?trSh%·׿w_\|S7IIǔGA,gM*˜/`!rdr'&3;_\|gvݢc#ޭ6Z^]YecJw7o~yᩀTOSև <sa'>|ɠPԧt#x LΏ#Ǒ'qv}j|W7<ϊU$1HO%B js;TJij3G[w$RheASHB\07!Z½ /+VW3c px!{s uRhr95s3օd C(c]OTC̮GcH3,.gT mUff a.1聍^ -0*۝oP<2s`V5 e%1I[HD ˅)Y0fɠ JeW _>m;_}RI)JҾ!") &)wg( +K7;_jӅ.l9f3r, ǐQ[A@Q*JcHm KfAYy}if\T56~p"QEQ4V5w(Fa+TӴVPl #ɿ\}u .TgDkS4se,%3qY :&~nZ^GYA&փ\ZQ<|ragE2|mL](6sʔEC˷o7Ck]|v~zjQ=gW:6>8;4}Vk H[v1@cvF( fQGz22K3X~,N2+^<+o0lzf(RqkRQ6/sMJOPo4qQ!Lb;fU;14|9klvCYR3R>QirYYi C.K\n6H80Y699TY"jܾ;dAv>` a^$ ϪȨ}=!I 3~h+cլF+h~u[$Sj6˙)'児IAʌJK ơVֱ?SU퀑 !x^,63WwhZVM?7-Kc 䜇!}l8\-/>_.%yHkRtRx^l$)Q6aKGb ?<,B~vl9D蔫#aTݟoyQ,HgWaegudBx|y#X>Θ o~a|O$x᩹\o?{aǕ';Jv8֚`[S'$0ľBCPmr6䌖=&叛p,f RbL}7jQYFYTj'brجC5R&D3WDZ`HƓ겔CCl|3cf @1hřscL:({FHIk&C12]0j[meQ(iaS3cIkVyQqFBȻݐ>_펭yIz\h $XG3Xff(ۭ չr7 Ibi1Ww-+ \7Z2NI>2+$a= a! "2$5כXYcFymrTQ1g)C:塣v1PQruFln@Q1|6 檦GK\RU吂4=Hd+a+_͵M_TvJi͇Fey^UY$LYy]@f^}h]u3fq g_}e^~9VsR9>rT+r"ӺS)XOO "wЊarƒTLh0[l2(mm(zl18JYEtG Oܲjq9onlYY,z1;6݅'A"gN*L*ro;#~% w[Y4듨ΩQ΢i'f S٠c4i·]t߲< `⟧l? 8C2C_O^}^pBg@0N8 :}֪} (!;$Il !CJHfddOpQiQ .+$lZ?/uRRJSm4QAk >Y=n._Ͼ/>GvڀDzW*pP6+U$0/\@1z{suYוUݶK-j!K5!5HHTzvV1bFh{k͒f%` =<иn!fB*v.QM73Rs"v]9U8#ۭXyfTԅX:97ebijfR#d .}MQcC,  =MB)}E6ۑ*RBv%^#pZvVmBK }P~ t`y^XxӶI難"H]2iBM?vעX0HJnjE9>C-vp[݈Rn7[$;sch3Qg/0ie 3K/_fPK[uŽn֞:6)*K#3>1nMW%#se]i\T]yAv.2UϏ6>4{ z5Gspܯջo6ݦyճQΫ8Qz?/ψѰy$A}>y5gGIh_UJ?gc3- oMl?BڤpY]h3-Y$]rYc˥fci2.gNFfLA(akoY]Q 93=UY'Nnk.4:!sY8oD/E9ĝ78. XAxnsLя5|U<fS3|~M[qU\:dI[%RDB#ZF(>ebBطU%ﱶF7pcuB9E&VJ2ҐU  0xVs|/ ])?Me I"u e}ĊЧBRFH0mו(sBC2B.>t!s"DIzaK 7Hd;RI"];CZH&̻>kI!ؖ’(Ɛpn0wc}Etׅ]ΉWua, *\Xe#Çjڜ~`D O|݆6Yf8s@TvF(v TZwCN3uS!!`d FWjCRBxȅ @yLZ|غ}i )m'!r nT&!K(DtNJ= SSURBS;a™ǁ9[d ѵtsxxA}-5QJTq($MWcyc<3ՇnᤖpO@6b?LWUF~Wb1 sr>RZ^ޗs$?1s2+Wg1gQ^6zyZ3JtO.˫ʖgYlW|=gUE\~z<ŅyC/D^>͓piGfF 12 ՔmSa2Rc!@K!.]x,H|(Dɚq:=caElBN V֥vAH$bćnXƉRtV(C7K) œa4"AVl]*3*h* bN! #aK}ߔqq~6JM_H) sR@VHaC3#}x]nH2Z%@ :O!ǪK=h%) U l9|J*j-Q#Gqꑗ nK m/9b  &,quP$Sֻ},-E6ܞV+a*>e!(F RU4VG  2'0@1p^] #PK݅!a?},Ms(d/`zyfU %hT;Ԇnê)6Q 1)A\4ޕ6_-LfeiOiKb* U-l`K)ĠƹiR]2_Z5`'nt㋒;œYf2Rղ:m 99}~/pqh.s>v\s|s:͔6DB ]8{R|Q294,D1FE=Vn#|{&`d$L"(d@iQ Sft km$,ő@|(|J-%%qv!tPc*$Q.%VHBP\hPDd="R0+H&19oleJ.%ca;u..#ŽUH@}N!&RkLw[?IH!Ȕ j)Ϳ "FBꇬ wW . dIChy?l__U1.L}Jd*kb8b8R*x'>Fb\ia|w-WEQ2XEOY+&x>H).3#a1kt܃ؐ4S/'S|u–~29 >;n>(i`s^~lǔ&VC/>bOKn]< }9|La>7QOgB3\M^̼ <}`_/*mr}Z-tRIJi,F@mj4wL%|uRۇ9sFoӦ _Qr%EdH ^i{z]Qk!6Ř]R[r-3(Qd);:%r#ZU˯|[ҚadKl6R|(}<\5mGS !p9H!qRW,(ׅ(W%v05$}1x2gkfn\JuB* 1"2LnB *-=EiLE<1Y!bJp}r E+53HLXjy,}ΰsqD#IRʑL:7,0SSbBlX-xt39S/ZsɹdӇ4|r.[ %k~ʧ#d5d`uZ%$Rv*-U6:g"4cʑRH7>qf !CB(nU"vkKSi1Cj3[|B\;zO?{-1ld / Rv! d G_&$P&l+e-e}-F5md]HӦz1JQTZ " YZݰl b!eF8aU@.`vsHF1M3.b#%h}JwCmFrL hFp"Ð1҂Dr/JAR&\jSINxӢY\$c{hj]uPDaP(/6e}h1w dk>Z1[ =jEl>,6a^~F<*1iHPυ,wTe`{DrC֊!ђT~kT#e6J~%(+ (NɣLMU  UrH1c{nzj{~jS1ed.LYљޖ$9"x*٨.JJ19>e C/W 66C!sO|NOvgasΌa/% \6vVJ2c>}Y]1SQ /_EgA?x2bN;?faO )o՛뢮~quU?mqz?VU_f}߾=_L4g= 4y^braD'N}gSy ^/7O]`޹f[>kMOyɞO/eR_߿VR4$ Um'mȱ|i]HL;8#k0!E) 0g!ZhBB 9B>'SSrWVIr!TVZ.02U"cv9&ޥۭ!e~( TiJcNDlPhҊvד)p*dHS]*m ?ne@uB#YWP`,U]Y-XM@V(I 9'#j.L΃]Z8$Z& ,QlXMFeߥ."6rQa)a l\H9u%)@FzR@n\v"eU }H)5@"];hB$2n\ePPE Bq;4>eB1 V3d6aā6`ft 28]ڞmA,AHnݽn6;hJ_,JR~PeId!f$R֪CWK}+8UPTŮU ?geo Y&d9}#wֹ̢7Wj% mO!o]8__~:z(d6W3svNٖ>?7lsU"|&y AsH1Уq>oÛoޘ !kLP֪ѩrw}_YiV7 q=n~3:!;~4Ni-gs|Q"83S/ 9L"4,p=:n1W\ͥbۿzk5jR Qixnj KjfLFH@0F8.O" d10F1QVYB$=XVFAF><aby%l I]`m9p(.Q\iAJ%n3PYm!}(K3f1-޷PԷiYjݝDygV΅!yb ew]*j1E)-i"Pj;t)KiLA[E?U%E9K CYbW"`k83j}pmf/`ɍ*^]UN }%d!>G!\/*(owp*T>3t{ٗ52mtC BÆ%oʺ>,0z~ VWb*Jx!EUU^|:r'l;,4K}t ٪nFΞ D^,+[z{pj R||`JIB_Z?9o~Zn ]jlQ1tJ/~OM %M]/H8<Ƴ>a3~W7k)Ef`-8Cw?|(f!W_^6fXUJK!O__zs]ɼp ng8eWg]I/U E~jr6~9p c?xٴDʋni鴰,9TΚI#Ŀ߾o)-I^.aR#K ,li MfMa5QL{wu]0d̘!e*IRIR2FF]!9! eV퐖H! R$-B) ƌ!.K OeEJ0̐RJJ%zAhHB$sbP}Rar ()YBi (2dG:L4D1OwW*~l$lCNE,Dms;P6RtȻh)Ғdd# V Drn}LЮ J(m$+><>BI;Am|&pl$WKiNiz4Yl ah{2#)W@6ݺ1%ZS"UVn S~aea节2G[0ֹBdJ)gEVi8'mUcN6i!\J'F ӲLNc}bD)E [-Kc3gZ)AwwxbEFKm)ZZ!wRWlsJI˜#jԁEt .j/J߻8 " P-^]VolsIRTeTKg#7az2HS@jL .4D8x"s?ӷE)e?ׯWOa2"v]2gۮ,rݜ4}W-+) /+U5$4q>sO̳_2 @ 1?_}n?Yg7<9ùEqgW@Oٛ8gD\@e0|ߗԓ/Ӱ~?~%#Ya.$MRbBH]r>&̔mvFK3B-!D֕&OX1`SCF+{^mo>,+!bDBǐ1dQA OqG *Eߥ%ZDwQW*UmKea–)FDj \&HY!q\ C޳}.B2yCVY(NS!gL#Ե2}RB)A{l")6e]5eI5ƤQ]ӺQB&oBǘ9%cMYueH Xق(c*}E(̏hMl.F )9s4ZvYhK#vw9ȌTLIZ`BrJ8S޷ I*>mw]JQt >MXE]R> u)#pN!2,st^4l?вJ(5m)I(6NpdQ7M9P- k[ZBUrq(%L$wm~V  cGHnR2;>|WMP;7@;޻ᐼR)dyZz ژ+LH'ټ1s9,Rz6Y8ӅL1rpvV=CzULWuLբîv{HtI%oqȩkUa^^Z‹sO򉗦R8ӼӝPNBbT_dW'D+蘯T/vxfxq_:a݄̎g?|N*v@׿XYVy4PG)E &Τs @qDAS$?Sc¾)rJTxW ݻ,VK@fѧ@!DQյ*BԇC_*Jsqڊ6&eYYLIfxQ;kt rDjC.*f]e0@$["w|}!Cu$ZEe,UĂvX,$A~4%2kR`%@g3sUѶoLD|p \)1` E7!EZU#fu I0ZP Lc7C'ץBWthsJ"r$rdhBz=:kB( ƐP]!Qmmq#~T ֠Q%O~jң__7F%II^5ebCES6FXFTDæk*ik$BzNB@]SP4t`pP[9d jLm.]qvG׵zҙR9GTFCCd}RZI*5\5*rH#Kg-m>|UIm[[kB+#ZBI&%r)'-=H/LiznjOKHF*?t.58IYHq?gr<9Vгy[^/wwa&+ zMU7;"@(?~nw?7UҼ[~xS#zk/do׳"3 ՞\p/xl8]{ۡsUsC>_Cȓ =T :ksy/q@ p|x_/2{?Em?oBۅʘ28 [^XF:3Cb䔓.]V}r$ 2I@I*b9āRr*2!cTs0dT;)gHY JBζcf&a߻_XP,0o[_mqR>'Bk.q)P!1K:ͫUIo[5Ki]1Iɬ;!RPק[nz]ݲT̆Gd#Mmv~FYƃ nmf3xO(zUlvB]Jcc!QLwwòYe]w˱2@ B֋M 8>+U!e#M}{؍_Ԕ"$`,,Teaw H>vT\6Z9wucpeE18#aG㾮˯Jq۶,pJ6{LnaUWݐJHtqX-b4ϝkU0 <|seU}@I0+cPIM&ˌ$ պԲ,tJWXB{5|\ 㩅6(ِ:M<Cj֯VM6n~ sN6/Fus ?Mv٢*E̜IP1US<$3}8lp7RF fwe*Vm0s kC1W¤Ebj/X փX<ʊc*S?tz.e"yICHaiU9q$< م9TZq*f.6uS*AF ā@@1IcȴwNiIH J&RCtVT)p7߳m]m`1/gU˦;4&2a({_hlً&GZ(G 2F8qXՆ0=Cj[hbH(-~Tk,f]Č7VόBamoq`eM۳.Ej%8f.G8H1jt4}t٩ڒBI||x&FeY\!eI1zf딤ui#ai-#gPVf˲b %u9pFnJRF9rna.QASٌ!RqnwuSI9Bƥ+$ĔT(@& J]ia|;7Pi)}߆.+/:74LjչL1^ͥr<ǜYcC\/KMY>7Re ^XSuTeƩg.pk`|?K< ǹBC=&MLxOO'nv|E0݆. %S@ciNiTrOq#>IaFGq1 >Mei&gcy~t7O>cNjJ&.Ģ6Ow??~~XY\` Q*(H"2 Gn-a込(!āPU"(ݲjYb؇8Cjd̒dpi\5mHt# hNZuVVw9jpsX"c,z},4mRkL9ZAqH$4'_ժ@ 4;kOF!q]/+Ӥ6B nb[-3vV+ s\iB18p%,ZgU|(T`Ӳ2a"Id*@Kg}1*ebR"zBiQwnU6qH}~Ub 1@UBA6BY)-A*⩈~S`jw#YewYK$|ӡ))! ,DvKm뒮͡dee d5bq!d+Yxl߾3z݅F؃ЇLJ><^ql~/7HS YmHH UѶ,U2: ahC7ֳC,X_]X*I_a E8=|f =|تnqu(aUM-*'ȄL8y6J"FI@ ycbm ,&q&acF?LL{磄J;NΗB49؊ \4*ļwVJ Rf-6hKۮS 4 1RطZn] IiM˵BG|V@R0dFCEJʙ}KiA l2$!dn`4LNQ*J"*-KKi$΅ T䆤 { wjȱң;RbBږAΨX sY5hF`-&9KK^.^ m L޹Ma#Lîغe5mUIp5 a$}'|q]z)U{RPHڎ1ԇ5HQJh~zx4>v(H:#ֶ\j%M  5EՕvW#urY(-i uoXkJSV|ncRzHPB )(AiQQ*۾"wAH=F՛*c+f2Շ2&HR/gL4凌8Fs8̔(9ė} `1û;(eb᧻R=NRDG}/B3͍盍 S)3/NrmeV >nLY1Xx~/eH9xcJ=E_ r5:0>Ѥz/iy ͺff8'i-?+xRa3,=9ΠDfA Jj!6R!&B.JC4V%|trw;- $s ݹXķP)YzpZubY" V!AAO CFkhJ1e!1%st?=vScYJ-)NHE{*U^W%߅em;B~vﶇ.+!-߬m)posBCf}.dbemx2WM1:=K8H'ҦvîR!Ċy^BYjDV-0joeF?`׹O.ZR4BrCpn(jaQ[A9gym0aލN3H,n6r$K_]cUԊI}\c.L:aHK!kόeNtY,J ʓfl.E'3c"J>zWNPۻwnn`1~wYEn0~DߜՆ3NǩY$&~zj*woU(ǟnϾ96Mo>޾3xpdepbgTZN-y3y=yO_GM8V;Uwox3*X~SzG[nJ eAsc@E)#,I?˅ cDN(n쇂L]zAá[-=ncȱ.MbBYIx8h+_TSʹ@Sv$(BT$Sˑ>.!}|HR%ЬZҏ8+ba Q}$M!R #v+Yhy2V1_RjPhJ]S2d C!PHjg5L%.zE(aQ#)UeѷaU+Mi(>aCJ4=Ҁibl}OZᾋF*fxp҂ѡ`\xUiw,GBd,J-n[vIgƐA/ B`kap^*BO)m>eZ4r!FO XDQrL2K%1F"Bu]8rxX{wBZRCOq߇W`:Z]l7_ԃ>JFX؄)tg#k&MCS3s: wB> d }d W;;OWIU$4bf2x"o_R?5B̼1: ~ i$L/F;O! -Ua%$K){!bs}ue Raskj;?6[xWZ)+X^4m7Hp= w;Ɠ>Awi>~gy.ۇ7_^@==wWsczu7ӠO9Z*Aټ;&&//1Zv /OCG ?Bb6F"7#Gf7R׿y+|e]ZKA3N?W}#HNyϸ9/F U,!b -jba$\DI(cAf2PZ32rEm.:=J%ZBǪҸYuBVmZ9`v>V &!V.h!!w9&erlЅ0"Ä"ϛ@y6KBQOJ)$jP IȘ@BդPHIDATڍ< .d̻#Edh`Z]jTz @ r-j܎N 4/$cڬqnLv~ڴ.s܎L&gmk-gune)2SQ(rV< (^΍2i]i#Le=C cM΋eZtnجF0"'!fRi\K D#r9yƪbظL:!3ruUis J&n}vŬJ\z  ɦY(ͻWRW"&/[Y_PE?xe] A즴 !H$ fZR%B1$Ls] ])fD1leue>n fIcl̰M g?u&xH=>een.]}ICEwݎ!wOraB'Cꧏi(,೘Z<o ųǑb!ɿ!!io6:ƴ~\__U0og•|M)oVۺTxU~n~Trx3e\ '/ H,H&]`2 ziOz ]C4"zp^fd^8O 9'*Cdl4f+7XhS(n[$dVɔtP9 axt] ڥ8b "Ibe ! HJjS'b?7(ksPѾVʀ62R.4, 4,2$ɳ+$@1rff&[E]$ IR+邠9k%%gb$ .ؒHPi~L-KsUFSmHE]p¾q2GL76ULc?i=Ȉ]v[^UVbb,y)]rKmg3 IJ2??m6Vi͗ˬtዷڇ nb˹ MUZEӴ>y%UطBA"~zHNM.TaK%lxY)Ds(YDz*m ڗƴN *M;l72Zcl5sE*FJ` Q ?TE9.VJH(X[8ea yѵ֠㛋ZLbYܵ,)D]s1K/v2(M6AI Qp&HC#*$b$۶u%/>e$Jc¡OV8 4*Wz$8?N2T{"rPha (p'Gz~Eq vTio%l R朻m_Вde 8NW~8:L>ՀLqrCm%Opwsww?jy=1۝B-LJo+Vd>(x2n3Y.<ɋ+GA2sY _iu ǃZ ߟhE¬cu!99޽_2j! 䂳u`vs[F=Y `n #r2 TԎcbURFi ª.Yy!R CfWKb=ˋH>UUBBLfX&2HI Vcп1&@ 0cJC<ʒ#.!R1 )}Hh:9hT)49|%&b7֍ZQ+IspFƐ)mϵ6Vl-hٺːSo=mE=R.1ǧ.D`~Xhi17 RHX̋ճw.-kx}QI@dU7XM43+-!BF"fG@5 >GP6)GA m)T9~;׈Zu >b#WJ6̠FUYIТ(Ʒ^7/C8?c_h]QpqpmTvm׿\p"eROO>+-j0Vw*vd?=|R 6(i‡V'f-ٟ8 286C9V a>Bu^m? bJw?jVW%`{ ^Y474e*w&߈8ڏ\w ?sM_9"|2-#@!ȗa4sT<[ϜBOMWoAZē{8kR|7D:c%=ȡ.zK#ZfțFiӢ^Kc.fZ.fcLy7HA/mWfħfYͰŲ $jg>xUDjB%G?d/HԅWOM2KKT$ywrB٧uś.>jVإzVuEO\}1:Rι+=0Bu.DJ@yR.2AObGB)yƔR wg]j Y_}ӏtd^ Rvk{=k!T=r_a]ln#^&2C;#T"fI 52U/W 9'3.*j;wUfȌȾe!3 IkB$ T r4e)X`(vRْJK Y\̋Grpy"sJ |VhCa@SX-DNjvs"M9ޛ~^$C~]Jhǻ狥%iR6-Bw17GT~eUTrWL VP5KmKD>uoE7@Q[a(RӺZDѴU&Ͳ6۶EW3h\OJjSƖߔF8!6o>TGwa㵭9m7:1exZ81xG;.ړ'T-.e#é>KM 6(8 75N8JL. 51T'иp_ x(b穖h38b__´1͊;DtHgi_?㉂0G߸ö?( f#[$QZy_N)Ӑa1R M- B\ВodB)B(DM$1"sDF$cAFPJC;I4G昸Q 3&+uq/UR[lpߺ]6c]4Zd\ UQXĔ7˙ @c߅YYD<¤j' Yi2v yne#G2gL|]j%k"vLiR>m)baYYI. NIřSbuy\P!k1Ys,F-Բ.u?\Bʀ[Iyΐ)FoĈR.fZK,'JB. 3ޯڙ'KkBPJ w! mVj(?oAt,L3YS臨I!KvhP6QY $mKiV)l^rY=OJyjGX>}cڸlOvo泹vC^3sti15^P~ w|G\ =!+:77: n[ ]/9ѽ{`)rbȤk}|UT9NHZŇyGq,vg8}P'q2|G'>-_{?|Svc2U 8 ۭ^s" Ykŀ˺ VEi,*7]MD.x:NR*fHP*`O3 R G:rD?C}JM*Y*}PZ,*RJ93 RLaSqB#B̅Dr[KR%dVeRllVBU]=710-~ bKspPh= B R%J%*^TԢO$[,f3#kmIasYUCw̐&&*#E>v>"'Vovri7 u`˫kEԙu/ۛj-KcWW@@VF>>}d ê1g ~yn- Y`1m~m&6Ur^iiS0#'oם[RҢLC|iP*AP YB, UIoWDpuYxfu4#N6™3#ܟ>~v-Rxo?|zW/Cp 1zz i"P: Ξ}ϔ9(q<_| ﯞdPEM_]]/fؤT HvE⬴(T@vnXe(X J3ab1 JݧX 1,2h@bʔ.PJH{]*x8 %*8 ZP腮 D ` c$ћػXjbe!bߥy->f rfU }vTZ+DA23n:2j"LB"X֥Kҧz)QR2'TX)nPג>=fS*ܵ4\ e$VÛY(f䄔u]a墾X,bbVˇj\B J !cTր ,!Ei f$%*Qt?έUd -?s8X밦%z՚u)2q%ę)ȧ{WHm49wDB|z#ʛ ! 1e}e ]jJS%/gF0! Ib#sȁC)%Њd[(ԧn sL !=gIG(1QJ>E#)b\΍ !r$.DHNb(dwQ$'T)s0:l+ilL1_\ɊDU~yZ/:`)- "ZZGfYus$x鉠d(R!#)dA e.K |FZl4 e}h7YA 8\TY(r\9{Zw1nBnݐ\6`Z|_ ×]Գ mW6m!2s >EYg A1> P pv˽߹ >ûxjuC>I)H I)R>?Jil\"O7_Y,Y\'ӒB 2~¤'u&L򁯿lj 9E'O_DLj[_lr]XHaApWDK"!$2 "m1~qUs(R. ՕQΣ2"k;3j\imc0J;Π M.PX[]H -'Za5d%*pZ |wi!/f&*0Beie l 8$Qf-bemjqٖʧ$}Z ٴ# .BVڥXhy <1ڼ"$bύ6X*HR\ZRR{^o~|Ɍ]K=O~y@I˂- LM3OOm?eWvQ*]XHLhv߾s|ĮUv.ۅ>>]<+e0$Yj^lǧ dN)s1 ;c[S"Uۃ4}Q;"G.scڟ{upbEsN9M!яK/+81$A͞L 91xl<=gi/_~UCSpz3֦9sN Oբѻ<~?j"^#9GJ@'Q}dazGb5Y'bxN*O'IgϙAtsНy<4ٌ=t>UT,* #!DUQORB&.4Mz$ 3 RzOs5Qj=J)5oE6}_2X1IP.?R>'c0;.D> Ab$[p\QT)9V%@ v<1+Z$!rka<"t9/%iҤ#BBLozM8\ ˿ ojH?<&)q>ңvaF6]-!D ENI6 3೫Mѐ 74Z f d\2V>mBup(8j%z+6pNcҠGS0fҮv2#Gz^v)8O,02WPJUS&0. sdѐ" }0VEvZ&@Nss ʪR Cp@ (6A> jYQ}Dʔ"SdMVrV Dvh^O~fLS\X%YzU8Aޏ>|ǛR@k6Y [ھ{Y j-8F7_>||I|n缨ܨʖ ܶB%Xn^d4j?_yҫC#Cu^ {ٽĨ}x1)cnV˷˲*VN_fz#+rs3ه 2+sxLp0~i= bJ,y3 `Y?*߾ `F̽ECw$5!$ fΪoxY*[R2(N;"cDu&bR9m>- ȫ-YJ[䐁N^֚L| U׺՚ m0TyJ7R |Ru\_46"&bYoҒmc34UeK ˢ.E3B3CՀ ٱ"{3%JG <+"자[,cJmschp*=t^`X}LֈW"F)~zńF1x/ o]IBR Y?߭.ꢘuU! 'vܻEU oƧlDGqì;ǝ ʔ\D_!puZi1\XӇyY ΏN%Tz)4J+ cJ-"1zЦLpvH%:ڼwWب$Cx|˳+?x|p_M{vcnuUvMS7W}6O?|:5]VkuYN5Gg3V9힉>+Y)Md51/o_~s#~?˯~owR3H^>?<=wW~4) RL:4'`r pk>>w8s(Vw?~yJ@,$PQ̌."ȐbpBtJcL9R)}Lx9"Bt!>.++#qD 9R,,FdY"$S+'* $[rL̳J$$8Ȱ&Pֆ2VEQ?:ǣK bd RQ?5s qVO+ L SV9*H̙r4&$ݲlD BEZg!5˼Bq`_ RȪ#c c(gf9&U!%vxseC'3!,z)%C" {.2(ԔSہ5Re \%TXu(s]s:_^)AXtk׽g1ǦWJ31S{}Y0Pƫe%o76i?}[4 ׊_|s62 Jr^ "|{YYVg#e.u1V3SKh'\ۮkagXU>e`g [~QH& ^ /+Ky7"QUX%JL0qR'+iב7u|z}:'6ļs#X㹙<~8ovy1W8"\^-޼Z^I8Ҡ?!ջK)o7eQsIrjdUô m:psɇ4=ⓕ|v *c_}bp1<,Lu=<92g_qrʧ&< 60â\3QjM5<^,=0]zgM!А@}vPCd #&ICz$oXHOU]7} AA T̩Go+02#LMB9 }e?"9e 1Cv0 ZܴIkJ9KIKJ 3T5BD@8bEY>HskG檤󉉴$bbdznF bHR{~ŹmǫKLq'I@QRHTxE|!d[mD)`,#PJ91h|]J"秜ޕ%J}i[tuQ (A`f?gu!% %>>6}mMf* *nn)HBZwsRеCחUyeheGEڴө sZ2$wHx٘̂8U].jAۮbQ>[+Q&͌(.U1#mJۯ\*(>~s|5W]li Ƌj LJx;VϤCyy8=f~ir2)<ĉev #.|~3Rpc7L-I|ΘN4-y䃧2OWQ9DuCO$ ,L>Ys~M3(@_ K ׮EYMo/_7% 9Qh͟Ą@Y!rfS>&,X @QҐO[|$Ԗ#JBj PW:Y9Up?.dIFכB]ed>y#IJB" h#.ʶuuo7"~VŢ}MRɌqg jxM,SJc+#Cw2DȞ a09c/k{Xü .FHV a4ZAkX,u$iJ63殏WK =VYd*$vHˆLzHWlj~ڈg'~ZE%Y0 Eirp)R4"Ri9S8G,~\mmS([SPSj-HI .)ay m.Fa5 PdToҮFFY+D)!w #1ztCPI͑wI]raP{D—>όbfU6yUUfgM*ڂ$Y ~E)520hi! >tbB[ϧhΙփHٍJ2#O?:tcLNS1fTYW~hYag,q^EA~&꒶a;_2֊ _\/޼ETVbK +h[n}5(^1w F)F#$AˑL`\zsjo\X3x`E<ww Ob ]LxI\>yYOsK;!I2ҀbDBi7ێ$--H+4yE=tn++23VIl@njtwsF* >md]X_)БyueG<JCFQ}*V"f,L9^2 m*b໇PbRm]?h Z71љPuE|Yce&U%!3 %.iQ"7va :M(]6R!Ov] \6]^#9=t!$v1 @&9SnCӢT7h wFє#)Q"JVV vX֕jUX9r͎_tuR%JR+/l*eSҋJ7#'ё#N눘UMv,# 5ks%ƨ!$mvcb| gj%nkMb ġEpU]9U&ROɤ!]`DbԔIp Vf[mbHfFP?oՍՓ'Bkq3~` SPԱĭl?P+oИiAU!EK߆OC &uTQ,Y+e^׵_:e14Yf%DO 2t)q٬K'w.M޼y0'wwo. }: xӽ}2\juRrw:Bi(N/6=G fgJS0ߏL Rp_7Qbiw#9H FQGKCFҒ5(npȵV]CjoʥB/Rժ˒#rUe 04qt0\;0Z)MmѐEFFl@Yip555&PȓGy{ȵSF9!iueL#!S>fʏ0YhH +jxWt.MPUX&+ךڞ^ơwD*gnkg-# qgEf?6.4*Ji\0 Q mt Hi4_C$HnA> T!m&U.ZmC `[n{X6+W6IR^(fl*r^aHODbQ>ThnpB,JcD1q7eUSh@ s]Ъ4DIf2 5vQ2$OzYgE&=:G;g tJsj&ԉ@ͮ~ڴm7׏qFfG~_~˫ڎ<{4'%3oychTY\ԓf.s4g{~&>BK1WWw\O߽}{w|WW Xz O ;֎N0jNOfpv/ 4̓kl_Rcw03>I("dٮ 6aO!Ri$'!v@62 D  lUAdd8' d.&Hf4A*g-&4"Ec4NVi+TdQ }^Ub~^!N5~Y(F⦥:^lv51KY,iy )srNN>'@8^%FX43H:&s)r(DI@s 2$FY m҆ u1SZzQĥl'q>IJPdHV$sKBcF<({@1Ei7\.t"#GeSh;|Ŗഊ).*)4,V*T=&]&}&q8 ӵY,#?}ܓ".rK4u(YCb=ᙳ9&42Hj2t58\=l*}+C ج:m̮Vuz9p_WK*|CiPrơ)J G4 M^:UV \_}.$86$CI)!kk4e)i{i0!hLt /ן}kPU%e.ffx3ٛt)(gùF|S dNd/[VW.&IðXs@1slnU |z7 W-O S ϖ|X(j\'RXBYMog>YK&ӟ-EaF ؽ\񗏠bU  J(oxɾ|s\բݻO/\y5:]ycysrLVE><OIx`3ګBƺر}V^`yekEʁ&A'D]Jh 0`3DC̵19 ȍ5DVXbȻ6y oX8{0*IHXpg@)d :$YqԮ Px{5T TڽQWVmYEEvŲ6qQj.!!G V,#cŀY%PZZJiq:8:h$*R1ŢTSAg*椲(媔6λR_ iC]7*9RWۮ+ ZWDeIp Kh\86qmUQ8ׇpFs5&Y<8 9GtdZ}޴|ކ+tj!!q]l-2Mmt b~&Fћv$rƕb1a*|@V[cS|uEiH=rfO>@XO4xhCҔuDm~P[x*rQ]pU.u}w)C`·w7X= Tl9VizGNu#3)ge ~϶Z@>l;o14nKˆ>;Ԯ.{8!3901džtr2Oܿ8nԸ0p;߿o\h:5*0Vjtbq@SX'@CD6.ǮƊ@-oaHsbö!VH*MDB4m3ެuNĜ^LXnVHRrĦREQX]ӨR~`ꪶ7U9 ɨI,I3FBc e] *5ddFi?6eW 5%yn7!r=[M"P*ĜF$CCN!iiFm d5x; )Elz\C]f(1e3 ui߽.xBVT WCFP<>&> *'{ѯ֥쬹*Zs>4Y^zKZ2)%%#GEVKgli\| F e#00G4d$aH>T^{Snpigܺ_/V%%ATAR\-8d\ƪ"Z߼WrK)W ntir5ɒIoZSDip)&66އ֠ 5vo?)=Y>)kO ?_z4m` ۆwݔFt8io]iYa].4>n61 %b`X6KGi<"0\)('\gn۶C{F^SS9I;&hym}p٣Q~^1p4|qVO2r78'̔S &<-vuLC>}3sm Yp~~;C7`$wpζ%×O7]]SKEܙ_>ȧ U_x'K͚(BsNH#'EBdPL]O%L\"hIY6 :0#q*:j[ڔ 1֒$FB ZziS2aPjDM ]At{r]Ҩ.%Ed?1C3k% A, (t s? KG0ADa"j&d,!S>&y6C&dѲ4EYHL8R9Ámu!qr>6~Y8ZC$82:LZ&ˤ^ 0uC^U-֙îUΘ#pwC@Cq\ Q{,@4  =^&?َܳK706]^>.J@K v]4JmEQ,Vi>0յ/KX$5UgZZ!F TVƀBH!#e~$w㦺ї_ʘK7FʫFvC,&d ]zLjQ9U5]W#}8$ M-q2 ˘AiYjkp{~,J4}(F눅%TԦ7m3|Wr\8 =% ֢q~/K i҅9 8`-U ^6}bK^)2tE.0KficL^'KsqbSOQDE)NG #p/L,lx7)o~z^^`ѩcG?_fpXsIυZ*Ѿdc@?ҥ|/¹4qTQς?~D[_Io༽M[%a8V(g?~~{M}780x̫nP/i̟,maB]uӣ.EYę0wFς ?V'~'v?ω^(#2r3!A>S5)@v/æK,+c ɤoUx?pDyv5C4$D_6dͧaPC!)2^QD ՓZ2_:&4|xh^.|j\$Z)*# & `dcu Kѻ6XQaXE%ƀBj| JfqΖuzsݰt8$_*h߁Fd7[?D(>*QZuqIbT΢6Ʋ6}ק ,zQFa)?०o[GWW0 iP,yp\Vv|Ynk3AS!jel24/L1~^pJژi#*k׺~hsJ yg6tqwUAS6FEu *9,a1A$yoMƨ{~- +H]e~kiMeG-ueB]k1͡{xh&ʣ\(CNc7/ĸmz?yt0Kpw}eY(jdu62R_k6+k㼡EY!GIcMԢc; =H^h/xqU] dVpέܦ/u78="{y__ -V_(SL|L"yu~}YXgp]/x1Ǥ#/>R '(Lka/xrOb^H,V7fU(ǝ_]ŷ.!Ez$}9O YǼ>lƜ>o'Y&]$9zY?m;˴N?<J)!!<oXX'QAqt2IC j3vf]xK aLtJ 0)127`Xk2 S"=he":tiQ#ɖ놲 {32mƋ2mSwڥS΀R 0i4Q=4Hv+b"yCʎY5L9s r,:RwқRf;yOv*BkTE Ry)t6Vcᬝ: C0BXV3}ԇCEcqŘI+ gu# %ǜBoZԖ޲@ߍ(IQmXy=/vH(Qְ++FI1.t.־b-~H YO^&0bSO\݂.(^C8\-y]Ljn8(vD)=5CLΩ>EmVc]׭&4 绮FP1bѮVisLy{wKTRVe7Ir /WCs(LJƑ1/V~hN(:GGC34M+6[޾Wg(Y8sRh6FmvM_.z+k nooc8!? "4 DpƖ} XkF?/Ry~{XpzP—n_SΜLS7'';YЙ8M2M*D CLBNkh j&&Q@O]jY7G<*՞ 7,yUIӌ?†˅Ni'9M(ihY\T*Oy:M Mҧ5ªu-s$oA[^sN2޶2lÛ'32Ng`9" !kda!1D b=n9Q*GdQj|qerQkHYn. %Vr]`Tjr )%1kA&ސKٵԢCjڇ2ʠ47aWwFKv(M% RZ{}n Z)2?8Č+/_^iņa(}3ׅibJSQ a֔uF y<=9oo{JL΂"KC!K_Cћm3a!İ=$evz)'(pnaXKsNEYt>ߧWK"GKRe!n6ۂw_Kꪸ4)dPM%kkW:]׶EYʲ6x4Da e%}H!o_:R/zECw\3{:L+pXy}K9eox?n$5+1zd17sC6B\zam΁D|I2R>C/,= yh^G\ /ҋpĀrsViקA7jJ$0z,*X-AOm`vkd&TtdswA!ZG0-.&TddS ƥ7eQXcȠ2C}CUo6pԧVbDI#[ֆ^C[WPM,Y-dݴQiA k3%E Q Mۅ! }bCo,B`v :YeC)k1 zoʲp/^-}9 krSvH4VW`ooWIAn۸צ0FQi3gWڀ )BrǏoS>카M|emEʓv4N>le -hT߆UY*ѡNbD}#Ф X A-Hof9_?8'xzl1'tAZ}NjBEHm9[ q`ַF[k%)+T81}?6WF4zmx\.Rt)4()I5{gMy䔒뫚 k~0Z4!9ը@>GC4wjA)-EwjMHygk:qzzuY$t{sc`Umf%]E8ME9Z]~[OW_-Vk_cT.:O/))Qj( rEQ>W쇇t.]^+edž_7Ze<}ӜT0ٞ@w*Beǯl]ΖU*4÷j?}ZJ<_۟by?~Ykg' -gg>©/p/ aOOE{K%_;Qا,ǠH^-#e*^ ( {=kDSN9<~aIYh(F4}'msʹ }n Tv^ dQw}V^i}⪠Ee|C O衍 K@kіbw( G;fM.]3:,!Q"or3(> ._-픙a4<9t6]IԮ!TʲR+ jHMo^AoIkNd$l.Xo8Ǒ}fHBsdjbl^W~vxa]Dk9sHTXxڻmJaveP#gHKCBJSq.&2ġO">*r Tf)޷)0Z4)eNM3ǓʕXXY48DF =_1 s脬hCڛ~W"'..*ۺqf7sK# Pf3|MsX| Ue˲oRTz+BVBLJ޼k*Mqj* Y;/eW 4 ʺ™JQ]j]/%"Ҝ*$1OvMf6*"ggԧ$eOoDkҊ~y^KD4tZ'˟߹r*3四 4'yFKpzKi~3|j'NhuS XaeK/S*a`1.:u](+. YzlTe~lDNf(\<3rO=JVd&9){X9 pA/Z~Cj>vOE<MӯTIS>k{X,5{[I~(X-)LIdEA>'}ulw墐a RmT!/1eR4Զi

"Q&q@^E S.85"Gau=U\PD=&IXJD=`'o /[„0do^UҾdzK)f8$C+r{ߔ lE9tRUw]M1lɘQl.nGBc& 139E=M?my?v)[YEv^+״pMFy>hA:8Ts]hv]sζ_Vʼvxo@Xn6DaȞ+a4-^ Et{-1/ );|x6\h\#.'JeWi_lBZVS-yҗ+'G?#_P!g< >K|OjEhhJX}" 0 )&1} UI|'QN s ;sGCz :˫(_*59+)^cETC.V1aJ7}XOVժJC{~ ~>m9>z9aY^33_ʱ%S!?!)L)zztx[:Iy/z__[Sz 5Ф]R}^T 9(W^;9YoU*<$!Ep5!E)U*r De|aq31x5sjv?ڳ QJΓUu:Dm>7JiEݏn趻~zdzvCJwWźDss 13>U=\TL(st/0 j}ۃ<<@ __/1 b{_ψ rgGKP.W$9<^4yrWVP5>L 8e۩.g ٤kha7TKJ*  m4DemڮWニ :Ld a<"u˴.d6쀺@)xB`/*'Z'4\X`f͸m:Bʠ8K8GO5p4Q,&MA֚^i[]8bhaȓ )(\x_elv$K`GpEfUVٙ}_.iLP~ ?PfCr{gfdʫ qW7IU^8q?~'5[ )xn<F[>lن7bTlbJJ!SM>Ǭ qL>ȾOEY)f65n7rUYݤ4[igJ6w!C,T11zJ !!)"2Z~ڲާPv DŽ ؊,SY*g3)J6>KI}p')͸ưL#JW0qCx{}hm,c-)$$o*8  YLEv$R]Ͱ c}pF9.~p(18L 3^]ZcPVʘ~25g*V⻛ӻۅ[87+͇b8)Ltv?Y2A]n}߆A¡CJS e~v2[Gp,do襱zi˺ >G)|`}o,wF2CkWo.w_/1b9Wö?~~ GG tyRq({ %}|yU4"PZ>U#Tuu~٥iZ?gdp'Y'uHMON虞}! i.6Eh=CK]|ylirP;q[8Njo!T`܄y<_t ;UH۟ 7MEȈ4;UR"$P`31)Zf&2sRcȑEFK)2.HYJgL]g2Bu};>T񦏍1c%[}Qnv0)K@y@!k)!'1%:y 1eHgT< jWum8 k%bE͜ =Ąӡʻ1Y;8T;(>1ȧ|]J77k/A N1>1b5K*0 #xȸ,Lٛ Ǜa?UgB*nxʖR4ꡏuszav̼KB!aX."XrOa 2#ŧD-b_T| ΪaH(c='CT}hM)妶X@eXUӨ T;*TjS93pB$hQLbNI0E|g.gjYSB{ep2#(Qyd_wc^o01056q5l,vGZκ3(D hRVJao VݥG,Em'j>@)kN%w^{vTZ'ᓬy6DRJ$O,D9`K-#2rvQQ_Ka ?>ʩ#?'gGti9ק:ޗ;)h8:'$GnIrtE@k{{ǙO4>i2SEɃyv} ;I Jً 3[*%ﮁ5Z36L(>[m䒉@Ƙ]EYa ilp:JD4aO1#gô "u gt\uT4\l+r^OцQŒ?&r6,Zr$;19k KyBA|(p O$7L]Ʊ86c ] FsUv"ՆA2u,j'W͎m?R3lMWOSTTFkLFyYTJ@O)lWSڅƊxȖܔ$lR^Zjލ~@>"'(}tܮIOcc,}t J-?oh ăIR4y R*4Qcݐ2PNt*TzTzw7]+Xf?u)OFT0؇ X1nb/MYH h+VNenӢs%Ot WUa̖LH#kJlC9!J8REh 82h 5c) )T\jkFjB Ґ!w+ˇZ.*.gf7ŪP Lr3fv2.+OuI颱LT@C&w4֥k6lv3*dΒ*j"9#dP4dQcpQHZWw멮YR+cP,Y Y-ǩdև%,mc+ko~,(J0 |" qfH5\dâ>cM,П/VS M r M>,ӵ`qNl)FT*D0d2ҾUn717fkmݰwmPoi/?_6mk+6R2nb]NӴ0w5VF#D(up^7v9'8 BIDS4m&i;xn\O @7)YĖ?0u޿"$Ԟ?cbyÓOwo]}#x=l7nys/tW(<%c#<==z.'y4/v?&]^|KOl..Ͼ>WB O4ݢ^. |ӛ~uty\7n> AEs*9D#? #-"$˺MƬYs(1lLSjfԚ1!ȥQ.InOW*]7V& YURB' @d!A$4K(ʃTŽq=&S$ͪ8p@l(ƈk(A1 eaI9FDY RZQZd~k>ư>HCYV|ݧx(!Ҵ3q&*gֲʊijrw>즀by<_:&HҢR1C6,[ )%IdҘ\u/7C}W]>LHJUg[u/,ƄĖqHr*j%WuȏI A`HP>`njqZYtZG旻ICf%kmMKBʴ )x77I 4)V&x"q(5D9'i|=ϑSS`qBNy;o?cZغǛ?]-lWݰ)~k6Ew\`~ȚQ[Ov>:dN8%P>NQ8<5ꘌ=㡜=c ;߯_}sQJ rSl1ۏ!٫s8 Yk`}uM]}xw E(<ĴO7|Vg*z,cC߻H/O>;󼇶JR6r*;9JU?f |}؎bE4(TbJӡbgeg3L9"$!BY$ggkimB)2<[ H.V3S29vowd*iJtJhޠIDAT¨k3_QhG)e7~Q~t[k$SͼRU#u?^tP(ẟ|ȋڦuEj lv)hc кZn ݂}a՚>+k0D +k)lf Snbr ^| "-EU5)0#q7t*PbJH(lL!ǒSfQK,3Sdmc J ژuϖEme6۽m%!Ǧq(2y,!6dc^,VPiQVq?E%p8U5Ԕ'fC mQdfCSn͗ƃ8A P:?1&~x qfٸ"8aF\f|RΛDqQp(!B*c]!_u%˅S ^jM 3;3ݾ}~u$-*ǚ+lח7iH oCX2AՂXV34:o!e=LIX ƘrE(%|٤R3d2@PR,qrZ Lؑ[2Kr%G0u/ygڶ[T*z}u^}לiZ]vOout>~ kZ'H<+GhLQC'8~:Bx."~X2S)2ŢLۻMHX5}q?vsmE?]{{P4Vwn^<<~B"B "&!h!qz[[Yy!f_UM!Ci#7-s!ԗ]MBsJ]Oq$>>ldS_!ń2Nrp:6TǶv1Zt1T=ys4QtqNJ2vζ%@W71Tfwo Ūu ƈ2!Hcܴ׍]0;UU(d:ShIs8;0Rn:0N!Egb魯p('sG!㌖SEYgx=,# n:{Pr~_#^WcRVzwͅR|:}Qpėȗ0)('{GKgL8SDTB5c g[:w9A]JOw1:)nyOZ *F֚)V*R9g)zC^K>(S먫>Սn!(>pnji=ՅĄf PEa+)2*lPecYZB(dZ8%tV#6Q.F)܄1FbƂGlM#U`*.q*O7S]Y~1 l+E3C'˺54eݗp)StTĔ+;Y0Bhǻ]Ur1f ir]uW/68?C!(MES4 J!#aZ8\xf>ʚRieY%.3\,Zinf!)Rav*c'f3|C!}6(=NQ鋅*1Y~$%c׿.p30h]U^dQ ҰOTWv C P*mێC'S } 3Fwm&͓>=WQ 5|aOӓuE`͛S~Ri<͓K>|eq )&go̳<9Ǽivo`YwuwV?j߾E~ ~4{IVD^4x;iCO$kxQ^91h9FC3䔙)?}STa.$:#}t%wn*E2$8gdL01JGoo|k\0*M l+: Yc 1y/ EXDx7Ff1sGPrtVc %ta>hݐrHV_`LQ3M20?MCJ"~ـ4+(3+T4DRqB؏eBS;)yrN &ʁk[BQ I"kp;1y5v!63ϷUЙ)Ĝ!a%'a$.80!Rk)g9lq? i|wLx-r.REn'?eVz=53ӢQrRvK1F+X63WX26v2(pPӟ޶ I!3xqJ ca90e)栵׵HZt*Y >(1_Uf)R5SΙ}Z[T*0NZFf+ֽڮE>Q,TJi9NcR(Mݬ'B2@ʼHKU-N*$hh[+Dg,1grVk)}XR\giubpMGRAȥYPԫ7jm;{BcERHZgϘJn،cu)Z4L)iROYD$` j8xɜֈKp  1k~Kq:`؋g6Ntg/I@EG%L%RS5!( y衭pO]2 @0z|ĦV-jҨs,鬫Q/ Mݘe&7L)oCp-}n(d(I)LzP9F`E8!QPe!Z, o֣ avO^!`>ʍ}6xVq/y(I2>1z5C3²EVRiVb0-C+uH㳅SLBT$N)LgCX VeGblme$MV)c02-L1֡ q? }[ӡ ^ S)YPJ$&WWŗ.v Ɗi&T5$_|.UVth ,֨s6u|C٫`f|(.4 S,ɂo?ΛD連?~igo7~,mEǔM_eΫaBvLں- jgT^>h1Pr)l~ :l?RfKEVhfr,rzUa TE5\ŊT 85rDʙJ=$y,R@IfqLNOQ Kj0V_>j8ō{SX볳˕#R߼~o~5'LW/1 _c{id|qb‡sI%YQ|6 ?5OO JruL?C$V$>!yj| cG4C* P5C?LhŖh24֭D.*Rߎ1cVB)1-k2ػYUYƜFcO ..('jnTp+fec+W-kbT1 jDm\6~כkEra*6E<) A ,W g\>Zc)z?TҊ)r蔲?S0Q㢮HreLQ¨U(%TwTO9`(|"`]Nj{Çc[Jqm_U)e2,s_^$7QݧhH PPFd Z( 2!;X9vs=i`B3Hq(cEvf +Șϖڵ Ln?RZ/4)!k1\'tP`ϥPV2 !;C>V*D:S:mUo*h,3̜ LEMG*Mm妩 ӔjgI.Сn݃Ye_W]]J*2Vvx/%d+lxv׊!`r3":DZAS7Zm4aǼYC功f" L1(~Hp^ץ 4ߖַ΁R׭h15d̨4ɲAV@KL(e~J ^\R:Dr9sչ& ͙SzrՌ,NbB)u} F. 1vQ |LULnFB 1d](zo6 K} cѶ"S}t&u) StEv1vl?3&=,tF-4QO˶S6 [ۦnwkmfgx#umVmpa[M?}JNZYӫb S! V*$ߏ9K:E1tmۧi㘬W9JAY% ifPonRnpQ鱲{xZSG l^;^cu_b69ɑm%yD^`1O}D.N,'Ռ_xӁxǟS˿pgR\=|Ng{˿tZjp*U#S)'|3'OG1CC;rS?99lEJ J-_~\W1ve۴IWW)㡓 C0R5 F-|HȆzuF&g( zdi*T&EFVKʥR1cǩb@<0AUzaoSl& TnԬ3s.~/ iCɠ-ZX,LY*;+EdLvfhH91TF%FiC RcLGo v*BCSS|c.(frijެz_=X>4Nub xs 0)ڎ^nh4ff0KY8EeϷ}8;2ho:n 7JD4[&V qӒ3!KY+>}׸C*)8N. 5KUEJ)gJFZ p*S2V7W˫4?|Ⅹk67PpQo%q_1-낀OGi`.Ǹه2H"NڨI9EJvY NFKdkfLmUtTK#^ וR!Ile.GX8mi#m!|5OZ).I:BTLB;WP>ĒrW;cf4(%Qi%*Jtn|ZϿEAyI6>SZ]LCi?駫w"ri >ﵺe $?~ _d)~ #xZny8f`#$>DױB?n~z')-l<2O_32?8~W۫b6GӃ'O 7> }Z"'@'wn~@\Ƿq<<-l)y dE篺o7~WncCNOБwaCh+s&rflEJi qf@nv^W6}fڸ0JiM]WPJȐJ +wáK`lV:xwo갩a!yXq}t,SW50 f۩@vyXƂ4FbaAxل1CSߧ픍Fǂr^14R1 4:۲fi^h"c,*_]ũ 91Mc !۩V:#;$dcn"s9f~ gr2Bnfi rwWmsVKie$}\RS>Eeke$&4&L(1x볺E9nyR?;Ȧh|7%< U㧾5#lUHy,ⲹZ}/1ct9OAXd錳j3UY*XՠROc\&cAuaJjXd5 /v SՕ퇸| vQW%Иhte *4ԺUJ5 c"@cL3GOiۗե3V5||{磯 mûٲN%'.hP%g{Ȏj5!M1gMh8$]'wU}czyh[ʟ'hxOx# ;8DW6 >mU>\חǒQ~+%w()owjs>:u'Yq|ƪ8O4pÂ_׹>ޓcN:LrS<𗿼C[//ZWY[O|zǙ?q8{_~j{s[|sq:/drzcJ)SAІKߡ iO7Ooi3W<呫kD_Dj<.LX ygsE ݻ[;Ĥ78AmˑB˲< _k=YU$D H0 [O6ibl-jE5ʪbÙs##Fd TEd3}ַZB$m%IrV Q,3HĨlsC3;(Pi>T~LkP @ډ8Yũ2%~lƩ~V*NjiLяS0(b41_݀AS$ceLҞo:Kf35Ji霔2_4gzr;KTi*ƔY9 dT|)pShlV AbFBOU (CFb4J앁2(dl|pp;]4qUk%b´jo6kJ @ι8a$!4B(R)evJ"SBa6]dHEĈZbvJ&9 .щq;4k@ ]B@%4IJp<"%H?OK.0eMtmU%;,ǂ i1sC3Ct^Ջu!\7=^HB-V^^fmdZ=_WzS߯V?~X x0Kxde; Q|AFL8y8s:M$f/LE˰^~a7_EX[7gϖROe<\,>R~C4zdS(nb2_oۯO^^~_.N-ho|H|Ə-̬ѭ";SyNd3B HD> tN  `b)*ήV*emk;RC{1YP)Cmn3TR5[_#e&,kRrf)U=KUb)հۧkSU0R$h*"+Q.R̹Q@2 >jv F)FTg0FA8,YFRKRy>VJH hBpj#S,}|'LL?&OSL|+k3cvNvBH )/X&Lv!Yڨ@9ZG yLX_N;wC9#$TM)+[ƉaS(6.i% z޶ Eu~5šV rBEUН-ME 7q*|q%`mN)e~ay:"&ud2bLJ^uY:qht-V:\oreqqk)@#k}#g҈ۏ.JHϱU;Y|7jn*]Y5-x?faIF(*AaٸfcO3,}^)nM8dG!#Ȗ?~$$4d}nYbov>d X4xu ﾿p-h~v/ >GENj@19n;Wly0 wk^2͇(s!sZ"$$B%PmiΏѶæmmiJ S)j]R Xa3I, 4m*u 4A}h$8Rk sR(%pO4;bŒdIR#%6,s Ngςi?Y#IYTDQ6J4NBCJ5vuG^5:%0_$R kee%KNXhv" Rj,QjmSFΊ>JSaRa4Q.jJ$96em$ VBithj]^ ) Qef7O&)n5N,@En>UeJbf=t3ha|0 B)3`'e߅J/Dždk V^*觼0U?aoƦ1ZB1TW}|6NXDQrLi;o[b ى%UcKZ:ߕrP } ^ ͳٺSHS.k jA& 9 2pAhGK4!}bmZY0w]`Mg-1n~n<[UG$hk9O&@)Rʦ2"2m\]/cz!i+P"@wTJ !OD4q<ߊ 2!,-׽ٿy'J߾1zO_70}͛mU٧$Q89:c1̇ǒkx Y)w !쿿G*DIVӡ%k w[-?8yzs~˛?ӟNIŻưy(ɏBi Gxq ~0J.֍}ڜ諿~|fl}?/U.qZ5}^T~#@uژG>'wާ>'Mj5:JэCyu i~4(x^*"m- XGqzuR([dDK3cșQB 8m3\i60jv RkMAAd h 4 Z,5N* Lǫ:5&YRˉLr*kIJ"Ip$wr]!\Mil*Ȫf*Rz ++/3& $Ă7}ljbeUk3qܾ˩KI}|}%/\l8[ѯǘ\ `X6fyCz&jS/b Z^osʣ(4 %Dxh@X*0Ioœ]K,s(',ewSIV8TբV 7Tz^ojTvf7E#}Sb>`b@IUzcmT lݒY mZ2Kyy31~Q/wԴq'] $Q"}@ݟx??wQч% |tHK.ߩuΈ<]nw/\#H<_7rkE7MDOBG߷W. /ٟ9#u|áUW}Ϟ.1i߿?vۭrZ.G2ƛ+G94qi6 EH8aE wV9 Iu3Λmº.1@yD E,HT 8 y\]y{)~ 0!%R 5F1&Q~oog_׈8XIOcuXڊj!Qv[6R0 {Od!y>MV]&,Sh: >\Zh= 䅲b:!n7pB;W,$R ڗZc{{uNc*?$"S7a><+^~?}Y5-HZ?[8jwD>?BՙմX?*-r-?D{:QK'FϞ>Q]X) = >eӏ ջ?mWg|OkxG⏵"D}o_;u'?/Q!Mic|3%kcz%wv_M">&c ;x6EDxYv$J]ĻAFN1PkPDe D GLwW.T篆)_%rAFrHMPkCZQATTM%YgbERn*%KA}ݡ>FuSV@D,k}JLCӍ3爳Bá;„R(+!Q`OD9 v6v "_]3mrBk5RU>lv0cAZcB.>iF~潸PF(S7PYqIE Y8 )F9T!1ʍS"dC_m" -%"CA]B &=G̳ %EL!&&nBCIcT"J#wF-CqJZFʒT3hy닰>5EM-1B:n*ٚKCh XOVONUF1v~HS~n3OVL$}bl~mE[))CasrX701eZdtB_J[0{sJ%rIY0nn \Y\T ! >˧wL~'T^OG.}2y~wIAjsmoCztA 㲶?ݞ⧇Roxx?.R ޯ"HAxTN<cv|vt,ٔ7?۟3:W+~C~_s!ӟǧ_|,$_)jmIps3o,@V"+> Qփ ؓԷ[~p׿ٽU@)&(E,•sV\GK I C_kUY5Z-3MBIJSqa-ɍF/gv6Sj%&08!XRay` /։VBI)}~3Df"q36 p2V+ A >["*9qi*ITxL7l܌Vi<6FĄG٫3*c.3aR.}F Qf!=KFLp&ҡD|hKn8gAJkBZ 6cݞR4S~M-Pe# ŧ\;M@o◟ųEuOa?X9 B$.sSamaە R0L12qYDzLA‰uªʕwݫ.hӢ1P@!j$gZ*U1 ☝$k)r  F*%n\զ0xξgb-zRre\ZW 81 rv),UEsԇ];W]hqHg 9TڹZ 39EfYb=.Je{|l1;pFIbp5-7ÀE( EK)- ORSP()kUJ?TI]rZ?>¥1],]z< 1#R< 1#Vўb- 16z4~ҟ?{r'1y=OrMp{ tc{oίqGU/0*oV_P 0^L߄W_U=ӳJDHC ݔ$G{-\cMldyl;|\_y1ߗQ?Tao jCg΢lrU#rP8\!6u (eb qJX;ʙi4  ypXe.Q\8K]tAvN*,7RXE<ĸbze\%R+6Zayo.!e%(P9 ؍zB($@Nav.!; !A*UAkb"+GRؚCJsEgUɘSb;⫓vC jI4H"i)~+yu2C)O>V!~Zq(b(ie;NTZ#uĩ RM%+&.jR]j5̙(䬞4 ,O)& gs:/E#b$Dj*JɅ\u<1TW֚ /݉mfI[[4u~V3IRGi!8.;t[<Ƞ"?&pv§VBXx_"~Zm.^j'-X#}/rhCA)(X>(0O}Q ܯ<~QurJ!}7ݷ|# [9yʛoɫ')}w^f%?jٗg}TB%H5$>xf ;#o4w;īʏrhMࣈÏۀ)c~C<^w魊U9=UB"̃*  aRC.58}~]|餙R`I"*$E w/н8Jsi*YBΉP]HE%pJG$T`n.PB.b]+:V*xJ(gTc`< ]<[Vg yY->iRb3- iHQ:R1ecPƢ+MmO~;Bb~qM˅rdCR+}d />2%A<2>{((3=k#(Q.ͯu6 ;C^'"1חWg+:DSbG }BӞ/1|D. x읭j>ylj{q~Cljm?},j|>gOĬW7o|~/NV M۞ݺ_yfwwÿښ }~Y8JU,$d[u[?P &PE0 #,7]i?b?U[kTKHBx><~jwor󹑋90L_BT3Uaܟ_"8O- ! $IvF. 5K|݂gOrN~Q+QFs^/ @Z2Q~41źN=_PQW Yf%1HJWEߧɕJ !ijcwU|Zj`ek8PL3xhˡ_T㢲+WWBMSRqYjCoSZZJM\pYەqe"`AD\/ 㮴-ʋf âo.w)*iĭ;?:4 &.a#zhJ)9~L}b{ @+f1c\NiuZy@[F)-Z%[vu[xeP~ݥ񝮳xb࡚hw| Gy:>n{}Vv!Qz@;˳zflޏ(*2Kz0⮠q=Q c߼oOV&]q\T4̒~u}=Y}LCX/{wWW/~*:֧y_Wk2rI#H"r8(!3 ,k+Uj[~lFgҿzQCX9)+XSiyȽb|odzV^n5g:P(BD*!b dJ 3ncrXԠ%p.@E $$BB*b9p4FR <@ HT0ƂDZa%nRj7Le+3*R (/F"SiJ'Y@ iwuUXW+\J\j?ɋʉ)$,Ð5)uY*qg C<} qMuzQ QiSd:в}ueIk1;>*K%Y@K&JVfBdFL(Z3PmC 5!_bVṷڀ 8Se Iwuɗx׻iRDH`4Z+ѧuS it)SnJ rzYUT4"+bdy?=s'jCA.̽4:QN#H@SM1`>$n_˓Hś UUAbA(u֝m{$ o/s2/?qH!7?irtDwk)'DuDd4j(s?E|`)D)˽lS6.Gӎ\V%I ÛC|/N3>3?G\u"WpzRɂb]/QX# 1bg% NK!A)\(_}CzJSZv>klVfo̷3t9Ǻ^ؚCO!0ɒ tg;rEFѧ \{!drl$ZrֈXEjCe;:xXi+Mh!76J[ cb(sqV_,mn[& .+Y$S!@W g &+,Ż8K$N󅠳r71QM%i6sZNM!6FKޓBj)DZZ ccQ)$fQHP ꇨ*̦Fq(9#H9X`ecG18]6cVjDVkRPV>ͮWr!%hР,ÎVҢB]{p XJţJ1@]*\վKM8LCêX+;JSWiV^;FZq 4CY54c\T ovEQΑSSʚ]*'sf*0mǼwg k"FT !c#KA<EG?GGkl'Xtщ0hCQ0_4l?ӟ#Y`J^rw_E{+ PJA!63mb?ҜU=J2N]^v %gcW}Ϳ/F#rH{^7s\ p 8in:/i~Z=uAd\!y;\Lau+mlo x@# ݝ=R^5誸zCNE!'fF@%qYALBN,EpHI)+r&u-"a1ߦj]j!kEDo.)cf:;J1Fɶ,*R( *-!7VN5L,,5F&t~ٯjZsގʵM \m|S#+E 9R f rQX(j!X,>, Ȭ45WAjMO 1HL2:mT !϶XBM0)53UE!Uk4cU&n5$@[Sέ19\ HB!LIEK8xz\tonrYk%6X0I8J"dOL_2PIJ샧"1 .7SCW Za|>]kt8Y #5e1 ȱnNfo}+ 4m =p)ҞKRNITjiqlWJYvzN @V˾XH9 Y甐6* o嫥5D83Z0ճvinOu2 IbeB2IjfJ(EQ妯+J(ݘ Rڌ>hy33V8}IbcYӄ1lLzPr')v xGC0aãLѣ |C;x~2ǘ$yUisygU \#֋zw?'|*0uOm>AAY WN >p;^ %7⡧ۥ]߆G;{qZ5\X?~Uۼn7//wz?yj|@SFM⪪f-x_oO['2V˓EED6p((*9(4rC>8*q0Mus5m ͈ mjA,'Fȏ=Jz8qf:\ ou5c>y*D2O2T)3Y!!  =ggdL2?l-3!!"T>. 0XX)n]UI.[ J)}*An!qYHԇ  PJ.Tntd"%ef:mv!LƦ8 E3;# @smeBep 5 1&2\(a![7I9n3I%4j XH1RX,tI%ri$,%WB("AL%CLZfVwS٪iㄅzM:i|luR}5F7FkV|TWb9?jepڄAgiTh5I-3Yys.B"(.v_Fgp=Q KNHH4־ٮzbmbԧy6=wa ᬚJz(d Pצ)8, ѣpmMq]'+E[DٵJRnpםʆ$jQi!%lz.h&K(ddM ^6Ǥe\wߞt}I_F]=m6Y$rd23ɚ,9ۮO[Ǜ?O?NIW_oNBZcHv4̟FF{7V4;x<|FT߇fO@a& `wlmO\\[s<L_W"ΜSau)1m< g)wc4ٺ*XfGKD%IA18")ֵ46&Ι_T@5j<ξ2 0**)KR@B!Ԝ48deq`B͢*!wI5ApV8MFTTdRS~vo2fna\UAq@ܼ͢RNS"RF+HHj[izL.וGba!vm]!V4s;X9%UʹU܍jp ʹ kd}.BSRwj z_H,2oyJJc*"NFwٺ 9ɡFM0NkO2B\[gE[ %ծTK )lٗqN:iG`2t1QGZD^Kb>NR6ڒR5?f\59IAcEE/xի|a[. 2 i,]ެ]UB6ycbժjfSFF.`拀լi{X0p] UFk$t)!Y; I𲪸1p7HP]鏯wkcWxҭ/řw9aP8p+e8f?qe0 Hf%zOhhʜ"VFOAl.*j|H~l{/03J& O*yG A2Y@C>bOg}=ևnӑO;?Ǎgtel=?g5;K9(z.Wv>>(3AtU^8r] qʶ\L2uz;黷u=Mv<@86;2J/SK?C,%+ui}zcO\{Q~(c!N yP @?4D5ǐFq̜ Y8s\\PAȒON?=uwxw߮t  ֚j#Ӑ94 "i>NLUeiQΗ\fcXbTPwc>Uj-#aswZoqIՎ ¬ !I.@hgd" Z9.5i!X2HV0*Q:$vDCg$ZyOCl+ceA(fV{cetC mFE)X(b6Z\rt߇.ǘ% DښB..u]:Mm6QcFbiZ`1bႜlɔdaT4ybשOuEs5138`GdS)[ RLzo+f[e05!N&A_4v(DEpa?__5gO20 ?_(v?6xmBȉJi7~j[3'65bX~sDiLI~62TdU70&&k1ΐ"¨ Er2ZֺyB~~{}хx6ƼR "?S?n^TOk\\wgjSگǎݸm]FmE tw UaԞ%(5i4ygM kl͹Yɳ\߫?LyZXi_c2OPcCP#ׂ)Ƨe+OӍ>|1QLDIK< wݐ=Z1A<_~:4@8Y?҅~Xwe[oHY3g[BK1ɻ]rF!T9T`F~/24]㪮Pqb%SicJko\;AD;*@/PJ"OcQS{%)&鯴֭!CBP˖D$-_aU?*! aD\jӓF2%r2c UMJ[Ð58~,$٦,Ӑ0+7~qqwum-ƓW6E-Fip#`yow#9v" D;ov]r.x/h-Yƴs|Scql>hYrF3S.4d*" Yj74 ӑc%l)R?.blSNn= z=ZyLkЀ>DĮR(HG5@eWMSU6qR)nfgEnV#9y[ӔI˩cue)L͇źc[ֳ  0ʻ}jJ@C?כiU]˫2ޜLuUz/BB Wa_^!^펋AL^cQȏq:;kCN+(VۛzBaJOK8oˤ48fe6*evż$Ôood %"R~|8l~%E>{h꓀xZH`>fʢTO i qJZ) ?i6ϨR@wl |zOޅOv US=)y?O߿HҊ_W_]>$Ә^/?noo_6/.ۻ<\\.W5(AFSW_w7qsV}mn+iAW#Qmn?x$i8P} 1_oXsUEG-]ibޟjĸtspvo6ö%2 i]7ycSk]T~3ĉh0LHrD@YJYr9)E SJps0V+aTYN׆&if\(ƚ+`*ZUÔ}1[jd2n* PhEO{5%)#Z[r X* gg;o*Re*[7MT$( zU( Aƽ;עJ(J6OqlfSAg5BX5Prb qcnpYwi,únXU2gJ ]%\٪+Ǭ]DaTY}yHm&(enAPR1 O!ɍ0ckeeuSS4ا 444&O,ds@X&FѮg?ʇ3<쯟$<%`>üOs\U^)UUdw7wo<>G^~qxPzsu"ήΔRozƻ?o6Wgu}No?/WB槪ʔrNJyQ S1;k15^t| znow?4볺*ImC ^1 ƂH)$,ԳAx,X@N1ƠyCC."jNpcupolg$ B1ferMLѭ^4s/+0Z&S8m %P`>3B(S"SLNk  ! Huf5FiרZ$ 8_A2'x=bC+KĄYGD ES,92*Ks! q]Rebd,6Zш6%s?lF.QQN'gDek1lVºR23, drM9OkJWvIx Ih6~iIɊq솬 )b|q:tlo}Xd7wx2rs7W 4SỦo*s2gI ghk'Yu߳CơfFWvu"IrZ#v[z$ûEh0[=񓟔D>~$,^]uw۫lwCۛv*ЇSR?q}DvxrOt>pϸ3Ih1u$o>{ٙT UJk߮U^t⛫]/?Mnqh+__~׶Ư~ﯿ:zqO8vG}VFM_ր,~^qRuZ{1USൠ6R 8w3\fC*tkk}K-5z(WΨ @ )&,HJ.E?0!Tfiꥉ )%psp֭?r]+o{>x .ؚ2CeuJDN \6.<k@9hA&p3XYVXA֍YR)5@LFOXf|5ZJD1FD.JûL<褙>ʔx]RRk*I{' l:ev3N djj.vR (V1 TJ AT29 #O{"teSM~?SIh6㴮T.t9*eʬH0'%ڀ7//7k j< GBqT%sԅx áF%r3-S ꃚI%=˄lݾʍR#نh08~qK*5z;ϜhqԜyH2RaL]Fvq6njI>_~9n V[qSߟGbLwMc\*r8tkhѐNê^ CV+9+Z/_\<'*̠j>52'1*nSgAR,,V&u`8vTKea~Ur޽ۑL? %.ÿJmS#&(=ǫj`)6=h>Gys7K gRjomߍ(jeM XΝD]_xS9C3*c -܋*2E-M9CA31Chfߵ* Q*h nJ%W26\UFV0Ӱ=o6f銄 kpYԆՌ,qQ-2ͩg!yE*ff31$THReUZbjA% E=[ 1YQ&BRy]aҔf Zq!!δx(C+%ŹJL$2RuF5N"2Id~NJᾟ?e8/c,aJaEE pC )+#1D&|ҍјg-E+^K]i.23Ʊ2i;('m@G촖¦Aa䚨KvئŒ84e2mQjuj QőckRffNT2/t"WjPRVVjJ)i>sV;?3 v=WnF\_Ot7u) )7Q1ǜTmrsq:Dnsc˯},fŵ 3 T7J'0K(6bBʡV*}%;@omJDA4mVT!wSkêHUUir1OzccV8K^T9=؜>~3gt+뗚\qڜҴM/SQe_0Jg5iy8)HHUqkvF2g,syՂK7<فg=2?fg(ebރ|2 ryZ5D$,co/z#r˽^D~.{nv0KW½shz@"^<Fs}&Ҷ05Qex͋.v Y28xQ,Hl E$SѪ̏ӼBcEIwr*-%oW~<,Y?DSUe?{6j[ʃ=?^uSH},UC}9_[DxQ(hb__ooWTZ;"P_CL[gs ?~>yLNE ]R'AAx!o'*\a|]}ٮd3@S $O >isyoyyi?4L~rwXLSF7M*__Զn՛-C=պ?u }DC?!!gPu~xw24޽u[_L[YC4xyV9 OHS*c49S2KA0EviUe]i*/.%K"E{ ٧iLdyr3mEJ!uT9+)LYV7,Ume&E^ߍ_oztWBzEG1hO)鬍m>]Vdfe4LF3*E5Z : F3L\@~̌4KIc}ڿFL%~wsT+V8T^OY 춮D3z*|<~HVEosLW4Z6}j }q^Q!%Hi E Һ09K(jmfY2N3cn뇷SRefIRPWժxCq1LvV3dw&8L\S=o:g(^הTnBd}s+c~kJk%qJcT 1~yҔ#/xunf1㐖I2ur`yc0딋s$A}zXyvLu1>Li-кN}Z7U8u;vQ`2\E C/k8ȅ ͱj#(2dջUܧK)'! ٰ3DS0V~[S[cy[v:}J?vΗ1D!.aI~֕1ʫڅ)ΪizEuKϯj\U4d50/ES*tUzklE\U' .5AZi>W<m8q47h|}՗/}>լj)e6 W/y 3|rwD<'yh GF@Blzz1FIPUgM>_/RUjoۯsƯ]himܱrfտ꫿t[uƽtuqwweW&(Nu1%QWNrmfMc,Ԋ#)yM+☜gåC+X9)4 XaIx Yc&\֐$0dN]\o4Mf6EYvS5νn,J"m#F@/m ĚU3V@:/>kPDY0J4VQd8Lzېt#2n<8UdQT"mqf2̿hR$.R0vfE xHGLkU|'H L(y1f]1gf6FVRA*dU*Gԭp)2{0e֮9ĠXYQexs5SzqhPyCj+ǍkCQͺ/^oO?=ŧ''\#|>}Z7D=9?P*6&g~WۍCþֺ[:WU=%8_V/'ƫE?0ffG7U(ws?^BϾP0>dE9 xDRV-D5̤db!敷:v1م)TJe3[ ?xdΠ*U aQ_MY!3~sAyĦ"XJSᥚˠD+8 lqUۻ.0]Ь+Ku:xfeF0g 9`lb !)ʮ\ )#B+8PX59 KF)PJP< eb5s"gކ!6ã5T S uT$1)5%ZaQk;3))X[_|ϫ>ba^o!Ufi.۩UM4>~\~~7TN+(2{Uvc 9]*I6MSn~F!軟5SQ0)ŧO?ɫ^xvZO'#?g4??\ FʟןU^о lt j@@[ iU9 qErΐVgk^:+v+gi(˻P1$ue,i9<ƀR, )*t ~(?7+K 8}bLPoCwc.mwGOh-A 9r"CWuZjl-o^Y]~ 3 "jʔ WvҶ1,IàRQhJxB,<Q]Wuh-aA *c\ /1ǢֵV]_QS~y 1z: xOKb^h2FJfz/mi4aWڼB6O4DF[gȕVV!L\5ơOq,k(R{l+JrYx^YJEC4iu9 P.jisp^;ՍpN!)rH:rn?Č2OR2@M!E9PZLNY4I72U@iKBR9 /QN7i=kw\RSO)v!jJ{er jv3Q̜8BUA.cF (A p|~ pQVkcNT8uC4 koHtyb]p1Q5Vn9/cSW~umK(*c$ ޚB ij\DD:5ǪZ%֪uksUE!TX$&v\y7 իF iwCQ ,rԝf~T"y ¢B/b ;\.-Sj>I}cPgOsloI5.^<ƧY9nȏW>>+ϧ'0CM?Oy}_]qrk"Wނˉw?W߽Kn ݱ&|{8x}]u3/7ks~hB:;NgAwipbmZl4'.HVP)KdbcmR22m t~**s[WLK~Cwq fr8TFqxD0Jm쪲3\D*Je23}pֺҜh*.z%*b%o;YKZ@c4$Jh׏RZq9$r4wzJAT9BdҎ8.Cj\4Z˜y):@ET37֕&-z=8i J TBLAH )1.DrץED|m"!qHd9OHV0i`ֈZC2&04[;c^r7k.)#cpH1YLST1JUX2TҼYĔoONץh?NqU1nv땀ocɶ; ʪ67OA$#A)@@GQ Ep{ۖKw̶]]=doWf+V숵VR4byWύHX6`Y )9!q/FsVjæuO$ JAi Jt42V^n S@֪\mLgaaJ -NSXC9%%.DH+NZ+bӘv7Wd  a<(q,8Jl`d)T#MڶmW4cv uJ%r IȪDR$51ZiBjA*r;E!"\zM<\繴*}\B F*I Q_wyʳv.|s7m}u[0'UtPZy=Z;SC?gS'HUGW2qSد_h~xKo?z}@ྫྷ%~~~ë7\[ Yh50}:Vǖď*|} WQ~)h[%nh Zxbs(ƼY?k3X6/6~MJ֙ \8lWOWݷWFvj4,]ם1 Ü{'yyX[C!%VX*an gW5JT¾i,cR b**_%cbtZn~?Zbw#l}2G]ǾO-??ޝ'ÛFڶc]ٶ!EZ Z.F'%ĥջۍۘVkϒcbdV}7dYQT0[NMS+}J, 8 ,OY껣ݼf)ecX6]Ccq B,M?_]ɮ5m5j@#%[TDR3\FRWk *'O5A=ke#qad Q\Z2劂U9B;յ]Ȩdx6SnWV63>cEjJb!mX5$B)ܧȉ J++$9/sB@U*uVZBAJMSƨ9$\t"6VW(`2' BaΜjk!hXic*Q@XI9ݨ&̚t*Xy*'h< JNhW[+OJBϭ3NDQ8SQט貗'Eʝjc-RDZtZĀ]b9ϭ3aNKA8y gB=ߎvkV%sJK e«NP$VJ>6"3BaJȹi|d,S(X¦sBOΩqeg-#)PYbNX 18j_5DX w5Tr&@":)oO2RӒ:< qwC[H r4ۦtcVX+ ڨX.hwMUow]oNg YɜsʋVsU$s4]]61-tN tK|Ή!hgSIW},^(m/۵+T%/u֧X"nj85MH C2p t^ɷoF`%@=b^a2c.'*ڟ G{ɿ>T<,k=OVAf#- OX_۵;Cqα_O' ͼki@oÌǣǧhG_/EU}B ~Zǵ׏똟G_y{7_ЊVJ>iFݏMx JCw7UvUQvaZo m54ZX(iSpn9@I%IW oc2Vs߸Ui2#KtJ)!1ha˵U^?+X-Vx39+ Da`Զc,nm+жԭ4$Af@0ܒ;UQijqz`XH &)"&l,SbJ;WH1:t?,.@&aZ~Nk!@M%j`Zkʈ ĽΑrYuFn5*$LXZb@@49MUZV8K]C9yK~Q sJDc:Myx(K_Rr~Z q Y{˜h4u4>,߇(]C4*բIw{ŊYghX IԂn\o”ȓ)[-\jv?^. R4R('J  )_6.AvBurέSʪBxyiy!0T̬N~ݻRFx7ḤXgz֭Zč6/cܸrn2qj<iqskͪTDTĦN)'y.SJ5^U5cJ)oU5;$.Tvn b+c bōhbgт+\ci Z92(Fi{?qݹR s,>4T7w;*4> ̹C1N)-sɗ0Vt۹XywV &ـM i62FkA\<=%;y҇9 | n_@(#SOx%z9[ѳ3 W{XWR KϿB~1i]K:@?V~\#_c`}/ʓz"/';`Eyu??xngv¡F+mwsa~{>wN)XdDҢp"ڶM[V0eIKpEV`,JJS`%}pjĘm9X~sx}WKXWWfJ!sMc.P^_~C.)S).7 &Fd8?_b;t1V*XEͅaEno(0/鐕AgQJ̕b0I`P*Zj>W~7oWgYp(ZE!V.gRlȨ^ƔNd꜔ sYKplJ8G六VF]pLFr-nMT1B'550Nz3ń Iu~%V^5tBVRJʩ7Y-gI9^#cQi7xAd4s˦7cw~~SպF_\Sই~ۤ,TdqY ڸxuaK(n6S hNJZZgB`#YFQ8Q"V0^C[;fo53XSV4!J>ui{!Y Ȉ)u=Ǽ DC̥w9i7&| 7ZH)gtN2^5kbl[mu)2KZȷG]5g2Uc.yi\pOӔsŇ6T%iۼ;xƱ 1iV0[ T&QH(=Ѹ%1(-Nͳ5`]w9aWy{==۬拓;[*D@T PC k4V6SfdYه7Yc8[<&VI6q?4wңZ>Y{>/|l&?/eֽ+t/6 m̛ v/7]5<=x@6'asExק/ӥ||H>X~oͶ]|Oo[Uބr,0~ӛ C"7ۡkvԢ~y6jud[uQ K%$^ !i*)YE{3r9ą2ڇfC-}7}ٳo~蜹3B=NXRn"_b;Uzv['XN>y.j,y1vFB7_57F31o& p5#e<7cs^AqKJ-!530of[f}k WD02.5$)J;WP/bZUDIֲ V;*8cr@Wj14. IJ9ϥ]a]3!"iKh{tWEөq5XpFaq,3ZTgƭWDRջʥDDF :M9ka&RPNxRs-UQP,:Pi ȂiyO_nQc9{-}{{(< Vs|9'/*`zם%-i'1kDL)*>k㬨a9`(X!+'F)}2E5Bn /JZwwru'GE5sE9xݼ܎1pRkte,dQB"΢O`Y%P〡۸1D+0$+Tpql+"C cVS.NT:49yYO {Śv(PR@1uu!L$rUr!Q|݈ TS=k:{ dHc-Ғu~aRJjݭgNS`|1K0 kh3mgy}.GdS_5gOP>pOd?TF=J+^mty㡰{Kխp%?n)Ŀ7_P}>_ O,/tiKL$81nKB\\nn?ŋS~}WS:y?ei^?;Sە9ꮻUܟnߴ>5T%!\1g9PȆAbd(֭M.!/sb;+%N> DjiHP)Suͯݞ9ARl1Z)Қ+s (jAʛ]"|s,j)$*;>09^[ʮbT<1,l#Bji:aJ)r-Qc@RGCAD8X7QJOI~ujR r(D Q(re$y9F%/tpNt($%ٗU:7r7%8z~u0kE@Y yb \u2 Q!mZj#B挬Tu͇. Fe*1t= ,yM+6*Jk%?k% Fd "u֨nn`I ı)!~qA_ґS\S#˵_W wDs |fqx.GF8xĕ~Z_>Zf-/䐆uuײNj$"s+QjKiJwy wj}gvjLF _k|7PNJ%F<3*` 2J+ͥ 7n.q?|ջU뎇4uxu#RC;s*?^W&Uou[ 5poN!!$!Qxso7&.hBWsδDU/Z!J]dA ɕdD"@&V 9PhIPPֳE}hRMSgkcLTV!fKPN,z]8JSk.ulBc9RmW짴]; T ʕ t::9@\0RF,uZmgR qJ5YRxb>qu8)]vH5W[iRJz[=o;t V$FiBơPv7Z*pJ܍Ud5AvaNa͐98kB%Ă[\Rɿh|NnOèh'G塝N>&ħvUzVP=.>|zV??VQ4 _c(PEBIeԿ_]ܽÿyi}ek?Ί47ꍱ?^)S7kU9sa$ԹBU ;#A8 =Bd4Pb F . \`۾E'BtG?SBo7.9Ɋn[F~-fxN LKx:ˑ+Eww5r 1j l[PH @.\U% ϵLI4ei,U(Œ®땳x]7Qcg޳@3wgڇdeEke u"SDR sAIzw /_,4(zw䬭is]3Tz};BZ90h0m]Suf.UPJȊm3p:%+@mڔ LF6N,K C1T(PVZS윘#JY9Χ1\&|7@eu<>( IT47FiUk-y^0Fz%7+4fUBjb6@ 4Χ TՐT4dk 9XM(sC<})pۻu'Ud60\wԄ`ֶcӴFMZal[=SR>Mk"qȥ{UΩFݝںLKZRZ kSn<4qW*~*F DQ܀ N21F!4dOoiJL|u;k@ޏZȬ@C8Y#Ar=_O t} Ug < qTk$ GT5W@bIaWNnH4IS "|J+^dgahRJsJP׉}+r!6NSbvP̬J:X$Rǟdh*Лٛs=+% LhX% NR=Pb!E&Mc(+,'(JgDgL/< \ zHZϧsm-\VfJNKH~d|8% Q7Rhf_Ԫ x֍9 ZR5Dm=ki[IBԦS>srδ=\FL-l]deVN q8Xos+I֍$;SeٴŅkUjn,f1RcU?4dYpV0U.&BR0A\g'qo^lj1+Jr yBt814" ֮ttug͹ӫlT~R85BKA 9>|"VZR _| 0L9Xt>Gt>$!>jXOؾPe}0:3Ѯu큤K\nkqj޼SfL\^|BK] o.Vx c?NH17+Vݻa8:廭CS<#_pѯ5*KpH+3?RYÒ3)k`?S(EaT+XjN)i^R՜ ͲVH6 OZ7a,;P5!ႳXsвsג&YW\j8tOrc)PiZ)%jEJC jXboPcH2pU*$>s§160@cg8}} tY/|\ "B%1]C-J7J!AѤ)N/\S*u>dyN^T'bp´F]Kj!xgdda"tw:9g*`gqN吙<y5YJ/V])r)ΞKΩV^0g:ϱ! ,Wd(+䃩/; &Y>u I"A:_1 kϾriuN*Ki!k,ךq?g=ה9%`4a_0g|/vdMJTYzj.5GzSƿ^1\|6}.˵~Iۮo^ahNwPa|׷sΚG1?]o__ˋJ_ᅯ<>F+)G~?>_sH<;]C8>~onܔynS r2ϯ~ˋje ݷNQiaZg.d\b~ws2*<$`cl-UF'qkkԻ|ɎS(ym>?Jo)U̥y~Vnoz=?>};Zs./&PX CVZNj9-h M+~NZz57+Z3 >{' ܮ$*J"cL },Q3ij~4,\bȨ I~8TwC; ]Cw>͹ssmcȜyXr1$e,IcΡ fwV\ 2X+1?MmG0;!CGtH[$l)N?pxxEOEO+@ +>*䝍@cn iw}mGo6 7ەmp{rb>\4O)z'΂ c ӇOAb?8}vՊs]eyD*+xiL ͂C2hL7_xfm;i5ήRmiƒ9a4ncZN+D)bu7ׇR3Vch3b}{ kgZGOC8y:WS/Zbc_[_=KMKzi:Tz# gK%k{v> 1<)B_꛾xuVF +HĜRXjti4eDHpeOXC uYg )J@~%|A0'ZըeLUk&cEs)LEZFnLeY #ʨ Qs *B\ V)fC`@ PP0BبՅJS\S(1kqf+SHA 7f \YR.(JYى$|g<6NhRsQ}1ZQ<]mBZ-)zUh7R -8Udv%iLiy<vs\ⷅ  o⢽&i:9Q 9< V KUP%JX:Q$}#c֙9/iap/֝0l7fP`)"  2`M(v""E| N@͍2ƹ.D8Zi7k;t͂>Kjj7 C"j!M+*R qFKUȜs)*f<5Ly˼.W;[S:ZK՚K/kؾ߭9o֫d6+Zyݹø}7a#Y@04s\~O~n'u%k57F|Ӎ{mוZsI7UzEI-Xæd0 0fm")>/UV>Uuz}v^s1Ђk8\8{\r_>?j%>`C(xZqf>?3{5Ǜ7gzM۴˯^p?byTt )< VJvąY PZ)zaYq<|95bTz]̳PX%+6+yR#WpYQ[V0 ;2kd/jݍURliR1F >sDO9Iŀ1m2a3PZ@NS<,l}ȌCq XIG7l%}*FV-g1*ƠCI._ p.@d `L2xR nC#%#Ǹi]RĻ}i4CF %OoXk0s.bȹK8Zg @"\(}J YGĘF[KQIHhK)[vpRBc)*Dd[D3DSjBJ$x R t\0Dkn)OT@^cNjB~fY+"} P-ⳳ:22U,![ ' !Gz88k'g TB)y!Zw4e#wH[c$P  ~fݎ+v ŹKBw//O;h*a ΅rq^V }FaLXdqraEN\ )X0M/0Z80^)e/WR(ڈcoˀ 15Cs=oytj#˽xûG\ȫ0!w!/oQ*u&dWgfG(0ex?q;E53?;>o;"sxY}ranoo/]m0o2jrX,m-Nf7[[w6S ŋ3y%XZm44[sƨ\jEK)vX-;̔๐tT,*|Dt/.7Yc˃K\2#TR%Lȥ6 B˺[dJlgU p_^j1Kץ`fT-䁃U(%o+]ʊaRdE+js9r Ap)Xc]. u;N@!#R&2e1(W[j5g5Q@Foמ3%j~a>13rPs)=T ( 3x!"H P+]̸1;UKɉB}ȢTN˅7}Tkcs$VX83M~Zlg-reZ6[VRj*Z,FXotTW_BA45n̻D_l.@j?}8' P9%]81)()8LHRNWYDkBLF@"v.p=?&q %50QY]>;W`H۵ߏ?_ƻ@!˅ t>.O*l=shEBXzBA + JF/+- ʪMKk}{&AA?B169R>wpS"8?F\5ޡ5}>˳ݛn{@ϵ.ukon&wxwow]m?x@}>aݣ m+9YM纞\>UǼ`<*!q].&BA.*#!4[`K1+wn;oO7ic ]݅\>9ԧmspeouaޘU_4DIj,L#gCrͫ|YC]S,h%6{2*\/75YYĺXҸăL#]s+S)fL 7M316:i="L~1')L@Y2DE!\,c3O Oc'Vzoi}MS!KlRɞ&rS(t &;fd<'vQCe!FحFLi!PR WJ$$м=nHN{g<vCLKF2S3QQ˼L]EPXQ( DRK€(u5!.TdEaet&ȕUZ dׂf50, bJRXeGrR2,,c ʐj%%Rf;[Ԫa0=Ugo/ޟ/oP1T0l7/w߼?Y1zy=J{~vt63NK]1\̱#0"_oykl'&CH4oA c*k-%#08R&#Ɛzu+\9< OotQVfVg:X)BRTاpMCYc238vo#c;ݥ{~hE ?VJK;'j6+z,13z!{?#y"AK&U1 ljk߼ٽgȒBetI#$'_.X$w+q OD%'F6W''4$Ć^lr/g |~nOVfM%߾u.δXH3pN֕j^VNz3bV٧gR6׻t27'g\4/0(/]w{W6dίyVWo7߿x#xUZ-W&ebcj9rd۽[,QZV諻f_jB |˂0ѧ\g )QUd(o12Baۡ1(xB$QrF\sA&.]anvXiPJl^U]VJ)seUD)ӕB}R%xjX$kt4R)ȖOf >^4ǩjϏRoqʣʏwc9 M"rN9B{zC+MDO=OYx'zFU``~X?ar"χ??YK? ?;^6ׇܢqSݥ\֤ssn+yy7 e1SR|}}v;whc7Ȅ9|Ll;ՋY(W'BvP0 h˷7~LybL8]?~߿Ok쏗?\W|'E{ԶLzL\7O.V>y%ݸɂ}٢9;WK(׍8ݞRL>Bg.g&AU8z6w~(7wb@mXQiv5b!(Pp?PŅLΡbHC]+XFW8ӹ!Pr)eJ|2|J+VF1~pxhl!HT(+iV3G{/dfg~ A!`z y{<ѢRuA&'|]7ޥֈLluC[`mELM nnX|1a}Њ>0kxGatPьr<5"C (ApѪJn~ T?%En 4JRw~[Y c9 i^17>㩅b)C:D@g BS$j9R)؇dim6ܗHE1!Z0h=QIcRș'/L9CׯKS#5JO mТ\sYa.*SR샔~,hk*H+X6j')plqDD(e"@12o\PwZE4:n@p]JF3n!m744rC(B+68(onwP0-٫ۻeղ 9 UcI(e}Qǹ=JB%_ !‹EÈw˓yd\0RaJ.NV9X sulnZi3ubov後4;Gpy)-pz@ =@#׏*p{2_կL&cTQ?}N$쾌 5|>)Mg|c=}8`:'dwrU;C$"-q8qܧ&k7K_˲Mvn,c̃Pp!ǰlR2)eM9U?t^J2QW!!⏗tJ믖zzѢK?1;z7WE8?fOO)6 Z‹e-4KKܥ_~;Zvo1*.w .6Ϟ)4O+@Uc*!ixyt1q@"^}qn^ET]_~'sDxxIGŢoO?t9xko}!Rr{4 Co__ Zn-kȚJHt]Ūvu+O֠7p~ެU] b56jW!K^3̍z=4Ro+R,xucLw{d5|0\t?z_8ndw1vk[jFu7W gi-|l1_xyu?4/cZ#CQ琗dZiSMO!TRJRT qDIm!xƜJ/(9!\Wa`$AR0HLO8"*au? b%(&XIqY.ireҪ>}[cc")HH$-ʼ2cD K`P2W1mP\ĉ@k\DH0YaB@ ei 9F A`JΊKAx ZW׽bntO|Vk!CdCPOd,je3Hġ,RbF2Yr74J] >Y4, o\;qrδ 0q@Yk3d'sveɅ%xRb)_&h 0~׍9(VKu\0B!XiL|5xv$fR1:>RPx2=XjՑ]pcUb#۸A2cZ6QNer.=* #H rX :I)^*AFz6a/wS dцBdYtbҠ]tR3d?8JAǒs#w~ %qYe@ {SPƘ&FBԵ=}cnWF\`PųѹslDC., ΀D[]JG! QC6wQt9yx0_(e9͍Q%)UL׊6!wa}Q-)P$}2w[|ٴ5H;X crn+[iݓgvʇ~/gg|\ݬ?5~` q.?/9J1^g?KyǖvX0_}ݘRɓi[?fm5XY"-6{B.8":/NB?.߃|lն_ d)9n~M9,jٚ]I nt׻d}7F_^nFt5ӊW݋SV^f}3{lYZ3۹OOW;yDwwm+g'ϗzFT'1HonC\VwׇOۉI6"DL5f#'e` O%KQ+QiއFOgJ8n1Y(@9ɛ#|pRۑO!}kX1b(.C@2q1^!ܪJpP3믶Lrx''崇5n$KGLT0q`3M "\D 1,jSXzsU#sLFØ11Xއ)R8ⲅRa <v$P1\g*|J%VHX%+Dɠ $~GYc!FtJYJ~<3!+-Cpy֥쎒)-@ (I&_ 58(H܈2) 1&׏92MCʄJiF1G+Jvcz+Iss|iΠ4[)kQ Fn %V#-g:Rt#e[| C"S1Q=о_aPcʈͷf63V1>'5táUcC!ŋm7/uk}g|/,T_ 0xtΥtq;\=YEC7ۋ|z3ŗ'|Yi%Ζ0_.v~z B#1H]$Jȷ끧pr$gwՋyc[b/1j۽lv.e|IebQ}~_^{-zhbΫb?wmx Ϛjaub=LϾ;g Z5//' മ߿(WRFjvq @!ƐZJa,FZ!3Bx BmExƂ >ЍL*aPժS>霪yR)7+Xd, e~^s_C"%# V^2~?X+N.&^Y >q|~8_=Fǣ|5&%Ea?Q oYbVLkÝps΅+ 'gZYT3oƾχ9((Bfb}8kkr6PWZtjYa&dy~q~pᤶ?}>gv1j} Ofq~jً]\hc_n{) I/~>;n߿z3;y0c _^TRyg@$wp\H"G,8j 6UaP5`K-Ws9r:3y"sL[aLb1( <1d,2`CO$@"occai Ɂr|=@%aKZV:%4Lӱ4̗\Հ 6ʀd`2o/OOml&q?riYyJ.Ϝs yqZ+v.N[@>_R> g'/D_}|z1SOo8ϔ4B^%-olv/ϋ!j_|jktCvNfVʬ)Bҕ!2Y([΍1?[>Uv^ ˙&O)'C^2k 7W`PW!B{e/Tf(!F}sI VccTJL6|+ZF%fS~7\(.퇺KrP-ryJȅ$` B1˨|M%>.p#gj 9hϯ'\6ɥxsׇ GWkrゥeL8E9J(%frE`ci# 2yX9w.1٩ưRKNpQI#4>EbLx|>3LymlƔY1@8ülfj^wb0XJ1$ƅ. Dפ&S3?L5 XVVxe̫me yz:WYIJ(~tavlՂHU%W~K@Lc]H.}IAjMP|"a8 5#Nfk P K>&Ժb1ŵVcSc\UP} TG,I3]JQUYj% GC ,o{;3=%Rt4%g wϞI1Q5ϟbQ+fb AŲHdy> u ]su8) ( 8mE:_\կ̇6c;bUg~?ż_)Zs{)G᜝??-+=[VϿ;mBӧj"2S|և~ ?s>cwZ?>lPx . dѕ_П_Tř4"'0J\r4CD\fZi6ɞY!%' I%8Ørnre5D> *髧omO[Q_5 Il8/.?8g5_*|Wjz}q^ǽ6CFI˅y_n*!+EoBo_Os]Y-.K~nsR,MDw4B x GM_J63@~XuS ~ vo{ﯯ|V?m]h`bJ(+eBw7{khSpb9X^pV(62w,(%EI4 LI$^V*.ǐ.vH{in碡a: fHYcT-/$S"h݈Bpbn7}㊕ڠL o\L.gd yaVy)&5.8an@Yp޶AO.\Hv\Bd.hJJ&rk`OVRr>ƨ$S|fFKIAFЦ s&Z >J6OǺ9VӔr8J ȪBD!Aʐ I^GyU|=[.{?F$G9Xhg2]F J! V"M?K=_j0 2Dd.w!X(G\YO0;FeKypK+ȩ!& FcO%rRC]ޏe\trhV!#jql`60z7*>W~RyE~`\υlFSZK6"qCTȉyyH)x)'ʹ(h^J_ծ*-IuB&6feR >}?_z\?>5x\VfcPO}*>ǿ}Z~\eg=USV3e )c=ퟝ.C~,x;~cPzgN9qnnͫA1>= m(3.Bs!&H Zza`k#6w7}J-g*#mшh4l{umwwhw.1Ͽ}6]dFktaK98y~fJԧ=÷?]/_v}]8Ynz֌C:_Ts6jݯS@ڥ߽^ e ྺbȠO)$;}Hu 9z{UcX+~WH1Qe_rQD6{mQ]$ 2k,K`cHlz]ƨBCt C֏)KYH!<bbS(Bq!k`)cff#D h i|J֤* VH>gB;ď)kɒP H 8}pO096|;.&%YplNS&zp2\9Yr)K"1Eǐ|ȅF0uqft *Ksׇ3z"׷>fUY2vq=#;Xq̶Cu-{~1OgH9YXJ9FbOg90z9b23}'/Y,?~rç;#cOX };6(b grp㧊1$?g}M?_)'16=}gnpGwFM2M)ϖ gB8tb8s'!pg2pO/>f0)iInZ Ԭ1MCqU)JQ\6 4m$TVZ%Qr 42vQu]DƔ,ηZhҁwtSVFWQkCJ|Ly6᢮֭{'Ƙl*V nK"YWi%5!ZeJ˛V V\(Rעw#$,̬4ʂsccb ij?t_}ם4\1tltaLY2c>rLAu4 Rtv$hWB`>4,h4>~Ui=:}|ϛSydӱG=uaXst]{tCD䐢˩ pt̄<˒&&b q`32)EMDY$+]ܻQW<O|QJ9d!)@SdLXgF8n ).6Kb"> a@ ~\fԕ$ƓOCm##qv M~\ sru!wCw=s1k&!E ȆѲ1.Xh,?(lrMo^*1_mFŜa&=RmHuK%l[OqQ]ʔ":CQäJh"`33y;d|Uk_GXmt]ͻam`zkca9+˛mm))s&M[0qޔAɑ;4ţGKݵym&RgJSXs,uB\98uBmk}\@+NfN9I E<|zR{!9^}LUA4vϏ=?eSjɥzv~6Ot1y\.LǮ/w{uQ>Л?Z΄~{WC'gqq2\7}֓GpX_^J)LK.6)JF폕HG1W\_Offui4ZX_gE<ٶepv w./N!w'(|n6{ۏ?g9Y>lv#f?Zw㏗L}(B\UQ;|ϴDPPC7~6~y|aVo'Db 2.4oT܈_ߺsag!N7z7I8cq)c߆Jw邏}f;WjLĜbY B6Flw($A߿  )|!QcO)2!iK  dl8'QGGIWkY餘!!&y  o;[+$>s ( 8D`UCH.֪0 eqGqB]DXZ2qw, ]q@ )X/7hmt*e.X'>6F;R`)yB9c11Fl"d)ؾg-Us'֛8.!cBS6:,9JY*oJ(bm(b]0 X!Q2<NUYH;W !U HlCS)i!Ðŕqm.֎V|!.KJ>6̑4t0jdq+ I1C'sY:w,]F̠.TJ膕GoCN230ضXAfLB=vrgeQF FR \88*CL>R^B\²|CͲV5O]le &`{Ǹ,tp#p XȦu!uS`3j,rz8xurJzLe6Ay\'<#z >㻝n|}8_FXtc31_wkYRq" 1GoW'"fY =>2ܧiy2?9؇ ǻ۲)ޛ)`OߖF^ 5opkލɧHz[zUBS'g'nL!*9xg˅)UYoZXUgLmv VS՚1:JbSz_>)NR~sYg3W'b߽>Đˋ}Ow!Ζz^k[U(q*oatRra^_bBQ [Ɲ/ԮMovFY3OeJ(O°"F$ЊE$T&H D6R3b0S} Oխm*`\} cftʑOԓCML0pF#5삷vI\I3涏=b) < dUɕxX7YSQ52N|NyGkQb,C^~"uתG]b)U8E<.a^431eRrC yQ,%0ڔ2L6vvHȺֶe;D1f}Lj!j*P7S 0{efL OEa N}HQjՌ6KO!8:<>qggؐ0<1^i.0 ZpܬSuQ ~İug.7e69Ң0FȲ_*;۟48Fơ&~yYКm~mUE$;@Kh..qP)\n-Cc6J!Y+%~R}o2l-1_O W>0w׸c ̟gIb7oY}lcoUSQx\? `1{?֊Բbc/סU\4~,j5E'1!QMۃ+fJ)~wř.&% mWo3 ? 7-Dh0ufV_ddZ͐ `i~q|u];NLSjlvi"./*mdnk_ݏJII1_~yZJb͓B b xRASϮ/gs]ʔƣh'M9\vH|^޻e).ӴjٳBJ"ŽpBs:t v߹y) @!]BI\UHLvg%R Jģ`{zzs=[;h> Jb)G%-X&4LD$I08׊zO9qrL0jo-!ZZ0?zȬd*z>cο!H㭟" R٘BoS‰u q4\ґw -@B58]j,2Y@V t<10X޺` ,J$bq~AJs0H K)L"$&*"H-4ךzČF%FF@pR`',Kq\XV$pu1s~Brɢeض8dyd! CQHD92U q*Y@LLr))U%YM G3xq`}k EZ{~Y˺# " c8Хu;4zNJesmxzTB)Dmŵ~ʇ;&UoÓŲh4RR ĉOdʜˇRj!ս/DFWW};Dz}ֻ[?.Mbk-ꄟ- uoLW/N8y>'bu ks avۯV)D(SV/̫!}9?HJV y:Xɷ>D~_Oj?v՗u i,H)9e9"L21Q (%j1},I!f}4X|۾|ZiJU2kcdȑ1uA Ø3$)pY .uκr`ewZ,UTƖ |9bf#TB)L!SdoG'S 2Ϛ qLlR$,S47~J&,*>#) .p &91DYO-<(dBVL5B\ H]riDoCЏReL }j&3*erTK~4GHP0*wcN.)SƬ%ua0CBA!d<r{.ֵgGIu7jB zltRJ1QSx>~1\bQ.dr 1Zs A3NAxJo{B 3e$Mԍ0^fΙ$]u>ӄg yLQUjXnd;İs`;o`b$慪{؍-xfЃ?=|l'|h wҟZ|`H)GO2@عyy]o7T].z'E ?{5vů/N9Tc"~z19C:~> ^$Z|f'˳ǥŗ,/Οk iU_h裂sße>U{}3S)n<+{ee!"1SO$NOo exYr`^H&2n8j~z.B_O7R狲oϞ~jgssu۟/iC.M]ռ/os]iRӛݲ4OΊkgӼ{Ox}k/&,Qb;x>q=]&:9Ϟ0b:idx{wS~ZU-[w.Jdf۝-1cM=͕i(bQ]&N'fta 93\omYJF+v\zGSa9)!F}d2r 1Q) !b2!J qukŇ6`HDS\<ꡦ xޏt( xJ3,aL(@T =zg9!$jLvJr&D&`H&Ӧ}(X/7]- ~13Z;wQF|M㶋r@9WH 3aLP2Q vZRgqOhDzgnL \L^w|̙qyKä 1jY(<Oet00:{[D~(xBe)]3B3L>Sa~~lw6Ϗ<{? ޝя0?Z8@ sB.y]EIww((ӱܭMe1o׭(q~X]4L.Lbd,2&o֝`S?m( B)l!܎ƳEllwoXUd 7blԟ~-"S]cB:LNtu=[Ifۿ8/~}yROJZM`*|>_jhr.z>fc~널36S`y׹uOj`]rD+~Z)Gs"D,w_r׹8%1P䑶X7q6o۫//_dU1#`߯e-"M~SR~&! *b9A!H;"()cz7Jbvd n^)+eb$XHɄ v 002g&^ nSY_H( %Q܍S}/ۥn?\ ŷ,DMG6nO 3BF]Z>ۥ8fYFuJifXCѧH1R쬕55놄´aL¬2l9_(S>63-LHCBcFD/v.lnɳ,$O%zGb&КNIb:g(H@VRR R`-.V};Ɯ3?[U]}tUHOכȕYb}׈`I2`F#$ !!=e~QfѨUYow݄A2̚ `\wqJ69c\޺ؾ9.Ĕv&:[0u/z#c9cYI}J AfDb(g99<[έY2Y&$2KQ!4yINO|)2v[BH(kٶ,!0eYkh2@% &ZWe3:OiLPͅ!^  `1IȵbpSSi<E,.oBR@ҩd{]N9{!RBB&Brq%dI xsə)/@<Q?FNsdB l 3M!.IƚM)ڜOo]ۮAͦOpw'2)RO^7Ջʷ7u+Fo^.?~ۦs咐UY*Џ$DljCTzV2 rIRL^__}ZgQ;qb}Ќ#B-e?ġg+9hwy~zm>xy}y7*~+SN3gs!#ÌE\"x>2M9< =eZB&D(8*)J؛NOԐcC݅H-!O,>2<#n2,G>SP?16>㛭=g)1RRRDlp,*B$O轛G[%4FQ X>Q˗e傗K>Dc$g3xwT> f8*c ZҲ>ow$jԸ() MP> `ܷjcl{+-˼ޗl7tc8qx?ԂFG'Բ?X8S%1J/8[i1qQnB蘍brWJ 7/+)e,Z2xRR :* %!DۍaYU1Kg:8cԌT"~QwmԥK1e Cxw1<_J9fJ7HNql^ чT\ĘrOb(2(Mm41=+#iR!Y)v LFɘd%Ct1cuUHҦ`BN!@Lsv4WwTPpXճ۱->\"d)4 H5ssF)P!3rR__Dj2Ԛ%L3X.`@Dβ<1'sfg~Ea>0BGC4?C |,w_~wǵ( >"j^>z/:l' c&z#x}!ı}66朄Sl;/4Mcf1I&iUUGuO޺ѦUA%m32[*9#t1X{5HMC2XCg޴B X/L\Ɯc|YcINH'uqirF.Ҙ3Yz@‰_\ 4G8hR\Jq1*fgc 9R3!I E=$e 3QKƜE]8E]8R c4h !еa|m_͘Geab{Ff&ᑚ GsO(\+͛듧dP!w?@HG`j}''8~J¤PAIIL>bQ``I]a|jTšw[I1mw 6ox'h*p;t'_?ե7nYTm{9s.~|~6W!w?/1q?i1Uv|~"Ǜjvӏ#<[k*]ӭUL`Lgr;Mꋥ#̙5 vg"2K2Lܷs(g%˄ %7^s!?Y>߿>pJwm`:}V=1*a#ۮ(٪V4ͮ["4+xYKN BZ8}ܢF)1D|rw^P$@ S>!6[j6",(HvYU kU ҈K@~ykAY1B(DB>8bv,Nż?VӜ-uńíI]01/(L0MFGѦc+y&)8L3r%ce@ fObH`>5\$~pBч @wa4h6J:S^7'8>UD~ň4"*)h1`h\`4c]㳑" q#-J7 Fc$ZBZ޻u*"cZW",G5ю2',DAp;gwUŤPdb)i!-4G3aŽ0-0v)g1B3nhcWV 93n6HnRѢ>;<8dU #~, ƊUc>\;]rB2Jy$cYV7w!v/fOj>|6c" N% poƱ iK(gs 9u!8.q8Lc@s$_uQ$\F;K9Lu¿ .|B %Sf2L/JDM㖈Vc<{7O'txX|9GK3E-k~^HLQ+Ca~8 XT k7'LJOI'I?=`@{۶olP; }uIUzH"/kDڌ(~|s(vd h/}"D70psNVЍe]|q6ۙѻЌI l7:|sZ~o_P^oKAΪjD?ᶱW;@j|ԙ}tݷi\-TDii^*cîOWe3ݯ_5 f<̏t) 4K_ͅ3L{Ķr 1/ʻǷz^͍w0a̫o_>[\M&ƙ[;Lp2B {uxܙ(#"cuǞe)('INZƐ135;#0k)p*ixsSI1CwPoIfocˊq}#R&ׇS<2dL(NkSR\(qK0H\M_pH7F^ 264+&*m$`,GhGڵ~H"&z,'D1fD0nv}$H9P8 Ne l[WB$PSs.%5z#oE%8 2b1@ۙN̙hKLb&1y\LR1H$f)hYUou|bvΌcѼS1O~vػCk's?|p7ƟG_oQDU-^>_jp8Xz3z6SctQp&$E wsJOl^0#1J\lgbNRppZ})HI8Ws99"xR3$Wι\)@ޟ.ԗē!&Dc1iHH6C4nQH},LѤuIU"e`,THTmlp*oU%"KCNT  p5Ӆ`S]{܍G%"8]lք!2/kSc,9_> r4X[vIE AHtHdt1u=cH@@ID&1|wc 9`| @K1]rd"gD.IV,%ʣޔQH!ţbbZ3*d$`HѸXi!Y`ׄJլ1lzeR$s = 3I-zǨ)R 4xɦ锎IR6SuL#kvCÀ5}pd!"$"I&5.aDޕQGgHh`]wȎHPϤKnֆ0ƺޞ63tVLM CBLfL@|?"&xތ9iVhGkgc$J;osY$% 7c![41& #IZ0!)0W<`|8;o昋=%Rh!drp`Es,h[E|pLJ2x 즳'xb"wXgMS]a/˧zY!fh{axhϏx#bCFc9=3_.7>RAJ~rLk~y:_V@/?̍n cvwtUnA< _f<H ,__Q]Qt/ =0()%ǮzNn*g_焚 -ΗE]11ތ߽ލ}vބIx}3h)חͶS[?5G-]ۚ s[2aiAͰY; (Ab9yg-Rmh֚TP+u`sECFw?g:8{roJQzK9!ٍ\"`n9mś+B3)x9ǜ!PD̉쀤c@hG!ь14a`Zay^sZ)Sqr m&Ro`Nɓ>#b7;$; fB 3bD3,i)s؇R̲<℞I4=hcgH*ƪR$%u9RG-k(.дNP~Ls69c@%wmb"|>h z<-ʄI -*0JwX]UuX ઒3FN10#R1Hc&TI2igLD\K弟"r=\ǐ/$4M78޽*@+rj% t=YVƩ#9 bv}Hm?(bJZ.9RO4`3&z,3qXi9H@$JZq:;LIZQG0xքbbUf3|цUm:NTNY 2BH$dM0-+9c3$ъ't!Dqs!z^Yz9j); fB1XBJzimJE,K]>x}GTGq9ϸ'G>R|L\'vEOZ=}sҏ@~Y^ol?=]>{s,?Y8M^n덹kl6q7 t!tSsź +9-$(̙tS᝗C6'3}+m E]3]1aKfD+94@R0LEŏHDFI@JT.McvIr&a4ӋR1z9#d3IL9c,e0EG~Ȃ<"b9SIP69DՒs9،OThsMдmZCHJXDJ($n^HJ dbL eL'9-*$dR Zv*O窋>ؤJ*F N4퇡Dh<-DDrAiDEUmk9)KE10M,KN ލPƢdi"9Jb wd90 D1bty9`Xua\7ɻAʊ0 XءRy)w{; '9#,$W /f2ɂ yYRaNgu! MY0nNZ˩fk-Q!!Ia4{ۉ\J TR~LәȢuIBvM9 ᐯͶ[+t#puBf" 7{H"$moシ!c 2 6mW$ XE̽7J=o}QK`_b9ДXBw:/7틗Ba& '͸kcUU HR؋P(vth?귇~˕Rm#qToz0Rl} ˯΋NWtUCy5 XTP W)BL$'M#z{Ï;`yӅjZBws1傒狙`RL`پu~0w/5&-lT;cfUźwf)@M?_ͻv^K-LDǾmLul-̐2MwOfLj5JK".!͘0.4 4`T@¡u $4Yp3<2`@LL"HJ9cN&pd滍/$|_o\JA{IRDTsBJ"$4§iMY"8F_* 9+m/PKDg b&:%(fܦ4n6dKħH10BeZ RQ$HBXrߏbfZKJ2t<[ֻ4DJ91nH6j9Mq\ϔwaN67D)WZcX*B>6kY, Ԃ7WFK.R` $lz$2& @JAj ( .1c=t{6(}'ҞAWvbELnWh]I}ܥ@Xx;/B%Ya0)PD k!&O=2mROhʈw!!bߍp5Bzm.YU<˩ #@]pBQWևټօ!4q llb!Fp6 @֧L&e0w2F+f*Eu.Fy!n P3)9=&ޓ PoYL.y?P|j3hc/G41IOA>Ӆ>l=Z}|ꟐOc8RӭJ?!|ty?(7/8&>zhnk\AO/ $.ջm}u~ sh,ALhY VU<#:_2)zk ,=J͠ibS˗j&!ځx C 3_X%BR-];a=uI`օ]; 9pFf|w=lyjX˻C1Ul9t&n5߽`g#dU|nԌ;='4+7v]32Ь5 H?-u3$l!wKP{sbD2ȘG]Id}kK36LRR0}g8ID՘1H1SJn?&!ICYg<=rDS Ɯ,ͤ؆TrI*Ř m"F%^jfC@`TNsqY*i }?6!tQl j$HspHj6  `.h]f* ̓'ReJ3'4xX]*`t yQ霈 Et=3ƫcggkJTR.I1({7޶V͆E!pN&~?Xb(ψPo7*Xz$$S+JI%Ů3YI{lUIsȕ5!l.O) F.eWq}K1C4Yq ];XtfArE9hnn]U`w)KJTُ૒ Z*p@U8YJHq6 LrBn,S8UX!Xק˽菹,.8dr״ " #+wat1blvf\t>\b)< cL3r U%IߚvWW'rO0|z19‹0PJYK™jMv ^ 5ˣR-v>u|0_8-ހ}"\LY xY?F>5=# 0wS?>fC}=HM@P>|Li #9.H)W+A\C/L3E(zYhv!m4fR's)eEh8i^8/xN*8@nzx?vDL=tō=ᘲ4,)`0c,!j 2#M`s#"WX¥@8+6#0/}wZӰdgD[ F1|7q#fG(ބm*M"% eL3l YbɤF!mf5GF%R!&lXf8zkEA)gw}YҔIeK`ad=ђ>!`'J\MV/˂2 Z8dV)\w*g8D\2Rx;5#`@d!Llc`cNcu#_v*g%O9g )8dUB 1f 8Uڡ+ cB8?0k±''m "-KZ`3 mJ+"8bBb8X~67 X1/NYyH&/˃k{K⛱Y*z 5ێͩ\ϫM%ì}o~BEm0c)CEDM##f&Ou,c":[W2bVL$e@dR'=q/G+*xxGˀ0||o6ϗ?2,JYlӫ߿zz}<]P>$>]х8'?o{˄O2}ݿoMy~=6*k>] $ui?`rӌ'kb:trVYJm_=?]e].e}B I48=4mK Tz~w0(+P}w߿.\u)Zk-l;cc~u5O<]N*xUKe/W>]=ӯ쟬ku{](5ך9G*OgbvgsX\|5XUѫMgb ԅJ6eJ.+}tյNJGsnCcfh"'1⮳ݐ~|E'!!]Z3q^I-0G۱,8M$l}jd!˂Qn7S|ʐ0 SfHum1cph38)\J!ŅIAJ<ΧʀCoLR p,ءtF0@4H]q(tΐ|9BE}C*N[6RpȂѹzCNfzޖ O(hXxwO29"8qMQ]O炱M&D3%+{m]{d. IRtxq ]ED2+ M̓@d8ktZ@lɄ~q;4-@cDXkhc&o3KzKHEB3LdwaUD<K F35c A6L- \Wl^ o`2GnH)((_ja)}#+^@[qNŏZ-xo1ɊR}d{]|zZkBv>[> L1}.^^n77ç ΟA' eG_k.ԧxFө !1'Ģ9tvѴgn3ڐxRfBo\v?!uӛ](8f%^\n'dYbZz8˧Z >M9hzb=ݞ^.owwoGr1ޛpz&&J [8l ߽k{ t}$fmkb ъRo-gz[:xBZ}BI1LW żO< $yBb;ZC16cJݻMu_XQЙinRbM3p9 %;.1RCz0:Wi=ز݌uʏ~JIIş/̢ɧhTz)3d.8M IP~ZGdg3!XBHԈ)~L뽶$K+1;JR4rqqy#_HAԡ\^ep]f[-9=*ƴ9g<E8+Ř vv|Lo@ꑞ%)0;ЌUqfBC]RwR:fZ>NT2G? 4!%+J!ΦhY1uAH2LhC2%il^wY3$J4 .3hʲ]ߏrCGo6.8rV,c]eiӢՄAaT>Ou SWum`)!A33Rp1I(,Oj'sW(yԋ|i$߾eLf\=z/JNFq|qb/c$Uіpx%_Fc|?o<ӧ8(S_gSG䜓o7^3s@,0vwmb!_}]dn )b$Fa?QF) \Q]|hvw7asϿ97< w"«Wxޘ( H0M:{u?\EÿwW߿KN>J3"m$*˴rU -d~ ˥dB^Ұg{}k`>t!tB,/j1M=zZL7ۢT!墠<5]r݋KybƲ6}\ƋU>:Xb]Ui4gMTw\|ȱC. }&@"0XSYS .zTTь%5RECegQpjx& ZI7IN*YL D^4.q!8py̒̏?) C )9e!iDĞD$Yr CT0r3I &K8u>Y? [7e?&~?U5CB\ĈTS8/<qs*#RqTF9Ӽj)A4BJpޔ1YFERrQĢK.R$*=Hq`@@rCᨸw^+$ǒ.I&%G" +]tT4g8 ,,ćC< !l mce3F&;Y'6TQhхpVPQKj,Gīu#8u[c.ۦO|]VF*!pBVǜ/(/5bJ.U '4ơ`2fS1x <*]{ޘ#@ )z~zw7RkDb9Η@,>L^^|p%ELx.I#CXӨ;vI!?!ӓ n4Ѵ˲>:?L!^_u^&%XJ6- yus^ʺxC?h}`eI*J5Z3~9{+ޏi˿v!+{5ws6Z%$iJg Ud˚ESB 6_MfoFQj!WWX > <3r]b90V(ךd" bt$ )%~L6ArSRh>yB.jZ5cfd)2 j"!7G CACLe)ε E R@3pJ!rxZ· Yj%/rwT|?SJ*9di^a|$QeQ8a]9y:QFP5MZg,;d5 G!>Z&O\ZOVvs]r;9uɱ?Mhu߹ѻA !fBYV:LDm^e/<Ū4H!.Dx&nYN*P$v}h ]5g| 2 ki.5Rbk"§NLжB}q>JF8N)6\(C-c$f.h^- `[JI*ޜC#gs9!b&Y"tv.s+# !Q YrOqd-'yr!=;Œd.KDΔeʚ1c ,A(Df (`Ld(@UpGTriʂ>UB0On+qR`1DOh!,!!afHJtQ@(,%AcT"AmƺRɳiMRuS&-*]nBm- T7{0" jN||P $s\Scb&-K'> S |aݖ`s]d-Jq? v>%WW*T!̈́P !cŕ֫im^fY-UDr* 33-a,nnwZkQ߾\-c 竪s4)Z-WugS[\@{Q]TESDlU|68M=v7~jAc"%{ynlJ"We`d}+ f3H^^\snB-P |~VtCs1I) #24Z%>v__՜fL.<;+Η S.%4vH`*F@\?MѯVXƓ"yb}$+ U!%aS* =g<)͔4J(#4)%X9s6$dTHFInmc(+׆FJqȩ1:zzB]ڈ!y|n®@T1rh[(IK!+. J 0A#!cA)fJrH2E +ٻb *ɤ .i.W۽K^^\Q=\RJ#:C$U!"pOpvjT*CLJDDч$3a1Ť%%]0VZ2iDFSr Ԫ 4FXT19 4LPdti,A1<=# B4 $1Fy4u|b02X+YS}֍RX) ";R )%hfӉ]2؏ĆY=8Z ca<)܄KEqw랭I\Q9ܢ54bN,VvJìE7Vj@pE$WoZq?ᮕie!uej q7 \R0}w(gP |Ps}D$i?C$GVυK\5b%"ݴY1Sv%[T8ed13Uyn>ypX'Sg]"0%_=1#}*f94󳪮@ɟ_R*NOݧ+eYQvU}W7E)?Z}B }l9&1<oֿpXMn߇:x/@xuYXn ߴg;Dxv֬*â ;g85r"_ƄQ߾G1ptĢR2^mT6cE!B(.p>ޏ֦4 !5ۆkJSގxڮLV06D(.d6yIƈ.&E)9 cĔNCn30mxŃs>(($K3An?, SfF.$ .L ~3>HW3jdɗL3!en֥5'.FM)o P]uAbwcZj=N, x'Sc.q@A4r xw)|vu`BR1qA"K8R,J5MiD!HY WL~hD9!UYn6<Ύu$c0L@ rd1dˊ38LMzgM6@> wM]RFA21Z 2OH<\Ls>?R0d1(rNm٭j"n$)Z{&tDblRf'ZQ}R۝=[PRvTJܗܐL5\$GCVdyKd 4E˄!!E#m/WirREu"h#DIJalZ8$~uݒKyY.ŶjY! Ɖرާ19!4zF)1"˔df%?ZӋ*lTeYժǠQI4 UkxURNs"p +'Oc/Y/Su'rSt%ٟͣڕ—ϺFB|~>=LH /0¹NJVsLk?3 @JYH6E4;؋ge٨͸Y,+ՐEr]ҌZF)]+I&E?,R7v˶/^7EYa+R0ӏڒeYXLbPfzwmeY ]2`a]qӯ׽#o7.)7Z;K~/[^mz{rY{k4;c.f~r-N)ۛ.fBh2Z\gJ\5bY] !2Gw0u#~/cc"JS!}@ <>O$R7r{^\}nE%~4KOE!· ͅdZ}-IJG4-XN̎6+J3)H*#v?(%Z7V &k\ҙrN,gU!Ɣ1GWF 傀ٻL6' oFFH#tsBQFz$Sj2⃳ÊKٱ dV$yaQyFc&D,UO$kIPxHD!s)OkJ}LnB9fٌMmⴢf819h] 6ctq4!-6&%f O%d⫹ FAH16|i$G( q)-J%ԂB"B8ZP%%3[Py? Ә}1DJa:<*m?r.E禺T7.Ƙǔ%3sᛦUE V8rFtH8y2a4"apUA0*81ZϾNQxB$9+Q䙜_'@]u&F5.d*Qĺ1n#Ӊ,pwKU+6p;F ,C Bsͨ SHъ佛)Jå0N\0{tY!  kϋY?~H?E>I?CABI|a|٩$Ũ|V#4~ylfGggs~0 8>҇a\-0BBp D@ J9W~} Wՙ4*Km;R 1X(|b3fEzݝnoϗ g̲R۟VEi4e5 !<ɪϗ%!o }lD9uc^Z.K쪑Z}e_\3'@WWYy9E_^稵׺`G^}Zբz];׹83`<-|Jamm6rۘ$|oFS|6GYi,ɔ\LdJ.)*f) i' g3c Gm?ֺX>-b;Ăq4s6QKeyJ6n62Hl q(l vtdBUNm}&FҐp8u$SSkdH\:2BnWDP''dbiQ69 AR1/*\ їl0ec4%E,0Ե!˂qfJ`}ۘ3+r lgfij!LލN\v " (~I"JmXbLUC0Mj#Q&4g0(s0f.\jsDjg-INgOZO>S/Pqf/@ zk[sGD}~?/y,<K}QVڟ(sr]Х9U5{)^(s# 咒 d$]?o }WlK1fl[!<|P/MmZhI]3í];~ՒD;-!Y/ttr;ǯ^ZسV&O-5`E7!64Jt6)us(ޅ$ݯ~c'ۿ_~by<+% ~P]z۵Z-1n~?WE'^^E)R$ z7Wk3tuVBDLmg")a?J!źH9!L)(67H! ]U rC#bΐ柌'IB1 CO,9jsHpa"LF &y)9DIsj+..Kq?D`H`">vebj cgŕ)pB!~7VFr%P #6H8+ w!cBw֔RxГV5δHD Nk2tHsp- PLsMߓM?&OFE)Uc ;RJq@2aQ3]ss&vL cL) w29YKG2eMo=x4$kJ2Gf Qqnɓ/s×_&9 G/ Ӈ#bwb<f7EY.v˳h?n^>WM)'_0oAC>kc ٰHBJU!um ?P;H,U12r4lE})aɨ̋vVRL4gV vR9EǀQ.ZRrQW>zFsNYV<[N0μwqkg$'.R^f(u%ܐ$ɇ肝ldbY*iy;x!gd2 *0N0r"%&NS4@lE4ym H$G0q 2^(˜ӃCXb㫫R~h+@fxh;gt.jp\lD,,/1VChxQ/y"|h;>n? 磏Y$'/ xʈQ O۔!G_}!9Ɇ>}ODxL`C=5Ek`4L:/cLۗ\t*>~캽n#߿>B6kscd` fO:ЏqEKN~ %BLX*Ĕq}JI`4tYK~q*$BheZ1a[R>~w..L3&~a;FSx?d;z?nf_honGQª1+/E#.Zyr߿>L@WdVU KY)XVEd|b1?n{Rz>ySw"YZP>LlQX>쨄VG6GkJ9Ikc< c4n49e-f*5.Q`A Br!:H) vnǜ#)*IDATjwGn-A Yoc l34M x9}Ԋ*2i8e/ p߅t Qb͘Yd~RTf$ZH!)x*K0 KOfb:%c ,Zk KzsHr",8'Bs|ZI$݁XKF(0F,xO|QFFf%h4Q)H%x8bSSHH 鮳^nJqd\ 5Si*[(U?:Q01s4$\!̘KbU.KI$%@hnj+9RJ4`6)*!)uQ9;ѹ@H[T7*f(gwJ-rYCYL"!ɅOG:f!Ȱne7$"w'܇ֺTr֖UNhԶzօy[3Cں, Fo 0D)ˊkrdؙ Yٲ~hץ(`D Ȓ]ǹ+i$z7 %QA3 ڞ:șns u(N_:P#x>hz'aǮ͢<$ׅzxR xM g2bID88Qł=(cC_>O|Q0'g Bɹ)ԇu '"zNX\'%m ΛˠlNº- QZRE (#)uW!ZW5a*dn<7UaB*2 o;]Q3>3Jt$ ^' lU['K^Q( H&~QEHDPPDJEBtR|6䦐,(k7ZE9&M3le%ZtњE84'-[dɒ^kef<1?`8s5| Π<W .4 Ab@#K2k.uiGU [XP3scm 62rJBRRꎽ|S{h_WC>xr>P;k3]~۴iuLszDض}F"bfooDC,4L(=(>],Ziߏ^w{\L T@0 "J%!Dl}.ݱq}<_9%8vK'lAKJ a)m+W7Ø\8&ׇyu_j3!-bGy\pHٳ竻m_r.yd6gl&1ʵ?L*\s8S`fcD78W.L3q̵+B\,tIYwndË |&ε&h!](ʠ.H|ugGO毶͓֢TXP&4Cc&Fi(')Qxby)J Њ)Xg1bzn_. µO)! H`|BI@0>S@$T@=Bt LC9d]<%9@KE)'68R]އ~QX!U\T8E ጦ߄"]]^Yd>9).R J_zcgsrv91@6٢!۶c2x^raC`\'5ۦ]E;`F峛[kDK6NȁDGs9+B&É & r6n4mQe1'8$eR 3#+gXЕߦ8vm(oHz^_~=|ϝ)&zvm34Ѳ{yT<[88c~\еƹx۟?YUny"|ϲ͹X缏N/- ' ׃)yy .&ͦ]"7u5Ͼ5GVud 8J c+>lP, 3X.(PeП~Rpq~CN\|d0OfP}p},ۏ/)q=׽V),z^ĄmV(.zs0ͣT .m.fbJǔo?Bnl.?~yg x09Z%&iCZ 2ɐrBofK˯Mp,0$|CNd]RRŔ2#,NaRP=<'L"!"I9>qAn iQ2F~ɂ:">Y81#!@1;|{K-d,"|ӌc 9At+Q|pTcDobp龡3zA Zb(6GSHYd" bRnY)m#TDرiR(viQp8$׵ڷ^2.*))k8%R4} v&R6(QHwշ7e)1bfZ6[ՊA)!Cc.j}S`p8e|!fK7g@9Sj'j;dl R'4g,ѯ ):|Qe8%r m̈.x/(;t㫫RTźعiv{:g+y8 D71cR8b*`GʧW. v sʧuHws9 q֧.SrzZjʨrS2ӊ2BqR}?xPU38PkNLD~#O-c{{4;Z֧v6/3:fR*qx]΋gPI(@xGv9c(^|ti;o9g&< G feEWjBSem<ڌ.fp9\(Pt\"QD8˗ `|֯j HM3/`P(5'R9SdG mvu]q ]-D;z`Qjގ_RA/lԥb6ǫ_{WuGOWgkL I=\m7WMqv8gWYݴ;=b,:v >$(;8]BBG A3zԫu!qӶ>լ(є H t<ֆyu;gap!*-R q{(`4d$YSLJZԟ'oOgukN0F1EɌ#NNR1#'FI 12;L1 0KXrk}JX`PrνK R%Y(0![_|ZՀx"E$I)q;) u1)GT 8a@F&f01tb'VC* 6%C4A=u.t; &1>C 0,qž0C,JJ068P@9_jMȮ2OFdD1cG|9wf#LJB #.$N3xD JO|9!LRR'BNPD6f:-D*8Nu͏CsD=1wndomxfNj %a9$Q$) @2s,4%9`f]1ȖEu{DsLkFR;ME VKu܍3Jdv`}W5>b2MhF7+$ʹ=2$2M})E{쨇qgxF9gQ9U#1v_mڴ&>řzW>L7[gLyZ?[w)-Z|hV 7oaJ=C>$7o5P__כCH)Z3F$p;h_<6!A{|o:' esN11M߸Zϖ_=28|YB0p ⊏ɌN,gt^Ь;"³dvw{c};Ggr9!1()xa SOۉG&Bs!]kͤ$DzrQ̻ΙS'Z"2|^W>!3͓R?=f*˫ֆ! B~c1YC OOp 7L`Gٲ~L_Gc;<ێomx׎l߻h1v գn{PD (ż$\ЦubzJeNw˺h:;&E3f*-F+Bdi,e,q.%vf?<|O>mQntzPF(s8#A+b89Ǔm!98i`\ d/ϔtNL24HmL1Ss? yͩJ Q qUi‚ R3 bD$DHuޏb.y#&`ǰ(wTEED#6%G,;$ŀp0h$xZџt%’@ LpJ140lQW`|NElΟ!g,e* R1*6QKF},4pI)`Gձ3!ֽsZn~?O/ H !)6bৡLc9!q i?xL"cf8Mǁ>(K>`"zjpCv}X!쀧N'OvY5ƌH`H8 Pȡ )g:->1Rc/wwVgݱ _gm[ST0!7 Œcm0}δ"&p,9;k~ԌvG{w<aT5JsiS(0ge7('8B;?dߌSH;{Ϥ7q쉸m"y!?@7~[3ͼ|rsy;p?o>tR][˿k|= |;M )to;>/ p7bz|PB0NQ8c XHHQ*DvY7 x(Y-JLiiC{%ǂ3 1ŷWGE|ۣ)C]4'tǑL@ GYƈ=زbwO>C`Wg}gʮZk5懟gZ=+nmy&ٯo:]r\,gL4ĈYnYE).͟|vt9&wZR?ZL-jR1S <T˕X5/Wo :T m?M lULg㣳yAm<]H>&ҧOEGJyfRP&0K%b]f&HD|!V*:/H&ܧHy"dΩ޻2h0m74]FB}tS昊/n QCNlcHG3Rf4((N(FF2 2ȕcp1{ziV3jVJ$NqACrߏrUz;ro~:Y9zĘ[|G'ӥ{'{'h|=])6V=@ }ϕ-Z2~s#ݱZRB>@{H9yrίBl5d.=4ۿhl[嶠U,З_uQj|jUiD0%C8B4 yyןwJ)$]r iVoD0~5F/BcHݞ9 '5v333ظ,I{D̠ ')gEnϖ.qtUm3,*g?ؿt!'vwYժ 6tC?O>^?Yg *XGKXm;h+'}pnz͝ӔR:Do£U5-8co.j :}4[Д(dY0'>)Ė@ԡ\s>[뒑f@J"?Z ۣL,*$ɒoE|PLq99ג1pSg}@BqI\=/?bُhu~jί90l)-:ܳn7"s9w&&8Gi>9n1Zq*TRD$O,s= ߰u(,KlGOi`Ø\1!sÅ` Z1B|LJ9vRck xSaLB$CFpΜ1VӲ4Q h;&b٪"cNv3~ JPYԘI^ Px|xyVs6yqp4!֎'S*: dN4(2޶6UJIz dt 4g#"  !޸]E , ٌ1'9c8:YU3Xb)0:E`d,e)[i^vXԊԓDYBRHAJȗ7c]PM8ڼ7cI **э(I"^v4KϭiRzʅűP‚ 6e;xƙaLAp\Dy8ٺ7cQ1*$zt)yCL0aS|FLdZf`$!()땠 0~;Nao'1DI$A] ˗7n_~-Sf4_9(^T4F'!t/7Gʉ.ԩ}f|w ofrξ/U!9ݠq1&DJx^jBƇ#&})lZ8nc'-tՌ?:+ՂG1|Q3~0se^np_|}$KBoHr>մq|x64|Vz^=J;V@1 ;\&A*ʕIU"r[/9i?bMSB<} w6).k"6mqQa0P9ۏWۡoo$?lUsecq_v4>~]s}4ߜզL~,P4|w9%7J~0` HݚA w_~wrϹ0&Ƙ1pywBJIkSthnɣNgP4cКc4j^c('赦BH`S?TYPʀa)٢X=<(׿۝maz?e9~s9ti6R+Dș2ٌ%!ӮLTT)SddC4YdۂA!y&Tan{ETf׵Y*B>vQ  nvüT~لHk1fF7hHY]=8bB^6ɩu0RWrrI&\ YI,އe%gZv]b8 Hs4JйݭA B@iB)B Ա*d \@JR1f"4x9l:ĢHK#0DDU`3z0 1[#$hBpÂ)-;RP!Gi}NR؍Z ԓr`k}ceDp> q(jF(L0wipΈq8+$Y+BGFYNAs*8cs`hqb%, h7m2"d1cx |0L 1HC.hf캖朳>Z:E BpȈ<d(aQsom0D' JV%`8yNzoP1MQ^+*wGs@.Q[ĿOas86Ѫi۳wyFeU))907|NhfBf%aզ(~7^EpP>a|_ӽ\*~7P?ۚ_\ZrZ ,rĐCNTYBV5 m*ic,%oom/Fy=cSnZrsc;EUsl)ӜGݝexdͬT☬ޱC((t 3FڍK o/2=Y!1__5{_d=m]ԗ;fK:˷{pѿ6*OVlLe)_ܴd絪$G=Y<}(,͡$;ؔX~]?cD~n [ų6Ÿ\gZS)l}]Z X3 I[DiLNJ0JĔ)11v9" 1(coV`J1!|}N}Yu|v6-*. 3U5X߽-4DdhAO?  O oR3x u$Fd1NqM@۽MӸG+!f1yT291edњQR1J&6̤@8l ~̌gĦ[U)D iqb7-?uhNv1.9;oJHPp< ^0B.sLvm2!3kpiԊQİ !#8LA8@|p1g;"%x8B`#P;bddߣ}=BָL舏)F Jnm2@"wظl vq Wv>*= .ҤaE9vSCdqN0֎ Ȥ)BPbJ#ɥkM!+3E(z8b()@ʠ"r3Z=bƔND2d1e@ISB!u.L1vFPc4e(ı0]8Y؋$#:$, aӗ8Č>e_VC( M/7:)^>uǼSY{gYzvU&&407)~Xkצ~7'3(ov]:Hw\Շ\5ߋN$?o5Ǜql,Esŷ ELWfcVK-$ }$8vEWp=)vD~i+o ĉZq6+3|Ww\E=/gN"7]|Vddhʘjwf o]{lӳ0xfEOWn5M'ˋ=>+ų폞??i8ϟTRq9}Wy^ 6eR '32תwϺ k%R˻|rBe 4.X 〆$7W1NZ`DsfsqwJYٻе`x͝%YG`UxΡВp %f$@"oo{r. 8Q"3a ~LS֔5ׄ)so" = A *~!Bb O HF"tq(2TPqb(U\OocM%Da&$}7s /7BGO&)ƒ3մBB8 82|w. ?qZљd)C+bFv@V3:{DRQ5TOiIǮ*eLacɲ`jpB úgK8Ox^+?R h>g/@Iք´lJ[\$'ӬPɱahBbhL7I׆gUQ`bHJvɺ찏ԝN9EG>٬RBhY"Ӝl9(b[H9gb@hd!%vɸR6- ޏcl\<ÃӇ6/7 , P?Z =8M]o?{|\ zvq`95.R2"ڌ' .b^/朕Cqp0~#e]x~s 󃚨:9c0Y;xuiA}32> &hN8)sB=BM/b5|Nm6780zIPӿ9xrϽc&ȈBr(Bl%؇4m<^Ea05ȩLdQ.tHFHI"Ā= Ta6fbάJH'׻a]2[X-ɫ\pqD0#_ '& <<棳ffƐ01[kI->z~ %!c X]c%`-)xAY@rd@9q#vh.l.fLQ 4W8sk83%=]/tH%%C'o@\E1:9u9)J0EBaqHaYr|aQ)gTbVm)f$uε6UB"΂1xvsqΈdkwQJ|ߙbJP*~苪, Qw GJu9 &G66cb1tc”`{H,i}”'QG'&Υu- QQX4TӠ =T!Q)oN9R% GO2B-)5go`/eE(%k|0wGt2}놲Pxشftc=]FU)XWm1pxs|7VEC| 5TJe B4_5?]E!y- 0T ry=^_g mO:B-'ϕ&mc`(#`<5a|u?^\ٔpPhV4Rn G#%ť5|YVE DfO ,nFpLFrY,֘z+=v]fb]sO׿`nz<5vL/6ͫ]g7 ܍j3)q^V,ánnY?x񓇳FP`_`;Ww㮱_1/嗗cɒPF;GWDS*mՋ睳#yޙxClZskl>nܮWE;$w;eQ~`jajc"6XH  4EDgFQ#OW;㽏Z F"Ӣ )iP2(&VjF=L4 L)xf SG!!ckV4F YT$\1㈝s8#ͨ5dXwD&LjUAB 1B)NO3ܱJ0$zXSQ YjAbc#^k%9iL/gYBt]:JB2k@ lJA2⑟ 38A5ɔTiଌ)f4dV_7 0r2c %B̋*}OTt_a\!zjR?ގSJę*(0$')3泒e)Y:H)[VCI>aPR1"g,E8Cb9 KI4GW{=cY(LrkLú,&5n(bQl"2ęp޹FeA.'sƗZJ]1DLKx0bIUPn)) FLXs Q]Ȉ3,{T#Vs03^vp)&Ej I0匾*"Zd(Gz| 6)_ C>''ۀQ~_ڮ #P}3wF=n}lbgrB^<=SJ|[^iX/T*tMT2G{h\/u.w} ?H9*ޓJ)QN%/=rgM.4$}P:/$ETI.<\V Em*nvŋvi]ӌb8fxs:ÅrZEM1>9>Z* % [SJ0bMXʹ{N{Þ0 H$6̋k )=[=v_YQ]Zr׍ao26Y>;G(O1Ojח˝'szb)C)i+ ì_|ֿ;|weoCK1|4ZzEYbf\kd-pqZ(i^u[$Ie])R@GUh45l]]*U(WydUVVCUZFDG=\q[B8T!qUP*(^\mEBSXTgd]TEEWEUiӫ77`iY)ּ>[|6mb<[55xZuMOX/B LFvzwy֍"w=>l\(chh\@G͘sL~|;(ɇ!| A~nҀ1 &R)"nQ1H)0Liʾ,$ΐFg9rC9 +1+ 띔Q0Cʷoj'F,fIv ihJ!#A fЏqai bAa2OC1E[G nvYC>Np)9>iiIQ:}Z,~tEUS ;FIif]F(JLe%cʀad#Ҍ&hC c l|{Kp]1LXd_5:zTHn(n:ɪѻ,Fia$PUGlcZib%m!.H%i%P"&(b >ȧC6U!(i)o˶2,nI2@7RӔ%IRԄ˅9/71Hl&k )Ic|P"-ކ41G1)#}?}IR e3h Rtv@17,眽Rco:e,2fPU&!O:T0G;1rQB(!&uP"E[On;R3n|'{ I# %vo^ IqO:?O9#ǃH?tf"#}P}{4i q2߿0O`~ҿN˲Pl⬺?^-?Vd͵1q\ʜh{:ͭzxR7<ۘEQNǟma+{jI۝_nJTKE߽ Emj#T/ %p$elrY %0t6@Bɹ^ ANd`#1ljIW˳'տВ.7m>_To˳ppK!kn;NcY#ni.67ϗ%cRw'竢To/ʫ B 1npO/_ra˅Oh$]$^.Ěa2FX,V23HLA)J!2aJ!1q& *&/BPĐ+Xbh޿;Li6ObfĄi$0&Hq>X-dS&V]9ۘ)K6rr!f#g ,Rc8&i"23}2BIQ1KNL>b8vB*]0ZpꂤnSEE<ʓ+>䫣r:Ҩ FlJ3dJfGk#RJ4felɁ*GgH3`F#M%)eqm%병T)}"Lb,0Fۭy0A(,depӲ֥'烍5%<& #[bRsbNi滂A |po]Ɖ2GaZOٴ;ec<IJ"yTtau\hlue!!4hɾLCE!&V(4Խs :Z7^na EKS#9,uu{խq7e3TҴBXq6ADRkNei[CUmn~#O B@|h~Ӥa P&>rP?9A_219{ܺ0}rs>HXZGOrv߿x{ܿI_|xRSxss֛=PTٻًP50Xu3?yRlEmDu|׵>oojq/_oÔEN00ﶇ]?4EH*S[7Sq0\N)JydMs:N> /_ғυuh}%rƷqp \a4\e$ վ?բp}__i4T "/՗$^sH׻}\uSBY?Ӣ7bG!H l'6>E@)k]S€1ƲRdkL~o>i6`@΃lv62Bb-$\K-8;w sNQMQ1'!s?%ȨFۧ@+l+I ѱFqԥ8Q&Vrp)H`B,3uE]7>9pdX_nʘW鮏YdR(J\R@pX73hL>-[%sc4'om]f}e!H߯x =)p y MiczV&+4ْ 93B[+I}!"qSvƢG A?k-a,k*+MjSt;;vnU~,A+m(cP"af;Z4XVqJOh =m^I!'qnڗ?Sh1F.U[yD|w8Dε)@(!lsrSc@'fnXUYgpJHټE}1# bDjZg QXǨȮ>37}bLy|jgQ(dz~tu;Dɘ(~}@$>"# )?ay{}(8Zb͒;{yժ1$rdzMZJήׅԼl\B1=&†(ؿ\'ƿ|{Fh_ͳzIw׋&Q<`!]W!#>'DZPa~S!9FJHovAj#RF@۟͛7~{9[e an$7wF>aF&I\|A+#XWW[+P-\(/)4'j웛ׇ2z\fc2Dѽxo?u.B۪OⲌG) enM x}L6щ]ݹ$'=)gsL RQ,f*R>fIτǣO9̄ `{BA䓐)9HP$PJΘD֘$|a1 S 6gy 58&wS3H\1 ItUߥu|lNi%R7D" %$a7L~ěbfBvDFh^WtW{(TLYC].fb}Monk*屋ʰ#i+Q:!BJ3g<BʤH&ͪq{ FQUJekdʹ=M} >( 09*d|`R ujTFD$wy=Y3)X.4hcRPiO7U)(˂E,M= .XP,J `L(|~7_5BiME$vfJ4wnL71>O)qK$4Y 쳌E1FO|9 fR8>JP=v$7 Bc u aaLcʌCaJ.'?Q&AfoJTJ1(~SVƜR) >\zU?0>=0 _OJw'gelwR,>ep([vW?]]֫}{sdLʼy4?653<!}>|8+'h).M_nRuͦ0|bcg8eA/_wGƞջh4J 8[û7wŦ~.] R~5g˦. o4M^/Z:='mS9Q0gO/I?ՓjuђH,$`$2Șz1oFwoC8Nu|;gfg/b7t!M/lrQ]uPjij-k$*Mz;UJvs܍s4?|CW~d*o_o$)Nv 6~ҬƐo_gKyU)e"^ln9=T \׸\UWoVU7띓RfJh4P<YrI2ąE)BBpx^ 4qmэaƛ9v>!3&HsrN̜}(D>Rs8H9Ƕ2#)ZK P!EQ\(2 7 Z2*^1 %"̀sXk%eƶB)E<2, d #8q83~YTS39e*CwGgH"ΆŮ>KS5zJHItzSTj l GJ'g2٨{{k9V7E&>1K): G7K#S@MUr=CJ $gH3{NN/J23 E* Kl)YE>kKu)dﻱ0z ٲrnFATj":!t-q,YYX2vZ "(gMAz*CD=ez{32Fgc]0T˶ .Ysi̮8^M0+e'ۭJ*HɧB^uQq % C*Z7mY0a< P'NvKX+b'6s]~j J٠FatO|˥P8) TIFyyG~#|> h~i뢐);--<~!y{u쫧ׇ'|4S e8=>zH{[\=]Z,M7 eZ@]i6O?w/5|rqEv|/V_<]g !P$-v}ӖR fۍHyZHEUM[!un댚o_ȹgOU%bzn者FZ={<4IMRn7c'UBLeDL&a2m۽qrv ѷhB 8;+~^˥ZL`,.L˲\g-"l{b>]}qhdz_TR=_TG!f-4n;aUKVܽXUSsg]BnHǴZ/tCy~f t0E>ſ4zs܍rfSlNU<=đC9N%ewNt$|fz)iM .&9˾01䍖N.UoV s 1QK`?^JNa"JɅ1bZa7 8MqvDD:"Ar&s,kF?!,T%D0g1%7L(T2Wd[%BC(Zl6P2|w-u;NBbKI4B27?8SIDղ,SL˲뼍޳@aPJ6/iU16EI\/ՈuOGT>"#ԏa>?R({/*PA}d}х$ q|3M m'{<<>>!mY?=`C0c`/mt]ۛaw77붬+M"|Ro6VMO4p)ꅾڏLOqDź2LE9m^|y>'Nx5> d7+bNR8fø d'+c>?3⫻Q ڬ̾w~ J__4$>_.(O=_ 5ZjUж]J EIGϚ:(Y|$ .L VXb?9c`!ZRv7[wK.q7Wi]ȪR C6wo7[mHnnv%|֮|w~|nUٙ~wV\Omh :ۈE#aD,1)-*\ Ct!|(>5P?dپ s @RJ< 'HCƨQB HfqEI.g)t< ™184B[=XUyyYIh=.L|pZۣNCI1I*IB!:O!*Fh,.%bmhK(JbrH│. V N$} MuH #1Q)nnjeqwJaĸnOR0 s4@0HR_ڦ:4PaU̹T!ŠL#׎sBQ5^ ?*UJ*<#eЊA%c \jqNwOp VPibں ^*bcZa((=z= \a8"1$9xdRQ)TSŀ)"\]s"}>.q F2<\ Mgèoe~T/?4>L<<UqziǛ_ncU |pxr|?ݳR}~gz)4LCv#P]ۭVŋunJ`ڦ릎$5(e}{5^,__z/SIv7c[wG?s)dQݔr٪e0 HT&0 [uUljq ⋦SZDyHX؜VF ʰVYAG`Sz;ro^bJ'JyB&EU,+=Wwo>Mȹ.Urs ' A j|R|pWO_?۬$('wm7WW !œrwm)0؛;ݲRAE$d7M׻㗛EA) &;dX F⓼E>wA8I$Y-@st#,fץaŤO!.*)H4#owaQ0+BzEwa^9*Y47.-(ЩSOH],EU$D~x}xzv2;yFJ0dMQGwsBs^5~,KuWh]pөrHi y3C=Z.KCbMf7J{B5hGkc[h{x גi |D&b_RKDH)'? 礝I;8|БI·Ïs0prxLcVL/E\={q=òIxd#I_݋m>+-C}x[-zOO=Ow!uZբJFqaZ |yM/Rɿ~{MRC(t}.]/{wn}olonF"__x)[^]MuS*'?=l&QFs)Ix f/e&{9dh!:KFd1$X3nec0)hfD"UKj ;f->Ic(~!B* qYُa?uU]leuٷyOk$駦)iYޥ( KSWZm!5"7YNp|jHiӄ樭*6E!$o^n\kp؏C󍑜o1~ 9: 0 6I%S!2K 1Ӑx4H'.6YDb ¶M]y~2Z37&0`piѧ9'G3ۛn4UL) TTdњ`sX 2ʂ%U)b]I}.럶q腔>zȔ-%#\d98Y 5 pCҽŢ)3F] :,(í!23).Om8RhJ' (1tiLtq. ԥQJĄ#FYWbߧ RKb 8>hE9B&JmJSШ q}4FSvZԄWJQ:vȏIYX ;7ORj]tDaBa;"`!몹9 T!5-녎)oZ3m5DX0JurU7Swi(u)լon+#9ZfJT~ٖL&tu&Ob2=ffR.8V Eeb y28-٪Y+V 1}GE ih.U٘{:'P|3 hqD|4>?bQ ~/lk͘>}N~4<9~]RYzQK!q=GUQd<ot6?5?BhMUٔx+ KPҰRR7n~ݖZU+]ʫݍU ׿Xp0+!_ߌe*SpIJv9vtuW/:?t)ęotۣ\YrtIhc] 'Sʔ\2}x.a #)8ևfyQ >R`Jְ( QEB6iV XKnuYUwFᮯ nB)տzm.0U%84QlCxw[e`(_׫nU#V76 & й8s#$EU_btN۝[TJ$+M) MEVS)M#V54 +)^r-rB[ l C*zpc_ei~ ) QBf2%ZZ.HH8yV H!W'-Led8 Ncrn1+)0"2Hɧ5\<Ӳ1c Y,O)a䋶ꂙAIV i3$%43=ڬ 睤9PF=VT^N"Kw}? 0R,at,0Bju.RHH%x֕:f n,d'Ȧ*DcAj.%XizJ=8t^Wn0R&r)0OQ}dm]@L;D0EeYO4_Q;%>|#>{om.ǫQ~c7M.|3_O #$gd ~$=|?hu8Q{ngϛզ{[j.;ߔ7WǛRťwIy^ jź>zp|}/ϟ7@ưV]欧EQxB0¢QRUqyRTC܏,ZW ˏ{F>X$fj\kSy O) c75)3pv!VT$%*!mհbL9J|uLE`B!sDJ¢(S 9m6WFV̬r؍a46{ܜl)j/K}֨^Me)꼯lj}wSۣ__攞L=~|@}4~ B6<1OSPsE B!f:m̵ԥ 33c!` Q !g4Y+*h("!UJa݈WU)ONFi hHn)EA!JE%!d ,UvG[OvӳEcH.fySn+1QQ7sXs:՜#g-Zảp2kHj@¾KZ)MRvRQ1gT4dg'.bTXW!5%( TF0&/JK<^(dQEƦ*4)UJ^m'a@DZid2U~&Jҥxb*5EB^Au2("Ωa ˅.MRW庎3Y@r'Ipw)yIL#ʲԑ̶i4s>jb)O1˒Rڲ!Zv{[ռ=v3i!)gIġo_wZ4B2T RG7 V*NI(*}s356^|N"OTB; 6%yJЧ{9>x߳'_4b?_o#ٕʼn82f7FeK0,Eـaaːlz̛GrHV[nw;k7A+3{G §bhR^ЫOX _H?sv//9q_Usc_=}w09zÿ]~sijk:;7V|ӽ}tO懏KwMm򇏻:}pWgww;TMVx{?Mtv+w}3XjMB(AmJ(ÐrשUcɅ9oYcxui3-  SP2&i(+nJ31}@ )%rmH(%-'o;v ;ZZ-,ΩdZ;#KI'V⺭SJsJ,)|>oeg޷(@uvy\#+ t+6py=9E>9)#C9Յ R k?~OctB(rcU%9PKPraVu9/nPp?f!Oͽ2I2VGS Dc qNYF#^;S)EU]I:5w8Ep PX$x(%4+8,@W0̐/9[粋sE*cJBa}@da,׵xBysӨ!Z/@,zAK ĺ211w&',@6vOn GzӔ9ñ/R$ &:SR(e""}2 1HM`aXdUzj-sLp*%atVujRXKmї8,6 1 +) , $.;J*XQP{CLEś0zejcQORsfd-hyX,rbܙ:l0l[!!F{fu6/88"@|rJJQrh|I!FBlKeoWuӠIisiHúƹ7X-k7OASQx8i`]eq|< ' hҧ>]o-?~|M)f㴛z;qFFzJo,q? a3WtssҔx96BُȢF;YJ{VSgM,n(4wPhA9L h#RMbć}X4(P qPE\XчRx]rDSJ7n:sFʔgk32e\CAdZqќ3h~7FecPL gsDJ%盩Zܵkm <@_شJ{_F32 NN3NUJdfRS\)Fgx4ّXXgR,'ƔZmk*qdD8LOɈc,YKvc(yY,KCkzNS1{&CfyLj+J8%#8+RqJi=I* }V̷Ӵy?9˂a`[k[Gw4lv3煕Fwȩn3j7 b8~hSB0`uY;>s%ibIllcUӎ~gǘU* F6'SEwW:9@NOt[N/x2}9$񉷝Tr_R)kwn<~n?m6UX7c?,p?:]2f2n>/HKH}, _!q?꟬_i?OO4߿=XTWWen)|U;m>nB<6g_n9ZzXk2V%Qͦϛz/i򜹿Jze>0qJQf0>sBDLKn\5-0J0sI)S|jE.G6$XiMY ݦ֪>J0vj1<>@AOwfΝSR~/vJc RD5o67꛳牍.[cK.宏ZRsf 폺qs}m8X4!m"~ 1APc{ed]y6Ho6f;JժG:{ggӺ@L\*k1V/^U%J0ͱVnp?/~_KMBO(ys<`jϓ*crP0hSGqH~Qym ] PVg׻ >%o?lMSY$ _nӵ:DbsH/O}<i/0X/uWt~ٞ__ݵ-fOo7E=nƋzl? .ˍJ(I`e,ts05_<;-\e-Rl¬#a.RR CȕA.\@jك)gNEXiyI 1)FČ'1'0jVr)REI5jw?MSZ4UZR%JXHUS cS^nmJ2g8A䋍3Jg'T!F}>jQ JC+U %̐s%TY'~?K?.2y(,W[@!"ɐ })xј~9qa'@8L}ݘ4`:͡-Dp$-6->,p#T.5Y)CӘy,0"Fbjź >kWH ^x]<9 QTdc)w]e~mmH.w5Cg}]UJZO G$!^|igvxAxlϳ)~;UU 0K:v7?_OSo~|j}X>( J\}_7]5OR/?Yrؿy7 a,|WO۷o7no6Z}>Ew]c &>߿o? o*oq ...BYSg 'C_ j'Y5-aJ;MWV" ?tc5ò݅ i~kך-29ZƉJQaywT9N1BIr'ܩNK j\\6 rYC|-P'>f#I}*()}jj H%y,zMr1q#هT iYĔ8 s! EwRnLO,4aeW&mUj9R]mwc-,%6*"r;zP1RUQFs,|*ɐiSEłRYA\Pg":rՠ6RI2f#xS0\Bʕ%Ć@Jj;cuԚ늜ֻOChc[\1M7G0R|+KQj-hLly1ƺ^88fiHjlƑQ $"Wjrx? K4&ft0u"bVsLA{w?]$0C)RY;tA QW sL)5 oGDj|W #/j.|M%= }{KǽL >(S|QURRT﯇]Oՙ$ko:Hp]u}?nΩX`ι>rS g-BN4F_&ۿWC//ugWvh/xMc*klc!{_ꊚa.z{}s|Z =0Db%muղ0)1or9ˢPZJ # x#twbsNk#*)pLQb-󰼨ǀBC2҂I0 >}򗥨-<v^~t8q (sM1aN$\ 1AHsEJ $P"d m<͜Ɠ^ԇnU$d^0QcimZN@ J)1H f{ĤTӜY`\I5?[̌S~/*-tB I@. A[*9[{jib3g))23s6Jlj V/6AC Ʃbݍ1Z#Xk[bL( n<ʌcj>N#CTzupۏ)F[R T48ƩT\)'D]cEƺv9KQHD\TIQXuh.*#"P`ƗhFDһ/׍uFδbcqk)=8:V@@g5eM g)P[q]zC<~ > q=(@DMW.R^NC %>>zYl Yzsn<1ڟA|F/gޜVAѧ?Zg`OIK!;rs~\P E$^\E~&&|}f7~C8ӧ)t3 i*HnJ?˦_"r.B/6T ((\u5)&+xۍQWV[ S-rwYTQ7_c*lWu]ymrrJV/ w>ژb: 5 qg?dR=3) S+U Yc9 ^[<1jCa.2RSp/ ĸ(n Lo׫ >fqRt#z𪳅cG)[)/WZYNN*b ! C!8.hW]!R r*2RPƓsJ|KR]#LQ@&0odžoo:Po7X H0.d+MrӸ($!j?׭(X0x,g>EY[rDTҔf A A.BvL1R$R$ҋ%cAT0b)j¢fiBR{2J WR7&YY<5@?<%'qOPŔc_*p6#qVtj)Ԉ8t<˝j !bvb)0k.NhM)E?P_D xysa gk%+Bum2\ kD4>y9QTYjtre员9/6sIx8NQ IFDȇ#3=a5z|$%hojCЖPeԒOr GdxPjʅ=㦪v8DZz嶾/wҮ s茕TP'[Uc*U FidE L⩘!~ U%s1 ҖZ㠜MO{YgOC^'%Nd.<^NI 8*?QC~~"Eũ  4܍?_(!)9R^<=t"NzjY< bY,ꟿN%haowWzU3bqۛ0kr >w ~۵Lmmq Ĵ8fJ+'p1M78us;GxFqN1'g8{(D14"fPn|ͽ7M OJT4MŀM-RVطs1ddEb@BqΨU%,2% *+P1f樍PJΙ:ɣF<B mA,*3Ǩ$z@Tɕ.G% &'9YNV(uwYFZw0ժxw"tϕ"sJw2J)Rb c༸;eqޮ)պn@P!OpRJnJ0̫&#}l|׏S]#}iJ *R;E'fJ0%/=M(BO'`B>Zcvrcv:~iKLL$9ԜSȌVTQH"'Y~nW98yVYJą9t<5"JQSe |HHg<~ uXV~b#gfl,xCJ "e[\]X'*$O C>TʜZ Czx܎0$Kx Cer5g9*}?~Ĭ 4n\s2G}JXUZ6JPa,7Ѧx*N_tUQTRpgP)8mܲXJO$`B$_v=Cpx3Gc\*Xz1~Qk/g|Yb* %#3}&OöBP~wo~}YmTs"u{Ηgle!曳篿Zyݵyw?~x;_7=k4NXmWʉ)CBFӶ0˕]^t*:{rsmEJ4,BD(`H)E]_땫[ۇp 4Me3#0iPN>xq{Wq  A# A4*eB&I z[K8L^k ÜG_iB)@P(R/j9sJL$J6՟}g'Uu[-8"/_uxq^ %-HV$0Kc!TKV%E,yxǒ>O{hHD~JS9<Çcl\!̨8EӌF)4rVbU};OScq ,z>,%: SȤ4*Ś-@!U2Fِ3R)%3;`4 @ `+C 9Q^ʒ {9J䒉 QsE'/D[92Ir.0Vi%'9PR$X(8tK"!klrGRJDYJddd !LBVDcڬa, 9E}U< /%k%sc?ԕF>m/:si*o̹q b.~*R˶J.>"pøZG?eYX.~YY03Ɋ#ʐvJ(3nڊ@[YØ [k%1Άѐ(^n-SvPN)tVtIVyUi8~UWa̿yzsU%󧛸2GG% n-DRY|GTAs8`|7=\n4C y;DܠS*BRRmgX@ TdQ;cmcHV8EJTUs q7Jcj{ Z p`0TNq\I5~rJ)3yluEC\w*' Hdc#9 )i|伟}UnDAX +IX7Y"$< gH*]o"*IRʇ\E'e([Ļlp!c 鬩QVj42y)%O-Ng_Kqش)峦K=o6JK"\Z#aUNUxeJ3_m%8 Qx;׫*ftu4!K<TʐDR JᬩK86Y:_jJdALœ{͇)Eźu)=xw;c`# :;Nպ9t` "OwΑst<2GcAЎ`b޳E@䓰iU YdU7ʜWhBύw/:[*saoDd.}=к[~WF˛Oߜ9#}d z]\ӵ۫vբѨOʽL7gfmgoQb-q:,bnLE/STnV¢KjbۏV6fِqkY )\|OcbÜ$A$caBQfY<2LRj 蕐TL 0dsIn!x %D(iSA(PSH:[L"EߥN"\<Ă*VdaZ ?;WUպ@mq7+3%?߼T]dFYAӼ F!4 QĹL Y4BqY2mRP1_B\|{]#N-OPUҏqZ!Y*F-ݔcRsA(,h;΍чHūV $@2ZFAX1\(ehde PXӽM9"+ZvbiPȕ)Zu-ɺQv?xULY@sF憢TJ؊䋯lթ L%13`2aRfWWs,R0HcvF #|RY\zc}glJ1JGIژ!'LcU~O*(a*1T%*9^+Y$IewxfsDPY5א! Ă V\K.B )$H 욺**|A7GQuL sS5S{j9nWzź)ez?ޠ1 k#{C[ 3!)iNŪ~F"xOL_ys%Wo>{ZWܮNVZ'ôێGg+k6z?8"n__N^6G ~{uzqC';;/>~u2+@dGWwC_OώR||vhKgZVMmof?k]]r JWnP+a:JA.\n & ~ЮIiԐy 34"r7RԻ;?꜂>10G ci za _\4_*ƲU|}7 S>zC,8f2{9>Tǂ/6%ɦV}v}2׫əߗoF;rd(l 3qFm>T%f3մxjgB>ʢ(*iǕ^(*c2c/QY$!UޏaSYk)NYT`Xc1:CH^6SK%)1&D5i,p|?# )p6K̲i0SnX)7?褭`b( ˘<Dbd X}sIiTG2mlYh+mUU3)hl-]p)P-X*\ һ~$.CBV~|]kclh}'E ה>hm|tX]U| X0JpR,Ia7,J7|_-WbFgϙ`ų'O^~CWRrhZtƬ3A B.~"e7w wg0F)ɃbmݴLqneVVT໫nY5*9j2(MiFsaCiqKR u,RժzG'Zk̏>̩&E,jrue^ MI}~@amhh6o~!O^hWQ?T16oajO+cQ3)"m&aC/NmAc\TqxY?ɫ\2]h9R|cFqk&*0s8!hħ!\t,r8=Vb愎⅒>&+\܈0 fэcI S ZkZËVTVFeCt-V+SQcnMpRW/鿽>=p 38njb}\w;h]ĨQ)™ar8 %vds?O?j0շh֧w>e9 Ug?}L*af(d3Vu4ù, 5'pGRYrS+ذQ#,"V!99>r*$V TKU% 3E8,BRSjq\f^rǕӡa23Tw~(GlIa`ri1yJ(գ)ͺ u\ f렱!/^rʹ ʸ1ĩQ\O)N%V*q:2ENIgo|J9I]Af跛zl|cfȌ~9E1̹4C6ayXteח'G UR,'Rp@JJq;jzRxAqp>/Dd~?~w8}y޴OޜuS} =Crt}w=1]ݼn?!?|gPkyJ\"zr+j43W |~l٪^l-Sgr=-\@}oZ._jE)>:ۜmΛ]چ\b\v_.^ξ8~u5zACvN޿ h]>)DbO~zvB]]Ǘa?FSrNa6@IJc\׋MmڬWUvH1zV)揮UqֲZu"P(* B]VTHU/^.̸ bqQ"Ɇ̲~̕% b*̑n׫v25Gv;ABUÐ^_6m*G>vJ᜞Qykkg:[*?5xGt? q:&^ﯛwWRO6LGS*+7׷] Cxq\V*#Z>w>|RfϚڥ5FC)TR>F A0S))RX&؏1,FÈ{I%l _m.V\ƓE)1BQFdM%ex9r!B>Ҫ(`mmX/;o L>UVwSbmHP#Zj>Y|k![)@HH$tFsn/**t3*J>Z)"2a6PʛFUN7fTHgg-k a?Ikj>]#b(W4 Ϛj~g@C΄ Kܼˋ?jww,iTujS׷y=~=ͫZS˯ŪJҟت W67Qm׵9 >z2H04m{ߏhf&q1I֑IjbAh  c JBGVj<]Ws"RrrR͘/6Jnl*mw׾̡ }VG6;l5Ē{l ƦՕ2si(u}'uc/r@eTEͻi )3`/oú"2TNDbinwh U5%Pa ԐRr# EoSEx{@=㥶Z0mS*Es^ HZHOc̹֧4s8ey0~ÞqSa9bOʥ #T)"rSuKu|f&\o rdR'dmȪӁ}5H8A AAeu~kg2@fZ+Y|iTPr7̢Cbֻ~W7ڇ l-0RJn7f:XWT +v?{h?(RE>) gM=CH>,OQ=&OTx>09? )ֆ>'W7]^<'LFħl2?&xٺPbR @f@L6#Ae k.!U5TUCu}tvѺz!DXw~Js727aT7=Cݰc3N1(UR>9-syn$V̿,(r|N*Ċ*g!vU$TY4.wW_nIω(Q|JjX wN@ \i\hNR A˵Hy~̩ +FSaR᚜Z,]9U(),ą>UF}rshb1Fq1PB"\k~w;nIk!oAFU9Jq7櫛of;O+aw_?\mmeξ̮VDX` |BO-))ڙ]L$Sh]1owym__vg]5vȃS1σy]ZVr˘SU)4 %[èS&BAAȔ%9zf`UCR̚т^`Ux^iަsim @T3ϫᰩKhW2K,7S }@wRbNza~s=Վٚ>w].(0k4Y@mU iUeV@)&D t9ܝM8i7NLN7~RTVLQaLqRY쯾qo/1yeMǜ")Z:k  ØBCzm5(i~`. M$8mSOΪq\,Md~fGU>4h#xc>ji3ḏϛ(;_z+<:X<(?e/&A_bir2"n' -lZhr߽ PՊ o~}P)Zv] ]5[2.*(3Vaj WkmQ8Xm߾.l+~=Z)W}^Q" yi)fF<[iXuބqֆ` q7 WfX jC9=N[}y4>3ݝ*ʰۅ" >K][9@1)˗Gc_8*`Km, V2 5X[% f`64ss_ɪ(}H۝?2Nry/wɑ.ɋ, S8N o~O%qOPbS/S5^Gn{Pd{ ?7t_O%N>o7YND)f]R*c3dnز\mjMiú+zQa"5n|1<*f6VgbA8y*?@TDm?T3,2P[m2cBʟ eę,[PS.'1 8wGU}3G+WH,cb[U7N|2TzMk׭ۊ5i٠b?HI"kB,Q?9kM^+;zU)BCn9 ٍqr*ιX!OX`ZkeNXN ʱ٪eTeS)D*)C1VJMnʀm4.9sʪvJku-[irHL@GEEOf*7vÙhRQY>͜n(̒ώTd,uV)\76Hr")}JɜC1!kK~yQ~BZvMw0gt8J9'C4~W3EX\KG*hU@>W{;I^vcOQk{EDnvAsJǺGt{]9g/9p?=Z>ido7GY8qŋ4)jy,u5ҋ/o^㶻9(F@uqR-LϛYp^7D $65W0Vdc^ S! KQqntݬ^k(T'Bk4j)#,T^36E9 wcGgjF,:ޘzJڍ/;FƄ oaLGk3-*!yrQ"> i䴟k\̙ JKE1K}^U*YXœG(g!bMQB? y,*ԕ;zШ|u \\ OQ+qkM!\.Z/qZ}lc45NL=7fR&R\l{tB;h_m/Z46vXgvUʶVҺ!$S9g!x2gX_8G~>¢ <󑉕 z(O *Ϡ|7^n/'+r~1GY*B߼c&WC) >htãh\y^<|3 GMBP&kk]HHRTkbTL1#MBt[ﯠi+T(vu]BDeJC[iɻ޲7גtjAy˻82Dl7ZV8GfmxYirR~۟m w!;UYRjZH J@zSk|c`ݻ7ndN T9uק9{q`WqE$[J9&XbbR XS %aVaZ>=p(aeu#&HP̂ byS(˒~ddtљH!]7ŧ8qw׿oO_ͺK?77~ۯ_w7CNOlj]|m(qHTEzIQn4DDZvb rr̴M?9F/[dP" 9ss*ÅL/(j% $hJn&X}wdȸOq7 *fB2V^)b\be:iFqӼ+Ûctq?ͤ 5ڕXšO!0)hߧ)Tx/nq,8gGM((v?LG3mC][";92B!9Bg:j7Zd!äL RF >1ѪY}f͕t!ԕfD4R#S(0zhkS)bBZ)k0uV( *(YU4x+61 R@cɺEwL*DZQ)ZRȧ!{:^)QpNkA2jX*q/7wv{C 1 aƙեY蛝gT4%Y|nr?W'㕲ًGкhstj=l'@Wke׮֙y@ylj5&"b~QiCR)UPǐb}NT*ιDilZnk{OPr8EȘ#3^Z2k6Yòo9Q `Ƣ +B00mNfXDŚ A(`)ĢKJ$K ,d@ÔF`|jÈűoy[["nw1 {Bo_o%X'_^7Ƿrs{ 3D_M;nZSFcCJBij~s7f*v*XUfse, #KR,+œ/VV{3R9@C 9d">u}R1zFtK jׇʲs8.UشԄ7AVC[1Vk)^XThMC_T%Ȕ~J!o W6TN*7IιPZPq˵m+OO⨢Z~H@jN &arMUs)p,0SƒnmnꕲA.^v@Ld5CcCZn8ww~Ur(~Xf,SCiK2nl cjk)j}]nf2-I;u785Jx=V G Ϩ^8-S{C)~dˆ<Ǚ@E.Tu]זuNbngt'8&IoڜnSjv㳣RZLV"n$2xK)&6* rYt~Uo/w mE'ڨﭙe{{!imRXuG_9窪LI)UWs0& C GBœjg EUWrikM=Df_ޟ넉\-Oh]BZ߿e^u.e?tͯ_|xuy}꧟]mh/~׿˺f5\1fW~:z&j[kgVV$ Le4]1a,JҪ mHJ ] ӪQ>!TZ% "oM %/١3mogƪ\d׉anOYҪ4f]icp!)P qJ3e؍@iߥOO7dIbxWowSKѸ,ӣuCr˄p.Tr"A\01 9Dˁ\ƺ4?33 ohʑ$}j+l'+g4O!S,GRTq/B$!!gz=( T?xBhknUN5֙h$VVmDZf*}7_:­7Ns>jZ>t̆a&YO9EAFDV+H8H?BEY^4<~Xߣ?X|x8OS" aQt?qS4~S,">Wᣑ;ņ_P;=|D5)6meck aO_%ߎTeW[d T'M>T[`hPy4raP߽ݥBbmIkW͐J7iۛ3hR,CJV+mt%Ԇ|Jvc(m;I]uMj#e쳄dH) @#zИ J y/ 1YV )PQ0}A8L~UӦ1G&]8͒Df8Kj;EQf%ƈtgSx9nnϏ\mi-1DSi[(#k:q%FmP[./"6VV*& F#0&RJJeHMckr4C9nL)T>;;u{$h|^;* +R*7hk!/ (ȯ+Ԏiq~?äV);ʼnK:5M19H*`,ULx6qXڦֆ2o4iDijE`ͺSZ^[okKEXc{8ÝVWwuMhRM~76`h6Le[Dd7;i=02|Cվ{ȝ{H3?OxӜ&<*{sya6=CG]IQ<mow|g7kþo?p<"?NjZ))v{i 0~S *X:7~8zM$!=\*ňtVqق\mU''ő&&!" v`qztu//6POġ\>w痟:F VE5q!Z;Z%PE$JHpz8=+ڰ12EH1Ð*+s⋽?Zlňn Q=9E1μx8r,l,T+!otav4CH aE*v+!3krgr$T]4=i@ِ~ ]ЗypYi3B;+$_W>}nw'?_ >!ǿoRP75X|UmhmϿll~{|o͛m(+/Ec]Ke%q 1VBMj8D^]F.ce(/YXHxlp_*sBJ3UˢPY7*Sb>"FH+AYS1u}R)EP۩r蜔h N4ŖD1,&ƑŜKBy8:nwH"bnz{v .d*DΚDe\*iIu#D^ C EJZTHu}L^TCU.tGU}v>=gWڪQxgL}ݫ(RTTBr0OcͮC Ø`XǔDcԺg*Ӎ^Vxl=Ɣr69aS|k1Bv_l8MV<-O!݊8:߉r{S.w~ozMI (biR ԍ!gkBe-F__zⳚ8w|@O!Q M\/+D8yy䬁]R(+R)A@gr!h .ZŘviUc"VI'3EsD|??H.6tzl /_-5Rwx>o>NV8qD#nH7Rr_W j#`B\Qn=!%$0!䂵Q>8Rl Ɖb gvLQ?iAeg$k UZn._?_?/^zt^hpmmOXO^/;=yO]E! yş~_g'_/^_q<ċ462F1r|Ty'ҋ0KNXd Ou`n;~l ɗݘN_tq]IDxo*ZV|j? r1H d+1 sO"g򘲱haӁec܉a scb6r`M2D#veQ/m}&B%ntb(h cD!ĉ-Վ[nIP}YLSX–DۗU#YS̻ rJ6v YʙiFcQ0UE}] JXzL33u2VI92LRhҨa 2AL9> HB4@eWd7[N=GO3ZI !OCf,kHSb%dZGN\*kQF1%ޓUJIHSX2DS R0!Sz, B >4St+x ש%)ΝmC<|BܭH-~'ƻmO6z%yWj}soݿGYŇ9(yOzo_ԡ7e|}eդF^o~[|y_Y sήisK1V dչ]|եQJ˺Vs)g&~޴fv1_iߕ~h]U kZ1mli!:}1_v##]-Z4f#">K-K3g(8[7EZN3LK0 B4 `Ia&E|}}4ӌV\W}w^ 7_t/Ͼ|GB !/n(Cvaqv}]}v$0?|sLc]YlN:"(VOlHʉd!}QrYlxҺ~Z|Ȩ2j1zi:.㾫{ CAF_r}~1ESQm$'F8$4Vi TZC;5 j՚pw(ȅS㤘 E~ODArZM,j׻cXV1Z.솈z69%> )RHղ ^ڐAVSyjmCE)j%¯^X3[rh,j)gmRݘ3ֹngh] @]$FA2t+l'(LIqJKȩ(#}SUP8b7ʺ- C\.G[ KE;?"*ٶJiN;ͪֈe D0GuzMz%D)A*, 2T0\(y03+s XA891QkmY,Sc EsdQ}ç;z v47A 'y\-T킏=@NwԧYnAn|:;t}wU7vOzwE'{O+PO48a'6ЖveNg3 4kCsbcnH0NXX.OWaFqiF}Dc vɒAV 3jZ6aje9DlDqQ+i`iPB)jK9ʦ~{~T9) WBMt4PѲVm|{zTJG>jMyH̪T8D 8r5nIk#b^|s2C.R6w VAg ?l]R&y&VW]]\ i"x,_F&.`ˍ)O'WK^QqX>}믾c&w7_1{؄4UYdRlE !%ɪ@̔]5zc؇qYK1Ч˭Xw_WS04VZՍ0Da @F֨؊`!a٘mQ@uBL,QOrX1YIx',_wV`ap7<ϥ\KI+UI-k@2s^Y)\IT(VVR.+BynZ~CqE@y7#+D`D6A!=p˳P@F1R/Zϥrj^&; w]Z?я2O@)|wPputc^1 >ܧ&q~T]'_{66(#?‡)m½ Qb?aڕw͢SlϿǶÐt_7ŇYIw!܍ە%7 S4&p7oA޳6ctmCLOYLsY",L2 !)2KD8Y&,ꆺJ]ꇫNmɹ;ώM/%ow81i7Fa~0Beu%$Mg]O1Cbnlri]s;%c~/N' PJjjR*WXsQKg  qn#Vfn,X>PO 7/zy_Z^???ōR9(ZͪkT!>/ nss'ry'G?kQ|]m.uո;Rm?]<-  a$E11AeIZHօ2Bfw_2ݘcn|{(>& iXH[tcnZb N*!yafkths?16hX99X­nbrVh-yDbn_*։ʪ({ J灯CH-s} P/Ԫ!]mz/|q*c*i,$ZJ0fd\X9ZI)nY2*tI2Ԩ)ְk{SbpJ@(uM%ot\;l֒H(L1XcajRLn}w5pw}(NY"Qu A'al#wuDOGRxYUJc§(42n!sZIL1ӽA 7$JrcRq@ZJd(eV\;D(DKQ+}Z&[9*/\.P ~>Ň7YŃ_|?2/{aOnUӽ?/ſd7$Q?WG-͚R?[s^L4/^׵Ҋ/|hharFBըf&J7g//OWFzrX ap}QGںтDz~BhMDЮT>0 )4:}vwO>kmxs>|RGTeT^>_7ϪCJ/V@,[Y9gzl軬Hu\r8}c(*\r Kn@Y+X7{-fN)$~(sl]K`WrFV@N]zil,Sn?eeǿv;!%G%`CZ>Ey{_g?KMsvmc/ۣjanƥ7QV}O'۷ﯾ6ѹ5ο;l].?o_mldeSJP2$XEւ102*[p4ꆘR]me TC|pZ>棅S4NL`,%W!&JZReeH!$R@h&$T%P2s* /RS]269Ksi7ӥ;d5??2 9bjBZ&Evh3&=(ChGǚ%uc Js[piH2 LZV2>R8 D<"_oСr R0ħ'2&*ʱp EUFQ 9J] 9X &ũ "alEn߿:n!)= gʂMxqUv:gm6܂@K:JIԵ"!G8tߧ>&E3[ l،ӄy*+x]C[Yk^SFn(kSJ&U n\~)5=G G# 7YǶ~8[ZG;sx{;]??ں;xUT1|ȟ ?R}Hѽ/n{q{|xCY E@EKU#׶u=w-a 1G]\Jru]c:^XtLJ!ޮOrѱ{g"0zB?3ou]҇wo~|nV @gMuykgK;_X65hiTMC/< Lye媉FEȂ8~p J8Z.|v(@pFG.BOX1oMOn3pl팤Z&i[?o=+ _>l2'VhJb)v_~_v_뷯_??UM?ټ?=QT,)BS, ƱB0~LQ%B5}B*+ R , %SH5n X:K„HPRGmrF@|(j+P<|\k=M84`/!JfJhI]tR) GYd(ݘFA82%$amMs.)h+D}YԢVg(T 4֒Ra^&sA9}LJ R!i)x@ǔu+IxЂ.Ws9#Rʚ"[)1R`ы?Bb\ZgoQ!tR )j #T5Dfge w RZ^B*a+/9AHZ+/YA" Vvn@:jpΪ.xied<&~ymcN3aϽe4){L%W{VwC]SL Xx%ĝz6svݴ+m}<:nOΠQsg'Ik;6_wz ۟Htim>p]9,bI_VZ!j(48-!g#^_\=鋹5"!E߽?>Z>MF}8wb9uyziJ݇o]yBYu'+:^o?^'~#& ~|1tNqbՍPx}؇ZhIbZ%ծWߜY RNR%emљ)0y~Xl%/NWn'kُ1G(~vV0JhAJH E*eXzb7VT0|.O[8B. Mc@h oO<\*goX_,cG?Y/d*_\ * 2LDA".%C}N(j I$ZT8G QfVnm )&URSi!rOn !cb ΂tJEI"(bm.Q'~"H1rezn*\18Rdi{)DNDvdraǓee T+5z&/P̂ڑvc *9cUeN7廳ݳ,1p`U+jHqVcIFA&Ԕ3|%.a=*Y!D!*)XMا:s9I7G#-y #*cj 9gN<5XV0Èʢ:c( ЄlS gQ %znEVR7brt 7C,Jd߱2 %OeG!IMmVwC?90@"uňc@NBD,bﰭbMj6َ\nؒ솔M#*C}n߽/x ╣g|⣲C1t? $a~Ix#J'O#5J)f{Z}wǢLQCm]pӌԇC8y~ķ;!#{zOrn @IucDWD\5 9,9#(0^a?yUW '֮vM-6fQAtcBJ #DQ5gܤo!c490Z-nW}0(֭LA` IA"'.RC\:7x0%T2`FvV/OÉ)Z7aa|֥Wj˶׳pCDbEvrN%1*WPjCX.pp}*VZOtݶQZV&fUÎC>ߜgZGrBgG'/׿r>D b(VbJWr0KEEjȰ$b$iNiUҺl=ćCd6]mG(K:#[Yb4JJ+Gԯ!$H0k]RjJf 0̭]\4 /ۊSw\$X( ,M[Kɐ _Jswr(|ȣ>Z{%“ݴOqѲDz?tGϟ}6dd`w cq˗ƚYTcǼQQycy {Pcvfk#$S|],TE*Q*vī?'fo-vِȱ 1x}|bZdu*niku.>|eu]Ckzӈ\…4Qks'ϚE ³e3>nS$+3wZa @-^jt[++1|h%!d-F2'Y)MJ9|P"RcI* ZYK#<,ŐseNq_>?iWb- ~{?_wCY c$|v_ ^H!UKRЗZi4Zi9U]rr윂 {NXi)x$gO62h-^/>RbwOܳJ7=3 2_iҍRhYC Fḇ@N@\o#Nt!+R!Z DGycITq7qdQlGg%H`QGY 턱d9NcB9!$GHJy7ub`Q(qZ 5|w}b >Y2Nb(vsR 3=! I7}D Z0iZRj9u8ZTF A8v!-mDVJlȠ_uck"XKnrXigsPf\osAWm5P40U?^8fr7t\Ѻpqi 9$⨩v}Ѽu1ZHॲiy' ?t|\Y1!n9`׈..jPTbX8i$R>$h]Sl! s܎òv攈3S9EJۺ;HP?\p,/1!Bl9w `n+_'IV:s5eTw[]}n>J>Jճϟ9G >@ OWig_l.E2MxpuB@)hЭO܇u]i8,68ġl7ne7 cbM4!9XPB%/ZsoIDAT؜1M:ln?TZ˜SڊJa@zaE3c,7ߟ4Pطp"cVWgFTi xOXXr'GskCxѝ>RK>֚>de璃 Ge7^j)oj|($]E"lbH% 1ng|(>7|xxNȣ|{O3?%?dy(r|`xyc,W,g}[e9&Ҧ ڨw\puqUybҊ#@.ZAwgݺP$C"r "r, VP@u@?lC 6c,ɗozbeYZ7~u>Ѫ6' .EFBwmfR!CvP%<a3c6F9ʪ P*WJiU0Dܧw$ .k _HB|bώ̮DxS[R!}Nmn|엍 f̝Vk}(#x-D=D\16Ȼom8U#ߜɒs;$ݻê1BD(j3f!1M%`m0pZXTznY՟/J1몉ů&ʾ ]C4Z9+zGmcp},JP-).e !*'Z#&T˥㜳-A|fN1΁ƺu+㉙FM"Vy#-$Z$P8hݻm鑝gn)u[񸅅xȤv?0Rη/\\bbw)xp˜o?*vǕn-G6 ĩ|[1#AšXƴ˳JK$!gwq̒&n$\s'jٔ!6NfWuUcĨaRMB$aD I1$#{ F,1q^}TERg榒t/ezfq><iB8ľrDs0m8CuޜRVn{RS^8='V[kfr"墪sҲr(">IT13M9Y<]!2Iʁ n}=r6覓 8i,Yr#"תKO7f`Kd_!Fm}R[eemʺoUfƖw} yӐDle 9 ʐY] /:q,Q~aebohۇ>f7Aih1З`.5&/0u,%Wv( zS2zÐs^: Os=zJ 0wX(HgS\ۍ/h$KN1`/|'taWiz)uH:ik{(,pfX^F⬆a¯/ iCpE. )=E]"vGǵ!:>?#t}X6wwht}|'| 7۾v 8a#JIW 'DDV9ǕkRt5n\V39E[Y!{w 9 %c  CH4 [#-Ǧ}6ZF\ڒ=k a?nJ+)=B.<4(@<BIZ -}ѐxzBG9xThjb*s݆Mޙq)x'8U,! xIxU{Hz<—'p}߸?;c %.h*d>ѿ aaBN0'I l)z=tfY}pZ<~#+gԧ͎ A%j% $Ut=CVCm}RP<춼p IdyB ,9sL|*IM9D,Vuz@ʙpDas!iS*A"0 h!B}I?mqYE}d`rVw"I0ǩdSxk`.C8؎'f!'#t~9W2'fҼRSu-6K!f IUJYS9`J˺[j هݮmJœqfUfB`I}_+̱~l2W)%Ҡ4oMHڀh핐MޣeUr^Ϫ@]8oc.v(Y`VzmC'P8CEa-z"?e0(qb U /+ :-W<>y>=-/I?0_սdGtGKeLo??|K1( bHwmetQ㤵ƧSq'7OHeNK4`YEv}պ[:riC׆ cCۮf# yw޼2U^]\*Hv}vN9Dp>7k $f ЇC[X/";M?5)>3XRS1gZJL|׳ $:2sxHnQY.1$(`ȭV oZ"I’)%~՛Uc˙րu z/.4J\J`S{uMwb|8 7z!a*g m:&qvTBIZ~j=WAmhP@1hB! g0\U-PG,`v.\TuH5XrI: _4)GSJ T6Ǡ1o`*S}muS! 7( 7B"W ْTT.j%9KNݱHMXMcʉ&0Fx$CF1h492k@sc,y1BjnVӂQRBe^k2b3li.jR}]8t=7ь ]HSvzVYJL %m߻FXʬTYv!EJ#ʉa(RռA}܅KHYTs80MDꇸX\rɠfYl) ٧$HF]*Gh(J \HaT)"jk(me-G3'Q 4VmT qC-0%ifA fܶ-SiPR9f;0 r,1tp~|[ Idl=[ӤcU"ye~5k_M?Ȫrz>~e?\nw]ʩU/j×?ެdx skvv'`h|Q~mݞOW_G_>.Ύ؆(\,sH!t[-"%yhklw?(V޿)Z(KJmZFh7z$ 2ᬮXm;&F BssCL1qUxҏwï当.qS|ۏˇd_n͝1pV)×nT պc(PM˛b! rt,VHL\rɍ+Si{g/HXnw]Ef +A;Z^d2\fpقworQ5[Q=VA3mےP9Ӆ!I(>P|}z] |ht>E4Z*,%D;{g{JqB)M}qQ\̞9OZ4nX$PuS! gJ9\hOyZ*7 SbQx`Tͦ+%OĿNe'jIsx:e?aLu} {gbY[5w_/߯/[3R &=Z3k+ m;_N-(O:n; I/atE { '/a,fCh#!gf>jR+"CʳNɧ՛Ϸ}zmq,/BY:q1ssF B.(<{eLd%Z|P(M7mioV1%paEΗ߼mP\m.BRRfJ1w1r9^U!3}$a*aQ-ok9.$]WjR"Q^SB9?Վ @cD!qn)D 3OnBˋK|/vV|׶jh ]'WpE'3ݧq|q=\/j.+3 P=v@PB 'TE O0F]Z!KNJP&!0B BȜ%BH9rᬒbV flah[yL%PiS'Nʨ1$L=z}SIĪRNb$|jgBBafӮ ѵDM }t XIBdωv%ƸF}I!޴XeZ/\ q*T88 8S,c.z qWJ#+3 Y9:LA)I΁HH%p$KIpq K<1rwHn;>7%>ͫ0bDzhћX1eF8wTӦ e1vY\T G[4:4} F[)}<[~߭Wc;c Uc@a?et1ebY%B?l}S׬uRYTy}p/u{2w~oQ) ƄBS.^!&m"i˺RZ5.gVO $ l&Q].^8KzsQK˕U63d* uBpΙЭ 9XOmŧMwú$;]ԻcT֔إX%4r] (, uj8%$̲lRT !졮Sڇ(7 ?!Ng}HMoӧ;პ I['RV!h瘹;ֈ%j?X߽WS }2!biy*q`iMu & T|M˞18^7UZ懻}?_ԓd%H:y^_ꢚ;C*Z>=W}?ԆQc۬BmJ1@FiE`bʯ/Pp|fn>Vo/՗< /Wڥ` ! ?_^͝9X̝ӡ 3S jد?Kth|<>}]ENK%SIhE17Wr^BF.h(-֯cNdFέeS M (1X>~m4~ׇlxJ#y߰D=+~N3=h'c OOڦFsi/I,RLH|ζr0UVCF 'd%"4.dƣ%j &ضDt0K6D$ ԋ9ݷQk"V&dHEN ?X)?􍓕C qJ̝ϗ+MPr)Z$f̡Vm&.+2FJa(,XC\ct&A6 T^)4jVY"ZsU~㭫Qq9a{(_4S9ƻݐNFCX čSk !JQۻng)C͍{;D1b7rHެʻSj=Y݅pެ|~S8!ĉ|8OfټG: ʙ=@e/:/{Uۙ~IUޓ hwoco߿#A%hw(%M9v{_]/'Kٯi7OuCWG_W_%O\\.WW+m4Redx󕲇ms !ɷ0 -N.%B"?ݢ!Z '>y K~m_*l4{emja6}0&3w?kkKH)pgCB%?p3h$V*-ka> V1ekDVKP) 228!] Z `Q8<Ok8b( 4A.r Đ2*}+'cmbbQ S}SR5RQfCb; ELa=(rJ3KmO RCRR?ȹ+ݾԾup_(ONOmN]&%T mL2d. pQI3:pRݑkE$"'W!/2AZ!vGش~f00B2Jj!7.HRvo|>JE֌띳҇h H*D6Iut1L6K7^ C߭͐CI<[DCd2ԒjT|U]C*D ҧvvq Nfyhj8vGA۪0_ΔBBl㾀byt8=v DVRGG%MPo/~"\ aF>sy~$ȧ>V WW ,2g~-riCWoޯ‡RHլg5 qrǹODw]-',/(+|azx>:S|fަ /}aW"wn2&U%G2| Pf*wwQ_~je3cx\0ۂeCn+fF~C,3g#ϷU]̫KʪRr% āK+8ZWIH6pc5ӅǗeU8k rA$)PBL]ճZ $qo6q貔gQ7z67jH!ǛJ|e/@'v_$C|?,I[ۅvOo Tgr_*MNkis/-=r/Ex,yX1F|1΂h|^ntyR9aD1IlQ$b bԃXn%Py9"cb$q98n *O:,h*EF;8 R!jAz*w?nnRf2B+1O}1lQU+c_}u&dR1eD)G'TضIJfZ*mYX gBcݞXmit(ljϫbh8 aAȍET?anҲ+^ 'ӮR u1BuU ^01*95Ns.`D45^9=}露!`U#Ycl܍B D`oƘfV]o|ӫ)󙎡Ї4zV ۮC3)qPJTLm9,ra1f EgiwhsvZݴۦ.3='HЮMsySYKч0I1J؛n ֿz۽X:NɓV}UN _~py$3tE>mƏ⋱8(S7O&Bg#͉x΍< < 'SWI}u_(?_|ӗ]x8H?}t;tjo:w;?~9FYc3@vzp}A}Q[0%EuWu"ФS{D?,ԺC[j]UVuF>3 TB]^[Ή2Zr=GItl8+7.]OJ(( c!n߾"G3e K"JFK`,I,Le +UR> 3]* ^^Y@uE8@v.OJ U3vЗw!xR5̬7Kur|icȎ2Z-0ȌW$$,ds*2<:+Sڗm kuӬyզmKY~vhd; eȞP22~PʺQI R |1JeCЕb HX[}<^|P9v>6PD%K7Ow \:>#ROEiœjwī~@pZQG(YĘJ0VM(h$b.*T h T]}f&Bv@Fj]̈onKѰ^[ SbWc30\JK R&$ `fÁ7]Gg$ٗ"JC\̬X U8],tnh Rox V$[BGmy3R bJܷ*9.ڑCL0TR؇!sLzy8zuclhR9%ڄoΩ'>L++ ,8{O|nb?Yr"bCIES=v.VZ'SL?}mwE#L !~"\3td]vs}-pk ^g<_~;X<&?};=TXos龿b%ʑ˰pFPTA/nvۜCH.tSwCK~8 ͪ6Jt~F9$$œn?Tvl,-F5͵BYJQ }\֪ٵF,Ѕ!"@YPK J LC>6qȕŪC늖7nPRs- IC$$,BJp{Hi4޽oŇo~?_PϨۧh KI GEǗNdf቟O܉<2?:89W`>t ZݾHFb7F ,Hk֨ Fݮ,AR2firf!ymژGW)̸;EE!U5c bh b8}IՆPǐۃR> ׺uR EÐ|+3ӱdusY"tUVC3u-6VaB/rtAP8%Uc>h-L7 M+Cc;kq߯39HYUT H5:Iav!yj#gK nu%./]ׁC*m H bdR.JgdcTwV@|\¶/'lCZlۭ QH&YGN /=ʈЪx3_۔z$}D"zobbsG8P5D!X%sehNNUjT'C_̓Vӳz;=7/q r #KEl~ΪO#0^@(bIjքP6wӒH?p{ش_~n<'Q<@/Ua{W|^<_f鼼~BN,'67w8eۻu~zV-UͿNaI_4kۨ(W#X,/#ڎYVZOq.7^>* ~_/&*UMqL$V(D}A] %- .Ա4s]w IlqTT Mȕu% cyf\V`BJ0t<Β"bR 1EI՜G=ZS!n@) :@JHNa$QS`ĒK1 v[>IA+)Q.@8mS>x胳)Mo)VR )H(rR[ m²aVB]5U5KaJ8VF]I%KWqbɥRZJJ̡ }J|̃j"z7iл1{h5Ce:.2:bYu=JI9 SmjXK,$`IbȬ6VbiUlj) dAfVK&S\J6y?t pxd !5UĆL9+| J1sLBFs ̊Ӕ)x'fo*trԸU?'0S )N6H)5rz$\tQ^\.\e>^[?O~y1JJr8mE=HOWo/b+>-Dp_uT>qzT37_pI+p q}gCp9i#d-}ֳwokRƲ}ownc^6n@1w ,dŔ8>Cyv55]ژY5Y(UE8_\6r""B0dЧXR,omYύR̄$Z4;c$=nC NZ*5z9T$$ M˅"!6|nMηVjySS!"(FK9UgMX9ff5#zj r S!ѩ(xp}J3t23qlM YI_R~KICgѪ 1'q()a-BDMKL2X~EK`DS=Y)Smq6_G,⢶Jѽ2Ij**-+mwPIccHN!}!!>Z\(͜ ˌk4kvii@CW4eQ%f~ Qc!zi$Fh)iX7 E%&ZX|{L$X 1I2VrAF;c8)4JlaZU4Yxևv0R@?SPcʅ}SAp Xl7W$v86蜐hͥCrDgќUA*X[}jUhgph)Ux㻲 uFO;KL}^^vы<=p8U=!& ga)g^h㪩'@.]<(tHȅcnu>_ou֪wճ|1sb^@Y dcJHbjPwf7|h|I`AJ@j#@ P ~E6 iƇ.Nx ۭAHeBF9×/wH+yZUC]d_iLg/%%Qr6)DlQEA\ZHO|&ϔ ;)c|H Tqp#m'1قs~YZTjsp(LR̳y*G-Qԯ~9a1ЬcR/Smp]. Ƀs^E9{@CJ낖%!HE!]ANBy]@\5)y* .G(J"䪢xw\-+ȺTBw嫅ޘqH;#c͊{+mY=a箲8c͓9KpQ@%CHM޿U Cfqh(|z|ΙMSVz_k4l|:0;_|qgiGOEY?+:5"|N /"J>|VvY*!\vw5 ]fd AUV_]ڴ)͛ϯ~?A6f@%PA!]B HĘHJՒU*9['u  #guY)~vfz6ȴi=dRH IRF0=8Λ6lrI采*NɡmVBai ?D}6 m uh^ȗVe̩xaʴ?){rF)IPvܸFy Щwe? a?Nfuss4F-=>pT"'e.)MMR$ p*1蔔N# @p_)1e$&14Y]AA+_TV9@a`,LE]1n ~TR.Oȣ f-`^4N6hv-)0E9#2ê.IAdޥ^םO w 1~ƖR z6} )RȫeY(?7+kwiؚ4Z@Ώsb2OϮc*YkBaeBB.(Ezhs6V,o T(,aWKYH걤 :zQ81O\#ձ1ir4n8gÓ&zw  vU"J6`i2D=Jxhc2B ځ #3H2m#q 7ƣ Ƹ,)0D,Jp,(% )i5pّ|9-2y&7UiSJc!BN;01vf_rl]{nوy;U[ 1' ?RNSZw4rceHHSNg$IG>*M빹 ԹdPFrT0Bh,rLZ.xc$42 ]"Dzv. ) }썰R>rU`6dHE9҇']*E\8mܨ+τz9s"8?m[upl$S=3ݺ?ܻy5K*7J<f[Jmlf1F+Ƕi[AąwG }6B:%$JF:V|wIZ:G!`/܈* ])Y<Ż;?M 'J*_yJȻ >?{&K>f_\.o__}2lyf4r«ۥ1Z_.,&>g#YLNJe:))3K3(>+P Ppz$:Mp4znU ,P\̿׿g_n~|+~ώmwa1MȌn3d҈97_(Ҩ)7 v B-]]a*h (ArۇҪRR5} ZZMRz0߯ WTD2Arݦ++I*?F.DS*YJ})7ũYtʙsϳKyY߹ﱐ]S:9yt~1k?_LxQ@74EϮ|{e(Hyme@1L9:lը$*6}HH `' g$ɼݦѻf3'FFчlt2Ũxi##$w4vX#Rqg+sseG 7R(u0%r#cn1&fЋJx"-rq1x_FոnR 1][>[-p 9p3+9囹 RFYh\; p4z3|h`A"8*cQX\z^J-9x*$ 4FuVF l~_$RFe 0 Pxu[] H2#rG!PH#3K;bCrTQQ =EytAdGN uc4<=#'sRypD+ w_ͧjML2k}DAF&1l+|UȢCYJd@VO2@mԻH!G($iT*@涍SqVE>47J}MJ؋b0jkK".1dA2tW\S E !.+r)m0TVj(RQ,  ?6҈a]6=Dc`4R6Č<|q]}BH]J)@ ((z/sJTR]j]bȋ&!p4^4tyQ^J`mm=S(Ab6W3?lcuOn[Pv3 PՂ=V>׋S#sP 1Juo`dd !D%J&j $3<.nVK)4ܴ_̶[75RL9[#7(_e0Qye>?6UGeqڜg^cG@is A>kS6O?<㩧K|*?'ϾbLy|iSO" >BLZsJMsy韟&JNoUm>,h)Ȍ ,SSIc kSFw{XL ػ7J ]"Bi\ת."d~xHX # %E0Daˆ]̙O${TZ*3f-*63%Hm}_ݚrUdtpN Oބ*̠n٩_~giў?9yXtY q9Ťju#7Dv],*ΒХd{iaTC2V@BjȢTa g).q=3knfUD^, OrU UjI `עgtew dFI HYPA"UZճ2D!͐)9(DbP:!d@IXϑ%JaJ mv\];I]`hpbH!܆m% =Bռ$ʘF 6uBm/__ǘ.rV"'`J(R;]BdOða;]GT;7ha6`kh/hT e)!̻^b*MD*㬴*߭KkU=w.q%0KkS]3BSaBs1z4#!b)KCЇafށFbpnȪU~n݇|Xw}8TЯAv_,4ߊcO:@VnqGaܡxb`3'٦˝yʑxf֝Nb]tܒbL~l7)?^I?¥W ^"5!`Ǐ@srJ9gن9#a~ӏUޠT,e_,p9i?*W%>n+ 4g2u}XP]sh(}ʀEVDY !eȜE#DYIr"r`ڕL!γFPBn{}UШUHF %jRH+*̀Zׯ y0f,mC- *FM11I"lT(Q2ښ!!'A{Oyl; cFW%(N2f 18b\VGTQL1,8ƐWtԕ%DitC~~rn?:ktL >lZku{7̌$iHG!!cl96]W"#(jYEҷylFZ2:E@ HPir1 )eQZ|gk)J(RH" _73k 0˶)IZSФLCLziEUĘtk_H_̪7^BU3FjQٺ?kjc(5+isNYWXn|M9k\^,$6ִX̔'?"~zYo{3_^ݮżDPXfAV|YkHv]\ͧOQn(>rׂu< oRފσU{No&O3'GL/01g<+p9Ex("~cQEv`nwr]2m_\moӟ_zQO߾y zC{mj|O ^+bu귋?ehbǟ(R ޑt3P)I \(E4I2#m&bIi3 SҶCg h9\!04.)`QʢMKS9;}lMźde ;u2(ykS=r?')}0:g kfډy ?g SoqDۃ3݋Oi's>E/3,X穆:҆f>Ds׿^[+=z] RZf B*K.mJ-JKBCT35-.pT۶clbZBdz+~Y-KVo~zBյz|4jXbH&}neu,Վ=9xU蛘JmzѱP(U[dSʻpZ'DTDm 譑w~0)T[ 9zD([`L R7_wymgV"~y3kWVR K2ۡ}qmwe$~.+jK8:Ґ9:4>y9W#;fDT6}G /nnݠoo*0<[[W&昲/F4Za{U/9B4 LI*Z1 +΁̅B\9$Rݐ A*̘VPX=ʓsWe@> T䨉Njv3C/qHN:9[ҕ')SgFFm]-vͱElTx*-1eޭ?-oZq򉆦]\O<x)ғz䘟z<>YG8!sS/238lmMJmYNkېPj34(\Nf*^we-vٴ\|0?YVVx%\(#:,n%_SZ67l^0 Ybb<9ca@Ȝ`>QYD8șuA$?=v[Zxo~ERXU2 2E\#S|o7mSww_UTfR_VG8%ֹ0;ق-Et({4ic̉yj{J1cW Sie@:2LQjN{Ҏ6Y':ڸjkwDyGCb\w]Ld4ZLTO"#<ZȑffDIAHF`pCo:" 1.ֻaY%t> &jfzmARB+.hż *QÙY8&)Ji+W;U,8bH͔"s; ΍KR%W)I)9Ǯ VռsT1LI8A+keVmHUENIy 0Zfr{T rU)dA |J [WZJ"9F Rb zcCh$\i=x 4&TW2|xVE]R"p] aU(8!j1Ozq13w%[ ]7 D!GRfN2Lw 7׻)wVC[_a=/N8Q=&e)};f>f?jU!:dJyjWuY']BPf^+/gQ)d"p.׳~ @?qsu5?M/|9ݿq-!_ճ@zo>[?_OR|J>lֿ#k"ij5$PR(}irBG -wD9/_-+=P7Z$!<@;Yx  QУ䐼 a&'?IquU `4ږI*n7K5tݤ1ns#k.w_7~xUW:L%UڪzHܩ5ĉ2z"׺sneEzڨVԯCG; n`BOO\C*x>Zث,o 4pVI$B$l\P$2>ddJ-G[1Ś*^h}BW+[TιcJbf8ԥy\Q(*40,k%"&GJ 0*c5$-Exe 09\2G;)kU=m<*I3օR|l;mY)A#T)`PZ:(@CEgV=xYHm8 }JKahQc) C_>צ 9 }%6*p`e9z%i}i4OD[T?t|&7D]; 89_.Z ˪nYdF|tIqMMdaU7C^߮vn"sxh֘G׍1?H,RzVjBZ+XȪaYB w)hi4Hx0~ w6͛滿|l˙2R1OQZbȄ1vx/R{B0̏JlLN=7nKK/o|߃V|DH8'?πs#i rMQN$=7M!T)cNt ],+HW_X(Dw_&3c1}QZUh,?cRJ8a):4tޥ7O|w!Ez,0pp~Ȧ޸.bc uHq. Υ}pJx63Z`3) 9h 6nAqUȟ-+ak33 teRҶ VV2a4| Q#5ɔ8Q&I.DEFFBebea!!|biͶy>ۺ.y߾kVwz&o]1wYvof_"yj;7_^ߴCo ]{O]XTfna |^oߦP_ݾ77R^u׿X5n^c7! -h4{vQɭJc mU]m61U riFDοP_7Dx18F&'ǜUO>eCt'N.u~Z^/q_;e\~J4ϛuI% 2|!'Bayu|@ $Nњg:y.X>xkyYR}!((Ӊ8*-E(dIs<4 ,3n}ђAȌ3]j<& K#>nT*wOԘs=syHBgHk݇݃Ѩ)YtJ2/#lX]37\|~#.q2+鯓"pr|?y_{}HBD$'*g2DIdzW(*{<=?|߇3׿ku}XYbHB3,)/~WLjL1$׻1'ta3 ..*/Q5}lΉuafEc2S.)L%utB놹?_TkӪ.5Ue]^,jvs > J臧N Q`Rc  9 )PljnݰkQhI$nvl7x1Y6RWsBi,fv}!2,DղN-%3L!{N!K9(4ӽ1:V5K[S3TF`5tٶnYMz8s{dfgY|M3 ~fУri\G=h |FKRsK=I!ۜ]WG1r+(8V)!{?a4gOo/ݳNG~L\D D XN;Rv$AOG(CǑDDRJ>X]׵/7_u5ʾvaDji2)~UDC`v1*R_UŠ|9Vx9!}ώkBvw&ME4;D ʾ 9]kLq4wT`pt'Փ'24ッ{M*1'XHtiT߄Bw+Fa=̬$(v)}x+fzaZV"}L6ӟZϕyܺRm.ESzW/KY$>$mqPI juZqkD;"Ǚ1K٠\-t°N1CfH)Wˌs+%Z C*z6Vm ZP!q4%….fRׂv=GRiDLQH6=e=4C#@n)m~_U<!!$9^c^^l0XZjmpU*%E3m i$-MkRl(ʙpKWk߄q"--`6TE;!d^w.!4ROTU%t~0][ȸmRxCJk)7oWs]eeϳ_TJɎ3q9m qy)I lֻ 8y_~B䇜N-N*xݝy*Ov{ Wۻj4>9N"k?'{.s;}Lqw@kZms>r.$nIӐDb\SPN${J fH BB!)OSHzBȡz9,nfTªZ++$'D]OGgdg#rDiMɽOS- گW>ES>@r-L:C%>s >M-4v#j=vAK(lm~7j D>j;J᧍+J# f]ȀTcK"/k#MN@bQu|YT ]U&8nB/JDX̊L8/RKRLVQ M-'ZآֈE%$A4GO]t%}D@iR8(Uƹ$"j sS^^$Ѵ8o7ij kSoB-ga-j0/UyMMnS;X-cTv9'Nf1"qX dm PZe iӦ>;JB3U]v*sKs@d,jWržm/*լГR"Rزduԣ6.1–j+ P&ÞV[_ga ^T4J pXjQfB_~U E-K8@ 6:ȗ\K/%:jx̩$<&}ڎOJi -8z{H B(aG9r%Y%| pY'O#${1߿8׋(?n{S˅ygve?]S8fzaX6gҾd)Bӥ8u+H!}Q~ 8fޱxĩ>&e)yNġ YGW'3|F`#q\ɈRtV&?90_OG< _W>@Q#K Baʼktݐ"`uCw8lC5hpY\3]b}b!^^U>R69j1aW/=gUJϡ4Sƅe iZHu eaJ79HZhOEURPЦ nRħ'.H쯯te|p(Q<.wkg"=|]mTj~}9* 9 56&u̔*: -YTmV6 +)AJ S+r?"CUvpVhբguAC&9[azy-.81&HR6}/rSeLILJj";J!YYr3 ZJHڏ`=B_Dce4! )j2'm/w?7L ?ɩ6|u:jR|u$j&N<D:8߽o+]9SG}yҼ>OfȴOz` 8 .E,._"itPX_]߼vzǏ_}u7?w?=/e =OȄiD @#bv[|]"*qppr>];x\Ԡx~ rN906̅Ǎw"/wOkDtNڧm\Na 9})xJ>]`NRլ|ݤVȢ;֚~j=#We 8:>>%]k#7^lTO4<7z? }2 X+ipPmc'b;#%Z.TYhPLjȌUVrJRbVP(1Pv1?OY%cM ^+TC jfH/E$ G{7Y$a#%zzf1IO z*e(=j<??wW/[]U |<]5!ϢquRz%e 8k6ǰ6\١, *OČ8oѶj #Z YA9ۺ_`c,n>EqiwBMxM.#ӓۀ, e.uW63B§},*Z=@`E ~Y"ryrJʎ$7žެqY{Zb(BL}X(w-rȳm dQl\MJYKÌeh# ޮ}{dcnM$R$-KXm"ʌTAs|ivo[[ ȤBPz{ԬQ:9dMG\zG sG ln0M5Xŀz[MV\7v9sY#&|ZޭBpWr%":l̫>tlW~%fhyͧNW~KVSDֽtr۵&ӧC>s}^ݯI*ƻa^FufW\QG) &p`IE!;v|01^Xd`޹&ɣO$t6ԣJILΫɥ ?'|qƼ,dx:&rb0Vu\fJFRV7k?>>5oVTF%G"!ɥbu1#9.eB Mo-#d =o9ZqoU:)PI_vKCm(4A!iy"p.}sj8/#mkDJmh)皣PÙ9ZGv.OrAI…u ^R1 $0q׭9N96"Akq=o([蘤1k5Lօ}c߮skŧ́i?%{炋Y&>f`3}1ТŦBmwz>:ž,2NnByq^wMQZtkbmKEu`4X5TYŪum[ݖRx@dyR,rfb"F kkΧT &}Y:x`Ń(2!~_$_?62}gby/<k5Tb0g^%Ld) @8WGeSwf3E| O?C% >>lLa(7HfR AL/jv񣻀 P_IP{f3-^<ڜ.K:3yD!IE.i:&aR nx 80 |Eͺ*FuCHXjUɜjeRJshSN۾;4x=J&p3ŸWp`1{#xGe/mm1RH_}*ȱk#݇}&a@lf{WCo*DJVF?t1*tXODۥn NZZ20Bbh&CDB\sp[ʣ|4oVæ7L'bҹ2&,K#eεBr4gmTb`TE읻1~L0:5%_7ow.[/{[q."̼y/m*3nO/BWpB' sbl}SUeݿ +zHFw,F`y۫1"lWc4_Yr63ĤojrPPNIDATr/`O펋9wj:LjQxs9~qq3r0Y^ 3af4x'V1Ƣ SN t>&˪^PxtR )y)5aG59^ $ r@Kې~{.C.0B$U#hۈ=`$0a3K:J7ε~۵Ծ) +g{éJd\Z1 ̺ ( 4 (tF%H;y4W-L57oڮC gZ0F %}Uwf r#`](Պz Ɓ\ $ S%&4<IB%V{Rf\}Fx#]"3VrsFꁒo4đ\  &הxF5%i޶Mm_޾YmG3(|+֊Q3ڦ]G߂,vZBg\m;\f EjUp$ec %an֮wrs?筽)\۟ekz؊ fӖ#': 19l>=+5M\3j$U(o+;ux'/3 *JB|12 Oj?~p#$/WP(gOz%\SR,nŢ? t,C10EAqݿ-}͖sɮmNaJ8>bƑ`*`0=zCqcɆ1,7쥤hY J8t/()qfTI-gR^ W{ I´-0ч&r۔>aF2nM<;gdT;IiTHtt 12">ؖJQ ueueH`Ս#K!($vO0`K8dC_d1mYO|X䆒1l0,`G4c PmiSV|@IC=U^[뀂P`k9zo2\>*¸@[5=ec":=ӂjbϨ3ĕFEu 8p쭭wuQJL(!3c 0D%1x$w=><~R:P%4(x%ȞXƉfZ1&=IFY"7P -V䅒Yρ.tUg^rs輕s73G9Zg4F`m>D2T`OU^*`6SLS˗oᩩadZBAT]qARAnHZ'&L-&\ H!it.~Ԡ5 /N6̄,pgT_`r_WCv\\y/g„ \?W 'JI%q^^fIL郋W ( )7rU&ktWಏF9pml$"s-3`neAC61$L?N%Dɘ[R3l*2 :, ΃ f٣ mSzI%;Ëà骘NM p"WQg3ddaSom$r}"8 @B|]}t"D'|SߙMfnt`xzb 3*yA.4{e݅5Yrfw7Ka:X8ԞAE$e!8TԵ_(Qq6h8"ӚqAR]4E8Vh.N {|D7e`sZC6[qhV*&8PeC4 >X(oIqvJ>ʍZ~DmOԧn\IBp/{;Ԋ*lMX)}ӹ 2eP5瑑oou_6UƳ>H{1R*&JETC 7+Z*.|ܴ]L)ö{K>erI+$B)<]7K3F0sA)` \CNd : <҃#kaVj)x*HH\0D:(8h[,Ug}&)Ͳ\1i'6|RwPmZ@\DB]*eq`J3i6v9 բZAt-EڌA$H8 w' M, f!6xKu#eASTM#Di$0<7h:ɨ}܀Tiq$+}|l諝-JB<3iΌJp;\dsM J蔀Y \Mu}Ͽ{$pƑqPieTW2:^(N9I Qzj=G{GwYѢR2BXvWy:2v[d@Xuȷn[3QRrYqSgɿ5Z\8L(H^*,獞&`A$1:zD )tC6|d77P0唦byb]e!eL^We M KTL#rq{$Q>?0qDGFO?wMܼYTڍ0G &'sZEOҬ2J4ԦG4J.5 x4E9F66){+341ɍ]x>?\Hz%Ye\ss <$7@/:-M'\G=]'A+Lx(#J1y?qO+R,4p {BF.`@MWSӖD5OuUN(=ZڇPypm$mDf}8wLi5?ېE1Qa~S;zH&PFw7 "ӂ U. LA JCu dP<= lC lewBGIYN)4w-L. ))yZ:m`Q.m(.%_ڡ[N1)S+cC6p댕i ,XvZa_յm>j u|mL [,ܔ Zoi\dÓ69/MGJNE"uK. NMW3e:^j5}od&8-Y77ѐq⦉˻rΝ4qIљJcҌbRzgy_HQp}Oz#M$觟>L)`&&ϻPKM:}e^Y v .t?mR.PȠlM4k+|7ԛŹ8C܈q{B+Q۶]X:Ђ-hoyգg BDb_k>D: :uo\qbTx.w]anբd ?o}̴h]|ka )KI/RK]ȡg Bjx7ÿ"'77847Ha$=.D>w7٢>4q:mknyݍ~|fbPh}CVxۋɟ+yK3 @&3 aGS[3_qA26s|ZKZq)ۻnұ_KMwh2 GEEܝ% DOF P0F>y8'Î82Kah—ؐc|l&S~ Mb|8~fofliMH ׉{L90""׀ގq8;>1~= 眦1)CMYߚ_S5M*F22>Z&tev`ǾwQB`z ֛wj-鉦mQخ-2"6ceד4={m`B.x#9麷KE^"ܺ9wg[bgC)4 CR}6\VvеFWX 3jCbM5_/,#b ?8e~ R@5iD˔nh$pAy(>p[z|:.`.Bi"iNeHHcHS"3,JNZ}a t_mLyd˅,3ʼnd1I~  5F_+FAEhpѵv对C8Ū8\y;x5udVJ܃ az4I^r@b b:/L*]ra0'ET;z=dLț=  eb{&*).TZ`2>_<H*:uv14$Z*Zy.ӡN4P[c|O߯KץpCcMuV4FOͽʚ2=٭*4'OR:tĶTzmoZT7&[9癳ja@AyqɘҢDxJ%*g1C[4"=y^(u *?pYzR@J :%<2h[ln!>nƭYq38-Q`3pYH1 Hv*WRwEB+Bs)R[%8iھ"=Y-1g2q[żΤz{< bHZMGZ3mU)B"hzbS\1F+$a e.EAȳ/Mz پnwִ@RV* x<4"m#>=WDBFAri炂xA=udst-R㏟ޅo8H[vڄTZko™fB=W'Tw}d3fM6^7!yr\ȯ<:O[1?_T英~tO=ppKI֡Ūxe#>3$]Uja,{ڝÈ-Xoṋ!kݦ.OZ\A'Ȕb7e9FJ.wW0.b{,M7uTFfv&۵AZ#Ҫ wDRm=Y.Tzœ{[D\x$t)miń) H hMKzvQKN/8.c9𥿻1 ,j2+oa!wkm-xB`qԁfZSRQw}W:#!(NZJ$&J%>rپo*)H|U2.ƣPOzt0 CL?M@YTH(aR(^ӗ-ȼZ.mC2" mpHxZ"]6?tNgP}eje6U+$8SVԍ'@35;S.&PΤl:2G'9@ #- ȆÒ8'](@k^G$$LsS᡿5K]dv'SH SChr}i;{zk〧& clJOR?ݗΜ C NG/,;~a 5I \\8Y$:Ov,~qL skrIS8o/Vш#Y^H8.C972NK˶ r؊dL~tiҜiLE-Pщ:FAFRQD " `FiiC0E!x]{.š<*m(F;;k w/euR\ gUtdxTZ]%1B{s[+8ziiZrhDJo$hcap1vmӳ@\riMvTSŖ\JzcBLjL^rm3 鸉ą9$l>oz˛R).8l*z^.V1wdR)9N|{yr͵7c qN< t /Ǜ5Xӊ$}& %Ǝx]h&l$L]mhD`_?<}s2/<'I{dfy]8,4YQ,;$  1f@aO2v6; H_t~@(Cm Øɚ^hBb)>]2q2We>Vァ֜qTsɸNB7mȴp(bb]w 2Zd{N,SqlO90[l$rZus2@z[ݼP;rAB2Sуg ;W1Ƹ*zK\n6J-[d$+[ɅQ]G#V2!sB3C& &E713Ua, mPȽT^j+f2[sj[X*?>7V<*Sk&SZrXp<|gE`4.,#A4=`JQi`5 I{ ԅG; _=6 Ba:F}n#77/b|vm7OܯJ)H!#qKURO? &/sr9&=rI9F F&p"E$2 LlIdFkEI ,HYZ,EU~`Cpӗ1l{=elLB,:&) N0u q RjˠiEǓ95fTJH^͖fy k(lt( 0Bqhadpy\Ze@\ܷU?C[f̶ 䊟kOm⦊ e73B4\-Wq|ƫ$1W|,>W@ģj4 $ς*'oR41X8h-m,27U{Їm9c̵KY:.DHu r β Z Ut0UZg3K#m)x0E17:pDɄd|( JUٸ\g[*D9$Uw!řf\㎜zgmeY} Ham\qt/uSCɔ`޶^ |%)y.|ѓoHs50Ӝ {N.PvQJU-\"5uNz_Z0;~4N,nf%cw,y^f٫˷o!qjں}z?( ^h@wJ-s$"y~,_K{xEjqc8ĩx#,VwWoB#quzݵ8Kij}oM}?21`K}MПL $AL}?۵{`GO1=?:Nq%C=r:l0 S#"T<tI*ߐ4lS{rMX`L0u~G;%Vfy8B(5?,M߼8zn 06TRh-u a[vd`6w/ 瞵!zL"3\6nٶ;l+8.H G) no^`uf;]0$hbÙD4̑z@}$MJ`T"z=B\}{O2xr,MSv96_V٢滯1Pc"wv$3Yp$ a,*0#BxLwr麼R:%oV7oVy!-]/ =b%H2&^LD/xݙl4xUx1#4cw)'Zt-jK3'vפ&ؽX4ӷiQ [֞gfD-\P \=9SZQ37,t?G KiNȡ4+sz\ғ(_їms"S %N4s!(8J]( \- NX Ɔ(IeZu23- ɘ"8!vu]m$o'?|t4ώ WB<5crap1 hڕ~67^mm6u@! 6n|vbÕ(qe R8pH karyluy_Ђ.saJ @rf3D] rDu>F^G%pfA`b[y- H,!gJ(880M0*945HFgJ%k:r)8.^qkE9rݶ?Qg a=®YDE6+a^f}wW\2ܷNٸrn&.R.ps8`T  Z[w\yƀ{#d,P5͍if&f:loхMWʍT7c3EuU#,1M~v[jUTCgsCST.e xn$  iB]%u?0p8ƅɏ&nI^K 7˛M?؁_=r޴s[didȸ/R7<S>'!* ~zDY4hh~+pa'e!r.!G` qO{*f}b8&c0NwBZZ&ACYRd&A0=+89NƳ$#^ٙIL>7Oe+$$ϳ?%0c.aB'nJ3fzD1` <],ýs\ +b(zRZ/eNq9v.Čf rf@0"L3޺XUY!Y*-fK%f!2E)pN˅l;g:׻K8G`,j7u\ΥJ@X$xBP׀Pex^j4!Y4pB6V9A3Yԋr樕R3쾓*cQn6O҆32DS?ͷoou=6zD-dH?Wi-͌.8G@Ґ}Er*ʬk2#Hf^2Ɵ'xZgVc'h|xpyڪL^ Okß>=~y1 ={n;"m05ʧHef֮n9L_:7BLҙ4+ZN?_D%^\hZS=_[&W&%_JӵKZx՘^JJ-BR cPR_|ņ]bfZI^p1b8MH 0M^D`3oB*OA+81ģC1 c>6yW/y*Lh)˅زWdP8.|<@1Y =iJ P02XFmEvd)C,:Jota_漗Jb%B\ kj;tZHrDlRSheB{bDZRH"XԺr;O3foS慌6L}gWW&A}usA8]:*4lKhkQ7R+iUמsh,m>G&ǧԹǎB~>Oלw<7mHlgv`!.֑:ȫoCDHj&[x䝶hmS:H3}r헤W%" Bz)}ZAM;L?9T±dP(N:cЧZ/3EI~9p[TQ0#5VdqLji)6϶ˆp*ʙW*]SJkR۰ Yh|DF_. ˒g _ 8Mb&0F-DBy 0B<(:2#n#a1/ι}F dB k"@yHpqYe*B6R |hRQ OĩzB ÷e\w!wy)cmyFo˵F 23HkAhش"cQ7_6x~Ed8Y'"1Ux[2FIdzT( 赒]jk3cdGn۠PD°\dz XJ,4w^y\-q%E> K=zr.x)j>a/,*>?ܼ| TjyR }(vi||׭cl .~]aF4!/ĝ},.%J\2 g1]gR}Ev}W¨1C4$Eǟ|FJ>e_>>dI^*$RWѐp~TF9>;1R*t}1Ow\ 8UZ9 |U iRӖqM@Spo7tNb! qX,5lznc)K FH]:D : :.c;olJ&ߩ[7AC7'nIQcnX_H(O\^$%܃UWڼ/!d+';vGFqi5s&֓䬟G`0F 4>]p?k3s=SϪY:^]$&q3-F2T 1(΂1&5׹4K%!ZzDFZ UkX~W?s/ Vo35ZD r=m>3`rmYs˹4̡;[uDWW (T*9A` ȩN.+dፑkf:n3۟o~5LA;};#JZiyCݕ[ӢqdF4je .ÐhhOR2N( G J#,;"hij>pʢMKjOhK01;xbO<92-81R'{S SX9 MbH7<نϳ RD'X0L^Zf6=4|H 6 Ruw`X2'؅/mt"2OqoWWjc s uۮi٬d0+خ.(R!R>Oko?Uj!d$ [o|H ܕ`jt|+ɑwJm"`fc+YG@;3JayOH wdw,64_. ?~M(d(GBekj4[-cSh:rO|ۺ)Ԫi\k Fn[$DaY&WtW v[$:V[[pCtPHRYs_\!u2~sOpn'߰zȄe԰z!lM},R䌝%gE2&3ĂOOlu\_;w1[:Aa Jx^{M ~aWHxyK6z&~./o:2h73p!qtd\ZSY^qN 3_$bJ_#K^J @J}SA8^(٤WX=u;n 8($]a:ELNݕ"%2$d<էO9lc:s_ CN蜞P->zÞCN j% st]F!E.݁T7Tf3!QmAe:オs%Dn);q]G?8@CnZ*KnF+T]e?~ޮ8E6EGC%q@uǀ)u1ࣄu% -s\7,nQwTn (138R<4ڈ6}q53G}*;Rl.3 , #̳@(O>B)dz8)%8faԫE 2 *.}|Y&Qs8 es(%?p[f!хkL/Cň1~>=7PQJ߳L3öYUE^4u(LSPV m6Q!! Ca8,=n3^DUH*%.IjO^Î/TyJos?}xЅr}JLc5#c .Ôj^HAH^3aztiȚTcVK( }_{p\ݾ|ry7͢79Q0L'i('#%8DB51;d\K;0"zhOE՛DZB igUX_.u&׻,UFk[r%`"&q~\6J"rRK%È'[S:%)/!dz4s9Շ;k1v.*1K)n6q?FF;M Ed 9SLs%XO3HXvv (BXr2T>cS9MkHls f^cp`B4,rx=;V3. . B 4:L^2. τ Ʉr%<";Ad a%65(AӾnig@wh|u; K{%YY0^rU",fiv[{?Weٶ+^I ym{OБ(e]LdDͶx.YQӹ8w9b#pS*&\ePtprFAWӏxT~iyMZ\'cx>f?~>mWT0CH/Z9 9淿IkT]cB_qYy ?ßA_..b>6K[7zoRe2U2~t/Dq%R{Ia8.{ⳡn*mtܘxiU ㅯxR5ʕia;a6A[]-L«OgpTҙҝ^Ɖ6&\P%0b{ 35آu= &:xqsd*JgF^Eۺjz Q i_~)OMEɸg9b#r>\q0i>zTsQH㱢ɦsrX|W7_}n`?p~_??WW_]G<221Gi6ǚp׻7OwO t^cwbNDǾß?87ﮊepŒ a]؄wisD<!Ѐz[dӇz.9/De D@-7P)G^bZLJm ތG oa4` 1[K2& oA֣1B '?Nk_$r8,; <,nx| "] ӻJþ9>|,8[?W?RoM]۹/y*΅ 6Lrsf!\EVB:B[` W3Z$ΚW$T]mRG@M[nIWxFCr"]WeӹbR$b&DH( 0$v;ӏǨ۰}<{y05ZP;3m:Ӛ8 *sw-2"0;zZ(G?|ٷ.ԭ͒`dָ]kL-2Q`m]b9:F yN0ZFuI6tuNt]jȋ}eC7+Y3@xkV*ʿA.X:ŧ`8JUHΗzL:,*ʆ ak!ޕs72eM)-wQY3lk9o~:"ApNYi2~؜s1LK )9^~- UvD qƃ<߯RR tnmOcg߽璇ULJrQh#isf5y`Q@O>鿂]T4hJrHjIHNͦ8.5LbNuJLd^l!"|r4s\ OfFLԖ{ jJrr :l1b@6PO H>!G~G% DiZ0΢EY"nSnxޖW寋J_wJ7-VfQ ص]cir6RB0D[wg8K9o"*5B*iɆ!vq9 6{2 E($B<7qRMQ%j:qP L B jlwa6V.ةQq0̨]0́osY X"GULR#Ph=+HU0ds9ZIշ<2׿8z>ПƝJv}wsL?&m5lj3pD 2^2/XY|aANK?Ą{t?&iKL0~'ۅ/￾^\-`<8RdM Nw g/Pe|43 J-ϟ,SE 1/>I֋;.өryz {΋'gW|qCܤdy1W?⋉'c0yN-{U"bȳQt9; g' ']| F !Ɛzm˽{g/bR&pS "h(#\Uq>8Y&v%"(z\`,'TJ63\GnK-PQruT)*kb(2K\ \11Qs gLJy}L`"3B--fȠ0:p7u,f!Vs|⁑a<`IbUL/, _>D6Nicf}IH^L*i(YQ4걋agU\(sؐ8@Ʌ|U=t痋4M" .(4=3m/dsߥG)B_#꓉'Dnu)VwCu1}kYP\4.[NkJִ:XlA٦K[אؖgeB˫~8KOpL bK!n\}oFvܦ .w\dpB*dw12 B|$!8THX0{N C.ʒ)<&F)JR9g~yeWN>6i]tIմT$i!JMS$cSYHd6E.3$[:.kkyI%HaZ}(~0.bpat.i;1OLRîFme FYnuUKZHjj_' Dt4+e}u=]*}7u*j˩|6/`Y%X RIMns1ˋ"|_uk06i"tպD-fIZdLv7Teݥ4=Y̋4DnC9y: v3n1τc!+0: a#kGSh$/rRJ?N?sף 8q)xpF!uVl+ N/rSzB g<-}\|vod5"!a^cQF֝aE9\`aa*|Aqd4mb39T$r8&I! { K` ^NYb+P^v]xG5! *!3ojds_Y Ϗ` t+EzDítQY2Ui^|Q~^U)Q8Kvo(h%NȔljT&i(QRi:QBbj;Z0$4@(RV.MoW'Vr ERʮ4  ѦTVzؘwO|"EDfV*;tfAu!t%kn[A=u߮w2 (pae.&W4?f6}iY&YRվɪy"7TCfI5YV 0Rm4hUڊ<-N"#Ҕg\֝Adn}ꪛ|~ZNR$T$Zh3L]4‪LX$q"zjjEd)}Ԕ֛v9P ,j)ߵϖonwZd?V IUٟOlA F`Q.FѴ,1180"۷w~7OʥD؛7O‡zxw_^g0y%muU ȝ~xeGo I<0gOZ0Cjng//_\4}g,egPn6:l_wW$?jlALH pz0޹r,;zs3oRƳMC OO8{2xK7*؍Au/qVpL#g)+ǜXugܿklמYS#ػ \T_ؐf(I$A;ZRR X0O pdYh9D/ &#?ѠTHҰz/^ ;6Ɛ Hvf$~~}Տ..PoW;[h[ݳ#Ԧ{zQ>>Vn,f0m4@g"N$?md\M1oZ*lԝ\5Y<JŶ6U#4Ժ%Nb+"ۮikiwSS?y69Z뮽*TnIDATHcNYWj wS[H&xQlv|t} !]UYwJȌя۾׋b:2 hO6c`*.OX7A! ;w[$z0TF U׏fL{-䨋`^fXO~2Q!OG3)&a0RгDUig1,17>b>ZIUgD?d(*9M}cMƘư{uLa+7t\ӷNŘ!]oC T3AWF'7kBr f*(R\G m*lNґ ;/DDFnE  9߂mn ~x`+` pj8/p"0p bOl[d0ӈyGp͆gw?)]U 2#ıPSQ 4gݣ֯k!Mz./TKKJƤY*BGŰE4{-l#4(" )Xt?]~__{!4g]zs1kд3)'VIZUk%KR-ZBYR~}!N34O44Iek2AWURVN^. 6u+jmgOJOt}4JY^NׇCrTKpߵkna944!,SCSz>kbY$)t@jzs%& 'q]?~ݶJ33OQDi ׸}38 >.c$,;e176Y jz/@k3HB:6%0 ~,VI0X/oݿ;ᜣZY׿}mHvy|xb:#}T]P!\ B >+g)k_%z-r1wg+ }!i?M3Cl a[c@:xx]%"'s@K`$EuFA(fCڗ`뀱o8 ƾIRεYQC֧`c`MLD N#&~eMvk"Kzݞ)Mx?>FV&nh$#ewa0,F`u_CmͅxW,0=?rw;).o.wi篮/F`dj//?9(.v{],o^><3<3NT!%?b#Bw`xƶe;ZRF{|SpF˚F׹BCd[)jtJ>}f'ӂcE%xf[,zNtþ:p# nׁp6<o``BZoŒqDa p׍m;Q7HNr4*FH.)Z[+g Bۆ 4fzGu{} ba֘S)!U3cҐycHQu#YFvpQj_c\^D._\!P5h\*:5aҢw4V1`cwj \x9Fpxač~ nE[8W`,B0*?N !..0;cƘP'I2|KoI\>md rȅ6a[6>-ύW Fa.++]i4'vޖ`Q*K"T6J:b50 6R_aF#=o?v-{S6{HGν<6|T^duiһF8vʼn.a1C,Hd!1.Xi}<y621z|Qۛ $ )D\D6͚h/STGsreGӬ?p߽}I^G33ݵjDHޗ^6bWU "=5y6ZVU,6۶\ehP5)'7O<5  A ʹLX)Ѧ2d*t)U}#CJEL&EGpFGdI봐Ozt0b"ˌ7I$18>Ę<xf}8 Y.~:&&o<cN3w?>|*XD'W7y 710gO*i$kݾِed@lоk(QI*C(UmKQqCϧ@BMgLâ'}@q:3vGAf=OzncӋXp".[pہ pBQ&=rz(A"82/ I̳$Z޷_wjJf뽆m+>nlw|BmpavU"D$m= 3zl/"˫}=IҢHC9T.vH@,D`eP2yO$~nUw9ILF($(jQcRLZj$uI|d8&|$^ɑ2=:֐<Ġa`{rY@㚧k@hodVpm}~a5OBOQL)T`uH8 w/uj}y< xd/#o~sHFş2c٧7IU/Nn{,H}RrcU98|)Ω[.I:o?UQw+,WFs'E#CDS>?>&VccBAkqyG| Íc~\0,#a$Sn/ )4hrK }4Dg1s4s 71AY"^sp|xD Q废H." A#{tiRc5: SM@ sőGUDǺBڿ﷎|<ټx,|R3IxIoͻv/ϮïvyRKyu8T(pE4ny,ZPw| Ȧ$Mr2JfS-'[)M6$^fEde9oÜҞէls(֩LՖtR Ѱ|x\dZ湑`}e!":A~޹c~QW@? XB!oweVm|)׿&_//m~O %CՊD5_[oda+y~pS^dߙQF̋Szn7zCz.DDrZzcZEFF'Renး~| z,khj (aKm>OҞ7tG")%ky Ve{wnX\H]|2}; Pà*8Q:7KO|"HQQ5 &UɄ:Gp"C8"脁:~$(;*ud<K,V2,}7r*y :yk>5@ Y-w\( N`AC7cxmKJ9_lZ*ezÝj 7r&mfm`1R@Ub](Gٛxj7I5FOLHd:Kʤ-iIb{hԦ&MU^$uH+b>HI%%)0{>I>Iܫq#yDXZ;I]>Vrv4u94t[mvdV<ݻO>}ƟH,#vENdpTtyr,C']J*GAзמ2\f;pU/~_?I ,Ϡ}zڶNK)sXj 1c/"PSσIw$M]>dVdi4]L^{{Jg;퓌1ژAoR™ e80p:Fdau-0͐G|p]m%/~)qv|qqF?\ʐ=pkݽ/<d@#e#RkAxu& BR+w56W8lM`4Iy\Ͱ& "L͵߰d_h䎌 #{o2%DH3 %7wqrS]8 Q ܆Im`]>/7Ym.nO7 J &;dyH ԇ6"91x(X6/,z͹>n@CmͶF"tt0Wi"h^e/q4]|>(!e6I:|W TBA@d &"l6EQW?(Aد\Iہ!ҥ(Xng6}T:4H+5FC@$Xk9 ݉ayga0%xl镢, W/l+v4-n1A], JQ <Y~8ZGsm-n6v TM| 'UKqA,KEKbC$u94Q'ۦNt~ *99AN\y}`ƉRa_!gіxnRV b r@mŰ$˶Aع )\}Ҽ@XMWŎw:6'uicY9tQޕ20U]gYNÜ1 =Lt;F@@ޑãiY"v'HicJcfeHqaGڗ?Vܫ_7h ~erxx>)T 5YJ$V L`y>M2P٣? Bݶ-9?MХ0\LL >P:>ȟIA2PHQlw q gVD\8툂" 8'L62PW&?{r\Ʈ2&Ljd]]KHcU)Da?ZEvbht|bwzJ)wEYguϐC Ha&@\G2ZY~tV.?đc<|W1kdH׆P&Z{vc>M7g*rYTbyf.bsA7ҽm4`BfĪePr6x\p44Q& [5$1>TX^-ܫ"Q @G☒4w/vyXxv~ BQN^3 mBG7u=ucQu|{[˓r} Hu<nD?w ̎ nA=V>u;p{4_H4& l b鬈ܧPlb,X!7 1 pm`z~C*Cn\6v0DK+\;JBaudȘ,IE'q9#!_~!"M\E%px`b6P{9.n˒~B#s@ ީ@i蕀G[(}o!@,g?{wOOliu^"t(J ѷ]X$lsFoϯrMu-?,͋4UI_3Dx[PٲOCSɊC,]`::FRh@d tx^G~rE.ƐL$ۨM)W#-޽{|G/YU*MAm)Pķj n"$M$i ֢8 rDPNi2]ɴ5XAQr4s]DwuzgF-ހ1 V:E$=*t922Z!AN:<$i7n!9=+cc#1~qHEt"j8QX?Ad+;"?܉V?6^=.eZ 鎺M3^&'9ell9fe`OUꓺ}6N]B@JwIl{t1{tJ$ x}Iֻ~Ol{ǡz},(gkx!rea~fmII~rΫy#FDxWi7`{0:5H !i##B{ / f=zL3nf2ׄ[`(hb{A$g!6VIHY L`My.9޻3[ěqNIz5>x\,;%P3%Ъܪ"t`R'WPF^бKt'S3$h c ȌA\# G2G@\F gJL) dxRNĝ'Q,KǠ/.gz6 )CS7C#W_I!H[ ̮=AQ܄`4Jb;Tێ8>|)'0a &h& mKZ ~E0zqO~zxQv{21mtFީu{F1ꄧx(wħjO uR&^=(saZSbNX"OL a5ym3Ċ">TIr  0WE%.^(mxAF1?9d]hy|*#BA S\N:hsaD}/seWt:y&k4< Аq >!+ WK!%($%[ @ ÿQ7^Eu,d8mt Goxƫ~pu/paWq,uxYmT"R#3t NM Xtj@Ya|A}d[h q|bƐ:NXW?"%xo ipQofQ`y ѝockGA55t=Dyۊ$INb&у q<'w9yd8 md@QxD"8 B WN~mB R9ϊrc AtoUt)km1⮧G{Β%gh_ӥg`.uC=z]o"X}jWG %-r6#1gKB78 #!Sjk1L $bƴ8L ؕy#C*2[Mր!>dX1R}G'ڋFt:ķܸ\p1c,őB4:x*#f20[6F~?N+@{޶NFG dh9^$|2: 5DbDWv_'̞l%skQb(Q CbBѣ/ă,a6dX[<[璮(F('Zht2KC?:(= nW#Zt'ph@-0)9˜/dZW| iHB˜9MYc|РDz!c$j"t]䌪bΈ!ѡN/{`P&ܙ 'FJׇjM@ǃ"_]WePjG@gY^qo GF}CE3.Aa5.jOxH,EI8]4`angJNUF 9r6X+8Y̮H{gK wіK>63?=˿NaEߤQ00ho[󞅤0lG'iqd/ ~7ސ?/]a4vVlt 3DzDw F&NVڃbG0T:3uЪdusklYq *5x7Ef5ptA^{t'/ fuxp1 uނh9g\mx霁:[ _if]y Ns+u:;,4}皺>M[!*ٿ'ډ?lkJ8<ՃK=]%AdSL 2-R2EOZ˂,^М!dCD7]ә1hVۙ-f1߇ƪ9lv@<q buBPr`lȡƿ0%x`Lhg~ =p϶K<1<xQCC:ɘeDWplaLarEF9&A"ħfuC2*It2kD ,=v!KPUcƨbM@g%1 * w{R+|YoeZ#z =F¼Ms';˻-2:v[e<`1=YhkXe980:/FA #c3`whHd!4-bZ7(^P-D%|0C SZCn (,R .pGp`j\HA b} u^I B?wBf!}5pFX#@>F=h)00X?f"> cz[ {ĘՅ/XO|2*4;T,@8J=ڽ;%1~v4! dzQ l-ywFωRU37Q$`^ȸLr"}1L una_`~|U-im`=1Zpu&)^EOp炣@]fht{ށ g-!cy:ӄ0\7:j}>"#C+Ӌey[aTG qW Xq&/=㯝>QV-e84 Q [ux_G FEb)OY|@&W}\x.6`eBjN8L^B8G'1c,ޙ $&҉1}=.sPź9_Ǒd/m%`[!}!ㅼ5M!( 3H5r9:Q=o"D FHa ~`^]bf|*|#INd?XFsɋn? XG.If6M}Z,ʼV <2" ` )hC'&p14݇1Q9"cVcha}gBgqdQ D}fuG ;s;r yxdK Ih Q'w""v茼Wq Si"~o 'VCx{o!mie&[IhOybr a/:],rMp2"%#̷oK!:7Sfa sbb tʏ1d "%|lz~gypK#Lww;!N̎} H=j8>Y2JBijG5֤ $2mw4 &BCH8 jE۠yTC l9tJ V\u(|hM?ANdIйa1M^~jye 2Y"ms *dVRNa`X+95D[dra9!ghxDvCc]I%@H8bxCgTm$9Fyʔ1*~˦EF- w/'MM +pW2h|[,*|͚a鎧}WDa쿻%;AK2k>Z69-S-{@20I _TC@Eᦉ '(IDpo6ep$ q[| V3'+jV#ڶ]:@#>-7O [ڜ/LlL{L6n&&qJS%ok@ԻUcMqw A^b,@,n8޲v0q.VfpLњ,1Ddd` 2FyL)=T9vn#?bL[\uL}d]09p;7(KlTӥRwHmqS}rՍ+`ko1%6FG雓-@hqS(UEQͥi2D~Gi]8[ϹZ1/=\ (xDAZ` TQ~KScLŴj1 ۖ?+/wA޻()B5QATqv8$I8 ȱeS(m"3eQ<\f.aCU]\0˝+Z7l^ݶ}G&6`OLlEmHj+7&waubk, eVfIӈkO7LMܠR1%o\ŷv$53E%tlP#ᓯՂ *ֽm7fTU4uqf`0X "] LbM׋8PAg$sy!عIkˮtOt"@sƕ  e $TVH[g虍4hBiT@9~C(^ou}* 3X_fpkLKZ4=ykUh컾aLS;OnYEi&SY$䈪Bt̲L IbdVMp? ;8Au먭Zô,"mH+}Z&6\s\Hk}3>q$afxthՒ X[n]?lP<1IAԤS_gEax5 ϋP;fUʍ%^$>(jց-tdLN0dLv)DH=k)sb`͢akrGCndѮ!`ݡrpWr/ILV-sj:b{k hߊ#SIJ%d|/K%JLb`L( ,BdҲƴrdvHW Jthk%qQstZKʈi[p3ᄊ3@H3 lIkoAvuHi~fQ4t Jcȕ#!nRV&-QSS?}< )ٖh8ڀNd݇E,p @?e-n'v#20m/Q:{8&HK+MT;ϙ/Nq܃;M8t=sS5:]ջ \|FdE*luPmڷ=znoz٠Nn'+:VU(>+d$I8'I2,D0IR۲0y:@8< p|z& FPhd˓>$>(I b8rRϟӗ}8?|G8f"8C^0PG sMI.d`w}SI[|wq^>+/\9CaAQ`wyUPwS̘FQcao`*[kLTW_V4Ngە:ƥ| ˒$y^ ۋlczC_gQk|TdK(S?iP|@ߞ0@Io^8]NQ<+ -B'$3lcyvtѧ4I ʘj'q;[~: ژNʁX!pNO9q穚:!X%L>|x܈a -ѨVLM=?>ڦp,Nw#J"цIrF|'eUZsJ j5Bc}8aU7<Q$>CPg3ԟ| ]/>?1?u t18]߼p%T+$S\NWߜ}ՙ#;vF_L0zz\_eYqqdD.'S+&+ʳ8BRn5o{莇!˪ {ؙF_'͹ {M9%q袯v$^V ^ǿ FOU mD$T!sknz鲨*rh=?;]-I?"!prfx>te1tX$\̬Yp8}A'ct$I9^nvQ0пej,*ڑCXVb _7Kswt)`/?a0UH ~x|-kkoGa+RϷ~>lW6̩ٵq]%Xӕ\֟~lVQt {ǧbM1j9FN-y:D)3(} ?nY 4I$s'B%Ek{9$iyv9Un&hjd tXKxc|wYS "/^U#CPӺ;Њ/0oq_wRwmohR&v3-pW 2SIP˶;=q1k'ɧlY/wVӉ//SFR#]( JHdڠ4V/ }Sk4d^9^3?@4k aB$ϞIEzPh!ĤI@)>2/oMܘ_0vuK'h8F"]s$,5{.6_ f$͞f,9Ą{OkHvscn7PU]oO,_aґbWnyԬX՛!bB-F}-$Gar`-M/M  1*A\tX7Ct,B\α#,b$H|Dk) "y {+*2ݐ (ϋx-Zб,D 4$se2Hi8- :㐪@;8_Qs,#H\D$q`$ijuƒ(r gR  ( ̲"N;@x0sZحM7@p>h](r)ac.I$A,d%aV^QbVY9, ߌyU}4l㛳6Jv ~18{n}hPN0\_M,vf˯ zbx1zyZn@?Ntq3x 'hWNAhm x6 4IAqxh\|!MR?Ipa'i(a8@3FQm9.Sb0ȴM& ưKNGv^qA,%sLI{( 4]m; 8^gD< Ar,]AƙH$I}7W|,'p4e9X& < A\Yq@V Bij]Y@a%ɞP "qDQ/+B'Qcz~d3 ılpUŌT35qk͞f i^@t`9&q|nf)]9Jv HB&YmE'hm?Ͱ9IS9Ƈ)i*/dFxߏҔ8t߀ybG2~%'V qa|MWXNb$qvϞyQ}vZi7j򸈂x؞@e w~an(?!)uebhn0o_tUyXEf~}hr屦SIw?fXza[gszYTMXvdQ9+`x?'` 20E+-rz8Jkspfלٯló{oK"o5$IBXCbP%r{j ,f9ve `~D!(,1)Ir~=j LfCrP\()HbA \O yXEҤb[pWy~zðl#*M3=Tm5֟*kGU 5c:wl>ǷU`2WV@ȘȍS" ـgj6t{Q"*2*8.VAjqv(*8-c LQ$vw5)0g}V͸4H|?-w*=HtHk t?lC%qY (6M1Qa6@\Q.'h;MS`wI0% F $ί!bկ"ef|yZQx[&)/w8#`ZybK&I>>[;iGM >r义W76[Ξ5#Pq ?{^83~}W@u5̞z_0tܟ?x϶ 8~+#ӋcnSYWgߞH·  #Q+{M!iO/ʼOb"8tr]9c:=@ v+e@MW `rAnO vU?bqdڄP;>4I<Ϸ,;O׆<ˊ,Ts< Y^§\BAf2ñ1<4$I?}^g+Hug]b]_p aWmFw=~.ɞYlgOsEOoEC?|{PśW5F(-Rc-RTyjoR CܩA(IREAsiPhU=4x n<# Q95kYB )`GTSOмM)&^1o~{eT" @e)'٧s"39&`h4]a:fb 23WA yF$$k2]_f &lcIe4A |9Kn(1n<ەݰn* 7=ج(%'ީ dkKL9-BseUPIJ&|9Y?$-X0B5K,?Ecp㑭7_ÝRk4pۺ<,_14(^I$8y4QHPڶ T[n%jrڴ8#@F9ee_'{~@띿G0njAx#h9؀pj bc\mp@į&n(z4Pt-A68(Rv<DUr[)sV.e4A61nл1zUšvBVbN]"JӒ4H M;-_j!McI}FR0kySa Rqx5d4Վܳ ȬhI3 h͟.*C}L嘋A<@Wv㗗0dEFQriA$GaqqDAHsA_}:g\6thx]i~g]/^]T'!IEa糀B,8ɱ!6P}Qp{*IRI7A i8{9#cr #'@&eokbA*J+CA-PwB1~HPnGQXi!cX '=9) 1E, 0L2랍@R08ydqQWjD^y#Q^ 㻏qF`ܸ! o{vU e+ VNdx8NN|7/$OV &Pbmf8XVR;Ueت5C+`p>?{V ~>&T$^@>>~̫6BA˾k 5:="d yPJ4C]H'.H"xy_Rd-+w9]r/__FS3H]/>.F$}ۿd$M(/gTԉv_n9^ ]aYQj9LY!TE qful HA :F_i?ޭ7QqzDh!Ѹ韏zg]Q&q7O. H烝 {{xXjAtڸ7 ;{Y/EI5,|>8X{Kw&Ei C-?Or, 88Mq~W:VK9y<.SE^ ־X4_o] ǽ Kr & ¼Kj0LlM&n*~=ɷϿ>: N>_;\ʒT]>_t+Y_/~nyd6=Ƿ4Ӵa(˿U2nyNwP8T}˟>^ 9~IU2,eZ6W' bۭan7jɎӧ 0$„aQ|;[aOV84l 2|3*n#>շK>rxQ5 a:4x!a1YM,W C8G]ee/jfhv44|5U\2f-nuch,-g9Z FRM=wJk \A2*O²@}KA%4֖:yf;8>aU EXQdQo3P_53ϋq B/[C9dpyn@9):]U0j3J\>qBY93v=-z0:"-*19ăAK̑TZ/ ,1,a%}3Uזao ,B6ٺ9S`l-jfmwQ%G(m#OɻYD iP՗/`|}Ɗ_hW`k;~NҴpp6+s h݊c³.(~|8" wqRqA%1 Mk4z ꪤJGը@h]+)m 6`[e]DYQ!DQ$>>ƍv!a6PVe^>.wrS)Nª琀aLZ14MWqQuܾcH'D72:8bq{')r~hWC"*FC$77RM*.@$N/vKqsf\  cxx%aA pa k6uznj$)qX+ؒ _Cc8*7`QK_$ITT |uL6QU0!)6T B-wPc  4%evNM9@.$;_4 #Nkڤ.& vp3MXcR9շ~Py*WN̟W ^-wir>}$ak274Ce_ òaW#^xQ=@Nlv~i4 IIEnAdHyAXqI>s_Al6oZXU%Sum)Lr;< 4KkN^,;q"cm5r1{a5(8/Z/M \w^D ta^-?|yJ0r|LOsKS^*A0qm<> ^V\WgRD<BVq"Ʋot5Xˤ91, \/#bn;U!&M{Dm,C^ )a@UaK0 Zlny㵪ɍy\<.$7.ZWyB}Uw  Dlݏ7F$E0 l@E'pyXBsg핢Ȣ "TRlP*F;iz ~hͷ?\hV k( FlGrEQ kZUqI@?EY\>-DQbwWUWڼ[ir:bXL7gijϣI&@vwQ+s,!<`,~r,=8}DI*d JS2 `')8l-IWJ`ʠ^G !{ȿm'ri}LܠZjEدeջ\ TMf"W]LU0|2]jD#Awë|Z __rU>Q|&ipSsЁL:-]]s"oo tRim:.d$ɫv67~߿C(dQj]/`m`0 ,Tmkx4+fH}~)y{{" zQ~a %%)8s14x-*IZ]nL5 qm^G[Fru3" !tlnGѫ4$[1SR(S8>;"> 02 N@Tվ_?>nc_ȁ`p}bыZ `xї$vZ,?r\veqw] SQw˗cď_FQ!q+_,dXRV9H;]P/]Nf Z:L W+dM deP5oGaUgs4]_?xѷDt-jN\^:XWPt@o> A'iA,$1yARmNr}C".M]󍫢RCCpQ:*ðԋG~KE sb0L;:c "8#A*`\$!'I˲AϏvʢ rTep8 ˆudYI fͽYqrmQXeka[dçiR&h$[@HnSW]Ap 9xK='mPzulB=Gi:Ԗ ub4;0aL6NmfץTท^a`ab7~T`>AennjP1YXŮi܉!`*EHki*,FPWp#я=Py0' +9y5gq\",}7~S+$= (LR,Ȣ J랐-7VqK%룗kD[$>r w:پE[t*1-]:E4NGWT55~$ܠ"Q4,ac9H<^fP&E{PEehIa" Ð(y+ 8Ĕ#EeDRၪ-$et*PBp¶u~Q㶂:̵[!d?f*_Bv}UźS PA'X@_*sA%@S nģ"; A"qmiO͂lXOv0avK{~W7PO./mJHC:PMP.747- nPbn`=lCkhQRZ0bʽH8}N^M-QWq| 8XrÓ"-5C%oSů#,-ƅm #4ihc \BJh,9-,ID)^c3BK <@D Vm%mV2J͔2z36qC0+ i4gHi m4eHV(MPJij@nj%8`'w )f*ѐAܡnn#F5+nRȚάvW-Y([%Ogo`=JTi cb4V"_. p,{Hq:>ӌh4>mjBh8K?v 1 :9v//QE}+Y[yn1ƚ.5lLtĽno-Q>>a,Jh"IRo[S-螿+O/Wۗ|vuO[V{G>쉪oUUP5ˏU~߿Hݞu'a5 b>=U褐{0BR agpնF_^v[_gsEx??%:8/gW#:_YtN;ݛ݁A/wÞ(˛@QE?}nge*-OwrOuر'ӧ=(ʰ %ddzOO&NW;biZ_ ,/;`?/UtT0iq̞W.sdPpEsⰷ%Ej'B8{^N6"G1|}SQ]g0t; _LXOP9uJ[0_v]A+MyU:/UC ;] h=]g;4dMLw=PiKNOgya0 9:*mWP,-:31 a.J(uAr8>ݤg8[-ۃcݾ^{E6לaE3hm/3A0D/w/ݡrlaL?XPT0H/aA,e1-)ZGS~w`0Z~ +s/0Fj1ŧۗҶBH9 䏢|ޭ~1#sp>qmDED"rIDAT,eza4 r򰄬`rwkK, -\s_Y:緳 !E8a}WI +@zRUs{'lĐm o S(HX}47 jWM!nmr?_]hjAHɑ˺tdFw,Vʋ"jraޘD,6˝7$EN3Ԃar I⍾Yl:.#r."U .b6}%_[94CVNXOWZp;v()J:(SÜ03]u~w=4M7sw wֳ-7 [;wp9ꏌxp,&kc:+ZW_LqZ!ۃU ^c칾sn3<[>$}˒y0ӗ*["j>/G r-loࢯJwˋчף y*ûͥ1>||Newlwn.'vR/P3Y/q`sG,_VjQ:v/yg;zcFӿMu7a[n|x_@e4~Z h+zbV՗$yLa%eeXpqQ N:G@-$qlt?n*ߞ2 Pq jh)a0O@MhL(o^8MC@2Ap 3] wQ#e:$qq4#'@5u1,Rȏͯ4NA_wӥi2U!^oZBSD2$B(!Y._rȳ:`YaWyI:jãCkuzIMee_ײaȊ JfELz"(t9 M`uY%d8kwt|-j "}1ٖӇew< c(7}XPo/HRGl0wcrLf,XOSTSF`&﷑1NZ;)PfDIU 0  -c@y9.۩S/y@e%4- i 兂\`,EIP ]Tf4d8J@ Dǰ X!8 \lOykRKuLa /d 2 ~@4 ϰ1ıq"&i1QS$|&4B7$u%Uvmb]8.b9A$xr =ۭDXI-$qp@ȇ^9%bW~*sfrLʑǬzjVn cz0ͫN+a&ϭBM$ZYU Pc$H,݃Qdjq2z8(I[:Mm@fQ/K 5麡}w`82 [K \]wlX4qwz"{>6y|vAd|*`x1د6AE~؞H| gW:{Gʪ(Y0=?h}zu4a{n`qچ zz{aeQed~czn:~>089Xz~x1DrGEwk*w -ŲpDZl6rh4 +kbu\1*1wCrՅ 2Ҽ];z^hv-wGsv`Gn3k5eګr\ $ʱ>輷[g,k;Qfvֲs>@:1{F"!^R R{uqK1+3^T 0?ty>MhWp*J88'v߰<ۺ_ӵu֓7t+Y,l&Ki]GI4U d&ؒ2*|^myT10"Ƕ$ pUdF+Fv(2wyB jyߌ[кLbkKFΨpa##ΗB8!aeftL[׿{<ak8zk)89I7ӵ? ~v6}uvj8Ew)^g07fT: Q0"jS)' z.N{t,A>7y0~xMyCu%Q""z$E?LSf|9P<Ϟnzu|rz}b|W4 0mڪ} RԋܹEQ_ίbi-@wv~(V˃ {ŁW QS*ðe$9 * 5LQmMrՙ53=R E.Wҧ])pmopO˵}8[Ow: <'j*DApyb_/.y!mtV^~ŗ5$?t~q}::=A>{G_><=]l/"o7;w9;<:;񗗻ϥރӃO?_q}7g_={飐M";/N/.ONb<}o^>?:i2ztiTФ i}?8OƂ?DX.Yo Kr͗ҵ#7EQթ`:YfUh7KmNw#&A\/Τ#t cS PB_@k[܌uLRQ +P;9ݲZ&c͒#w1C̾jӄU&a3;fFa9ݗW O"F0 23lJSd+wx@4IZr.,?JU~Ϫ9,~,BrОlo%lu%$%Ś~a[l1.a,4I.K> )?hWy /q"RKB*LF XIOVz0)iDC&9_v g/xJ/RGjFdzi\UǓG8‚*Am #]Re>Bg &Ta'TF-%(<N&x&c3!0T;>::p,NJ$4ӎNaQeWD%+Θ:C˦xY$^0n:e\Hcv2cchBNf<^e^5Z0 bed0PumLUlnSVv س1U!s c(Ub[H@`H̪08@9eZ`Ԅ}몇Ӝ7"5y`]үNZ0Abk4I4L`J 5rmڞw|?ݳV3tt7+d4/j ElǀSD 'B+΢7N4ළRQ-flna0PgIJxxdDS5wxZ&a`tƲr%ĢIh"SX݄Qtp՘Uq e[̱FG1X׸,ĤcPҠ[B5\خE9킂,%ZNWbItED9QK)n>Y*yŽWQ(H@Vg%Jl,8;aU ŒCۄO|GGxqtR&X:dF+#HC;E8[߉ pSZBr޺cJ"0"u7 \32|J]!R$9utӶ6=Lois,MOoif|1:2MLTB<EТoωCu[dݡVsw ( %\#nK3mF>؈k 1gj~ʒ^a hc@ Ň)Md >>)[3?H1KXg?uG8 @rT2N5Z '$^ RLe{:Jr^տL&ܼ~gW!RȐ%}Bk,IqP3&o'd~DM:_76 W u{Q͖]nhr},GfMaxsY*>yv<0N}DRX !`: fbEa(<[;2UFE+EB5bTg< ~2x9x6kJ1hC?'h L]d4)Z*lbjx +sa^*8UqA wʞhU>Uo !yo%L run]t# c*b9  N7:^ rw{+ zNZWTUmyHvu,OOT’ t(qy!v 4,@{Jz OLX@_G-oؓp9":20-yg! POHJkcAD QF>xZ&wBͣ "e!j-jtuGjܸN5iVV1^F53?D7^8P3Ioif'+n"+'6fZNLϒ#Ӕٯ.%moV()?`3?BL\~]LKKBsKKE'ձ?H1:ԽzvYLᬆ  w JHSt6ϩCMw3.]Uߔ29&ګ̤4΅+Ex\g(Xl6Ϛ  |ڝWƞT.υ D;FԿi&KM 6iuASlV_G;Ү 0R8G!$ 'icfQE9mP ,&.5!LEP?A>Ylo7we%Ԛ,2ӝ_g<%ڬ8rv)W~5..)3Pì`b,4d>U@%`C F| RzdPΑ17Vg̽ͪ32b~M/J\e.d8h̳3 AHY\wHgTA0yNq"˩{T\JAyg@T**Ngq&)vYEj(o9o0&xgZteéϚ h@ 6"T1v,:"JXBǓ$y̫zwZ#,fFFY#W l4K#SEUي,Jf@`ݚoB&mpĚ~1C}SM"oXS5&IkH5v5XDR?iiܝIGD"FpΥ}ԭa`Lqm>7w~pbj$Bg$'DLҒхyV |8FS zo!oHL6G>;*P_bZΊԮ-z% M_855Qe*06IENDB`Seurat/tests/testdata/visium/spatial/tissue_positions_list.csv0000644000176200001440000055173213712563445024663 0ustar liggesusersACGCCTGACACGCGCT-1,0,0,0,1487,1480 TACCGATCCAACACTT-1,0,1,1,1607,1549 ATTAAAGCGGACGAGC-1,0,0,2,1487,1617 GATAAGGGACGATTAG-1,0,1,3,1607,1686 GTGCAAATCACCAATA-1,0,0,4,1487,1755 TGTTGGCTGGCGGAAG-1,0,1,5,1607,1824 GCATCCTCTCCTATTA-1,0,0,6,1487,1893 GCGAGGGACTGCTAGA-1,0,1,7,1607,1962 TGGTACCGGCACAGCC-1,0,0,8,1487,2030 GCGCGTTTAAATCGTA-1,0,1,9,1607,2099 TGCCTTGCCCTTACGG-1,0,0,10,1487,2168 GACGACTTTCCAAGAA-1,0,1,11,1607,2237 CCAGTGAGCTCCTTGT-1,0,0,12,1487,2306 ATACCCTGGCTCAAAT-1,0,1,13,1607,2375 GGGTTTCCGGCTTCCA-1,0,0,14,1487,2443 TAACCGTCCAGTTCAT-1,0,1,15,1607,2512 AAACAACGAATAGTTC-1,0,0,16,1487,2581 CAAGGGAGTGTATTTG-1,0,1,17,1607,2650 CCAAGCTTGATCTCCT-1,0,0,18,1487,2719 TTATTTCATCCCAAAC-1,0,1,19,1607,2788 GAGCGCTATGTCAGGC-1,0,0,20,1487,2856 TATGGCAGACTTTCGA-1,0,1,21,1607,2925 CTTCGTGCCCGCATCG-1,0,0,22,1487,2994 AAACGGGTTGGTATCC-1,0,1,23,1607,3063 TGCAAACCCACATCAA-1,0,0,24,1487,3132 GACGGGATGTCTTATG-1,0,1,25,1607,3200 GGCGAGCATCGAGGAC-1,0,0,26,1487,3269 CGCGTGCTATCAACGA-1,0,1,27,1607,3338 TGAAACCTCAACTCAC-1,0,0,28,1487,3407 CACATAAGGCGACCGT-1,0,1,29,1607,3476 TGACCCAACTCACATT-1,0,0,30,1487,3545 ATACGCCGATCTACCG-1,0,1,31,1607,3613 ACTTATCTGATCTATA-1,0,0,32,1487,3682 GTGTGAGCCGAGGTGC-1,0,1,33,1607,3751 GATGATTTGAAACTGG-1,0,0,34,1487,3820 GGGAACCACCTGTTTC-1,0,1,35,1607,3889 GTTCGTTGCGGACCAG-1,0,0,36,1487,3958 TGAGGTTGATCCCAAG-1,0,1,37,1607,4026 GATGCCACACTACAGC-1,0,0,38,1487,4095 AGGCAAAGAGGAATCA-1,0,1,39,1607,4164 AAGTAAGCTTCCAAAC-1,0,0,40,1487,4233 AACGTAGTCTACCCAT-1,0,1,41,1607,4302 GTTTGAGCGGTTATGT-1,0,0,42,1487,4371 GAAGCAAGGCAATGTT-1,0,1,43,1607,4439 TCACTCAGCGCATTAG-1,0,0,44,1487,4508 TACAATGAAACCAGCA-1,0,1,45,1607,4577 GTGCGCTTACAAATGA-1,0,0,46,1487,4646 GCACTCCCACAGTCCC-1,0,1,47,1607,4715 CGAAGACTGCCCGGGA-1,0,0,48,1487,4784 CAGGATCCGCCCGACC-1,0,1,49,1607,4852 CACGATTGGTCGTTAA-1,0,0,50,1487,4921 GGTTGTATCGTGAAAT-1,0,1,51,1607,4990 TCTTATGGGTAGTACC-1,0,0,52,1487,5059 TACAAGCTGTTCACTG-1,0,1,53,1607,5128 GTATCTTGTTGCTCAC-1,0,0,54,1487,5197 ATACCAGGTGAGCGAT-1,0,1,55,1607,5265 CCTAAACAGGGTCCGT-1,0,0,56,1487,5334 ATGGTGCTCAAAGCCA-1,0,1,57,1607,5403 CAAATGCGGAGTGTTC-1,0,0,58,1487,5472 CGTGCCCGACATTTGT-1,0,1,59,1607,5541 GTATCTCCCTAACTGT-1,0,0,60,1487,5610 ATTTGCCTAGTTACGA-1,0,1,61,1607,5678 ACGTCCTAAACGAGAT-1,0,0,62,1487,5747 CTGGGATCGCCCAGAT-1,0,1,63,1607,5816 CTGCAAATGGGCTCCA-1,0,0,64,1487,5885 CATTATAACAGGGTCC-1,0,1,65,1607,5954 ACCTTTCCTTTAGAAG-1,0,0,66,1487,6022 ATAGATTTGCAGTCGG-1,0,1,67,1607,6091 CTCGGGCATCGTCGGG-1,0,0,68,1487,6160 GTGGCGGGCCGTAGCT-1,0,1,69,1607,6229 CAACAGTGCCAAACGG-1,0,0,70,1487,6298 TGCGGGTATTGGGATC-1,0,1,71,1607,6367 GTCTCGCCAACACGCC-1,0,0,72,1487,6435 CTGGGCGGCCAAATGT-1,0,1,73,1607,6504 TAAAGGAGAAACTAGT-1,0,0,74,1487,6573 TCCCACGGAGGGAGCT-1,0,1,75,1607,6642 AGCTTCAATACTTTGA-1,0,0,76,1487,6711 TTCCACATTTCTCGTC-1,0,1,77,1607,6780 ACAAACCGACAAGGCG-1,0,0,78,1487,6848 AGACGGGATTGGTATA-1,0,1,79,1607,6917 AACCTAAAGCCGTCCG-1,0,0,80,1487,6986 TACAAATTGCGGAGGT-1,0,1,81,1607,7055 CCCGCTAGAGGGTTAA-1,0,0,82,1487,7124 CATTGCAAAGCATAAT-1,0,1,83,1607,7193 TGTACGCTATCAGCTT-1,0,0,84,1487,7261 TTCTTCGCAATAGAGC-1,0,1,85,1607,7330 TGTGATTCCAGCGCTT-1,0,0,86,1487,7399 ATTCAGGATCGCCTCT-1,0,1,87,1607,7468 GCCCATGGGTGCAATG-1,0,0,88,1487,7537 TTCCCGACGCTTCACT-1,0,1,89,1607,7606 AGCGGTTGAGATGTAC-1,0,0,90,1487,7674 GCTGTCTGTGATCGAC-1,0,1,91,1607,7743 AAAGACATGAAGTTTA-1,0,0,92,1487,7812 CAACAGAATAACGCTA-1,0,1,93,1607,7881 TGCGGTCTACGAGTAA-1,0,0,94,1487,7950 AAGACTCACGCCCACT-1,0,1,95,1607,8019 CTTTGAAACATATTCC-1,0,0,96,1487,8087 CTGGGCACTAGTCGGA-1,0,1,97,1607,8156 CGCCCTTACATCCACC-1,0,0,98,1487,8225 CACGACCACAGACTTT-1,0,1,99,1607,8294 CAATCCATTATCCGTT-1,0,0,100,1487,8363 GTGGCGTGCACCAGAG-1,0,1,101,1607,8432 CGGAGTCCTAACCTGG-1,0,0,102,1487,8500 GGTCCCATAACATAGA-1,0,1,103,1607,8569 ATCTCATAAACCTACC-1,0,0,104,1487,8638 TGCATGGCAGTCTTGC-1,0,1,105,1607,8707 TTGCAGGTCATGAAGT-1,0,0,106,1487,8776 AGCTGCATTTGAGGTG-1,0,1,107,1607,8844 TAATCAGGAATGCTGC-1,0,0,108,1487,8913 CCATCATAAGAACAGG-1,0,1,109,1607,8982 TCGTATCACCAAGCTA-1,0,0,110,1487,9051 ATTCAGATGAATCCCT-1,0,1,111,1607,9120 AAAGGTCAACGACATG-1,0,0,112,1487,9189 AGCTGCTGTGCCGAAT-1,0,1,113,1607,9257 CTAGCGCCAATCCTAC-1,0,0,114,1487,9326 GCTCGACCGAACTGAA-1,0,1,115,1607,9395 ACAGTGCAGCGCATTT-1,0,0,116,1487,9464 CGGCTGAAGGTTACGC-1,0,1,117,1607,9533 CACCTCTACGAGTGTG-1,0,0,118,1487,9602 ATACGACAGATGGGTA-1,0,1,119,1607,9670 ACTTCCTGTCGTGCGA-1,0,0,120,1487,9739 CGTAACGGAACGATCA-1,0,1,121,1607,9808 AAATCACTCCTAAACG-1,0,0,122,1487,9877 CTCCGAGTAAATCCGC-1,0,1,123,1607,9946 ACGCTAGTATCAGTGC-1,0,0,124,1487,10015 AGAGTGAACAGACACC-1,0,1,125,1607,10083 ACACCCGTAAATCTGT-1,0,0,126,1487,10152 GCTTTGCTGCCGGGTA-1,0,1,127,1607,10221 ACAGGAGGCGCAGCCG-1,0,2,0,1727,1480 AGGCAATACGGAGGAC-1,0,3,1,1847,1549 TGGTGTGACAGACGAT-1,0,2,2,1727,1617 ATCTATCGATGATCAA-1,0,3,3,1847,1686 CGGTAACAAGATACAT-1,0,2,4,1727,1755 TCGCCGGAGAGTCTTA-1,0,3,5,1847,1824 GGAGGAGTGTGTTTAT-1,0,2,6,1727,1893 TTAGGTGTGACTGGTC-1,0,3,7,1847,1962 CAGGGCTAACGAAACC-1,0,2,8,1727,2030 CCCGTGGGTTAATTGA-1,0,3,9,1847,2099 GACCGACCGCTAATAT-1,0,2,10,1727,2168 GGTATCAAGCATAGAA-1,0,3,11,1847,2237 TGCATGAGTAGATTCG-1,0,2,12,1727,2306 AATTCCAACTTGGTGA-1,0,3,13,1847,2375 TGCCGATGTCATCAAT-1,0,2,14,1727,2443 GCTGGGTCCGCTGTTA-1,0,3,15,1847,2512 TGAACACCCGAAGCAG-1,0,2,16,1727,2581 AACATTGGTCAGCCGT-1,0,3,17,1847,2650 GTGGGTCTTCTTTGCG-1,0,2,18,1727,2719 CATCGAATGGATCTCT-1,0,3,19,1847,2788 GCTACACTGTCCGAAC-1,0,2,20,1727,2856 CGGGTTGTAGCTTTGG-1,0,3,21,1847,2925 CCTAAGTGTCTAACCG-1,0,2,22,1727,2994 TCTGTGACTGACCGTT-1,0,3,23,1847,3063 TTATCATACTCGCAAA-1,0,2,24,1727,3132 AGCGTAGCGCTAGACC-1,0,3,25,1847,3201 TCCCTCCGAAATCGTT-1,0,2,26,1727,3269 AGGTCGCCACTTCGGT-1,0,3,27,1847,3338 CTAGCAACTAATTTAC-1,0,2,28,1727,3407 TTGCTAGCTACCAATC-1,0,3,29,1847,3476 GCCGGTTTGGGCGGAT-1,0,2,30,1727,3545 TGTAACTTGTCAACCT-1,0,3,31,1847,3613 CGAGATGTTGCCTATA-1,0,2,32,1727,3682 GTTACGAAATCCACGC-1,0,3,33,1847,3751 CTTGTCGTACGTGTCA-1,0,2,34,1727,3820 GCGTCCAGCTCGTGGC-1,0,3,35,1847,3889 CCCTTCTCGTACGCGA-1,0,2,36,1727,3958 CCAAAGTCCCGCTAAC-1,0,3,37,1847,4026 CCGCTTCGCGGTTAAC-1,0,2,38,1727,4095 GTTACGGCCCGACTGC-1,0,3,39,1847,4164 CCCGCTTGCCCTCGTC-1,0,2,40,1727,4233 TAGTGAGAAGTGGTTG-1,0,3,41,1847,4302 CGCTACCGCCCTATGA-1,0,2,42,1727,4371 AAACAATCTACTAGCA-1,0,3,43,1847,4439 GCGCGATGGGTCAAGT-1,0,2,44,1727,4508 ATAAACCATTGGACGG-1,0,3,45,1847,4577 TCGGGCACTTCTGGAT-1,0,2,46,1727,4646 TCTGTGGCTACATTTC-1,0,3,47,1847,4715 CTCTGTGCCTGCTATG-1,0,2,48,1727,4784 CACGACTAAAGTTCTG-1,0,3,49,1847,4852 GAGGAGTAATTCCTAC-1,0,2,50,1727,4921 AGAGGTATCTCGGTCC-1,0,3,51,1847,4990 GGCGTACCCTATATAA-1,0,2,52,1727,5059 GCCGGAAACACATCTT-1,0,3,53,1847,5128 AAATGTGGGTGCTCCT-1,0,2,54,1727,5197 ACCAGGAGTGTGATCT-1,0,3,55,1847,5265 TGTGGAGGAAGCTTAA-1,0,2,56,1727,5334 AAGGAGAACTTATAAG-1,0,3,57,1847,5403 CCCTCGGGAGCCTTGT-1,0,2,58,1727,5472 ACTGTTTAGTGTAGGC-1,0,3,59,1847,5541 CGTCAGTTTATCGTCT-1,0,2,60,1727,5610 GCGTGTATGTCGTATT-1,0,3,61,1847,5678 ACAATCGATCTTTATA-1,0,2,62,1727,5747 CAGCCCTCACAGGCAG-1,0,3,63,1847,5816 CGCGTCATATTAAACC-1,0,2,64,1727,5885 GAAGACTTCAATGCCG-1,0,3,65,1847,5954 TTGCGGCGACTCATGC-1,0,2,66,1727,6022 ACCAAACTAGAAATCC-1,0,3,67,1847,6091 TTACTGTTTCTCTACG-1,0,2,68,1727,6160 GACCAGGTCATTCATA-1,0,3,69,1847,6229 TTCTTCCCTTTGATAT-1,0,2,70,1727,6298 ACGCCCAGCTGTCGAT-1,0,3,71,1847,6367 AGTAGCGTGAACGAAC-1,0,2,72,1727,6435 CCTCGACCCACTGCCT-1,0,3,73,1847,6504 AGTTATTGAAAGGTAA-1,0,2,74,1727,6573 TCAGTTACGGAATGAT-1,0,3,75,1847,6642 GAATCTATACTCGGAC-1,0,2,76,1727,6711 TCGGCTAACTTCCCTT-1,0,3,77,1847,6780 ACGTGGTCGAATGTGC-1,0,2,78,1727,6848 ATATCGTGCCAGACCC-1,0,3,79,1847,6917 GTAGCTAGTAAGCGCG-1,0,2,80,1727,6986 ACGCTTAGTGTCTCTC-1,0,3,81,1847,7055 TCCGGCCTGCATCGAT-1,0,2,82,1727,7124 TAGTGGAACTCATACA-1,0,3,83,1847,7193 ATCATCTGCCCAGTGT-1,0,2,84,1727,7261 GTTATTAAATACGACC-1,0,3,85,1847,7330 GCGCTAAGTATGCATG-1,0,2,86,1727,7399 CCTGACGCAACCTTTA-1,0,3,87,1846,7468 CCCAAGAATGCACGGT-1,0,2,88,1727,7537 AACTGGGTTCGAGCCG-1,0,3,89,1846,7606 GGTTCCACCCGCTTCT-1,0,2,90,1727,7674 CATGCACGTGTTACTG-1,0,3,91,1846,7743 AGCGTTCCGATTTAAA-1,0,2,92,1727,7812 CCTACGCGACCTTACA-1,0,3,93,1846,7881 CGAATTACATGGTGTT-1,0,2,94,1727,7950 GAGGTCTTAGTGGGTC-1,0,3,95,1846,8019 GCCGCTAGATACGCAG-1,0,2,96,1727,8087 GTCACCTGTCTATGTC-1,0,3,97,1846,8156 CCGATTGGTCAATGAA-1,0,2,98,1727,8225 CCTGTGCGGATTGTAA-1,0,3,99,1846,8294 TTACGTAGCGCGTGCT-1,0,2,100,1727,8363 GGAGGCGAAGAACCGC-1,0,3,101,1846,8432 GGGTCACGTGCTTATG-1,0,2,102,1727,8500 GCTCCGGACGTTGATA-1,0,3,103,1846,8569 ATGTTTGTAAGATCAT-1,0,2,104,1727,8638 TGACCCAGCATTCCCG-1,0,3,105,1846,8707 TGGTCGTTTGATAGAT-1,0,2,106,1727,8776 TGTAATGCCTTCGGAC-1,0,3,107,1846,8844 TGCTCACACAACAACC-1,0,2,108,1727,8913 TACGATCCAAGCCACT-1,0,3,109,1846,8982 TTGTAACTTCATAGCG-1,0,2,110,1727,9051 AGATTCAAGCGGGTCG-1,0,3,111,1846,9120 CTCAGCAGACTGCCGA-1,0,2,112,1727,9189 GTAACATCAGCTCATC-1,0,3,113,1846,9257 ATGGAACAGAATAAAC-1,0,2,114,1727,9326 GGGCCTATACAACCGG-1,0,3,115,1846,9395 TCAAACAATTAGGACA-1,0,2,116,1727,9464 AAACCACTACACAGAT-1,0,3,117,1846,9533 AAACGACAGTCTTGCC-1,0,2,118,1727,9602 TTGAGGGTCGAACGCG-1,0,3,119,1846,9670 TGTTGATCACTGTTTA-1,0,2,120,1727,9739 AGGGTGTGCTACACGC-1,0,3,121,1846,9808 GTAGTTAGACAATATA-1,0,2,122,1727,9877 AATGGCCGCCAATGCG-1,0,3,123,1846,9946 TCGGCGGTATTAGATT-1,0,2,124,1727,10015 GGGTCACTGAGTAGTG-1,0,3,125,1846,10083 GAATTATGCAACCTAC-1,0,2,126,1727,10152 GATCTTAGTGAACGTG-1,0,3,127,1846,10221 CTAATGCGCCCAACAA-1,0,4,0,1966,1480 GCCACCCATTCCACTT-1,0,5,1,2086,1549 TACTCACAACGTAGTA-1,0,4,2,1966,1617 GTTCGGTGTGGATTTA-1,0,5,3,2086,1686 TCTTTCGGCGGGACAC-1,0,4,4,1966,1755 GGAGACATTCACGGGC-1,0,5,5,2086,1824 GGGATTATCTCACAAC-1,0,4,6,1966,1893 TAGAACGCCAGTAACG-1,0,5,7,2086,1962 ACGAGTCGCCGGCGTT-1,0,4,8,1966,2030 TGATGGGACTAAGTCA-1,0,5,9,2086,2099 TGCGAGAAACGTTACG-1,0,4,10,1966,2168 TCGCCTCGACCTGTTG-1,0,5,11,2086,2237 AACTCGATAAACACGT-1,0,4,12,1966,2306 AGGAAAGCCTCTGATG-1,0,5,13,2086,2375 GAAGGACTAAATTGAA-1,0,4,14,1966,2443 GTATCGGGACGAGCTG-1,0,5,15,2086,2512 CCTGTGCATAGGAGAC-1,0,4,16,1966,2581 CATACGGGTGCATGAT-1,0,5,17,2086,2650 CCACTAAACTGAATCG-1,0,4,18,1966,2719 AAATTGCGGCGGTTCT-1,0,5,19,2086,2788 AGTCCAGCGGGTACGT-1,0,4,20,1966,2856 CATTCAGGTCAGTGCG-1,0,5,21,2086,2925 CTAAAGTCCGAAGCTA-1,0,4,22,1966,2994 AATCAGACTGCAGGAC-1,0,5,23,2086,3063 AGTATCCATAATAACG-1,0,4,24,1966,3132 CTGGCTGCTAACGTAA-1,0,5,25,2086,3201 GTTCCAAGACAGCGAC-1,0,4,26,1966,3269 AAGACTAACCCGTTGT-1,0,5,27,2086,3338 GATTAATCCTGGCTCA-1,0,4,28,1966,3407 CGCGCAAGGAACTACA-1,0,5,29,2086,3476 CAGTAGCGAGGTAGTA-1,0,4,30,1966,3545 ACGGCGGGTTGCCCTG-1,0,5,31,2086,3613 CTAGGCGGCAGAGAAT-1,0,4,32,1966,3682 GTGCGCAGCTTGCTCC-1,0,5,33,2086,3751 TCACTATCGTGCAATC-1,0,4,34,1966,3820 TATGATTCTGCTTGGT-1,0,5,35,2086,3889 TAAGATTTAGCGGGAG-1,0,4,36,1966,3958 TTACGGTGTCACCGAG-1,0,5,37,2086,4026 CTACACTAGCTTGTTC-1,0,4,38,1966,4095 TGAGCAGTCGTGAAGT-1,0,5,39,2086,4164 CGCTGAGGACGTCCAA-1,0,4,40,1966,4233 GTGTATGACTTTAAAG-1,0,5,41,2086,4302 CTAAACGGGTGTAATC-1,0,4,42,1966,4371 TGTACTGTGCCAAAGT-1,0,5,43,2086,4439 GGCCACAAGCGATGGC-1,0,4,44,1966,4508 GTCAATTGTACTGAAG-1,0,5,45,2086,4577 AGGGACAGCACGGCGG-1,0,4,46,1966,4646 AGCTTATAGAGACCTG-1,0,5,47,2086,4715 AACTAGCGTATCGCAC-1,0,4,48,1966,4784 AACTTTAGCTGCTGAG-1,0,5,49,2086,4852 CCCAAGACAGAGTATG-1,0,4,50,1966,4921 GGCATCAACGAGCACG-1,0,5,51,2086,4990 ATGCATTCCGTGATGG-1,0,4,52,1966,5059 TTATAGATGCACATTA-1,0,5,53,2086,5128 GAACCATCTGGGAGAC-1,0,4,54,1966,5197 TGCTATACAAACGGAC-1,0,5,55,2086,5265 ACTTGCCATATTGTAC-1,0,4,56,1966,5334 TATTCCGGCAGTCCTA-1,0,5,57,2086,5403 GACGGACCGCGTTCCT-1,0,4,58,1966,5472 ATGTGTAGTTTAGTCA-1,0,5,59,2086,5541 ATACCAGCAAATTGCT-1,0,4,60,1966,5610 AAGTTTACTAATGGCA-1,0,5,61,2086,5678 CTCTCGATGTGCGCCT-1,0,4,62,1966,5747 GATTGACACTCTGCTC-1,0,5,63,2086,5816 TATCACAGCACGGGCA-1,0,4,64,1966,5885 ACCGTTCCCGCTCTGA-1,0,5,65,2086,5954 CCGCCACCACAATCCA-1,0,4,66,1966,6023 CATTCACTGACAGCTA-1,0,5,67,2086,6091 CGGCTGCAAGATTAAG-1,0,4,68,1966,6160 CATGAACCTCTTATCA-1,0,5,69,2086,6229 TTAATGCGAGGTAACT-1,0,4,70,1966,6298 AATAAGTCCTCGAGAC-1,0,5,71,2086,6367 ACCAGCCCGGTCTTTG-1,0,4,72,1966,6435 CTACGAACTAGGTCGA-1,0,5,73,2086,6504 ACATCTCAACGCGTAA-1,0,4,74,1966,6573 CACTACTCAGTTCTGT-1,0,5,75,2086,6642 CCGACTCGCATAGTCT-1,0,4,76,1966,6711 CATTTATCGTTCAAGA-1,0,5,77,2086,6780 CAAACGTGGTCTTGCG-1,0,4,78,1966,6848 TAGAAACCACTAAGTA-1,0,5,79,2086,6917 ACTGATTTAGTGATTC-1,0,4,80,1966,6986 TCGTATTTCGTCCGGA-1,0,5,81,2086,7055 CGGAAATTTCACATCC-1,0,4,82,1966,7124 ATCCACGCTAAATGTT-1,0,5,83,2086,7193 GTTCAATCTATGTCAA-1,0,4,84,1966,7261 ATAAAGGTCAAGTACG-1,0,5,85,2086,7330 CAACTCCAACGTTTAG-1,0,4,86,1966,7399 TAGGAACAGCCTCCAG-1,0,5,87,2086,7468 ATGGGAACGGAAGCGG-1,0,4,88,1966,7537 CACACGTTTCAATGGG-1,0,5,89,2086,7606 GGTGTTCTGTTTCTAC-1,0,4,90,1966,7674 AGTAACGTTCATCCTG-1,0,5,91,2086,7743 GTATAGTGGCCCATGT-1,0,4,92,1966,7812 TCTACACGTTCATGCA-1,0,5,93,2086,7881 AATCTGGGTAGACCCT-1,0,4,94,1966,7950 TCGGTTAGCCATGTAG-1,0,5,95,2086,8019 TGCCATGGCTTATAAG-1,0,4,96,1966,8087 TAAGTAAATGTGCCGC-1,0,5,97,2086,8156 GTGTCCGATAAGGCAT-1,0,4,98,1966,8225 TGGCACGAGCTCGAGT-1,0,5,99,2086,8294 ACCGGTCTGAGTACGG-1,0,4,100,1966,8363 GAACTTAGCGCCCGGT-1,0,5,101,2086,8432 AGTAGCTAGACGCCGA-1,0,4,102,1966,8500 ATAGGAATCTAAGCTT-1,0,5,103,2086,8569 CTTCCTGCATATTTAC-1,0,4,104,1966,8638 CAATATGTAGATTTAC-1,0,5,105,2086,8707 ACAAGGCCTACCAGCC-1,0,4,106,1966,8776 TTATAGTCCAAGGTGC-1,0,5,107,2086,8845 AAACGCCCGAGATCGG-1,0,4,108,1966,8913 CCTCGTTACGCCTGTT-1,0,5,109,2086,8982 GAACGGTGTAAAGCAG-1,0,4,110,1966,9051 ACGCATAAATGACATG-1,0,5,111,2086,9120 GGTTCGATGCTGAGTT-1,0,4,112,1966,9189 CTTTGGCAGACAGAGT-1,0,5,113,2086,9257 TTCGTGGGCTGGAAGC-1,0,4,114,1966,9326 CAAAGGTTAAATTCAG-1,0,5,115,2086,9395 GTTTGGCGTCAGGCAC-1,0,4,116,1966,9464 GCTTTCTATCTCAACT-1,0,5,117,2086,9533 TGCATCTCCGGATCTT-1,0,4,118,1966,9602 CTGAAACGGCCCTCAG-1,0,5,119,2086,9670 TAGCAGTAAATACGCG-1,0,4,120,1966,9739 CGGGCTACTTAAATTG-1,0,5,121,2086,9808 ATTATGCTCAGTATTG-1,0,4,122,1966,9877 TGATGCTCACGTAGTC-1,0,5,123,2086,9946 GTCTAAGATGCCCAGC-1,0,4,124,1966,10015 AACCCGATAGGGCTTC-1,0,5,125,2086,10083 CGCTATCGTGGCTTTA-1,0,4,126,1966,10152 CGTCTCTCGCCGAGGC-1,0,5,127,2086,10221 AGTGGGAGTATACACG-1,0,6,0,2206,1480 GGTCTTGGTGTTAACT-1,0,7,1,2326,1549 GGCTGGCAGCTTTATG-1,0,6,2,2206,1617 CGCCAATTATTGCGTT-1,0,7,3,2326,1686 GGTAACCGGCAAAGGT-1,0,6,4,2206,1755 TGGGACCATTGGGAGT-1,0,7,5,2326,1824 CTGCAGGTGCTCGGCC-1,0,6,6,2206,1893 CCGGTGCGAGTGATAG-1,0,7,7,2326,1962 GGGTACACTCTGGAGG-1,0,6,8,2206,2030 TAGCCAGAGGGTCCGG-1,0,7,9,2326,2099 CTTGTGAGGACAGCGG-1,0,6,10,2206,2168 GAAGGGCATAACCATG-1,0,7,11,2326,2237 CAACATGGCCTGATAA-1,0,6,12,2206,2306 CAATTTGACCGGGAAG-1,0,7,13,2326,2375 TCTGACTGTAATGGTT-1,0,6,14,2206,2443 TTCATAGCCTTGTAAC-1,0,7,15,2326,2512 TGGAAACGGAGTGAAC-1,0,6,16,2206,2581 ATCGCACGATTGTTCA-1,0,7,17,2326,2650 CGCCACCCGCATTAAC-1,0,6,18,2206,2719 TGGACCACGGCGTTGA-1,0,7,19,2326,2788 GTATATGTTACGGCGG-1,0,6,20,2206,2856 GTATTCTTACCGTGCT-1,0,7,21,2326,2925 TTCAGAGTAACCTGAC-1,0,6,22,2206,2994 GCGGTAACCCAAATGA-1,0,7,23,2326,3063 CTACGTGTTGCCACCA-1,0,6,24,2206,3132 CTAGATAAACTCCTCG-1,0,7,25,2326,3201 TCCATTAGTTGGATAG-1,0,6,26,2206,3269 CTGGCTCCTGCGGGAT-1,0,7,27,2326,3338 CAGTCTCTCGGCTAAT-1,0,6,28,2206,3407 GTATGACGTGGGAAAC-1,0,7,29,2326,3476 AGTCACTCCGCCTCAT-1,0,6,30,2206,3545 GCAGCGGTGGGCATTA-1,0,7,31,2326,3614 TATGGAGTTTCTCGTT-1,0,6,32,2206,3682 ACTCAACGAATGTATT-1,0,7,33,2326,3751 AACACGCGGCCGCGAA-1,0,6,34,2206,3820 CGATATTAGCCGCAGG-1,0,7,35,2326,3889 AGCGTCTGAACCCGCA-1,0,6,36,2206,3958 GATGTCCGGATCACAT-1,0,7,37,2326,4026 GGTCACGTTAGATTCA-1,0,6,38,2206,4095 TTAAGGATACGGAGGT-1,0,7,39,2326,4164 GTGCGGGACCATCGGC-1,0,6,40,2206,4233 CCATCTTGTTCACAAT-1,0,7,41,2326,4302 TCCGAGAAGGCTAAGC-1,0,6,42,2206,4371 TGGCGGTGTGCGATTG-1,0,7,43,2326,4439 ATCCTGCTGCAGATAG-1,0,6,44,2206,4508 TTATGCGTCCCGGTCC-1,0,7,45,2326,4577 CATAATGAGCGGGCGA-1,0,6,46,2206,4646 AGACATAGATCCTTCC-1,0,7,47,2326,4715 GGTGAAACCGGGAATG-1,0,6,48,2206,4784 AACTGGTGTGGGCCTT-1,0,7,49,2326,4852 GTAGCGCTGTTGTAGT-1,0,6,50,2206,4921 TTGTTTGTGTAAATTC-1,0,7,51,2326,4990 GGATCAAAGGACGAGG-1,0,6,52,2206,5059 CGTAGCGCCGACGTTG-1,0,7,53,2326,5128 CAAGTGAACTTTGGTT-1,0,6,54,2206,5197 GTAGACAACCGATGAA-1,0,7,55,2326,5265 CAATGGTCGGCCTGGG-1,0,6,56,2206,5334 ACAGATTAGGTTAGTG-1,0,7,57,2326,5403 GTTATCACCTTCTGAA-1,0,6,58,2206,5472 TGGTATCGGTCTGTAT-1,0,7,59,2326,5541 GGAATAACCTCAAGAA-1,0,6,60,2206,5610 ATTATCTCGACAGATC-1,0,7,61,2326,5678 CCGAGGGATGTTAGGC-1,0,6,62,2206,5747 TGAGATCAAATACTCA-1,0,7,63,2326,5816 AAACGAAGAACATACC-1,0,6,64,2206,5885 CTGGTCCTAACTTGGC-1,0,7,65,2326,5954 TGCACGAGTCGGCAGC-1,0,6,66,2206,6023 ATAGTCTTTGACGTGC-1,0,7,67,2326,6091 TGGAGCTAAAGTTCCC-1,0,6,68,2206,6160 GGGTGGTCCAGCCTGT-1,0,7,69,2326,6229 CATGCATGGAGACCCT-1,0,6,70,2206,6298 ACACGGCACTATGCAT-1,0,7,71,2326,6367 CCCTGGTATGGGCGGC-1,0,6,72,2206,6435 GGAGGATTGAAAGGAG-1,0,7,73,2326,6504 CCGCTGGTGCCATTCA-1,0,6,74,2206,6573 GTTAGAGTGTGCCGCT-1,0,7,75,2326,6642 TCGGAATGACCATCAA-1,0,6,76,2206,6711 TTCAATTAGCCATAAT-1,0,7,77,2326,6780 GATGTGTTGTCACAAG-1,0,6,78,2206,6848 TCTTTCTCTTAAGGAG-1,0,7,79,2326,6917 ACCCTTTAGTTCTCCA-1,0,6,80,2206,6986 ACCACAACTCAGAACA-1,0,7,81,2326,7055 TATGATAAATCTAACG-1,0,6,82,2206,7124 GATCCTCTTGCGCTTA-1,0,7,83,2326,7193 TTCTACCTTTATGTTG-1,0,6,84,2206,7261 GAAATACCTGCTGGCT-1,0,7,85,2326,7330 ATTCTGAGTATGAACT-1,0,6,86,2206,7399 GGATTAAGCTAAGGTC-1,0,7,87,2326,7468 AGTACGTGGCCTGTCT-1,0,6,88,2206,7537 TCAGGGTGCACGAAAC-1,0,7,89,2326,7606 AAATTTACCGAAATCC-1,0,6,90,2206,7674 TTGAGGCATTTAACTC-1,0,7,91,2326,7743 AACCAGTATCACTCTT-1,0,6,92,2206,7812 CACCGGAGATATCTCC-1,0,7,93,2326,7881 GACTGGGCGCCGCAAC-1,0,6,94,2206,7950 CACGTCTATGATGTGG-1,0,7,95,2326,8019 TTAAGACGAACGAACC-1,0,6,96,2206,8087 TGACCAGCTTCAAAGT-1,0,7,97,2326,8156 AGAGTTAGAGACCGAT-1,0,6,98,2206,8225 TTCGGACTGATGCCTT-1,0,7,99,2326,8294 CTCGAATGGAACGTAT-1,0,6,100,2206,8363 GGACGGCTTGCGCAAC-1,0,7,101,2326,8432 CTAAGTACAGGGCTAC-1,0,6,102,2206,8500 ACAAATTCAGATCTGA-1,0,7,103,2326,8569 CATGGAAATGGGACCA-1,0,6,104,2206,8638 GGTGGACCACGTGTTA-1,0,7,105,2326,8707 CACGACGTAATAGTAA-1,0,6,106,2206,8776 CGGGTTCGGCACGTAT-1,0,7,107,2325,8845 CTGGGCTATCCTTTGG-1,0,6,108,2206,8913 GTATTAGGGTTCGCGT-1,0,7,109,2325,8982 TCATTCGTATAATTTG-1,0,6,110,2206,9051 AATAGCAAGCCTCCTG-1,0,7,111,2325,9120 CATCTACCCGAGAACG-1,0,6,112,2206,9189 GCTTCAGTGGGATTAC-1,0,7,113,2325,9257 TCTGTGATGGAGGTTG-1,0,6,114,2206,9326 ATCCACTTTCAGACTA-1,0,7,115,2325,9395 ATGGTTACGAAACATG-1,0,6,116,2206,9464 GGCCCAATCTAGAGGG-1,0,7,117,2325,9533 GATGGTGAAATAACCC-1,0,6,118,2206,9602 AGAGGGACAATTGTCC-1,0,7,119,2325,9670 CGCGTACATTCTGGAA-1,0,6,120,2206,9739 CAAGAAACCCTAAACT-1,0,7,121,2325,9808 TTGGTGCGGTGTTGAA-1,0,6,122,2206,9877 GGTTCCCTAGTGTCTC-1,0,7,123,2325,9946 CGATAACCAATTTGAG-1,0,6,124,2206,10015 GCCCACTGGTCCACAA-1,0,7,125,2325,10083 GAGGGCCGGCAGAGTC-1,0,6,126,2206,10152 CGACACGGATGCCCAC-1,0,7,127,2325,10221 CTGTCTGTGGCTGGCT-1,0,8,0,2446,1480 ATATTATCCCGTATTT-1,0,9,1,2565,1549 GCGCTGGCGGAAAGTC-1,0,8,2,2446,1617 ATCTAACGTCCCTATG-1,0,9,3,2565,1686 GTCAGACAGCGTTGGA-1,0,8,4,2446,1755 GCCAGGCTTAGTGGTA-1,0,9,5,2565,1824 ATTCAAAGTACCTGTT-1,0,8,6,2446,1893 TGGACGTAGGCGAATC-1,0,9,7,2565,1962 ACACATTGACGCAACA-1,0,8,8,2446,2030 GATATCAGTATGTATC-1,0,9,9,2565,2099 TGGGCCTTGCCTGCAT-1,0,8,10,2446,2168 CAAAGTCAGGTTAGCT-1,0,9,11,2565,2237 GGATCCCTACCAGCTA-1,0,8,12,2446,2306 ATCGTCCAATCGAGTC-1,0,9,13,2565,2375 ACATGGCTCAATTTAG-1,0,8,14,2445,2443 AGGCCCAGTGACTGGT-1,0,9,15,2565,2512 GCTTCCAGCTTAGATT-1,0,8,16,2445,2581 TGCTTGAAACCATGCA-1,0,9,17,2565,2650 CAATATTGGACTAGTG-1,0,8,18,2445,2719 CGTGCTGGCCTAGTCG-1,0,9,19,2565,2788 CCTGCGATAGAACTGT-1,0,8,20,2445,2856 GGGTAATGCTGTGTTT-1,0,9,21,2565,2925 AACGCGAACGGCAACA-1,0,8,22,2445,2994 TGTCGGCATGGTGGAA-1,0,9,23,2565,3063 AGCGTACGAGAGCTAG-1,0,8,24,2445,3132 ATACTCTCGCCACTCT-1,0,9,25,2565,3201 AATCCATGCAAGGGTG-1,0,8,26,2445,3269 TTAAACAGAGTCCCGC-1,0,9,27,2565,3338 CCACAGCTGAAATCAT-1,0,8,28,2445,3407 CGGTTCCGGCTTCTTG-1,0,9,29,2565,3476 GACGTGAGACTCCATG-1,0,8,30,2445,3545 TCGTTGGCTCGTCAAT-1,0,9,31,2565,3614 GGTGAACGGGCTAGCC-1,0,8,32,2445,3682 GCACTGTGCAAATGTA-1,0,9,33,2565,3751 ACGAGAACCCATCACG-1,0,8,34,2445,3820 CCAGCTACGCCTCATA-1,0,9,35,2565,3889 TCCCGGTCAGGAATTT-1,0,8,36,2445,3958 TCGCATTCAATGACTT-1,0,9,37,2565,4026 CTGGTTCAACGCATCA-1,0,8,38,2445,4095 GGTGATTTCATCTTGT-1,0,9,39,2565,4164 CACCCTTTCCTCGCTC-1,0,8,40,2445,4233 CAACTTGTAGTGGGCA-1,0,9,41,2565,4302 AATATCAAGGTCGGAT-1,0,8,42,2445,4371 ACTCAGACCTGCTTCT-1,0,9,43,2565,4439 TTGGAGTCTCCCTTCT-1,0,8,44,2445,4508 GGATACTCATGAATTG-1,0,9,45,2565,4577 TGGGCACAAACAGAAC-1,0,8,46,2445,4646 GAGCCACGGTAGTAGG-1,0,9,47,2565,4715 TCGATAGGCTAGTCGC-1,0,8,48,2445,4784 TAACCGCCCGCAGTGC-1,0,9,49,2565,4852 GCCTATTTGCTACACA-1,0,8,50,2445,4921 TTGACGATTCAGCACG-1,0,9,51,2565,4990 TTAAACCGGTAGCGAC-1,0,8,52,2445,5059 ACCGAAAGGGCCCTGC-1,0,9,53,2565,5128 ACGTTCCGCGCTCCGT-1,0,8,54,2445,5197 ATACCAGGCTAATAGA-1,0,9,55,2565,5265 CGGCTTTGTATGATAA-1,0,8,56,2445,5334 CTTGACCCGAAAGATA-1,0,9,57,2565,5403 CGCAGAAACATTTGCG-1,0,8,58,2445,5472 GACCCGTCGCCGGCTA-1,0,9,59,2565,5541 AATCGGGACACTACGA-1,0,8,60,2445,5610 GTCACAAAGTTTCCAA-1,0,9,61,2565,5678 TATATTCGCGTCGATA-1,0,8,62,2445,5747 CCTCCCGACAATCCCT-1,0,9,63,2565,5816 CGACATGCGATCTTCT-1,0,8,64,2445,5885 AACACGACTGTACTGA-1,0,9,65,2565,5954 CCCAACCACACTAACA-1,0,8,66,2445,6023 CACCGCCGACCAGCGA-1,0,9,67,2565,6091 TGGTATCGCATCCCAA-1,0,8,68,2445,6160 CAGAGTGATTTAACGT-1,0,9,69,2565,6229 AACCCTGGTGGAACCA-1,0,8,70,2445,6298 GTCAGTTGTGCTCGTT-1,0,9,71,2565,6367 ATTGACGTAACTCGGT-1,0,8,72,2445,6435 GATGTCGGTCAACTGC-1,0,9,73,2565,6504 AGGGCAGCGGCGTGGT-1,0,8,74,2445,6573 ACATCGTTAACCTAGT-1,0,9,75,2565,6642 TCCATTGTGACCTCGT-1,0,8,76,2445,6711 TGTTTAATACTTCATC-1,0,9,77,2565,6780 TTGCTGGCCGGGCTTC-1,0,8,78,2445,6848 CATATTATTTGCCCTA-1,0,9,79,2565,6917 CTGCCTAGCCACCAAG-1,0,8,80,2445,6986 ACGAGATATTTGCTTA-1,0,9,81,2565,7055 GACTACAATTGCTCGT-1,0,8,82,2445,7124 AACGTGATGAAGGACA-1,0,9,83,2565,7193 ACTCTCTTATACACGA-1,0,8,84,2445,7261 CGCATCATGGCTTCAG-1,0,9,85,2565,7330 CGGCTCTTCGTCGAAC-1,0,8,86,2445,7399 ATTCTTCGTACTTATG-1,0,9,87,2565,7468 AGTGAGGGTTTCTGAC-1,0,8,88,2445,7537 GCCAGGCGTTCGCATG-1,0,9,89,2565,7606 GACTAACACAGCACCT-1,0,8,90,2445,7674 CAATGGAATCTACATA-1,0,9,91,2565,7743 GTGGTCAGCGAAGTAT-1,0,8,92,2445,7812 ATGGCTGGAAATGGCC-1,0,9,93,2565,7881 ATCAGGTCGCCATTGC-1,0,8,94,2445,7950 TATCACCATGTAAAGT-1,0,9,95,2565,8019 AGCGCTTATGGGCAAG-1,0,8,96,2445,8087 AAGCGGCGTCATGGGT-1,0,9,97,2565,8156 ACTAATACGTCAGGCG-1,0,8,98,2445,8225 GGCTGAGCATCGTAAG-1,0,9,99,2565,8294 CGGTTGGGTTCAAGTT-1,0,8,100,2445,8363 GACTGATTGGTCACAA-1,0,9,101,2565,8432 AGACGGGCCGATTTAA-1,0,8,102,2445,8500 ACCAGTGCCCGGTCAA-1,0,9,103,2565,8569 GTCCTTTAATGACTTC-1,0,8,104,2445,8638 CCTACAAGTCCGGAAT-1,0,9,105,2565,8707 GCCTGCTACACTGAGA-1,0,8,106,2445,8776 GACTCGGTCGGCGGAT-1,0,9,107,2565,8845 CTAGACATATATGTAG-1,0,8,108,2445,8913 TCGCCCAACTGACTCC-1,0,9,109,2565,8982 AAACTAACGTGGCGAC-1,0,8,110,2445,9051 AACTGAGGTCAGCGTC-1,0,9,111,2565,9120 ACAATGATTCTTCTAC-1,0,8,112,2445,9189 ATAAGTACCCGATTGT-1,0,9,113,2565,9257 ATTGGGAGTTCTGTAA-1,0,8,114,2445,9326 CGAACATAGTCAGAAA-1,0,9,115,2565,9395 TAGCTCAGATCCTAGT-1,0,8,116,2445,9464 GTGTCGTATTCACCTT-1,0,9,117,2565,9533 CTCACCGATCCAAACT-1,0,8,118,2445,9602 ATATGTGCACAAACCA-1,0,9,119,2565,9670 CAGTCCAACGCCTTCT-1,0,8,120,2445,9739 TCGTCCGGGTACACTC-1,0,9,121,2565,9808 GCAGAAACGTAATCCA-1,0,8,122,2445,9877 TTCGAGCCGGCGCTAC-1,0,9,123,2565,9946 GGAAGATAAGACTGTA-1,0,8,124,2445,10015 ATAAGCAAACACCGAG-1,0,9,125,2565,10083 GCATAAATTGAACGCC-1,0,8,126,2445,10152 CGCCGGTGTCGCAGTA-1,0,9,127,2565,10221 GACCTGGTCTGGGCGT-1,0,10,0,2685,1480 AGCCGCTTGATTAGCG-1,0,11,1,2805,1549 CCCGGCTAGGTGAGAA-1,0,10,2,2685,1617 CGAGCCGAGCACTCGA-1,0,11,3,2805,1686 TAGTGCTTGAATCCTT-1,0,10,4,2685,1755 CAACCGCACCTAGACA-1,0,11,5,2805,1824 ACCACTGTTCAAGAAG-1,0,10,6,2685,1893 AGATGCTATAACGAGC-1,0,11,7,2805,1962 AATTACTCGTACGCTC-1,0,10,8,2685,2030 CGTCAATCTTTAACAT-1,0,11,9,2805,2099 CCAAAGCAGTTGGTTG-1,0,10,10,2685,2168 CCATATTGGATCATGA-1,0,11,11,2805,2237 CGTACCGAAAGTCTAG-1,0,10,12,2685,2306 CTCGAGATCCAAAGCA-1,0,11,13,2805,2375 TGGATAGAGTAACAGA-1,0,10,14,2685,2443 TCACAGATCCTCAAAC-1,0,11,15,2805,2512 AGAGCTACGAAAGCAT-1,0,10,16,2685,2581 TGCGTGATTGGGTGTC-1,0,11,17,2805,2650 CACATGTTTGGACATG-1,0,10,18,2685,2719 TTCGCATCCGGAAGCA-1,0,11,19,2805,2788 CCCTAGTGTCAGGTGT-1,0,10,20,2685,2856 TTACCGCCTTAGGGAA-1,0,11,21,2805,2925 CCAGTCCATTATTCGA-1,0,10,22,2685,2994 CGTAAACGCTTGAGTG-1,0,11,23,2805,3063 ATTCCTTCCAGGCGGT-1,0,10,24,2685,3132 TTCCTTTCTGTGTTGC-1,0,11,25,2805,3201 AGTTGACATCGGCTGG-1,0,10,26,2685,3269 AACTCGATGGCGCAGT-1,0,11,27,2805,3338 GATAAGGCAGATGCAA-1,0,10,28,2685,3407 GGCTGGCTAGCTTAAA-1,0,11,29,2805,3476 CCTCATGCAGCTACGA-1,0,10,30,2685,3545 GACGCCTGTTGCAGGG-1,0,11,31,2805,3614 TAATTAGATGGATATG-1,0,10,32,2685,3682 GAGGGCATCGCGTATC-1,0,11,33,2805,3751 CTTGTGAGTCTTTGAC-1,0,10,34,2685,3820 TCAACACATTGGGTAA-1,0,11,35,2805,3889 ACTGTATACGCGAGCA-1,0,10,36,2685,3958 GTGAAACGTGCTCCAC-1,0,11,37,2805,4026 CGAGTGCTATAGTTCG-1,0,10,38,2685,4095 GTACTGCATGAAGCGT-1,0,11,39,2805,4164 GTAACTTGCGGCAGTC-1,0,10,40,2685,4233 GAATCGCCGGACACGG-1,0,11,41,2805,4302 GGGAGTAATGGCTGGC-1,0,10,42,2685,4371 CATGAACCGACATTTG-1,0,11,43,2805,4439 TCTGTCATACAAGAGC-1,0,10,44,2685,4508 GTCGTCAATTATAAGG-1,0,11,45,2805,4577 TAAAGAGCCCGAAACC-1,0,10,46,2685,4646 GTACTGAGGTCGTAAC-1,0,11,47,2805,4715 AAAGACCCAAGTCGCG-1,0,10,48,2685,4784 CGTCAGTGCGCACAAG-1,0,11,49,2805,4852 TGTATCCTTATTCCAT-1,0,10,50,2685,4921 ATTCTCGTCTCTTTAG-1,0,11,51,2805,4990 AAAGTCACTGATGTAA-1,0,10,52,2685,5059 TGTCTACAGTTTCTGT-1,0,11,53,2805,5128 TTAACGTCGCAAGACC-1,0,10,54,2685,5197 CTATGTCTATTGAAAC-1,0,11,55,2805,5265 TCGGGTGAAACTGCTA-1,0,10,56,2685,5334 TGTCCCGACATAGCAC-1,0,11,57,2805,5403 ACAGCATAGAGCCAGT-1,0,10,58,2685,5472 ATATTCCCACAGGTCA-1,0,11,59,2805,5541 TTGGATCGACTTCTGG-1,0,10,60,2685,5610 CACCATCGGAGGAGAC-1,0,11,61,2805,5678 TCGTTCGTTATTATGT-1,0,10,62,2685,5747 CTTAACTTCGAAGTAC-1,0,11,63,2805,5816 GCACAAGTGTCGGAAG-1,0,10,64,2685,5885 TACCAGCTAGGTTTAA-1,0,11,65,2805,5954 ACGTACAGATTTCTCT-1,0,10,66,2685,6023 AATTTGGTTCCAAAGA-1,0,11,67,2805,6091 GTAAGGATTTGTCGGA-1,0,10,68,2685,6160 CATCATCTACCCGGAC-1,0,11,69,2805,6229 ACGATGGATCCGATGC-1,0,10,70,2685,6298 CACTCAGCTCTTGAGG-1,0,11,71,2805,6367 TAGATCCGAAGTCGCA-1,0,10,72,2685,6436 TGAAACTTATGCAAGC-1,0,11,73,2805,6504 GCGATTCTGGAAGCAG-1,0,10,74,2685,6573 CAAACTATTGAGCTTC-1,0,11,75,2805,6642 TAGAATTAAGGGCAAC-1,0,10,76,2685,6711 CGAAACATAGATGGCA-1,0,11,77,2805,6780 GATGGTGCCCTAGGCA-1,0,10,78,2685,6848 CCCGCAGGGCCCAAAG-1,0,11,79,2805,6917 ACAGCGCACCCGCAGC-1,0,10,80,2685,6986 GGTAAATGTGCGTTAC-1,0,11,81,2805,7055 GTCCTTCTAGTGGGTT-1,0,10,82,2685,7124 GGAAGCTCGCTTACAG-1,0,11,83,2805,7193 CACCGATACACCGAGC-1,0,10,84,2685,7261 CAGCCGGGCCCTCTAT-1,0,11,85,2805,7330 CGGAGCTTATAACACC-1,0,10,86,2685,7399 ATTACAACTACCGGCC-1,0,11,87,2805,7468 TCCTCTGGCCCATTAG-1,0,10,88,2685,7537 CGGCACCGTTAGCGCC-1,0,11,89,2805,7606 TCGGTCCCTGACTCCA-1,0,10,90,2685,7674 TGGTTGGAGGATCCTG-1,0,11,91,2805,7743 CTGCGGTAGTCACGTG-1,0,10,92,2685,7812 GTGCCTCAGTGTACGG-1,0,11,93,2805,7881 ATCGTTCACTTTCGCC-1,0,10,94,2685,7950 ACTTACCGGGCGCGCA-1,0,11,95,2805,8019 CTAGACTGCATTTCGT-1,0,10,96,2685,8087 TTGCCGGTGATCCCTC-1,0,11,97,2805,8156 CTGTCACGCCAGGCGC-1,0,10,98,2685,8225 CGGTATAGGTATTAGC-1,1,11,99,2805,8294 CCAACGCTTGCCAGGG-1,0,10,100,2685,8363 CGTTGAGTAATTGCGT-1,1,11,101,2805,8432 TGTATTTACCTAATGC-1,0,10,102,2685,8500 TAAATGCCGTCTCATG-1,1,11,103,2805,8569 CGGTTTATGAAGGAAC-1,0,10,104,2685,8638 GCAAGATGTGTTCGCG-1,1,11,105,2805,8707 AAAGGTAAGCTGTACC-1,0,10,106,2685,8776 GTACGTCACGTATTAA-1,1,11,107,2805,8845 AGTACCTTCGAGTGCT-1,0,10,108,2685,8913 ATTGTGACTTCGCTGC-1,1,11,109,2805,8982 TGTATCAGACTGAAGC-1,1,10,110,2685,9051 GAGACCCTGCAACGCC-1,1,11,111,2805,9120 TGGGTGGGATGTCATT-1,1,10,112,2685,9189 GGCTAATGATTGAAAT-1,1,11,113,2805,9258 ATAACGTTACCTCCAC-1,1,10,114,2685,9326 TGCGAGATGGCGGCCA-1,1,11,115,2805,9395 CACACTTGTATTGCGA-1,0,10,116,2685,9464 GCTGGTGACTCGTAGT-1,1,11,117,2805,9533 CGACACCGCTTAAGGA-1,0,10,118,2685,9602 GTAACAACTGACCTTG-1,0,11,119,2805,9670 CAACTGAGGGTATGAC-1,0,10,120,2685,9739 CTAATTATGAAGCGTA-1,0,11,121,2805,9808 CCGATCTTAAGAGGCT-1,0,10,122,2685,9877 CGACTCGGTACACGGT-1,0,11,123,2805,9946 TGCTGCGTCAGAGTTA-1,0,10,124,2685,10015 AGAGTTGCAGGCCTCC-1,0,11,125,2805,10083 ACTGGCGAACCTGCGT-1,0,10,126,2685,10152 ACTAAGGACGCACACC-1,0,11,127,2804,10221 CGTCCAGATGGCTCCA-1,0,12,0,2925,1480 ACTATCGCCGGCTAAA-1,0,13,1,3044,1549 GATAGCGTACCACGCG-1,0,12,2,2925,1617 AGGACTTATAGGAGAA-1,0,13,3,3044,1686 TAGTCGGGATTCTTCG-1,0,12,4,2925,1755 ACCATTAAGGGTGTCA-1,0,13,5,3044,1824 TTAATGTGTTTGCAGG-1,0,12,6,2925,1893 TCCGCAGCCACCTAGC-1,0,13,7,3044,1962 GAGGAGATCCTCATGC-1,0,12,8,2925,2030 GGTCCTTCATACGACT-1,0,13,9,3044,2099 CCCTGTTGGCAAAGAC-1,0,12,10,2925,2168 GTGCCTAGCTATGCTT-1,0,13,11,3044,2237 GTCATCTCCTACAGCT-1,0,12,12,2925,2306 GCGAGAAACGGGAGTT-1,0,13,13,3044,2375 CGTCGCGGCGGGATTT-1,0,12,14,2925,2443 CATTGTGTGCTAGATC-1,0,13,15,3044,2512 CGCGGCAGTATTACGG-1,0,12,16,2925,2581 GAAATGGGATGTAAAC-1,0,13,17,3044,2650 CATTCCCTAAGTACAA-1,0,12,18,2925,2719 AGTTCTGCGTTGTATC-1,0,13,19,3044,2788 ACCCTATAGGACTGAG-1,0,12,20,2925,2856 TTAGATAGGTCGATAC-1,0,13,21,3044,2925 ATTGATAGCAACGAGA-1,0,12,22,2925,2994 TCTCGGCTCCAGGACT-1,0,13,23,3044,3063 CTTGAGGTTATCCCGA-1,0,12,24,2925,3132 AAGAAGGATCAGTTAG-1,0,13,25,3044,3201 GTACGACGGCGCTGCG-1,0,12,26,2925,3269 CTTATGTTGACTACCA-1,0,13,27,3044,3338 CGGCACTCAAGAAAGT-1,0,12,28,2925,3407 GTCAAAGTTTACATAG-1,0,13,29,3044,3476 CTCCTAAGTTATGTCT-1,0,12,30,2925,3545 ACTGTGCTAGTAGATC-1,0,13,31,3044,3614 GTTTGGCCGCTCAGCG-1,0,12,32,2925,3682 TTATCCAATCGAACTC-1,0,13,33,3044,3751 CCGTACCCAAGCGCCA-1,0,12,34,2924,3820 CATACAAAGCCGAACC-1,0,13,35,3044,3889 GGTCGGTAATTAGACA-1,0,12,36,2924,3958 AGCGGGAAGGGTCCAT-1,0,13,37,3044,4027 GCCCACCAAGGCTGTC-1,0,12,38,2924,4095 GTAGACGTCGTTACAT-1,0,13,39,3044,4164 GAGCATCATCCCTGGG-1,0,12,40,2924,4233 AGGTAACCTCCTATTC-1,0,13,41,3044,4302 GGTTTGAGTGCTGGAA-1,0,12,42,2924,4371 GCACTAGTCGCGCTAT-1,0,13,43,3044,4439 GGTAACTATGTATCTG-1,0,12,44,2924,4508 GCGGTCCCTAGACGCA-1,0,13,45,3044,4577 CGAGCGTTGATCAGCC-1,0,12,46,2924,4646 AATCCAAGGGCCTGAG-1,0,13,47,3044,4715 CCGTGCCCATGACGGC-1,0,12,48,2924,4784 GAAATTCACATCGCTG-1,0,13,49,3044,4852 CTCTGCGAAGCAAGCA-1,0,12,50,2924,4921 AGTAGGTAACATACAT-1,0,13,51,3044,4990 ATTGGGAATATCTTGG-1,0,12,52,2924,5059 TAGAGCTACGAAGAAC-1,0,13,53,3044,5128 TGCGGCATAGTTCAAC-1,0,12,54,2924,5197 CCGCCGGTCAACACAC-1,0,13,55,3044,5265 TCGTATAGTGCAATTA-1,0,12,56,2924,5334 TAGTTTATTCTTGCTT-1,0,13,57,3044,5403 GATATCTCATGCAATA-1,0,12,58,2924,5472 CGTTTAAGCGGAGCAC-1,0,13,59,3044,5541 CATGCTGGCTCCAATT-1,0,12,60,2924,5610 GAAACAGCCATGCAGT-1,0,13,61,3044,5678 AGTTTCGCAGGTCGGA-1,0,12,62,2924,5747 CTCATGGCTCACAATC-1,0,13,63,3044,5816 AACCGTTGTGTTTGCT-1,0,12,64,2924,5885 ACCCTTCATCTGCGAA-1,0,13,65,3044,5954 TGCGGTGAAATTTCAT-1,0,12,66,2924,6023 CAAATTGTCAGCAAGC-1,0,13,67,3044,6091 GAGGTACATCCATCTT-1,0,12,68,2924,6160 AAATGGCATGTCTTGT-1,0,13,69,3044,6229 TCATCCCAGAGGGTGG-1,0,12,70,2924,6298 CGTAGCGAATTGTCAG-1,0,13,71,3044,6367 CCTAGTTAGTCGCATG-1,0,12,72,2924,6436 GAAACTCTAATGAAGG-1,0,13,73,3044,6504 TTGTATCACACAGAAT-1,0,12,74,2924,6573 TTCAAGCCGAGCTGAG-1,0,13,75,3044,6642 AGGTACGATATTGCCA-1,0,12,76,2924,6711 TTAAGCCGACAACTTC-1,0,13,77,3044,6780 GTCTTAGTACAGCCGG-1,0,12,78,2924,6848 TGGGTAAGGTTCCCGC-1,0,13,79,3044,6917 CTACGCCATTTCCGAT-1,0,12,80,2924,6986 GACCGTCAGGTCGTGA-1,0,13,81,3044,7055 TAGTTAAGATAGGATA-1,0,12,82,2924,7124 GAATATTCGGAGTCCC-1,0,13,83,3044,7193 CAAACTACGATAGAGA-1,0,12,84,2924,7261 CAGGAAGACTTTATAT-1,0,13,85,3044,7330 TTCGTAATCCCAGCGG-1,0,12,86,2924,7399 GTGAGGAGCGGTTGAG-1,1,13,87,3044,7468 CAATCCCTATACCAGC-1,1,12,88,2924,7537 AGGGAAACGAGGTACT-1,1,13,89,3044,7606 TCCTGCCAACTGGAGA-1,1,12,90,2924,7674 AATTTGGGACATAGTA-1,1,13,91,3044,7743 AACTCCTAATCCCATG-1,1,12,92,2924,7812 GCTCATTACTGCATGT-1,1,13,93,3044,7881 TCTCGAACGAGGTCAC-1,1,12,94,2924,7950 TCACTACGACCAATGC-1,1,13,95,3044,8019 GTGATGCACAACATCT-1,1,12,96,2924,8087 CCGACGTAAACACAAC-1,1,13,97,3044,8156 TGGGATGCACTCATTC-1,1,12,98,2924,8225 TTCCATCATGCGGTGA-1,1,13,99,3044,8294 TGCACAGTGAAGTTAT-1,1,12,100,2924,8363 CTATTGTGTTTGGTCA-1,1,13,101,3044,8432 TGCGAGCCCTTCCGCG-1,1,12,102,2924,8500 TTGAAAGGTGTAAAGG-1,1,13,103,3044,8569 GGTGCAGAGCCTATCG-1,1,12,104,2924,8638 ACTATATGCTGTGTTC-1,1,13,105,3044,8707 TCCTGGCGCTGCCTGG-1,1,12,106,2924,8776 GAGCCGAGCGTTTATT-1,1,13,107,3044,8845 AGAATGCGGGTTCGGA-1,1,12,108,2924,8913 ATGCGACAATTGGTCC-1,1,13,109,3044,8982 TTCCGGCTCGACTTCT-1,1,12,110,2924,9051 TGATTATGGCACGCAG-1,1,13,111,3044,9120 GGTTTGACAAGAAGCT-1,1,12,112,2924,9189 GCAGCTATGGACAGGT-1,1,13,113,3044,9258 CACCATGATCGCAAAG-1,1,12,114,2924,9326 GTCGGAAGGATACCAG-1,1,13,115,3044,9395 GGCCCAGTTATCAGCA-1,0,12,116,2924,9464 GGGCCTATTTAAGTAT-1,0,13,117,3044,9533 GTTGTTACATTGCGCT-1,0,12,118,2924,9602 CAACTCCGTAACTTGC-1,0,13,119,3044,9670 GATCTTTGCTCAAAGA-1,0,12,120,2924,9739 TCGCTTAATTACGAAG-1,0,13,121,3044,9808 CGATCATTAGAGGCAC-1,0,12,122,2924,9877 TGTTCTCTACTCCCTA-1,0,13,123,3044,9946 GCTTAGGGAAGCGGTA-1,0,12,124,2924,10015 CAGGTTTAGTACTACA-1,0,13,125,3044,10083 AAGCGGAGTGCGCGCA-1,0,12,126,2924,10152 TCAGATGGAGACGTAG-1,0,13,127,3044,10221 TCGCACTAACGTTTGT-1,0,14,0,3164,1480 CACGTCGGGTTCTAGA-1,0,15,1,3284,1549 GGAGTACACATGAGCT-1,0,14,2,3164,1617 GTGTGTCGACGTCGCT-1,0,15,3,3284,1686 GAAGCTCTTTGCTTAG-1,0,14,4,3164,1755 ACACCGAGCGCTCTTT-1,0,15,5,3284,1824 CGTAATAATTACGAGT-1,0,14,6,3164,1893 CATCAACACCTACTAA-1,0,15,7,3284,1962 CCAAGTTTCTACAGAT-1,0,14,8,3164,2030 ACGGGTCATGTGACTT-1,0,15,9,3284,2099 AGTGTGCTAAGATCGC-1,0,14,10,3164,2168 GGCGGTTTGCCGGTGC-1,0,15,11,3284,2237 GTATAATCTCCCGGAT-1,0,14,12,3164,2306 TAGTCCGTATGCATAA-1,0,15,13,3284,2375 CACTTCGTCTTATCTC-1,0,14,14,3164,2443 CATCCGCAGGCCCGAA-1,0,15,15,3284,2512 CCCTGATGTAACTCGT-1,0,14,16,3164,2581 CCATAGTCAGTAACCC-1,0,15,17,3284,2650 CGGGCCATAGCCGCAC-1,0,14,18,3164,2719 CTCCGGCTTGTAGACA-1,0,15,19,3284,2788 AACTTGCGTTCTCGCG-1,0,14,20,3164,2856 AATGAGTTCGCATATG-1,0,15,21,3284,2925 CGAGGCCAGGCATTGG-1,0,14,22,3164,2994 TCTGCGTCCGGTTTCT-1,0,15,23,3284,3063 CAATCCTGCCGTGGAG-1,0,14,24,3164,3132 CTGAGCAAGTAACAAG-1,0,15,25,3284,3201 GGGTACCCACGGTCCT-1,0,14,26,3164,3269 ACGGAATTTAGCAAAT-1,0,15,27,3284,3338 GGGCGGTCCTATTGTC-1,0,14,28,3164,3407 ATGTTACGAGCAATAC-1,0,15,29,3284,3476 AACCATGGGATCGCTA-1,0,14,30,3164,3545 TCGCATCCCTAAGTGT-1,0,15,31,3284,3614 ACTTAGTACGACAAGA-1,0,14,32,3164,3682 GAGCTCTCGGACCTAA-1,0,15,33,3284,3751 TCTATTACGCTGGCGA-1,0,14,34,3164,3820 AGATACGACTTCATAT-1,0,15,35,3284,3889 CGCTATACCGCCCACT-1,0,14,36,3164,3958 CAGTGTCCGCAGAATG-1,0,15,37,3284,4027 CCATCCATACCAAGTC-1,0,14,38,3164,4095 AACCCAGAGACGGAGA-1,0,15,39,3284,4164 GAAGAACGGTGCAGGT-1,0,14,40,3164,4233 GATAAATCGGTGGATG-1,0,15,41,3284,4302 CAGCTCGTGCTTGTGT-1,0,14,42,3164,4371 GAGTACGGGTATACAA-1,0,15,43,3284,4439 CATCGCCCGCGGCCAA-1,0,14,44,3164,4508 TCTTACAGAGGTACCG-1,0,15,45,3284,4577 TGGAAGACGAACACCA-1,0,14,46,3164,4646 GTTGTCGTGTTAGTTG-1,0,15,47,3284,4715 CCAAGGAACAGAGAGG-1,0,14,48,3164,4784 CTGCACCTGGAACCGC-1,0,15,49,3284,4852 CGCTTTCATACCGGTG-1,0,14,50,3164,4921 GTTCTTCCCTCGATGT-1,0,15,51,3284,4990 ATTTAACTCGTATTAC-1,0,14,52,3164,5059 AACGATAGAAGGGCCG-1,0,15,53,3284,5128 TATCCTGCATGGGAAT-1,0,14,54,3164,5197 AGGCCCATTGTACAGG-1,0,15,55,3284,5265 CCGGCGCATATTGGAT-1,0,14,56,3164,5334 ATCTGTAATTGTACCC-1,0,15,57,3284,5403 GAGCGAGGGAGTACCG-1,0,14,58,3164,5472 TTATTAGGGAAGCATC-1,0,15,59,3284,5541 CTTCTTACGTCGTATA-1,0,14,60,3164,5610 GAAGTGCTGGATAGCT-1,0,15,61,3284,5678 GTGCAACAAATGTGGC-1,0,14,62,3164,5747 CATGCGTTAGACAGAA-1,0,15,63,3284,5816 ACACACTTTCTACACG-1,0,14,64,3164,5885 AGCCCTAAGCGAAGTT-1,0,15,65,3284,5954 ATTAATTCGGTCACTC-1,0,14,66,3164,6023 AACAGGAAATCGAATA-1,0,15,67,3284,6091 ACGTTTAGTTGTGATC-1,0,14,68,3164,6160 TCCTTCAGTGGTCGAA-1,0,15,69,3284,6229 CGAACGCCCAGTGCCG-1,0,14,70,3164,6298 CCTCGAAGTGGACGGG-1,0,15,71,3284,6367 CTCTGTTTGAGGATTC-1,0,14,72,3164,6436 TGGGCACGTTCTATGG-1,0,15,73,3284,6504 ACTATTCGTCCGTGGT-1,0,14,74,3164,6573 CCTCTGGCCTAGACGG-1,1,15,75,3284,6642 CCATAAACAACCCGAC-1,0,14,76,3164,6711 CATAGTACATTGAGAG-1,1,15,77,3284,6780 ATTTCATTATTTCGCG-1,0,14,78,3164,6848 CAACTATATCGAATGC-1,1,15,79,3284,6917 CTAGTATTCGGAATTA-1,0,14,80,3164,6986 GTGGAACCTACATGCG-1,1,15,81,3284,7055 CCTAAAGGCTGACGCT-1,1,14,82,3164,7124 CGTGACATTGGGTCGT-1,1,15,83,3284,7193 CCAATCGGTAGATCGA-1,1,14,84,3164,7261 ATTGTCGCAATACCTT-1,1,15,85,3284,7330 AAATTACACGACTCTG-1,1,14,86,3164,7399 CACTCCTCTCGGTCGG-1,1,15,87,3284,7468 AAATAACCATACGGGA-1,1,14,88,3164,7537 AGTTACTCTATCGTGG-1,1,15,89,3284,7606 CGTTAGCTCACAACTG-1,1,14,90,3164,7674 GAATGTATGGCAGGTC-1,1,15,91,3284,7743 GCAACCACCAGACCGG-1,1,14,92,3164,7812 TCACTCGTGCAACGGC-1,1,15,93,3284,7881 AAACAGAGCGACTCCT-1,1,14,94,3164,7950 CAGCCTCTCCTCAAGA-1,1,15,95,3284,8019 TTGCGTGAACGCTTAG-1,1,14,96,3164,8087 CCGCCTGCGAATTGGT-1,1,15,97,3284,8156 AGATGAGGGTTGCGAT-1,1,14,98,3164,8225 CGGTGGGCTCCAGCCT-1,1,15,99,3284,8294 GGCAGCGGTAATCCTA-1,1,14,100,3164,8363 GCTAGCAGGGAGTGGG-1,1,15,101,3284,8432 CTCAAGACATTAGCGC-1,1,14,102,3164,8500 CACGGCGCGCCAAAGG-1,1,15,103,3284,8569 TGCAATTTGGGCACGG-1,1,14,104,3164,8638 ATGCCAATCGCTCTGC-1,1,15,105,3284,8707 GCTGGACCCAAAGTGG-1,1,14,106,3164,8776 ATTCCTAAGACGTGGA-1,1,15,107,3284,8845 TCCGGAGGAAGGGCTG-1,1,14,108,3164,8913 TCGGTGACCGCTCCGG-1,1,15,109,3284,8982 TCCGAAGTAGTCACCA-1,1,14,110,3164,9051 CATGTAGGAGCGCCAA-1,1,15,111,3284,9120 CACAAGAAAGATATTA-1,1,14,112,3164,9189 AGGGTCAGTAACCCTA-1,1,15,113,3284,9258 TAAGCCCTTACGACCA-1,1,14,114,3164,9326 ATACCGTCATCCATAA-1,1,15,115,3284,9395 GGACGTCCATAGTTGG-1,1,14,116,3164,9464 CATCAAACTGGCGCCC-1,0,15,117,3284,9533 AAACGTGTTCGCCCTA-1,0,14,118,3164,9602 AAATTGGTGAGAAGCA-1,0,15,119,3284,9670 GGTCATTGTAGTCATA-1,0,14,120,3164,9739 TGCAGTGAGGCTCGGG-1,0,15,121,3284,9808 GAACATTAGTATGTTA-1,0,14,122,3164,9877 GGTTTGCGAACACGTA-1,0,15,123,3284,9946 ACACAAATATTCCTAG-1,0,14,124,3164,10015 TTGGGTTTATTCAGCG-1,0,15,125,3284,10083 ATTCGCAGAGGACACT-1,0,14,126,3164,10152 GATTTAGTGCGTACTG-1,0,15,127,3284,10221 TAGAAACACAATAGTG-1,0,16,0,3404,1480 CAGTAGATGATGTCCG-1,0,17,1,3523,1549 TCTTAACTCGGATGTA-1,0,16,2,3404,1617 TACATCTTGTTTCTTG-1,0,17,3,3523,1686 TTCATAGGGTGTCCAT-1,0,16,4,3404,1755 TGAAGTAGCTTACGGA-1,0,17,5,3523,1824 GCACAAGTGGATCATA-1,0,16,6,3404,1893 GGGCGAATTTCTCCAC-1,0,17,7,3523,1962 ATGTTCCTGCCCACCT-1,0,16,8,3404,2030 GCTCAACCTCTTAGAG-1,0,17,9,3523,2099 ATAGCTGCTCTTGTTA-1,0,16,10,3404,2168 CGTCAGCTATTTACTC-1,0,17,11,3523,2237 ATCTGATAGTGTCTTA-1,0,16,12,3404,2306 TGCACTATGTGAGTGC-1,0,17,13,3523,2375 CCGACAAACACATGAG-1,0,16,14,3404,2443 GCCTTGTATATGCAGT-1,0,17,15,3523,2512 ATAATACCGTTAGCCG-1,0,16,16,3404,2581 ACACTCCAATGTCACT-1,0,17,17,3523,2650 AGTTGCTGACTGATAT-1,0,16,18,3404,2719 GGCGCTCCTCATCAAT-1,0,17,19,3523,2788 TGCCTGACATCGGTCA-1,0,16,20,3404,2856 TTGGCCATCTTGCGCT-1,0,17,21,3523,2925 CAGTGGTTGCACATGA-1,0,16,22,3404,2994 AGAATTGTTTGACATA-1,0,17,23,3523,3063 AAATGCTCGTTACGTT-1,0,16,24,3404,3132 CACCTAATAGAGTCGT-1,0,17,25,3523,3201 CATTTCTAGCAGACTA-1,0,16,26,3404,3269 CCGAAAGTGGTGAGCA-1,0,17,27,3523,3338 AGTCAGCCACCGCCTG-1,0,16,28,3404,3407 TCATCACTCGAGCTCG-1,0,17,29,3523,3476 CTCAGGACTCACCTGT-1,0,16,30,3404,3545 CGGTGTACTTGATCCC-1,0,17,31,3523,3614 CCTACGGCTCAGTCGA-1,0,16,32,3404,3682 GTACTTGGGCACTTCT-1,0,17,33,3523,3751 TGATTTCCTCCTGACG-1,0,16,34,3404,3820 CCTCACCAATCTTGAC-1,0,17,35,3523,3889 GGTGAGATGCAGATAA-1,0,16,36,3404,3958 GCTAGTTTCATTGAGG-1,0,17,37,3523,4027 AGGACATCGCACGTCG-1,0,16,38,3404,4095 GTGGACGTGCTGAGAC-1,0,17,39,3523,4164 TAAGGAACTTGTGGGA-1,0,16,40,3404,4233 TCGCTGTGCGTAAATC-1,0,17,41,3523,4302 GCATCCCTAACTTTGA-1,0,16,42,3404,4371 CACCCACACGTCACCC-1,0,17,43,3523,4439 CCCTCATTCTGGAATT-1,0,16,44,3404,4508 AGGGCGTGATCGGCTA-1,0,17,45,3523,4577 GGTGCGGATAAGTGGC-1,0,16,46,3404,4646 TAATATTGAAATTCGC-1,0,17,47,3523,4715 CTTACACTGGGAAATA-1,0,16,48,3404,4784 ACCAAGAACGCGTGTC-1,0,17,49,3523,4852 GCCTTCAGCCCTACCG-1,0,16,50,3404,4921 GATGCTACAAGCGCCT-1,0,17,51,3523,4990 CCGGGACCCGCAGAGA-1,0,16,52,3404,5059 GTTCCAGTCTGACCAT-1,0,17,53,3523,5128 ATGATCGGGAATAGAC-1,0,16,54,3403,5197 TTGGATTGGGTACCAC-1,0,17,55,3523,5265 TACCTCACGCTTGTAC-1,0,16,56,3403,5334 CATGGCAGGAAGATCG-1,0,17,57,3523,5403 ATGACGCCGGCTCTAA-1,0,16,58,3403,5472 AGCGACATCCCATTCA-1,0,17,59,3523,5541 AGTAATGTCTTGCCGC-1,0,16,60,3403,5610 TTCTTAGTGGCTCAGA-1,0,17,61,3523,5678 CGTCTGGAAGGGCCCG-1,0,16,62,3403,5747 ACGTGCGCCTCGTGCA-1,1,17,63,3523,5816 AGAGCGGGCTAATCAT-1,0,16,64,3403,5885 GCGTCGAAATGTCGGT-1,1,17,65,3523,5954 AACTGATATTAGGCCT-1,0,16,66,3403,6023 CGAGCTGGGCTTTAGG-1,1,17,67,3523,6091 GGGTGTTTCAGCTATG-1,0,16,68,3403,6160 TTAATTTCAGACGCGG-1,1,17,69,3523,6229 ACTGCCGTCGTAACTC-1,1,16,70,3403,6298 GTGCACGAAAGTGACT-1,1,17,71,3523,6367 ATCTCCCTGCAATCTA-1,1,16,72,3403,6436 ACGCCAGATGATTTCT-1,1,17,73,3523,6504 AGCTATTTAATCCAAC-1,1,16,74,3403,6573 CCACGAGAAGAGAATC-1,1,17,75,3523,6642 GATTCCGCGTTTCCGT-1,1,16,76,3403,6711 GTCGGATGTAGCGCGC-1,1,17,77,3523,6780 TATTTATACCGAGTAG-1,1,16,78,3403,6849 GTAGGTGATCCGTGTA-1,1,17,79,3523,6917 AGTTAAGCGGTCCCGG-1,1,16,80,3403,6986 CTGGCGACATAAGTCC-1,1,17,81,3523,7055 TTGGCCTAGAATTTCG-1,1,16,82,3403,7124 GGCATATCGGTTCTGC-1,1,17,83,3523,7193 GGGCGTCCACTGGCTC-1,1,16,84,3403,7261 TTACCCATTGCCGGGT-1,1,17,85,3523,7330 TTAGACACGATCGTTG-1,1,16,86,3403,7399 GCGCTGATCCAGACTC-1,1,17,87,3523,7468 TTCGGCAACCCGCTGA-1,1,16,88,3403,7537 GATATTTCCTACATGG-1,1,17,89,3523,7606 CTGCGTTACGATATAA-1,1,16,90,3403,7674 TAATAAACAAGGAGAT-1,1,17,91,3523,7743 AACCTTTACGACGTCT-1,1,16,92,3403,7812 AGTCCCGCCTTTAATT-1,1,17,93,3523,7881 TGAGATTAGGCCCTAA-1,1,16,94,3403,7950 AGTGTATTGCGCATTG-1,1,17,95,3523,8019 GTTGGATTCAGTGGCT-1,1,16,96,3403,8087 TAAAGCTGCAATAGGG-1,1,17,97,3523,8156 AGTAGGAAGGAAGTTG-1,1,16,98,3403,8225 TATCACTTCGAGTAAC-1,1,17,99,3523,8294 TGATCTACGCTGATCT-1,1,16,100,3403,8363 GGATCATCCCGTACGC-1,1,17,101,3523,8432 TGACACTTCTCTTTGC-1,1,16,102,3403,8500 AGCCCTTCTAATCCGA-1,1,17,103,3523,8569 CACCGCGTCCACTCTA-1,1,16,104,3403,8638 TAATTGGAATCGGGAA-1,1,17,105,3523,8707 TCGTAAGCTCCGAGGA-1,1,16,106,3403,8776 TATATTACAAATGTCG-1,1,17,107,3523,8845 CGCGAGAGGGACTTGT-1,1,16,108,3403,8913 GGACCTACGGTAACGT-1,1,17,109,3523,8982 GAAATATGCTTGAATG-1,1,16,110,3403,9051 CCGTATTAGCGCAGTT-1,1,17,111,3523,9120 AGGCGTCTATGGACGG-1,1,16,112,3403,9189 AACATCGATACGTCTA-1,1,17,113,3523,9258 TGAATATGCTATAAAC-1,1,16,114,3403,9326 ACCAAACACCCAGCGA-1,1,17,115,3523,9395 TGGCTTGTACAAGCTT-1,1,16,116,3403,9464 GAATGAAGGTCTTCAG-1,0,17,117,3523,9533 AGATACCAGCACTTCA-1,0,16,118,3403,9602 GCGGTCCCGGTGAAGG-1,0,17,119,3523,9671 GAGGCATTTGCAGCAG-1,0,16,120,3403,9739 GGCAAGCCAGGGATAG-1,0,17,121,3523,9808 TCTACGGGCTCAGTTG-1,0,16,122,3403,9877 TCTGCGAATCGTTCGC-1,0,17,123,3523,9946 AGCTCGTTGATGGAAA-1,0,16,124,3403,10015 TGAATGAGATACAGCA-1,0,17,125,3523,10083 ACCCTTGCCTGGGTCG-1,0,16,126,3403,10152 GGCGAACCGTTCTGAT-1,0,17,127,3523,10221 GCGATGTCTGTGCTTG-1,0,18,0,3643,1480 ATTAACACCTGAGATA-1,0,19,1,3763,1549 GAAATCTGACCAAGTT-1,0,18,2,3643,1618 CCTGACAAACTCGCGC-1,0,19,3,3763,1686 ATGTCATTTCCCATTG-1,0,18,4,3643,1755 GCGGTGCGGAGCATCG-1,0,19,5,3763,1824 CGGAAAGCAAATGTGC-1,0,18,6,3643,1893 GCTGAGCAACGGTTCT-1,0,19,7,3763,1962 TCACTCTTCGTCTGTC-1,0,18,8,3643,2030 GATCTTCATTGTCCTC-1,0,19,9,3763,2099 ACTCGATGTATTTCAT-1,0,18,10,3643,2168 TGAAGAGCGGTCCTAG-1,0,19,11,3763,2237 TAAACGTCGTCAATGA-1,0,18,12,3643,2306 ACGTTATTGGTCACTC-1,0,19,13,3763,2375 ATAGCCTCAGTACCCA-1,0,18,14,3643,2443 CGGGAGTATACCGCCG-1,0,19,15,3763,2512 GTCCTACAGGCGGCTC-1,0,18,16,3643,2581 CGGATAAGCGGACATG-1,0,19,17,3763,2650 AACTTCTGCGTCTATC-1,0,18,18,3643,2719 GGGTTCAGACGAACAA-1,0,19,19,3763,2788 AGTCTGGACATCCTTG-1,0,18,20,3643,2856 TTGAACGAATCCTTTG-1,0,19,21,3763,2925 GAAATACTAAACGTTT-1,0,18,22,3643,2994 CCCGCGCAATGCACCC-1,0,19,23,3763,3063 TTCGGCTAGAGATGGT-1,0,18,24,3643,3132 GACACGAGTTAGAGGA-1,0,19,25,3763,3201 GAGGTCCCAAAGATCT-1,0,18,26,3643,3269 TAACTCCATGGAGGCT-1,0,19,27,3763,3338 CTTGTTTATGTAGCCA-1,0,18,28,3643,3407 GATGGCGCACACATTA-1,0,19,29,3763,3476 ATAATAGTGTAGGGAC-1,0,18,30,3643,3545 CGCTATTCAATGTATG-1,0,19,31,3763,3614 ATATTGCTGTCAAAGT-1,0,18,32,3643,3682 GGATTCAGTACGGTGG-1,0,19,33,3763,3751 TTCTTAGTGAACGGTG-1,0,18,34,3643,3820 AATGGTTCTCACAAGC-1,0,19,35,3763,3889 TATACACGCAAAGTAT-1,0,18,36,3643,3958 CTTCATAGCTCAAGAA-1,0,19,37,3763,4027 CAACGGTTCTTGATAC-1,0,18,38,3643,4095 ACACCCGAGAAATCCG-1,0,19,39,3763,4164 TCTATCATGCAGTTAC-1,0,18,40,3643,4233 CCCGCCATGCTCCCGT-1,0,19,41,3763,4302 CGCTTCCACTGAAATC-1,0,18,42,3643,4371 CACTGTCCAAGTGAGA-1,0,19,43,3763,4440 ATTACTAGCCTCTTGC-1,0,18,44,3643,4508 CATAGTAGCATAGTAG-1,0,19,45,3763,4577 CAACTCCTTGATCCCG-1,0,18,46,3643,4646 AAGTAGAAGACCGGGT-1,0,19,47,3763,4715 GCGGGAACCAGGCCCT-1,0,18,48,3643,4784 ATTAGATTGATAGCGG-1,0,19,49,3763,4852 CTCGGTCCGTAGCCTG-1,0,18,50,3643,4921 TGGCTTTGGGTAGACA-1,0,19,51,3763,4990 TATCCATATCATGCGA-1,0,18,52,3643,5059 GGAGTGCCGCCCTGGA-1,0,19,53,3763,5128 TGAGAATGCTTTACCG-1,0,18,54,3643,5197 TTAACCAACCCTCCCT-1,1,19,55,3763,5265 TGTTTCGGTACTTCTC-1,0,18,56,3643,5334 TTGCTGAAGGAACCAC-1,1,19,57,3763,5403 TATTTAGTCTAGATCG-1,1,18,58,3643,5472 CTCCGGCCTAATATGC-1,1,19,59,3763,5541 TTGTGGCCCTGACAGT-1,1,18,60,3643,5610 TCGCCGGTCGATCCGT-1,1,19,61,3763,5678 CCATAGGTTGGCGTGG-1,1,18,62,3643,5747 GAACGACCGAATGATA-1,1,19,63,3763,5816 TCCGATAATTGCCATA-1,1,18,64,3643,5885 CATTACGTCGGCCCGT-1,1,19,65,3763,5954 CAAGCACCAAATGCCT-1,1,18,66,3643,6023 TGCATGGATCGGATCT-1,1,19,67,3763,6091 GAAATCGCGCGCAACT-1,1,18,68,3643,6160 CTGAAAGAGATCCGAC-1,1,19,69,3763,6229 CACCTCGATGGTGGAC-1,1,18,70,3643,6298 ATTTGTTCCAGGGCTC-1,1,19,71,3763,6367 TGGGCCACAAGAGCGC-1,1,18,72,3643,6436 CCTTCTTGATCCAGTG-1,1,19,73,3763,6504 CCTCGCCAGCAAATTA-1,1,18,74,3643,6573 TTCATGGCGCAACAGG-1,1,19,75,3763,6642 TTAATCAGTACGTCAG-1,1,18,76,3643,6711 CCTATCTATATCGGAA-1,1,19,77,3763,6780 ATTATACTTTGCTCGT-1,1,18,78,3643,6849 ATGGATCCGGCGTCCG-1,1,19,79,3763,6917 CGCCCGCTTCCGTACA-1,1,18,80,3643,6986 GGATTCCGCTATACCC-1,1,19,81,3763,7055 CGGTCTATCAACCCGT-1,1,18,82,3643,7124 ATGCCGGTTGATGGGA-1,1,19,83,3763,7193 TCATGCAGGTTCTCAT-1,1,18,84,3643,7261 TGAGCTTTAATGACGC-1,1,19,85,3763,7330 TCCCTTAGATTACTCG-1,1,18,86,3643,7399 ATATCTCCCTCGTTAA-1,1,19,87,3763,7468 AGCTCTTCCCAGTGCA-1,1,18,88,3643,7537 TCGCTAAACCGCTATC-1,1,19,89,3763,7606 CACATTCTTTCGATGG-1,1,18,90,3643,7674 GATATGCGGTAGCCAA-1,1,19,91,3763,7743 CGTTTCACTTCGGGCG-1,1,18,92,3643,7812 CCAATTACGGGTCGAG-1,1,19,93,3763,7881 GCAGGTAGAGTATGGT-1,1,18,94,3643,7950 GTCGTATTGGCGTACA-1,1,19,95,3763,8019 GAAATTAGCACGGATA-1,1,18,96,3643,8087 AATGCACCAAGCAATG-1,1,19,97,3763,8156 AGGACGCTCGATGTTG-1,1,18,98,3643,8225 GGCTAAAGGGCGGGTC-1,1,19,99,3763,8294 CATCTATCCCGTGTCT-1,1,18,100,3643,8363 CAGTAACTATTTATTG-1,1,19,101,3763,8432 CATATACTACTGATAA-1,1,18,102,3643,8500 GCGTTCGGAGACCGGG-1,1,19,103,3763,8569 AAGTTCAGTCTGCGTA-1,1,18,104,3643,8638 CGAAGCTATAAATTCA-1,1,19,105,3763,8707 CGCGGTCACAAACCAA-1,1,18,106,3643,8776 GGGAATGAGCCCTCAC-1,1,19,107,3763,8845 ACGGAGCGCAAATTAC-1,1,18,108,3643,8913 CGTTCTTCGCACACCT-1,1,19,109,3763,8982 GAATAGCCCTGCGGTC-1,1,18,110,3643,9051 AATAGCTACCGCGTGC-1,1,19,111,3763,9120 CCGAGCTGTGCTTGTC-1,1,18,112,3643,9189 GATGACGATGATCGCG-1,1,19,113,3763,9258 GCCTATGCTGGGCCTT-1,0,18,114,3643,9326 TTACTGTCTAGAGCTC-1,1,19,115,3763,9395 AGCGGTTGCCGCTCTG-1,0,18,116,3643,9464 GCTTGCAGCACAATTG-1,0,19,117,3763,9533 CCGGAGGTCTTATGGT-1,0,18,118,3643,9602 ACAGTATACCGTGGGA-1,0,19,119,3763,9671 GGGATCCCAATACAAA-1,0,18,120,3643,9739 ATTACGACTCCACAGT-1,0,19,121,3763,9808 CTCACACGCAAGCCTA-1,0,18,122,3643,9877 CCAGATGTAAATGGGT-1,0,19,123,3763,9946 GAACTTGTGCACGGGA-1,0,18,124,3643,10015 AAGCCGCTTTACCTTG-1,0,19,125,3763,10083 TCCATTCCCACTAGAG-1,0,18,126,3643,10152 AGAGCGCTTGTAACGG-1,0,19,127,3763,10221 TGGGTTCCCGGACGGA-1,0,20,0,3883,1480 GCTGCGCCTCCCACGT-1,0,21,1,4002,1549 CTGTTGGCTCTTCTGA-1,0,20,2,3883,1618 TTGTTCTAGATACGCT-1,0,21,3,4002,1686 CCCTCAAGCTCTTAGT-1,0,20,4,3883,1755 TGGTCTAGCTTACATG-1,0,21,5,4002,1824 ATGCACCTTCCTTAAT-1,0,20,6,3883,1893 GGGATACGGTAATAAT-1,0,21,7,4002,1962 AGTTCACCGGTTGGAC-1,0,20,8,3883,2030 GACATACTGTCGCAGA-1,0,21,9,4002,2099 TGGACACCGTTGCTTG-1,0,20,10,3883,2168 TGCGATGCTAATGGCT-1,0,21,11,4002,2237 TTCTGTTTCCTGTCGC-1,0,20,12,3883,2306 CGTTGTAAACGTCAGG-1,0,21,13,4002,2375 GATCGGCGATAAGTCG-1,0,20,14,3883,2443 AGCCTTAAAGCGGAAG-1,0,21,15,4002,2512 TCCGTAACCACAATCC-1,0,20,16,3883,2581 GAATGCCGAAATGACC-1,0,21,17,4002,2650 TATACTCATGCGGCAA-1,0,20,18,3883,2719 TAGTGTCAGAAACGGC-1,0,21,19,4002,2788 CGTCATACCATATCCA-1,0,20,20,3883,2856 TAGTACCTTAGTGGTC-1,0,21,21,4002,2925 CTGGCGGGAATAAGTA-1,0,20,22,3883,2994 AGTGTGGTCTATTGTG-1,0,21,23,4002,3063 GCTATCGCGGCGCAAC-1,0,20,24,3883,3132 CAGTAATCCCTCCCAG-1,0,21,25,4002,3201 GTATTAAGGCGTCTAA-1,0,20,26,3883,3269 CTAATTTCAACAACAC-1,0,21,27,4002,3338 TTAGCAACATGGATGT-1,0,20,28,3883,3407 ATGCTCAGTGTTGCAT-1,0,21,29,4002,3476 GATGTTTGTGCGAGAT-1,0,20,30,3883,3545 CACTTCGCCACAGGCT-1,0,21,31,4002,3614 CTGTATGGTGTAGAAA-1,0,20,32,3883,3682 TACGTGCACTATGCTG-1,0,21,33,4002,3751 GTATCAGCTTGGGTTC-1,0,20,34,3883,3820 TAACAAAGGGAGAAGC-1,0,21,35,4002,3889 TTACATCGTGGCCTGG-1,0,20,36,3883,3958 TCTGAACCGGTCGGCT-1,0,21,37,4002,4027 GGCTCGTGCCACCAGC-1,0,20,38,3883,4095 TATAAGTGAGGATAGC-1,0,21,39,4002,4164 GCCCGCGCGTTTGACA-1,0,20,40,3883,4233 TGCTGGTTGGACAATT-1,0,21,41,4002,4302 GCCTAGCGATCTGACC-1,0,20,42,3883,4371 CCATGCTCTGCAGGAA-1,0,21,43,4002,4440 TATAAATCCACAAGCT-1,0,20,44,3883,4508 CGCTCGACATAATGAT-1,0,21,45,4002,4577 CCAATTGAATGTTAAT-1,0,20,46,3883,4646 GCTAATACCGAATGCC-1,1,21,47,4002,4715 TTCAACGACCCGACCG-1,0,20,48,3883,4784 TTCCTCGAGGGTGTCT-1,1,21,49,4002,4852 CAACGACCCGTTTACA-1,1,20,50,3883,4921 CTTGTACTTGTTGACT-1,1,21,51,4002,4990 TCTCTAATAGCTGGTA-1,1,20,52,3883,5059 ATTATGCCATAGGGAG-1,1,21,53,4002,5128 GACAACGCAGCTTACG-1,1,20,54,3883,5197 AGATGACTCGCCCACG-1,1,21,55,4002,5265 GTGCGGGTCTCCAAAT-1,1,20,56,3883,5334 GTACGAGATTGCGACA-1,1,21,57,4002,5403 GTATAGGACTCAGTAG-1,1,20,58,3883,5472 TTGCACGGAGCAGCAC-1,1,21,59,4002,5541 CACAGCTAGGGAGTGA-1,1,20,60,3883,5610 ATACTAGCATGACCCT-1,1,21,61,4002,5678 CCAAGACTTCTGCGAA-1,1,20,62,3883,5747 ACATAATAAGGCGGTG-1,1,21,63,4002,5816 TAATACACAGTAGTAT-1,1,20,64,3883,5885 TCTTGGTAACACCAAA-1,1,21,65,4002,5954 AACTGGGTCCCGACGT-1,1,20,66,3883,6023 ATCACTTCATCCTCGC-1,1,21,67,4002,6091 TGGAAGGATAAAGATG-1,1,20,68,3883,6160 CATGATGCACAATTCT-1,1,21,69,4002,6229 TGCCTGATCAAACGAT-1,1,20,70,3883,6298 ATAGGGATATCCTTGA-1,1,21,71,4002,6367 CACCTAATCAGTTTAC-1,1,20,72,3883,6436 TGTGACTACGCCAGTC-1,1,21,73,4002,6504 CCCGACCATAGTCCGC-1,1,20,74,3882,6573 CGCGCCCGACTTAATA-1,1,21,75,4002,6642 TGCCACCTGGCGAAAC-1,1,20,76,3882,6711 CTGCCAAGGTTGGAAG-1,1,21,77,4002,6780 TCTCCAACGTAGGTTA-1,1,20,78,3882,6849 TTCTTGGAGTAATGAG-1,1,21,79,4002,6917 GTCTCGATCTGCTTTC-1,1,20,80,3882,6986 TACTCTCCGAACAAAT-1,1,21,81,4002,7055 ATCACATTAGAATATC-1,1,20,82,3882,7124 TACGGGATGCTAGCAG-1,1,21,83,4002,7193 AGCTGAAGTAAACCAA-1,1,20,84,3882,7261 CATGGGTCGGGTGTGG-1,1,21,85,4002,7330 CACCCACGAGGCAATT-1,1,20,86,3882,7399 TGCCATTACTAAAGAA-1,1,21,87,4002,7468 CCGTTACGTTAGAACA-1,1,20,88,3882,7537 GCCAGGAGTAACCGAT-1,1,21,89,4002,7606 GGAAAGTGCCCATGCC-1,1,20,90,3882,7674 TCTTACCGGAACTCGT-1,1,21,91,4002,7743 TATGTCAAGACCGACT-1,1,20,92,3882,7812 CCTAACTAAGGCTCTA-1,1,21,93,4002,7881 GCGGTGAACTGCGCTC-1,1,20,94,3882,7950 CCTCGCGCGATATAGG-1,1,21,95,4002,8019 ATCGGCAAGCAGTCCA-1,1,20,96,3882,8087 AGATCGTGCATAAGAT-1,1,21,97,4002,8156 ATTCAGGACCTATTTC-1,1,20,98,3882,8225 AGCTAGAAGCAGAAGT-1,1,21,99,4002,8294 TTCGCTATCTGACGTG-1,1,20,100,3882,8363 TTCCGCAGAGAAATAT-1,1,21,101,4002,8432 CAGTACATTCTCTAAA-1,1,20,102,3882,8500 GTGAAGATTTCAAGTG-1,1,21,103,4002,8569 AACGCATGATCTGGGT-1,1,20,104,3882,8638 CCCAGAGGAGGGCGTA-1,1,21,105,4002,8707 GGAAACCTTGTTGAAT-1,1,20,106,3882,8776 GTGAAGCCGTATAGTC-1,1,21,107,4002,8845 GAGCTGTCGTCTCGGA-1,1,20,108,3882,8913 TGTCATTTGTTGGGAA-1,1,21,109,4002,8982 ACCAACACCACACACT-1,1,20,110,3882,9051 AAATGATTCGATCAGC-1,1,21,111,4002,9120 CTGCTTTATGTCCGCG-1,1,20,112,3882,9189 GCGAGAGGCCATGTAA-1,1,21,113,4002,9258 ATTGACCGGCGATGAC-1,1,20,114,3882,9326 ACCCTGGTAACGCCCT-1,1,21,115,4002,9395 GTTGGACCGCATCAGG-1,1,20,116,3882,9464 CGTTTACAAGGCAGCT-1,0,21,117,4002,9533 CGGTCCATGAGACTCC-1,0,20,118,3882,9602 GTTCGTCTGGGTCCCT-1,0,21,119,4002,9671 TGTACTACTCTCACGG-1,0,20,120,3882,9739 AGTGAAGATGGTGTCC-1,0,21,121,4002,9808 GAGGGCCATAATATTA-1,0,20,122,3882,9877 ACTCCCGTAGACTAGG-1,0,21,123,4002,9946 CGTCAAATGGTCGCAG-1,0,20,124,3882,10015 AAGTCTAGTAGCTGCC-1,0,21,125,4002,10083 AAGAAATCACCAGATT-1,0,20,126,3882,10152 GAGTGCACGGACAACA-1,0,21,127,4002,10221 GTTCAAATCAGATGTC-1,0,22,0,4122,1480 CATGGCTCCCTATGTC-1,0,23,1,4242,1549 GGCCACACGAAAGCCT-1,0,22,2,4122,1618 GAAGCCGGGTAAGCTC-1,0,23,3,4242,1686 TTGCGCTCTCTCGCTT-1,0,22,4,4122,1755 TACATGACCTTATCCG-1,0,23,5,4242,1824 GTCCTTCTACAACCCA-1,0,22,6,4122,1893 TATATGCTGGGTTGCC-1,0,23,7,4242,1962 GTTTATGGGATTTAGA-1,0,22,8,4122,2030 GGAACCCGAACAAGAA-1,0,23,9,4242,2099 AACGTTATCAGCACCT-1,0,22,10,4122,2168 CATCGTCCGGTTACTA-1,0,23,11,4242,2237 AGACAGCTCAGAATCC-1,0,22,12,4122,2306 GCAGATCCTCGCAAAT-1,0,23,13,4242,2375 GGGTCATGCGTACCAT-1,0,22,14,4122,2443 CTGGTCATTCCAATCC-1,0,23,15,4242,2512 TCAGGGCGCAAACTCG-1,0,22,16,4122,2581 GATGCCAGCAGAAGGC-1,0,23,17,4242,2650 GTTATTAACGTGGGAG-1,0,22,18,4122,2719 AATACAATGTTTCAGG-1,0,23,19,4242,2788 TTGCTGCACCTATCCA-1,0,22,20,4122,2856 CCAGAGACAAAGCCGG-1,0,23,21,4242,2925 CCGAAGTATATTGTTC-1,0,22,22,4122,2994 GCTAAACCTGAGGTGA-1,0,23,23,4242,3063 TCATACTTACAGATCC-1,0,22,24,4122,3132 CGAGCACTTCAAGTTT-1,0,23,25,4242,3201 TAGCAACCTGTCACAA-1,0,22,26,4122,3269 TGGGAAATGCCTTTCC-1,0,23,27,4242,3338 AGACCATGGGATACAA-1,0,22,28,4122,3407 TAAATGAATCCGTTTC-1,0,23,29,4242,3476 ACAACGGTCCCTGCGA-1,0,22,30,4122,3545 GTCACTCTCCAAATCT-1,0,23,31,4242,3614 TTCTACTTGCGAGGGC-1,0,22,32,4122,3682 CGCAATTACTTTCGGT-1,0,23,33,4242,3751 CTGTTCATCTCACGGG-1,0,22,34,4122,3820 TTCTTGTAACCTAATG-1,0,23,35,4242,3889 GCTTGATGATAATCAG-1,0,22,36,4122,3958 TTGGCTCGCATGAGAC-1,0,23,37,4242,4027 GCCCAGTTGGTATGCC-1,0,22,38,4122,4095 ATTCCTCCGCCAGTGC-1,0,23,39,4242,4164 TCGTCCGCTGGCGTCT-1,0,22,40,4122,4233 GGAGAAGTCATTGGCA-1,1,23,41,4242,4302 TTGTTAGCAAATTCGA-1,0,22,42,4122,4371 TCTAGCATCTTCGATG-1,1,23,43,4242,4440 TTCTAGGCCAATTGTG-1,1,22,44,4122,4508 TCACGGTCATCGCACA-1,1,23,45,4242,4577 ATGAAGCCAAGGAGCC-1,1,22,46,4122,4646 AATGACTGTCAGCCGG-1,1,23,47,4242,4715 CCAAACAGAACCCTCG-1,1,22,48,4122,4784 TATCGATGATTAAACG-1,1,23,49,4242,4852 GAACACACATCAACCA-1,1,22,50,4122,4921 CCCGTCAGCGTCTGAC-1,1,23,51,4242,4990 AGCATCGTCGATAATT-1,1,22,52,4122,5059 GACTAAGATCATGCAC-1,1,23,53,4242,5128 TAGGGTGTTTCAAGAG-1,1,22,54,4122,5197 TGGTTCGTAGCAAAGG-1,1,23,55,4242,5265 CTGTTCACTGCCTGTG-1,1,22,56,4122,5334 ATGTGCATCCGACGCA-1,1,23,57,4242,5403 TTGTCGTTCAGTTACC-1,1,22,58,4122,5472 CGGGATCAATGTAAGA-1,1,23,59,4242,5541 TTATCTGTATCATAAC-1,1,22,60,4122,5610 ATCGACTCTTTCCGTT-1,1,23,61,4242,5678 CTCATTTGATGGGCGG-1,1,22,62,4122,5747 GTAAGCGGGCAGTCAG-1,1,23,63,4242,5816 TCTATCGGTCGCAACA-1,1,22,64,4122,5885 AACGCGGTCTCCAGCC-1,1,23,65,4242,5954 ATTAATACTACGCGGG-1,1,22,66,4122,6023 CTTTAACTTTCAAAGG-1,1,23,67,4242,6091 CGTACCTGATAGGCCT-1,1,22,68,4122,6160 GAATGTTGGGTAATCT-1,1,23,69,4242,6229 TGCGGAGTAAAGGTGC-1,1,22,70,4122,6298 CCTGAATATTTACATA-1,1,23,71,4242,6367 TTGCTCCCATACCGGA-1,1,22,72,4122,6436 CCTCTAATCTGCCAAG-1,1,23,73,4242,6504 AGGTTGAGGCACGCTT-1,1,22,74,4122,6573 TCCCGTCAGTCCCGCA-1,1,23,75,4242,6642 TCCGATGACTGAGCTC-1,1,22,76,4122,6711 CAGCCTCCTGCAGAGG-1,1,23,77,4242,6780 CTTAGCCTTCCACATG-1,1,22,78,4122,6849 ATTAATGAACCAGTCG-1,1,23,79,4242,6917 ACGATACATAGAACTA-1,1,22,80,4122,6986 AGCCACTCCCGTGCTT-1,1,23,81,4242,7055 ATACGGGTTTCGATTG-1,1,22,82,4122,7124 CTGTCAAATGGCTCGG-1,1,23,83,4242,7193 GCTCGGAATTTAAAGC-1,1,22,84,4122,7262 TAGGCATGTTACGCCA-1,1,23,85,4242,7330 TGGCAACTCGCGCGCC-1,1,22,86,4122,7399 ATCAGTAGGCAGGGAT-1,1,23,87,4242,7468 TATCGATCTATGCATA-1,1,22,88,4122,7537 CGACTCAGGATGTTAT-1,1,23,89,4242,7606 GCCATATTGCACACAG-1,1,22,90,4122,7674 AATTCATAAGGGATCT-1,1,23,91,4242,7743 CGGTAGAGGTGCAGGT-1,1,22,92,4122,7812 AATGATGATACGCTAT-1,1,23,93,4242,7881 CTTGTGCTCACCGATT-1,1,22,94,4122,7950 TTCCAATCAGAGCTAG-1,1,23,95,4242,8019 CGATGGACCCTACGCC-1,1,22,96,4122,8087 GGTCGGATAAACGGCG-1,1,23,97,4242,8156 TTAGCTAATACGATCT-1,1,22,98,4122,8225 CTCGATATTTGCGAGC-1,1,23,99,4242,8294 ATTACTTACTGGGCAT-1,1,22,100,4122,8363 CTAGCCGATGTTATGA-1,1,23,101,4242,8432 TACTGCAATCAATTAC-1,1,22,102,4122,8500 TAGTCTGTGACGTTGC-1,1,23,103,4242,8569 CTCGTTTCTAATGTTT-1,1,22,104,4122,8638 TTCGTTCAACGAAGTT-1,1,23,105,4242,8707 CTGAATTTATTGCCAG-1,1,22,106,4122,8776 TGGAATATCCTTGACC-1,1,23,107,4242,8845 CAGATCATTTAAAGTC-1,1,22,108,4122,8913 CTCCTTTACGCAAGTC-1,1,23,109,4242,8982 TCCCAAACAGACAACG-1,1,22,110,4122,9051 ATCGCTGCGTGCAGCA-1,1,23,111,4242,9120 TTAGTTCAAGTGTTCG-1,1,22,112,4122,9189 AAACTCGTGATATAAG-1,1,23,113,4242,9258 TTAACGAACAAGCAGT-1,1,22,114,4122,9326 GTTATATCAGGAGCCA-1,1,23,115,4242,9395 CAAATTGGATTATGCC-1,0,22,116,4122,9464 CGAGGAGCTTCCATAT-1,0,23,117,4242,9533 GGAGACCAATGTGCTT-1,0,22,118,4122,9602 CATTGATGAACACGCC-1,0,23,119,4242,9671 GTCAATGCTATAATTT-1,0,22,120,4122,9739 ACCACCCTCTCTTCTA-1,0,23,121,4242,9808 TGGAGGGAAACACCTC-1,0,22,122,4122,9877 CACGGACGTGGATGGC-1,0,23,123,4242,9946 AACTTTCTCGATCATG-1,0,22,124,4122,10015 CGTATTGTTTCCTAAT-1,0,23,125,4242,10084 CCTACTGCGGCGGCCA-1,0,22,126,4122,10152 CTTAGGTCCCAATCGT-1,0,23,127,4242,10221 CGCAATCGATCATTAG-1,0,24,0,4362,1480 TGGTTATGCTTGCGGT-1,0,25,1,4481,1549 GGCTTGGCTCTCACCT-1,0,24,2,4362,1618 ATTGGTAGGATCCGCT-1,0,25,3,4481,1686 TCAGGGCGACTTCCTT-1,0,24,4,4362,1755 TCTGCAGATTCGAGTC-1,0,25,5,4481,1824 CTCTCGCTGTACTATG-1,0,24,6,4362,1893 AATAGTCGCGAGTCGG-1,0,25,7,4481,1962 AGTTACCCTTAAGACT-1,0,24,8,4362,2031 CTTAAATAAGACCCAT-1,0,25,9,4481,2099 GGTTGTGCTCTTGTCC-1,0,24,10,4362,2168 GTGAGTCTAAGACGGA-1,0,25,11,4481,2237 CGCGACACTGCGCAGC-1,0,24,12,4362,2306 GCTCGCGGTTCCGCTC-1,0,25,13,4481,2375 TTAACTCACGCGTGGA-1,0,24,14,4362,2443 GGAACGGCCTGCAGCC-1,0,25,15,4481,2512 GTAGAAACGGGTGGAG-1,0,24,16,4362,2581 TAATGAAAGACCCTTG-1,0,25,17,4481,2650 AGGCTTGCTAGACACC-1,0,24,18,4362,2719 TTGCGTAGTTTGAGGA-1,0,25,19,4481,2788 CGCCCTTGAAGGCTGA-1,0,24,20,4362,2856 CCCGGTGGAAGAACCT-1,0,25,21,4481,2925 TTAACACCTCGAACAT-1,0,24,22,4362,2994 GATTCCTATACGGCGC-1,0,25,23,4481,3063 TTACCCTAACAGTCCT-1,0,24,24,4362,3132 ACCCACCTACATGCTC-1,0,25,25,4481,3201 AAAGGGCAGCTTGAAT-1,0,24,26,4362,3269 CACACAGGGATAGATT-1,0,25,27,4481,3338 AGAGCGTACAAGCTCG-1,0,24,28,4362,3407 TCTTACGGCATCCGAC-1,0,25,29,4481,3476 GCCTATTCCGATATAG-1,0,24,30,4362,3545 GAAAGTGACTAACTGC-1,0,25,31,4481,3614 CCGGAATGGTTTCAGT-1,0,24,32,4362,3682 AGTATAATACTAGGCA-1,0,25,33,4481,3751 TAACTATCGAAGGTCC-1,0,24,34,4362,3820 ATGAGGAGTGTTAATC-1,1,25,35,4481,3889 TGTGTCGCGAGTTGCA-1,0,24,36,4362,3958 ATCCAACGCAGTCATA-1,1,25,37,4481,4027 AAGGCGCGTAAAGCTT-1,1,24,38,4362,4095 AGTCGGCCCAAACGAC-1,1,25,39,4481,4164 AACGTCAGACTAGTGG-1,1,24,40,4362,4233 ACTACCAGCTCTCTGG-1,1,25,41,4481,4302 GCAAGTGCACAGAGAA-1,1,24,42,4362,4371 ACACCTTAAGTAGGGC-1,1,25,43,4481,4440 TTCGACGGGAAGGGCG-1,1,24,44,4362,4508 TTCGCACTCGCGTGCT-1,1,25,45,4481,4577 TATTTGTTACCCTTTA-1,1,24,46,4362,4646 CGCTGTGACGCCGCAC-1,1,25,47,4481,4715 GTTGCACGGAGTTTCG-1,1,24,48,4362,4784 GTTTCCTGGAGGGTGA-1,1,25,49,4481,4853 ACACCCAGCATGCAGC-1,1,24,50,4362,4921 TCAACCATGTTCGGGC-1,1,25,51,4481,4990 TTACAACTACGCATCC-1,1,24,52,4362,5059 TCCGATGGTGCGACAT-1,1,25,53,4481,5128 GGGCGTACATTTATAT-1,1,24,54,4362,5197 AGCGACCAACGATATT-1,1,25,55,4481,5265 ACACAAAGACGGGTGG-1,1,24,56,4362,5334 ATCGCACGCCGGGAGA-1,1,25,57,4481,5403 GCTCTAAACCCTGACG-1,1,24,58,4362,5472 AATGCAACCGGGTACC-1,1,25,59,4481,5541 TCAAACAACCGCGTCG-1,1,24,60,4362,5610 TATGCTCCCTACTTAC-1,1,25,61,4481,5678 AAAGGGATGTAGCAAG-1,1,24,62,4362,5747 ACGATCATACATAGAG-1,1,25,63,4481,5816 TTGTTCAGTGTGCTAC-1,1,24,64,4362,5885 ATGCATGATCCAGGAT-1,1,25,65,4481,5954 AGTCTTCTCCTCAAAT-1,1,24,66,4362,6023 GATTCCCTTGTCGCAG-1,1,25,67,4481,6091 CTCGCACCTATATAGT-1,1,24,68,4362,6160 ACTCAATAAAGGCACG-1,1,25,69,4481,6229 AACCGAGCTTGGTCAT-1,1,24,70,4362,6298 TAAGGCAACATAAGAT-1,1,25,71,4481,6367 CACGCACAGCGCAGCT-1,1,24,72,4362,6436 GGTTTACAATCTCAAT-1,1,25,73,4481,6504 TGCAGGATCGGCAAAG-1,1,24,74,4362,6573 ATAACGGAGTCCAACG-1,1,25,75,4481,6642 AACGATATGTCAACTG-1,1,24,76,4362,6711 GACAACGACCATTGAA-1,1,25,77,4481,6780 TTGACCATGTTCTCCG-1,1,24,78,4362,6849 AGTACGGGCACCTGGC-1,1,25,79,4481,6917 CGCCATCCGATTATGA-1,1,24,80,4362,6986 AAGGTATCCTAATATA-1,1,25,81,4481,7055 TGTTGTCAAGAAGTCT-1,1,24,82,4362,7124 CAGTGAATAAATGACT-1,1,25,83,4481,7193 CACCTTGCGAAACTCG-1,1,24,84,4362,7262 CATTTAGCGGACCATG-1,1,25,85,4481,7330 CCAGTCTAGACGGCGC-1,1,24,86,4362,7399 TCGCTTTAAACGTTTG-1,1,25,87,4481,7468 GTGAAACGGCGCCACC-1,1,24,88,4362,7537 GGGCTCATCGAACCCA-1,1,25,89,4481,7606 TTGATGTGTAGTCCCG-1,1,24,90,4362,7674 CAGTAGCCCACGCGGT-1,1,25,91,4481,7743 AGCGCGGGTGCCAATG-1,1,24,92,4362,7812 TAATCGATCCGTACGT-1,1,25,93,4481,7881 AGTGGCGGCAATTTGA-1,1,24,94,4361,7950 CCTTTCAATGAAGAAA-1,1,25,95,4481,8019 CTCAGTCACGACAAAT-1,1,24,96,4361,8087 ATAGGCTAGCTTCGCA-1,1,25,97,4481,8156 CGGTTCAAGTAGGTGT-1,1,24,98,4361,8225 CAGTCGAGGATGCAAT-1,1,25,99,4481,8294 TATCACCCAACCGACC-1,1,24,100,4361,8363 AATGATGCGACTCCTG-1,1,25,101,4481,8432 TGGAACCACTGACACA-1,1,24,102,4361,8500 GCCAATAGGGCATCTC-1,1,25,103,4481,8569 TTCTTTGGTCGCGACG-1,1,24,104,4361,8638 ATTAGATTCCTCAGCA-1,1,25,105,4481,8707 CCGTGGAACGATCCAA-1,1,24,106,4361,8776 GGGTCGTGGCAAGTGT-1,1,25,107,4481,8845 TCGCTCGGCACCAGCG-1,1,24,108,4361,8913 ACGCAATCACTACAGC-1,1,25,109,4481,8982 CTCTAATGCATTGATC-1,1,24,110,4361,9051 GTCTCGACTAAGTTTG-1,1,25,111,4481,9120 TGGTTTAAACGTGGGT-1,1,24,112,4361,9189 CGCAGATCTTCACCCG-1,1,25,113,4481,9258 TCCAGATGTACGCCAA-1,1,24,114,4361,9326 CATTGCGGGTCAATTC-1,1,25,115,4481,9395 GACGTTCGTAAATACA-1,0,24,116,4361,9464 TACACCGTCGTTAGTC-1,0,25,117,4481,9533 ACGGGCGTATGCGACA-1,0,24,118,4361,9602 GAAGGCTACCATTGTT-1,0,25,119,4481,9671 TAAATCTTTACACCTC-1,0,24,120,4361,9739 AGTTTATGTAAAGACA-1,0,25,121,4481,9808 AGGAGACATCCACAGT-1,0,24,122,4361,9877 CAACCTGAACCTGCCA-1,0,25,123,4481,9946 AGTCCCTCGCAGAAAG-1,0,24,124,4361,10015 TGTATACGGATGATGA-1,0,25,125,4481,10084 TTGTGGTATAGGTATG-1,0,24,126,4361,10152 TCTGCACCATTAGTAA-1,0,25,127,4481,10221 AAATGTATCTTATCCC-1,0,26,0,4601,1480 ACTCTAAACCTGGGAT-1,0,27,1,4721,1549 GCTGGCAGGTGCCGTG-1,0,26,2,4601,1618 CTCATTCGTGAACATC-1,0,27,3,4721,1686 TCGCCGGATGGGCAAG-1,0,26,4,4601,1755 GGACTAAGTCAGGAGT-1,0,27,5,4721,1824 TATCAAAGGTCTGTAA-1,0,26,6,4601,1893 TTCAGTTTGTGGCAGC-1,0,27,7,4721,1962 TGTTCATAAATGTGCT-1,0,26,8,4601,2031 CTTAGCCCGGATAGTG-1,0,27,9,4721,2099 GATGCGAATGGTATTA-1,0,26,10,4601,2168 TCTAACTGTATGTAAA-1,0,27,11,4721,2237 TTAAACCTGGTTCCTT-1,0,26,12,4601,2306 GCTAAGTAAAGGCGAT-1,0,27,13,4721,2375 AGTTACCGCACATGGT-1,0,26,14,4601,2443 GACTGCGGCACGTGTA-1,0,27,15,4721,2512 TGGTGATCGTATTTGT-1,0,26,16,4601,2581 TTATCGCCTGCGAAGC-1,0,27,17,4721,2650 TGGAATTAGACGCTTT-1,0,26,18,4601,2719 TCGTCACACTGTTAGC-1,0,27,19,4721,2788 TTATGTTTGCGATAGA-1,0,26,20,4601,2856 GGTGCTGATCACAAAG-1,0,27,21,4721,2925 CATAGCCGCCCGGGAT-1,0,26,22,4601,2994 GGTTAGTTACGGCGCC-1,0,27,23,4721,3063 ATTCCCACATAAACAA-1,0,26,24,4601,3132 ATTCAGTAGCAGGGTC-1,0,27,25,4721,3201 CAGTTCCGCGGGTCGA-1,0,26,26,4601,3269 AAGAGATGAATCGGTA-1,0,27,27,4721,3338 CGCAATTCTACAATAA-1,0,26,28,4601,3407 TAACGCTTTGAGAGCG-1,0,27,29,4721,3476 AGGCTATGGTTAGCTT-1,0,26,30,4601,3545 GAGGAATGGAGAGGTT-1,1,27,31,4721,3614 TCCTCTACGAGATGGC-1,1,26,32,4601,3682 TTGATTATGCAGATGA-1,1,27,33,4721,3751 TCAGTACTGACCCGCG-1,1,26,34,4601,3820 TTATGACAAACTGGAT-1,1,27,35,4721,3889 GTAAGTAGGGTATACC-1,1,26,36,4601,3958 CGCAAACACGAGTTAC-1,1,27,37,4721,4027 TGGCCGTATATTGACC-1,1,26,38,4601,4095 ACTGTAGCACTTTGGA-1,1,27,39,4721,4164 GCTCTATGTTACGTGC-1,1,26,40,4601,4233 TGCGCGATTAACGGAG-1,1,27,41,4721,4302 GAATCGACATGGTCAC-1,1,26,42,4601,4371 GACTAAGTAGGCTCAC-1,1,27,43,4721,4440 ATCTTGACCTGCAACG-1,1,26,44,4601,4508 ATGCACTACCGCATTG-1,1,27,45,4721,4577 CAGATACTAACATAGT-1,1,26,46,4601,4646 GATCGACACTATCTGA-1,1,27,47,4721,4715 ATAGAGTACTGGGACA-1,1,26,48,4601,4784 CCTACTGCTTACACTT-1,1,27,49,4721,4853 CCTGCTATTTGAGAAG-1,1,26,50,4601,4921 CGCGTTCATGAAATAC-1,1,27,51,4721,4990 CATTATGCTTGTTGTG-1,1,26,52,4601,5059 CCAGGGACGTGGCCTC-1,1,27,53,4721,5128 TATGGATGTGCTACGC-1,1,26,54,4601,5197 GTACTAAGATTTGGAG-1,1,27,55,4721,5265 AGACCCGCCCTCCTCG-1,1,26,56,4601,5334 CGCATTAGCTAATAGG-1,1,27,57,4721,5403 GCTCTCGGGTACCGAA-1,1,26,58,4601,5472 CACCGCCAGAAGGTTT-1,1,27,59,4721,5541 TCCCAAAGACGAAGGA-1,1,26,60,4601,5610 ATGGATTGACCAAACG-1,1,27,61,4721,5678 GTCATGGACATGACTA-1,1,26,62,4601,5747 CTACTGCCACCTGACC-1,1,27,63,4721,5816 TTATATTTGGCAATCC-1,1,26,64,4601,5885 AGCACCAGTACTCACG-1,1,27,65,4721,5954 CATGGTCTAGATACCG-1,1,26,66,4601,6023 TCTACCGTCCACAAGC-1,1,27,67,4721,6091 CTAGTTGGGCCCGGTA-1,1,26,68,4601,6160 TCCCGCGTACTCCTGG-1,1,27,69,4721,6229 CAGAGCATGAGCTTGC-1,1,26,70,4601,6298 ACACGGGAACTTAGGG-1,1,27,71,4721,6367 GGCTCTGCTCCAACGC-1,1,26,72,4601,6436 AGAACGTGGTACATTC-1,1,27,73,4721,6504 CAATAAACCTTGGCCC-1,1,26,74,4601,6573 ACTTCGCCATACGCAC-1,1,27,75,4721,6642 ATCTGGTTAAGACTGT-1,1,26,76,4601,6711 TCGTAAGACGACATTG-1,1,27,77,4721,6780 GTGTACCTTGGCTACG-1,1,26,78,4601,6849 GCCCGTAATACCTTCT-1,1,27,79,4721,6917 CGGTCAAGTGGGAACC-1,1,26,80,4601,6986 TTGTAAGGCCAGTTGG-1,1,27,81,4721,7055 GGAGCACCAAGAACTA-1,1,26,82,4601,7124 TAATAGTGACGACCAG-1,1,27,83,4721,7193 CTAAATCCTATTCCGG-1,1,26,84,4601,7262 CGAGTTCTGTCCCACC-1,1,27,85,4721,7330 AGGCAGATGCGTAAAC-1,1,26,86,4601,7399 AAGGATGAGGGACCTC-1,1,27,87,4721,7468 AGAGAACCGTCTAGGA-1,1,26,88,4601,7537 GAGGGCGCAGCTCTGC-1,1,27,89,4721,7606 AAGATTGGCGGAACGT-1,1,26,90,4601,7674 CCAGTAGTCTGATCCA-1,1,27,91,4721,7743 AAGGGACAGATTCTGT-1,1,26,92,4601,7812 ATAGAGTTATCAACTT-1,1,27,93,4721,7881 AAATTACCTATCGATG-1,1,26,94,4601,7950 GATCCTAAATCGGGAC-1,1,27,95,4721,8019 TTACAGACCTAAATGA-1,1,26,96,4601,8087 CCTCACCTTAGCATCG-1,1,27,97,4721,8156 CATGCGACCAGTTTAA-1,1,26,98,4601,8225 AACATATCAACTGGTG-1,1,27,99,4721,8294 CTATAAGAGCCAATCG-1,1,26,100,4601,8363 AATATCGAGGGTTCTC-1,1,27,101,4721,8432 GTACTCCTGGGTATGC-1,1,26,102,4601,8500 ATAAGTAGGATTCAGA-1,1,27,103,4721,8569 AGGTCGCGGAGTTACT-1,1,26,104,4601,8638 CTAATTCTCAGATATT-1,1,27,105,4721,8707 GCCAACCATTTCCGGA-1,1,26,106,4601,8776 TGATCCCAGCATTAGT-1,1,27,107,4721,8845 CGTTGTAAGATTGATT-1,1,26,108,4601,8913 GAAACCATGGTGCGCT-1,1,27,109,4721,8982 AATCTATGCCGGAGCC-1,1,26,110,4601,9051 GACTCCCAGAATAAGG-1,1,27,111,4721,9120 TATGATCCGGCACGCC-1,1,26,112,4601,9189 CCGCTTGCTGACATGG-1,1,27,113,4721,9258 TGGTTAAGGGCGCTGG-1,1,26,114,4601,9326 TTGATAGTCAATACAT-1,1,27,115,4721,9395 GGTTTAATTGAGCAGG-1,1,26,116,4601,9464 CATTACATAGATTGTG-1,0,27,117,4721,9533 GGTACACCAGATTTAT-1,0,26,118,4601,9602 GGCCCGTATACCATGC-1,0,27,119,4721,9671 ATCTTTCGTATAACCA-1,0,26,120,4601,9739 GAGATGACAATCCTTA-1,0,27,121,4721,9808 AAAGCTTGCCTACATA-1,0,26,122,4601,9877 GAACGATAAGTTAAAG-1,0,27,123,4721,9946 TAATAGCTAAATGATG-1,0,26,124,4601,10015 TATGGCTAGGCTAATT-1,0,27,125,4721,10084 AGGAGAGTCTGGCTAC-1,0,26,126,4601,10152 TGCTCTGCCGGTTCAC-1,0,27,127,4721,10221 CCAATAGATTTCATCT-1,0,28,0,4841,1480 GGGCACGAATTGGCCG-1,0,29,1,4961,1549 TCGTTGACAGGGTCCC-1,0,28,2,4841,1618 ATCGTATTCCGAGAAC-1,0,29,3,4961,1686 GGGAATTCTGTCCAGT-1,0,28,4,4841,1755 ACGCGTTTCTTAAGAG-1,0,29,5,4961,1824 GAGAGCGCAGTCCCTG-1,0,28,6,4841,1893 GTCCTATTGTTGTGGT-1,0,29,7,4961,1962 CATCTGCAGGATCATT-1,0,28,8,4841,2031 GAGTCGACAGACCCTC-1,0,29,9,4961,2099 AAGTGCAAAGGTAGAC-1,0,28,10,4841,2168 AGGGTGGATAGTGCAT-1,0,29,11,4961,2237 TGATAGCGGGATTCTA-1,0,28,12,4841,2306 GTCAGTTTGGTAGTCG-1,0,29,13,4961,2375 GCATTCGAAATGAACA-1,0,28,14,4841,2443 AAAGACTGGGCGCTTT-1,0,29,15,4961,2512 TAACAATATTTGTTGC-1,0,28,16,4841,2581 CCAGCTTCCGCCCGCA-1,0,29,17,4961,2650 GATATGGATTACGCGG-1,0,28,18,4841,2719 AGAGCAGTTATGAGAC-1,0,29,19,4960,2788 TCACATCTTATCTGAT-1,0,28,20,4841,2856 TATGAAGACAGGTGCG-1,0,29,21,4960,2925 TACCTGCTGCACTGTG-1,0,28,22,4841,2994 TAGGTCCAAGTAAGGA-1,0,29,23,4960,3063 GAAACTCGTGCGATGC-1,0,28,24,4841,3132 AACAATTACTCTACGC-1,0,29,25,4960,3201 CCGCACGTGACCTCGG-1,0,28,26,4841,3269 AACTTGCCCGTATGCA-1,1,29,27,4960,3338 GGGTATGTATGCACTT-1,1,28,28,4841,3407 TTCGTACTCCAGAACG-1,1,29,29,4960,3476 GAATTTCTCGCTGCAG-1,1,28,30,4841,3545 AACAGGATGGGCCGCG-1,1,29,31,4960,3614 GACGTGTAGGGATTAT-1,1,28,32,4841,3682 TAGGTGAGCCCTACTC-1,1,29,33,4960,3751 CTAATTCGCACGCGCT-1,1,28,34,4841,3820 GAAGCTTGCTGACCGC-1,1,29,35,4960,3889 GGTTAGGCTTGGAGAA-1,1,28,36,4841,3958 ACAAGGACAAGAGGTT-1,1,29,37,4960,4027 AGGCCACCCGTTATGA-1,1,28,38,4841,4095 GTGGGCTTAGACACAC-1,1,29,39,4960,4164 CGTGTCCCATTCGCGA-1,1,28,40,4841,4233 TGGAGTGATGCGATGA-1,1,29,41,4960,4302 AACAACTGGTAGTTGC-1,1,28,42,4841,4371 CCTGGCTAGACCCGCC-1,1,29,43,4960,4440 CGCAATTAGGGTAATA-1,1,28,44,4841,4508 TCGAAATTTAGGACCA-1,1,29,45,4960,4577 AGACTAGCCTTCCAGA-1,1,28,46,4841,4646 TTGATCTAACTTTGTC-1,1,29,47,4960,4715 AAGGAGCGGTTGGTGC-1,1,28,48,4841,4784 ACTTGGGACCCGGTGG-1,1,29,49,4960,4853 TGATCTCCGGCGCCAG-1,1,28,50,4841,4921 CAGTTCAAATTGACAC-1,1,29,51,4960,4990 GTCCGGCTGAATTGCG-1,1,28,52,4841,5059 CTGGAAATGGATGCTT-1,1,29,53,4960,5128 TGATCGGTTTGACCCT-1,1,28,54,4841,5197 TAGAGTCTAAGCGAAC-1,1,29,55,4960,5265 GAGACTGATGGGTAGA-1,1,28,56,4841,5334 TAGCTAAGTCCGGGAG-1,1,29,57,4960,5403 GGGCGATATGTGTGAA-1,1,28,58,4841,5472 CTCGAGGTCGAACAGT-1,1,29,59,4960,5541 GATCCCTTTATACTGC-1,1,28,60,4841,5610 GTCATGCACCTCCGTT-1,1,29,61,4960,5678 ACTTTCCTATAGCTTC-1,1,28,62,4841,5747 TCGCTCGATATATTCC-1,1,29,63,4960,5816 ATAGGTTGGGCAGATG-1,1,28,64,4841,5885 CAATTAAGGGTGATGA-1,1,29,65,4960,5954 ACCGACTGAGTCCCAC-1,1,28,66,4841,6023 CCTGTCACCCGGGCTC-1,1,29,67,4960,6091 GATCGGTGGCCATAAC-1,1,28,68,4841,6160 CCTATGGGTTACCGTC-1,1,29,69,4960,6229 TTGGGACACTGCCCGC-1,1,28,70,4841,6298 CGAGGCTAAATATGGC-1,1,29,71,4960,6367 TCAGGGTGTAACGTAA-1,1,28,72,4841,6436 CGAGAGATGTGAACCT-1,1,29,73,4960,6504 TCGCTGGGCGGATTGT-1,1,28,74,4841,6573 AGATCTCAGGTGTGAT-1,1,29,75,4960,6642 TGGCCAAACTGAAGTA-1,1,28,76,4841,6711 GCTTCCGTCCCTAGAC-1,1,29,77,4960,6780 CAGCAGCCCGTTCCTT-1,1,28,78,4841,6849 TGTATAACAGATCCTG-1,1,29,79,4960,6917 CGCGGGAATTAGGCAG-1,1,28,80,4841,6986 TGCATGTGGTAATCTA-1,1,29,81,4960,7055 ACAATTTGAGCAGTGG-1,1,28,82,4841,7124 GAGCTAAGGGCATATC-1,1,29,83,4960,7193 CCAGATAGTTGAGTGA-1,1,28,84,4841,7262 CCACAATGTACGTCTT-1,1,29,85,4960,7330 CAATGGATCTCTACCA-1,1,28,86,4841,7399 TGTGGCAAAGCGTATG-1,1,29,87,4960,7468 TAAAGCGTTAGGAGAA-1,1,28,88,4841,7537 TCCGTTTAGCCTTGAA-1,1,29,89,4960,7606 CAGCTCGACAAGTTAA-1,1,28,90,4841,7675 GCCTATAGTGTCAGGG-1,1,29,91,4960,7743 ATAGACAACGGGACCT-1,1,28,92,4841,7812 CTACTATCTTTCAGAG-1,1,29,93,4960,7881 GCGCTGCTTTGCATTT-1,1,28,94,4841,7950 GCGCATCCAGTCAGCA-1,1,29,95,4960,8019 GACTCGCGGGAATGAC-1,1,28,96,4841,8087 CTGGTAACGAGCTCTT-1,1,29,97,4960,8156 TCCGGCCTAGCGTACA-1,1,28,98,4841,8225 TCTAGGTGGCGACGCT-1,1,29,99,4960,8294 ACGCTAGTGATACACT-1,1,28,100,4841,8363 ATCTGCACCTCTGCGA-1,1,29,101,4960,8432 CCTCACCTGAGGGAGC-1,1,28,102,4841,8500 AGTGAGCCTCGCCGCC-1,1,29,103,4960,8569 ACGAGTACGGATGCCC-1,1,28,104,4841,8638 GGTACCATTAAGACGG-1,1,29,105,4960,8707 TTCTGCTAGACTCCAA-1,1,28,106,4841,8776 TAACTATTACGCCAAA-1,1,29,107,4960,8845 GCATTCAAGGCAACGC-1,1,28,108,4841,8913 AGTACATCATTTATCA-1,1,29,109,4960,8982 GTCGTGTCTGGTCATC-1,1,28,110,4841,9051 AGTCTAAAGTATACTC-1,1,29,111,4960,9120 CGGCCCAACCTGTAGT-1,1,28,112,4841,9189 AGGGAGACATACTTCG-1,1,29,113,4960,9258 TCCCTAGATCAATAGG-1,1,28,114,4840,9326 TCCCGTCGCGTCATAG-1,1,29,115,4960,9395 CGCATCCATCAGCCAG-1,0,28,116,4840,9464 CTGCACCTAGTCCACA-1,0,29,117,4960,9533 CGAGGATCGGGAACGA-1,0,28,118,4840,9602 CAATGAGGTTCGACTA-1,0,29,119,4960,9671 TCTGACGGGCTAACCC-1,0,28,120,4840,9739 TTCTATGCCTTTCGCA-1,0,29,121,4960,9808 AGAGTATAGTGTTACG-1,0,28,122,4840,9877 CCATTGTTTCCTCCAT-1,0,29,123,4960,9946 CTCATCACTTAGTGAT-1,0,28,124,4840,10015 CCGAAGGGCGTACCGC-1,0,29,125,4960,10084 TCAAGCTGCCTTGAAA-1,0,28,126,4840,10152 CTCATTAACGTTGCCC-1,0,29,127,4960,10221 GTCTTCCTCACCTAAG-1,0,30,0,5080,1480 GGTGATGAAGGAAGTG-1,0,31,1,5200,1549 TCAATACAATTGCTGC-1,0,30,2,5080,1618 GCAACCCAAGTTGTTT-1,0,31,3,5200,1686 ATGAAGTGGACCCAGC-1,0,30,4,5080,1755 GAGAATCTCACGATCA-1,0,31,5,5200,1824 TATATCATTGATCAGT-1,0,30,6,5080,1893 AACTTTACGGGAGCTT-1,0,31,7,5200,1962 TTCTTGTGTCCATCAG-1,0,30,8,5080,2031 ACAATTTAGGAGGCTC-1,0,31,9,5200,2099 ATACTTGTTCTCGAGC-1,0,30,10,5080,2168 CACGGGATTGAGGGTT-1,0,31,11,5200,2237 GTTAATGTCTATCTTA-1,0,30,12,5080,2306 GCGTTATATTTGGAAC-1,0,31,13,5200,2375 CGTCAAGGCTATAAAT-1,0,30,14,5080,2444 TTAGCTCTGTAATCCG-1,0,31,15,5200,2512 AATGGTCCACCGTTCA-1,0,30,16,5080,2581 GTCATTGCATTGACCC-1,0,31,17,5200,2650 TGTCCGTGGCGCCTTT-1,0,30,18,5080,2719 TCAACTAACGTATAAC-1,0,31,19,5200,2788 TCCTCTCCAGTTGTCC-1,0,30,20,5080,2856 TGTGTTCGTATCCAAG-1,0,31,21,5200,2925 CCGCGTAGGTAAGGGC-1,0,30,22,5080,2994 CTGCGGGTGAAATGTT-1,0,31,23,5200,3063 TATCTACAGAGGTAAT-1,0,30,24,5080,3132 CTACTCTAGGCCCGGC-1,1,31,25,5200,3201 ACAAGCAGTGCCTAGC-1,1,30,26,5080,3269 TACAAGTCTCGTGCAT-1,1,31,27,5200,3338 TCGGAATGCGCTCTGA-1,1,30,28,5080,3407 TCGCGTCCAGAAGGTC-1,1,31,29,5200,3476 TATGGCCCGGCCTCGC-1,1,30,30,5080,3545 GCTGGCATATTCACCT-1,1,31,31,5200,3614 GTCAGAATAGTCTATG-1,1,30,32,5080,3682 GGCGTCCTATCCGCTG-1,1,31,33,5200,3751 CGGAGTTTGAGAGACA-1,1,30,34,5080,3820 AGCACTTAAGGACGCC-1,1,31,35,5200,3889 TCCACAATGGTTTACG-1,1,30,36,5080,3958 CCAACGATGCACTGAT-1,1,31,37,5200,4027 ATTTACAGTTTACTGG-1,1,30,38,5080,4095 CCCTGAAATGAGTTGA-1,1,31,39,5200,4164 CAAACGGTCGCACTTT-1,1,30,40,5080,4233 TGATTCGTCTATCACT-1,1,31,41,5200,4302 TCAGGTTCTTTGAGAA-1,1,30,42,5080,4371 CACGCAGCGAGGCTTT-1,1,31,43,5200,4440 TTAAGCGCCTGACCCA-1,1,30,44,5080,4508 CTTACACGGTATTCCA-1,1,31,45,5200,4577 AAGGCTGTGCTCATCG-1,1,30,46,5080,4646 GACCAGAGCCCTGTAG-1,1,31,47,5200,4715 TCCCAGGCTTAGCTAA-1,1,30,48,5080,4784 ATTGAAGATCTTAGTG-1,1,31,49,5200,4853 AGTTCCTACAGAATTA-1,1,30,50,5080,4921 GGGCTGGTTAGTCGCG-1,1,31,51,5200,4990 GAAATGGCGGTGTTAG-1,1,30,52,5080,5059 TACGAACACGACTTCA-1,1,31,53,5200,5128 ACCACAAGTTTCTATC-1,1,30,54,5080,5197 ATATTTAACCCTCAAG-1,1,31,55,5200,5266 GATCATTCCAAACATT-1,1,30,56,5080,5334 TCCAGGCGAGTACGGT-1,1,31,57,5200,5403 GTTTGACCAAATCCTA-1,1,30,58,5080,5472 CACAGCACCCACGGCA-1,1,31,59,5200,5541 TGCAAGAATGACGTAA-1,1,30,60,5080,5610 GCGAAGCCATACCCGT-1,1,31,61,5200,5678 TCCTTTCTTACGCTTA-1,1,30,62,5080,5747 GCTGCTCTCCGGACAC-1,1,31,63,5200,5816 ACTGTCTTCTTTAGAA-1,1,30,64,5080,5885 TCAAACTTAGATTGTT-1,1,31,65,5200,5954 CTATGTCACTAGCCCA-1,1,30,66,5080,6023 TGCGCAAAGCATTTGG-1,1,31,67,5200,6091 TTAATGTAGACCAGGT-1,1,30,68,5080,6160 GGCGGTAGGATCATTG-1,1,31,69,5200,6229 GGCAATAGTCAATGAG-1,1,30,70,5080,6298 ACACGAGACTCCTTCT-1,1,31,71,5200,6367 GACACAAGGGAAGAAA-1,1,30,72,5080,6436 TCAGCAAATGCATCTC-1,1,31,73,5200,6504 GAGATCTGTCACTCCG-1,1,30,74,5080,6573 ATGCCGGTCTTGCATA-1,1,31,75,5200,6642 TTGGGCGGCGGTTGCC-1,1,30,76,5080,6711 TTGTTGTGTGTCAAGA-1,1,31,77,5200,6780 ACTGTACGATACACAT-1,1,30,78,5080,6849 TCCACTTTATCTAGGT-1,1,31,79,5200,6917 GGTCTGAGAATCTGGA-1,1,30,80,5080,6986 TAGAAAGGTGGCGCTA-1,1,31,81,5200,7055 TATGTCTCATTGTGCC-1,1,30,82,5080,7124 GGATTTCACTTCTATA-1,1,31,83,5200,7193 TGAGTGGTCCGTGACG-1,1,30,84,5080,7262 CGCTTTCTTGCATTCG-1,1,31,85,5200,7330 ACCCAACGCCCGTGGC-1,1,30,86,5080,7399 GAACGTCTCATGGTCG-1,1,31,87,5200,7468 AGGGTTCCCTTTGGTT-1,1,30,88,5080,7537 GTAGCTTCCTCTTGTT-1,1,31,89,5200,7606 GCATGAGGGACGCGGC-1,1,30,90,5080,7675 CTACCCTAAGGTCATA-1,1,31,91,5200,7743 TCACCGCTCGGCACTC-1,1,30,92,5080,7812 GGCTCGCGTTGAGGTA-1,1,31,93,5200,7881 CTAACGAAACTTGCTG-1,1,30,94,5080,7950 TTAAACTCGAATTCAT-1,1,31,95,5200,8019 TACTTTACTGAGCCGG-1,1,30,96,5080,8087 GCTTGGATCGATTAGG-1,1,31,97,5200,8156 CGGTTATCCAACAGTG-1,1,30,98,5080,8225 CAGACCTGTAAGTGTT-1,1,31,99,5200,8294 GACGGTCAATAGAAGC-1,1,30,100,5080,8363 CTGACTGCGCAGCTCG-1,1,31,101,5200,8432 CCATACCTTTACTTGT-1,1,30,102,5080,8500 GTAATAAAGGGCTCCC-1,1,31,103,5200,8569 GTGAACTCCCATTCGA-1,1,30,104,5080,8638 GTGGTTACTTCTTTCG-1,1,31,105,5200,8707 TCAGAACCTCCACAGG-1,1,30,106,5080,8776 TCCCACTCTCTTCCGG-1,1,31,107,5200,8845 ATCTTGACTTGTCCAA-1,1,30,108,5080,8913 TCGGGAACGTGCCTAG-1,1,31,109,5200,8982 GTTAGCCGTAAATCAA-1,1,30,110,5080,9051 ATTTACTAAGTCCATT-1,1,31,111,5200,9120 GGGTGCATATGAAAGC-1,1,30,112,5080,9189 TCCGAATGGTCCTGAG-1,1,31,113,5200,9258 TGATGGCTGTTTCTGA-1,1,30,114,5080,9326 AAATAAGGTAGTGCCC-1,1,31,115,5200,9395 CCACTATCCGGGTCAC-1,0,30,116,5080,9464 ACACCACATAATTAGC-1,0,31,117,5200,9533 CGCGGTAAGTCTAGCT-1,0,30,118,5080,9602 GCGGGCATTACGATGC-1,0,31,119,5200,9671 AGGATTGCTTACGACA-1,0,30,120,5080,9739 CTCGGGATAACACCTA-1,0,31,121,5200,9808 GTCGTCTGGTTGGCTA-1,0,30,122,5080,9877 GCAATTAGTCGCACCG-1,0,31,123,5200,9946 GTGACCTAAAGAATAA-1,0,30,124,5080,10015 CTGAGCGAGACTTATT-1,0,31,125,5200,10084 CAAGACTCAGAAGCGC-1,0,30,126,5080,10152 ACTTCGCTAGCGAGTG-1,0,31,127,5200,10221 CCATACTCGCCTCTCC-1,0,32,0,5320,1480 ACGATACCTATCCTGA-1,0,33,1,5440,1549 CTCACCAGTACAAGTG-1,0,32,2,5320,1618 CGAAGACGGTGAGTGC-1,0,33,3,5440,1686 AAATTAATAAGCGCGA-1,0,32,4,5320,1755 GGGCCCTTATCTATAC-1,0,33,5,5440,1824 CTGCCCACGAAGCGTT-1,0,32,6,5320,1893 GGACAAGTTGCAGTGA-1,0,33,7,5440,1962 GTCCGAGAGCAATCAT-1,0,32,8,5320,2031 ATGGCAGCCGAGAAAC-1,0,33,9,5440,2099 CCTCGGATGCTACCTG-1,0,32,10,5320,2168 ATACGGTGAAGATGCA-1,0,33,11,5440,2237 ACATCAGCTGGGACGC-1,0,32,12,5320,2306 GGTTGTGTAGCCTGGC-1,0,33,13,5440,2375 CCTGCGTTCTACGCTT-1,0,32,14,5320,2444 GAACGTTAGGAAGACG-1,0,33,15,5440,2512 CTGATAGTGTATCTCA-1,0,32,16,5320,2581 TTCTGCGAGCGCCCTT-1,0,33,17,5440,2650 CTTTGGCGCTTTATAC-1,0,32,18,5320,2719 TCCGAACTTGGCTTAC-1,0,33,19,5440,2788 TAGATTCCTGGTTATT-1,0,32,20,5320,2856 CCGACAAAGGGAGTGC-1,0,33,21,5440,2925 CCATGGCCCTTGTACC-1,0,32,22,5320,2994 GAAATATCACCATCAG-1,1,33,23,5440,3063 ACGAAATGGGCGGCAC-1,1,32,24,5320,3132 GTGAGCGTGCTGCACT-1,1,33,25,5440,3201 CCGCGGGTACGAAGAA-1,1,32,26,5320,3269 TCCCTGGCTCGCTGGA-1,1,33,27,5440,3338 CAGCTTAGTAGGTAGC-1,1,32,28,5320,3407 CACGAAAGTTAGTCCC-1,1,33,29,5440,3476 ACCTAATCGACTTCCT-1,1,32,30,5320,3545 AAGTAGTGACGCGAGG-1,1,33,31,5440,3614 TCCGATTACATTGCCG-1,1,32,32,5320,3682 CCTCCGACAATTCAAG-1,1,33,33,5440,3751 GTTCACAGGAGTCTAG-1,1,32,34,5320,3820 CGAAGTTGCTCTGTGT-1,1,33,35,5440,3889 GTCGGATATCTCAGAC-1,1,32,36,5320,3958 CGCTCTCCGTAGATTA-1,1,33,37,5440,4027 CAAGCAACGTCGGAGT-1,1,32,38,5320,4095 CCATTCCCTGCCCACA-1,1,33,39,5439,4164 CTTTGGCTTTAGTAAA-1,1,32,40,5320,4233 GGCTATTAAGTTGTAT-1,1,33,41,5439,4302 CCATTAGCGATAATCC-1,1,32,42,5320,4371 TGTTCTTCCATTGACT-1,1,33,43,5439,4440 AGATAACTTCAGGGCC-1,1,32,44,5320,4508 ATAGACGAAGAGAAAG-1,1,33,45,5439,4577 GGCGGAGTAATATTAG-1,1,32,46,5320,4646 TGACCCACGTTAGACA-1,1,33,47,5439,4715 TCACAGGTTATTGGGC-1,1,32,48,5320,4784 TCACGCATTGTAGATC-1,1,33,49,5439,4853 TTGAAGAATTCCCAGG-1,1,32,50,5320,4921 AAATGGTCAATGTGCC-1,1,33,51,5439,4990 TAGTGCCCTCCAGAGT-1,1,32,52,5320,5059 GGTATTGCCGAGTTTA-1,1,33,53,5439,5128 CGTATTAAGAGATCTA-1,1,32,54,5320,5197 ACTGTCCAGGATTATA-1,1,33,55,5439,5266 CGGGCAGCTAAACCGC-1,1,32,56,5320,5334 TTGCTGATCATGTTCG-1,1,33,57,5439,5403 TATGGGTACGTATCGT-1,1,32,58,5320,5472 CAGCTCACTGAGACAT-1,1,33,59,5439,5541 GGGACTGCATAGATAG-1,1,32,60,5320,5610 ACGCATTCGTGAGTAC-1,1,33,61,5439,5678 CTCTGGACGCCTGGTG-1,1,32,62,5320,5747 AGGGTTTAGTTCGGGA-1,1,33,63,5439,5816 GGGAGAACTCACAGTA-1,1,32,64,5320,5885 ATCAATCTGGGCTGCA-1,1,33,65,5439,5954 TCTTCGATACCAATAA-1,1,32,66,5320,6023 ACGTAGATTGCTGATG-1,1,33,67,5439,6091 TCTTGATGCGTAGCGA-1,1,32,68,5320,6160 GGGCTGCCTAGGGCGA-1,1,33,69,5439,6229 CTCTCACAATCGATGA-1,1,32,70,5320,6298 CCAAGCGTAACTCGTA-1,1,33,71,5439,6367 ACAACAGCATGAGCTA-1,1,32,72,5320,6436 GTCCCAACGTAAAGTA-1,1,33,73,5439,6504 TCGGAGTACATGAGTA-1,1,32,74,5320,6573 GGGAGTTAATGAGGCG-1,1,33,75,5439,6642 CCGGGCGGTCTCGTCA-1,1,32,76,5320,6711 CCGTAAGTTGGTCCCA-1,1,33,77,5439,6780 GGAGGGCTTGGTTGGC-1,1,32,78,5320,6849 TCGGACGCCCAGCCCA-1,1,33,79,5439,6917 TCTGTGCCATCATAGT-1,1,32,80,5320,6986 GTACTGGAGTTAGACC-1,1,33,81,5439,7055 GGAATGCGCTAGCGTG-1,1,32,82,5320,7124 GTGTGAATAACTTAGG-1,1,33,83,5439,7193 GGTCGGCCAGGAGCTT-1,1,32,84,5320,7262 TAGCCGGCGGTCAGCG-1,1,33,85,5439,7330 CGGGTGTACCCATTTA-1,1,32,86,5320,7399 AGTGATTCAAGCAGGA-1,1,33,87,5439,7468 GTTGGATTGAGAACAC-1,1,32,88,5320,7537 CACACGCGCTGTCTTA-1,1,33,89,5439,7606 TAGACGCCCGTACCGG-1,1,32,90,5320,7675 GGTTTCAATCGGTCAG-1,1,33,91,5439,7743 AATCTGCGTTGGGACG-1,1,32,92,5320,7812 TTACGGATGGTTCGAG-1,1,33,93,5439,7881 CGGCAGGGTCGGGTTG-1,1,32,94,5320,7950 GCTTTCAGAGGAGGTG-1,1,33,95,5439,8019 TCTTCCCATGGGCACA-1,1,32,96,5320,8088 TACCGCGGACTTGCAG-1,1,33,97,5439,8156 AGAATTATGGATTCGA-1,1,32,98,5320,8225 ATTGATGAGTCCTAAC-1,1,33,99,5439,8294 TAGGTCGCCGGAACTG-1,1,32,100,5320,8363 TAACCTACCGTCCGAG-1,1,33,101,5439,8432 CTTAGTAGGCCTACAG-1,1,32,102,5320,8500 CTAGATGTGAGTGTAA-1,1,33,103,5439,8569 ACTCCCGAATTCGTTT-1,1,32,104,5320,8638 GTTCATCGTTTGGCTG-1,1,33,105,5439,8707 ACTTTACCCTCATGAA-1,1,32,106,5320,8776 GCGAGAGTTGCGTCCA-1,1,33,107,5439,8845 GTTCGGGCGTACCATT-1,1,32,108,5320,8913 CGACTTTGTATAGCCT-1,1,33,109,5439,8982 GCCATCGATGCTGCAT-1,1,32,110,5320,9051 GCATTTCCAAGGCTCC-1,1,33,111,5439,9120 ATGTAAGGCTGCTCTT-1,1,32,112,5320,9189 ACGTTCGCAATCAATT-1,1,33,113,5439,9258 GTGACGAGGGTGACCC-1,1,32,114,5320,9326 ATTATAGCTACTTTAC-1,1,33,115,5439,9395 CGTGTGTTAAACCCTG-1,0,32,116,5320,9464 TTGGTATGGCTTGTGT-1,0,33,117,5439,9533 CATTCCCATTCCGTCG-1,0,32,118,5320,9602 TGCCGAAAGCGTATTC-1,0,33,119,5439,9671 CAACACATCTCCTGCC-1,0,32,120,5320,9739 CTGCCTCATATGCAAC-1,0,33,121,5439,9808 TCCCGCCTATGTGCGT-1,0,32,122,5320,9877 GGTTACCCGACACTTT-1,0,33,123,5439,9946 CCAGCGGGATCACCAG-1,0,32,124,5320,10015 ATGTTTCGGCCCGGAG-1,0,33,125,5439,10084 GCGTCTAACCTCCTAA-1,0,32,126,5320,10152 ATCAGGTAGCTGACAG-1,0,33,127,5439,10221 GGTATGAAAGAACTGA-1,0,34,0,5559,1480 GTGGCCTAATATCATT-1,0,35,1,5679,1549 CCTGTGAAACCGTAAC-1,0,34,2,5559,1618 GGCAGAGAGATCGGGA-1,0,35,3,5679,1686 TAGCGTCGAATATTGA-1,0,34,4,5559,1755 CGCCGACTATTCGCTA-1,0,35,5,5679,1824 TCTGGCGCAAGCCGGG-1,0,34,6,5559,1893 AGTGGTTGCGTATAGG-1,0,35,7,5679,1962 ATCGGTTACCTAGTAA-1,0,34,8,5559,2031 CCTGCCCGTTGTCTAG-1,0,35,9,5679,2099 GCACACGCCCATGGTC-1,0,34,10,5559,2168 AGTACGGCCCGTATCG-1,0,35,11,5679,2237 TATCTAGCCTAAAGGA-1,0,34,12,5559,2306 CACTCGGTTAGGAGGA-1,0,35,13,5679,2375 ATGTTCGTCGACCCAC-1,0,34,14,5559,2444 TTCCTCTGCCCGAATA-1,0,35,15,5679,2512 TTACTATCGGCTTCTC-1,0,34,16,5559,2581 GCCGCATTAGTCCGGC-1,0,35,17,5679,2650 TAAGGGCTGGGAGAGG-1,0,34,18,5559,2719 TAAGCAGGCGACACGC-1,0,35,19,5679,2788 AGCACTACCGGCCTGT-1,0,34,20,5559,2856 GAAAGCCCTTTGGACC-1,1,35,21,5679,2925 GACCGACTGAAGCGTC-1,1,34,22,5559,2994 CGGTGAAGACTAAAGT-1,1,35,23,5679,3063 CCCTGCGCTACGCATA-1,1,34,24,5559,3132 TACTGGACAGCTCGGC-1,1,35,25,5679,3201 TTAGTAGGGCGGCGGG-1,1,34,26,5559,3269 GAGGCTATCAAAGTCG-1,1,35,27,5679,3338 TTACCATTGATTACCC-1,1,34,28,5559,3407 ATACCACGGGCAACTT-1,1,35,29,5679,3476 TGTCCTAAGTCACCGC-1,1,34,30,5559,3545 AGGTAGGTACAAAGCT-1,1,35,31,5679,3614 GGCATACAGGTAGCGG-1,1,34,32,5559,3682 TGTAGTGATCTATAAT-1,1,35,33,5679,3751 TCCCGGGTGTGCTGCT-1,1,34,34,5559,3820 TACGATGTTGATCATC-1,1,35,35,5679,3889 CCTCTCTCCCATCTAG-1,1,34,36,5559,3958 GCAGGACTATAGAATA-1,1,35,37,5679,4027 CTAGTGAAGGACAGGA-1,1,34,38,5559,4095 TACGAGAACTTCACGT-1,1,35,39,5679,4164 CGTTGTTTCAATTCCC-1,1,34,40,5559,4233 GCAAATATTACGCTTT-1,1,35,41,5679,4302 CCAATAGTGCCGTCGA-1,1,34,42,5559,4371 ATTGCTGCTCCTCCAT-1,1,35,43,5679,4440 GAGATCTGCTTGGCAT-1,1,34,44,5559,4508 GCCGAAATTCCTACGT-1,1,35,45,5679,4577 GGCACTCCACTGGGCA-1,1,34,46,5559,4646 GGGTCACCGTGACGGT-1,1,35,47,5679,4715 CACTTAATCAGACGGA-1,1,34,48,5559,4784 CGTTTCGCTCATTACA-1,1,35,49,5679,4853 ATAAAGGCTCGGTCGT-1,1,34,50,5559,4921 CACTAAAGTTGCCTAT-1,1,35,51,5679,4990 GTGCTCAAGTACTGTC-1,1,34,52,5559,5059 CCATGCCTGTTTAGTA-1,1,35,53,5679,5128 TCTAGTTATCAGAAGA-1,1,34,54,5559,5197 TTGTAATCCGTACTCG-1,1,35,55,5679,5266 TCCCAGCTTTAGTCTG-1,1,34,56,5559,5334 CTACGCACGGAGTACC-1,1,35,57,5679,5403 AAATTAACGGGTAGCT-1,1,34,58,5559,5472 CGGCCACGCACAAAGT-1,1,35,59,5679,5541 GAAGCGTGAGGAATTT-1,1,34,60,5559,5610 ATATCTTAGGGCCTTC-1,1,35,61,5679,5678 ACGCGGGCCAAGGACA-1,1,34,62,5559,5747 GCGAGTTCTGCAAAGA-1,1,35,63,5679,5816 TATTCGTGCCAGAATA-1,1,34,64,5559,5885 AGGGCTGCAGTTACAG-1,1,35,65,5679,5954 CTAGCATAGTATAATG-1,1,34,66,5559,6023 TAGGTTCGAGTTCGTC-1,1,35,67,5679,6091 GAATTATAGTGAAAGG-1,1,34,68,5559,6160 CTATCGGGTCTCAACA-1,1,35,69,5679,6229 GCGCTAATTGAATAGA-1,1,34,70,5559,6298 ATGCGACAGTCCCATT-1,1,35,71,5679,6367 GGTAGTGCTCGCACCA-1,1,34,72,5559,6436 AAGCTCGTGCCAAGTC-1,1,35,73,5679,6504 TATTCAATTCTAATCC-1,1,34,74,5559,6573 TTCAAAGTCTCTAGCC-1,1,35,75,5679,6642 TTGAATATGGACTTTC-1,1,34,76,5559,6711 AAGAGCTCTTTATCGG-1,1,35,77,5679,6780 TTACTCCGGCCGGGAA-1,1,34,78,5559,6849 AAACGAGACGGTTGAT-1,1,35,79,5679,6917 GCTAAGTAGTTTCTCT-1,1,34,80,5559,6986 ATAACGCCGGAGGGTC-1,1,35,81,5679,7055 GGATCCGGAATATACT-1,1,34,82,5559,7124 TGAAAGGACCTGACTC-1,1,35,83,5679,7193 TCCGCGGCAGCATCTG-1,1,34,84,5559,7262 TGCATATGTCTGTCAC-1,1,35,85,5679,7330 TGTAGGAGAAATTTCC-1,1,34,86,5559,7399 AGTGAGACTTCCAGTA-1,1,35,87,5679,7468 CCCAAACATGCTGCTC-1,1,34,88,5559,7537 GCTTATGAAGCAGGAA-1,1,35,89,5679,7606 TTCTAACCGAAGCTTA-1,1,34,90,5559,7675 GGATGTCCTTACCGCA-1,1,35,91,5679,7743 AGGGTGCTCTCGAGGG-1,1,34,92,5559,7812 AACTCTCAATAGAGCG-1,1,35,93,5679,7881 TCTGAATTCCGTACAA-1,1,34,94,5559,7950 GCGTGGTACTGGGTTA-1,1,35,95,5679,8019 CGTCGGATAGTGTTGA-1,1,34,96,5559,8088 ATATGTCTCCCTAGCC-1,1,35,97,5679,8156 TCTTTAAGACTATGAA-1,1,34,98,5559,8225 TCATTTAAGTCTCCGA-1,1,35,99,5679,8294 GATATTGAGATTGGCG-1,1,34,100,5559,8363 TGACATCGAGCGGACC-1,1,35,101,5679,8432 GCGTAAATGGCCATAA-1,1,34,102,5559,8500 ATTGTACAACTCGGCT-1,1,35,103,5679,8569 TACGCTATAGAAACCT-1,1,34,104,5559,8638 CACCCAAATCTTATGT-1,1,35,105,5679,8707 AGATGATGGAGTCTGG-1,1,34,106,5559,8776 CCACGGTGCCCGGTAG-1,1,35,107,5679,8845 TCAAGAAATACTAGCT-1,1,34,108,5559,8913 AGGTATAATTGATAGT-1,1,35,109,5679,8982 CAAGGTCCTATAGGCT-1,1,34,110,5559,9051 CCGGCACGACCGTTTC-1,1,35,111,5679,9120 ACCTCCGTTATTCACC-1,1,34,112,5559,9189 GCAGCCTATATCACAT-1,1,35,113,5679,9258 GGTATAGTGACACATA-1,1,34,114,5559,9326 AAATTCCAGGTCCAAA-1,1,35,115,5679,9395 TCTTTAGCAGGCGAAC-1,0,34,116,5559,9464 TATTGACATTTCTGCC-1,0,35,117,5679,9533 TCTGATCGGGTGCTAG-1,0,34,118,5559,9602 GGCCCGGAGCATGTCT-1,0,35,119,5679,9671 GGGCGCAGCGTTACTC-1,0,34,120,5559,9739 TTGGCGATCCGAATAT-1,0,35,121,5679,9808 CCACGTAAATTAGACT-1,0,34,122,5559,9877 TCTGATTGGAAATGGA-1,0,35,123,5679,9946 ATGGCGGAATAGTCGC-1,0,34,124,5559,10015 ATCGCTTTACGTCTCA-1,0,35,125,5679,10084 TACGTGCAAGGTTCCT-1,0,34,126,5559,10152 CAGGACAGCTGCCCTT-1,0,35,127,5679,10221 CAAACCAGGTCTGCAT-1,0,36,0,5799,1480 ACAAGCTATATGGAAG-1,0,37,1,5919,1549 TCGCCCACTGCGAGAG-1,0,36,2,5799,1618 AGCCGCAAATTCAAAT-1,0,37,3,5919,1686 TTAACGTTAAAGCCTG-1,0,36,4,5799,1755 CAGCGCCAACACGATA-1,0,37,5,5919,1824 ATCCAATGGTACCGAA-1,0,36,6,5799,1893 GTGCTGCAGATAAGGA-1,0,37,7,5919,1962 GGCCTTTGCAACTGGC-1,0,36,8,5799,2031 GTCGTACCTACGATTG-1,1,37,9,5919,2099 TAGAAATTCACGTATA-1,0,36,10,5799,2168 AGAATAAATCTTCAGG-1,0,37,11,5919,2237 CATTGCGAAATGGGCG-1,0,36,12,5799,2306 GTCTACTCAATTACAA-1,0,37,13,5919,2375 TGTAATGACCACAATA-1,0,36,14,5799,2444 AAAGTCGACCCTCAGT-1,0,37,15,5919,2512 TACTCGGCACGCCGGG-1,0,36,16,5799,2581 AGGTGTATCGCCATGA-1,0,37,17,5919,2650 TGTGCTTTACGTAAGA-1,0,36,18,5799,2719 AAACCTCATGAAGTTG-1,0,37,19,5919,2788 TATAGGGTACTCATGA-1,0,36,20,5799,2857 CCAGCTGATGGTACTT-1,1,37,21,5919,2925 AATATTGGAGTATTGA-1,1,36,22,5799,2994 GGCCCTCACCCACTTA-1,1,37,23,5919,3063 AACCAAGACTTCTCTG-1,1,36,24,5799,3132 TCGTATTACCCATTGC-1,1,37,25,5919,3201 ATTCGACGCCGGGCCT-1,1,36,26,5799,3269 GGCGCAGGACATCTTC-1,1,37,27,5919,3338 GTACTCCCTTATCGCT-1,1,36,28,5799,3407 TGGTCTGTTGGGCGTA-1,1,37,29,5919,3476 AATAACAACGCTCGGC-1,1,36,30,5799,3545 CATACCCGTACCCAGT-1,1,37,31,5919,3614 ACAATCCATTTAAACC-1,1,36,32,5799,3682 GTTACAATTGGTGACG-1,1,37,33,5919,3751 TTGCCCTGATCACGGG-1,1,36,34,5799,3820 CTAACCGCGCGCCCGT-1,1,37,35,5919,3889 CTAAAGAATGCCTACT-1,1,36,36,5799,3958 ACCCATCTTGAGGGTA-1,1,37,37,5919,4027 GATCTTTGCAGGGTAT-1,1,36,38,5799,4095 GGGTACTTCATGAACT-1,1,37,39,5919,4164 GCCGCTTGTGAGAAAC-1,1,36,40,5799,4233 CCTGTAAGACATGATA-1,1,37,41,5919,4302 CGACAGTTCGCGTTAT-1,1,36,42,5799,4371 ACGATGCATATGTTAT-1,1,37,43,5919,4440 TGTTCCGCTTCCATGA-1,1,36,44,5799,4508 GGATGACGCGAGTTTA-1,1,37,45,5919,4577 GAAGTTTCCACTCAAT-1,1,36,46,5799,4646 GCGAGGCCCGAGCAGA-1,1,37,47,5919,4715 CATACTATGTAATTGT-1,1,36,48,5799,4784 CCAATGTCACAGCAAG-1,1,37,49,5919,4853 GTTGGATTTGCGTTGG-1,1,36,50,5799,4921 GGGAGGATGCCCGAAA-1,1,37,51,5919,4990 GATCGCGGGCTCTCCA-1,1,36,52,5799,5059 GTTCGCCATAAGTGCC-1,1,37,53,5919,5128 AGATTATAGGACGTTT-1,1,36,54,5799,5197 TCGAGACCAACACCGT-1,1,37,55,5919,5266 TATGGGACCGAGCAGG-1,1,36,56,5799,5334 GATGCGTCCTGCATTC-1,1,37,57,5919,5403 TATGGTCTGAGTAACA-1,1,36,58,5799,5472 GCATAGAGCACTCAGG-1,1,37,59,5918,5541 CTTCATTGTCAGTGGA-1,1,36,60,5799,5610 GCAGATTAGGGATATC-1,1,37,61,5918,5679 CCTGTCGCCCGTAAAT-1,1,36,62,5799,5747 CAATTTCGTATAAGGG-1,1,37,63,5918,5816 GTACACTTACCTGAAG-1,1,36,64,5799,5885 CCAGCCTGGACCAATA-1,1,37,65,5918,5954 ATGGAGCAGGCCGTGA-1,1,36,66,5799,6023 GTCATTAGAGCGAACG-1,1,37,67,5918,6091 AAGACTGCAAGCTACT-1,1,36,68,5799,6160 CTAGTCACGTCTTAAG-1,1,37,69,5918,6229 ACTCTTGTATAGTAAC-1,1,36,70,5799,6298 ATTAGGCGATGCTTTC-1,1,37,71,5918,6367 TTCGGGACTAATCGCG-1,1,36,72,5799,6436 TGGACTGTTCGCTCAA-1,1,37,73,5918,6504 AACGTGCGAAAGTCTC-1,1,36,74,5799,6573 CCACCCAAGGAAAGTG-1,1,37,75,5918,6642 CCGCACAAAGACCAAC-1,1,36,76,5799,6711 GCGATTGTTAACGTTA-1,1,37,77,5918,6780 ACTCGTCAGTAATCCC-1,1,36,78,5799,6849 GGTGATAAGGAGCAGT-1,1,37,79,5918,6917 AAGAGGCATGGATCGC-1,1,36,80,5799,6986 CACGTTCGTGCTCTAG-1,1,37,81,5918,7055 CTATTTGGTTACGGAT-1,1,36,82,5799,7124 GTACAGAGGCAAGGGT-1,1,37,83,5918,7193 GGGCCGGCCGAAGTAC-1,1,36,84,5799,7262 CCTGAACGATATATTC-1,1,37,85,5918,7330 CCGGGCTGCTCCATAC-1,1,36,86,5799,7399 TACTTGTTAGTAGTCC-1,1,37,87,5918,7468 CCTAGGCGTAGCGATC-1,1,36,88,5799,7537 CTGGCGCACAGGTCTG-1,1,37,89,5918,7606 ACTTATACTTACCCGG-1,1,36,90,5799,7675 GAAGTCTCCCTAGCGA-1,1,37,91,5918,7743 ACCGATGGTAGCATCG-1,1,36,92,5799,7812 CGAGTTTATCGGACTG-1,1,37,93,5918,7881 CATAACGGACAGTCGT-1,1,36,94,5799,7950 TGACGATGCACTAGAA-1,1,37,95,5918,8019 TAGGGAGCTTGGGATG-1,1,36,96,5799,8088 AGGGTCGATGCGAACT-1,1,37,97,5918,8156 TATATCCCTGGGAGGA-1,1,36,98,5799,8225 CATCTTACACCACCTC-1,1,37,99,5918,8294 GTGCGACAGGGAGTGT-1,1,36,100,5799,8363 CCGATCTCAACCTTAT-1,1,37,101,5918,8432 ACGATCATCTTGTAAA-1,1,36,102,5799,8500 GAAAGAACAGCGTTAT-1,1,37,103,5918,8569 CTAGGTCTGAAGGAAT-1,1,36,104,5799,8638 ATATCAACCTACAGAG-1,1,37,105,5918,8707 AAATAGCTTAGACTTT-1,1,36,106,5799,8776 GCGACATGTAAACATC-1,1,37,107,5918,8845 ATAAGTAGGGCGACTC-1,1,36,108,5799,8913 GCGAGCGCATGCTCCC-1,1,37,109,5918,8982 AGGGACCGGCTGCGTT-1,1,36,110,5799,9051 CCTATGAAGTGGTGCC-1,1,37,111,5918,9120 GCTTACGTAGTTAGTA-1,1,36,112,5799,9189 CATACTTAGGCAATAC-1,1,37,113,5918,9258 CCTGTCCCTCACGTTA-1,1,36,114,5799,9326 CAATGTGCCAACCCTT-1,1,37,115,5918,9395 GTTAAGTTAGAGTGGG-1,0,36,116,5799,9464 CTGGGATACGCTACCC-1,0,37,117,5918,9533 AACCTGTCACGGAATT-1,0,36,118,5799,9602 ACTGCGGACACACCGT-1,0,37,119,5918,9671 CCGTGAGGCATTCATG-1,0,36,120,5799,9739 GCCCAGATGCTGGAGA-1,0,37,121,5918,9808 TCTGGCCGTTCAAGTT-1,0,36,122,5799,9877 ATACGAAGGCTTTCCA-1,0,37,123,5918,9946 GATCCGAATATAAGTG-1,0,36,124,5799,10015 GTGGAGCATGTCGGCC-1,0,37,125,5918,10084 ACTCTTCAGCTCCCGC-1,0,36,126,5799,10152 CCCGATAGCCTCGCCT-1,0,37,127,5918,10221 ACAGGTGTGTTGTTGC-1,0,38,0,6038,1480 TGAACTGCTATGACTT-1,1,39,1,6158,1549 TGACATATATGACGAT-1,1,38,2,6038,1618 AAGTCTTCTGTGGCCT-1,1,39,3,6158,1686 ACCAGACCATAACAAC-1,1,38,4,6038,1755 TGAGACGTACCTCTCA-1,1,39,5,6158,1824 GACCGTTACATGCGAC-1,1,38,6,6038,1893 GGTTCGGATTATACTA-1,1,39,7,6158,1962 CCTCCTGAGCCCACAT-1,1,38,8,6038,2031 CCGCGATTTGGTAGGT-1,1,39,9,6158,2099 AATCTCTACTGTGGTT-1,1,38,10,6038,2168 ACTTTGACTGCATCCT-1,1,39,11,6158,2237 CCCTGACTAACAAATT-1,0,38,12,6038,2306 ACGCTGTGAGGCGTAG-1,1,39,13,6158,2375 GCATACGAGGTCTTTA-1,0,38,14,6038,2444 TCGGGATTCAAACATA-1,0,39,15,6158,2512 GGGCCCTACGAAAGGG-1,0,38,16,6038,2581 CGCCAAGAAGCCGAGT-1,0,39,17,6158,2650 GGTACATCTGGGACGA-1,0,38,18,6038,2719 GGATGCTGGCGTTCCT-1,0,39,19,6158,2788 CCGATTCGAGGGACCC-1,1,38,20,6038,2857 CCACTGGTGGCTGGTT-1,1,39,21,6158,2925 GACAGGCACACACTAT-1,1,38,22,6038,2994 TCAACGCGACCGGCAG-1,1,39,23,6158,3063 CTACGACTAGCTATAA-1,1,38,24,6038,3132 CGGTTGACCTGGCATA-1,1,39,25,6158,3201 ATCCTGAATCGCTGCG-1,1,38,26,6038,3269 GTTTCATATCGTCGCT-1,1,39,27,6158,3338 ATAAATATTAGCAGCT-1,1,38,28,6038,3407 AAGAGGATGTACGCGA-1,1,39,29,6158,3476 TCCTGCGTTGATACTC-1,1,38,30,6038,3545 CGTGCATTGTCGACGC-1,1,39,31,6158,3614 CCGGGTTCGAGGTTAC-1,1,38,32,6038,3682 CCCAATTTCACAACTT-1,1,39,33,6158,3751 TGATTTATTAGCTGTG-1,1,38,34,6038,3820 TGGAAGAAGGGAACGT-1,1,39,35,6158,3889 GACGCTTGCTTCTAAA-1,1,38,36,6038,3958 GGGAACGGGAGGTTAG-1,1,39,37,6158,4027 GCGGCTCTGACGTACC-1,1,38,38,6038,4095 ACGTTAGATTTGCCCG-1,1,39,39,6158,4164 GAGAGGGCGCGAGGTT-1,1,38,40,6038,4233 GCGTCTCTGCATTGGG-1,1,39,41,6158,4302 GCAGCACACAGCCCAG-1,1,38,42,6038,4371 CAGGCCGTTTGGGTGT-1,1,39,43,6158,4440 AACTCAAGTTAATTGC-1,1,38,44,6038,4508 CTTCGTAGATAGGTGA-1,1,39,45,6158,4577 TGCAGAGTACCGAGCA-1,1,38,46,6038,4646 GAAGTGATTTATCGTG-1,1,39,47,6158,4715 CGCTACGGGACATTTA-1,1,38,48,6038,4784 CCACACTGAGATATTA-1,1,39,49,6158,4853 CGATCCGACCCAGTGC-1,1,38,50,6038,4921 CTGTACTTCTTAGCAT-1,1,39,51,6158,4990 ACTTATTAGGATCGGT-1,1,38,52,6038,5059 TAGTCCGCAGAGAATG-1,1,39,53,6158,5128 TTCACGAAAGGATCAC-1,1,38,54,6038,5197 TACATTTCTAACGTGC-1,1,39,55,6158,5266 ACCATATCCGCAATAA-1,1,38,56,6038,5334 CACTCAAGAGCTATGG-1,1,39,57,6158,5403 TGTACGAACAAATCCG-1,1,38,58,6038,5472 ATCATCCAATATTTGT-1,1,39,59,6158,5541 CGCTATTCTTAGGCTC-1,1,38,60,6038,5610 TGGCAGCAGTAATAGT-1,1,39,61,6158,5679 TCACGTGCCCGATTCA-1,1,38,62,6038,5747 CATACGGCGTCTGGGC-1,1,39,63,6158,5816 CACATGATTCAGCAAC-1,1,38,64,6038,5885 GCTAGTAGAGCTTGTA-1,1,39,65,6158,5954 TGCTGTTGAAGAACTC-1,1,38,66,6038,6023 CGGAGCATGGCGATCC-1,1,39,67,6158,6091 TAGCGTTGGGTCTTAC-1,1,38,68,6038,6160 GTAGCGGCTATACACT-1,1,39,69,6158,6229 TAACATACAATGTGGG-1,1,38,70,6038,6298 TCTTCGAATAGACGTT-1,1,39,71,6158,6367 GATCGTGACTGATATC-1,1,38,72,6038,6436 GATCCGGGAATTAACA-1,1,39,73,6158,6504 TTATATACGCTGTCAC-1,1,38,74,6038,6573 GTCGCGTAACCCGTTG-1,1,39,75,6158,6642 AGCTCTAGACGTTCCA-1,1,38,76,6038,6711 GTCAAGCGGACTCGGG-1,1,39,77,6158,6780 CGAGGGACTGCGGTCG-1,1,38,78,6038,6849 AATCGCCTCAGCGCCA-1,1,39,79,6158,6917 CTTGTTGCTGAGTCAA-1,1,38,80,6038,6986 GATATGAGACACTAAC-1,1,39,81,6158,7055 TTATGATCTTAACGAA-1,1,38,82,6038,7124 CGCCGCCCATGCCTGT-1,1,39,83,6158,7193 CTGGGATAAATAATGG-1,1,38,84,6038,7262 GTGCCCGTTCGGATTC-1,1,39,85,6158,7330 TTCAATACTCTGAATC-1,1,38,86,6038,7399 CGCACATGTCCACTAC-1,1,39,87,6158,7468 AGAAGAGCGCCGTTCC-1,1,38,88,6038,7537 GATAACTCGCACTGTG-1,1,39,89,6158,7606 AGTCGACGGTCTCAAG-1,1,38,90,6038,7675 GTGACCGCACACTACG-1,1,39,91,6158,7743 GTATGTGGGTCTAGTT-1,1,38,92,6038,7812 CTTGAGTTAGGGTAAT-1,1,39,93,6158,7881 TTAGCTGATTTGCCGT-1,1,38,94,6038,7950 GCTGTTGCTACCGAAC-1,1,39,95,6158,8019 TATTACCATCCTGCTT-1,1,38,96,6038,8088 TTGAATTCACGTGAGG-1,1,39,97,6158,8156 CCATCTCACCAGTGAA-1,1,38,98,6038,8225 CGCACGTGCGCTATCA-1,1,39,99,6158,8294 ACCCGGATGACGCATC-1,1,38,100,6038,8363 CGCTAGAGACCGCTGC-1,1,39,101,6158,8432 ATAGTTCCACCCACTC-1,1,38,102,6038,8501 GCAGACCCAGCACGTA-1,1,39,103,6158,8569 TAGACTACCTAGCGTT-1,1,38,104,6038,8638 GGTTCTACTCGTCTGA-1,1,39,105,6158,8707 GACTCACCCACGTGAG-1,1,38,106,6038,8776 AGCTCTTCGTAACCTT-1,1,39,107,6158,8845 ACTATCCAGGGCATGG-1,1,38,108,6038,8913 AAGGATCGATCGCTTG-1,1,39,109,6158,8982 ATATCGGTAGGGAGAT-1,1,38,110,6038,9051 TTCCAGACGAGATTTA-1,1,39,111,6158,9120 GACCGACGTGAAAGCA-1,1,38,112,6038,9189 CCTGGAAACGTTCTGC-1,1,39,113,6158,9258 CCGGTAATGGCTAGTC-1,1,38,114,6038,9326 GCCGTGGAAGAAATGT-1,1,39,115,6158,9395 GTCTTGAGGAGCAGTG-1,0,38,116,6038,9464 TCCCAAAGCCCTAAAT-1,0,39,117,6158,9533 TTGAGCGCCACGTGAT-1,0,38,118,6038,9602 TTGAGTCCCGCTGCTG-1,0,39,119,6158,9671 ATGGAACCTTTGCACA-1,0,38,120,6038,9739 GCTAGCACCTGGGCCA-1,0,39,121,6158,9808 CGCCGTCTACCCATCG-1,0,38,122,6038,9877 GATAGGTGTCCCGGGC-1,0,39,123,6158,9946 AGGTATGCGGACATTA-1,0,38,124,6038,10015 TTGGTTCGCTCAAAGG-1,0,39,125,6158,10084 TCTGGAGCGTAAGAGT-1,0,38,126,6038,10152 TGCCTAAATTTAATAG-1,0,39,127,6158,10221 TAATTTCCGTCCAGTA-1,1,40,0,6278,1480 TCCTTCAATCCCTACG-1,1,41,1,6398,1549 TACCTATCCCTAGAGG-1,1,40,2,6278,1618 GCATGGGTACTGACGC-1,1,41,3,6398,1686 GTCGGGAACATGGTAG-1,1,40,4,6278,1755 GCAAATGAGGACACTT-1,1,41,5,6398,1824 GAATGGGCTTATCGAC-1,1,40,6,6278,1893 TGGTCGTGCAAGGCAA-1,1,41,7,6398,1962 CACCACGCCACACAGA-1,1,40,8,6278,2031 GAACCTCGACCTACAC-1,1,41,9,6398,2099 CAATACGCTCTGAGGC-1,1,40,10,6278,2168 TGGTAAGCAGGATTGA-1,1,41,11,6398,2237 AGTGGCTCCGTCGGCC-1,1,40,12,6278,2306 GATCGGATAGAACCAT-1,1,41,13,6398,2375 GCTACAGTACGGACCG-1,1,40,14,6278,2444 TCTATTACTAGAGGAT-1,1,41,15,6398,2512 TTCAGGCGTCAAAGCC-1,1,40,16,6278,2581 AGACCGGGAAACCCTG-1,1,41,17,6398,2650 AGAGATCTCTAAAGCG-1,1,40,18,6278,2719 CCCTGCCCAATCCGCT-1,1,41,19,6398,2788 GTGGCGGTCCCAGCGT-1,1,40,20,6278,2857 GCATTGTAATTCATAT-1,1,41,21,6398,2925 CCGTTCCGAATCTCGG-1,1,40,22,6278,2994 AGCTTGATCTTAACTT-1,1,41,23,6398,3063 CCTGTACTCACGCCCA-1,1,40,24,6278,3132 AAGTGACGACCGAATT-1,1,41,25,6398,3201 CTCACTTGGCTGGTAA-1,1,40,26,6278,3270 CGCCTGGCCTACGTAA-1,1,41,27,6398,3338 CCCGTAAGTCTAGGCC-1,1,40,28,6278,3407 TTGGACATGTGGCTTA-1,1,41,29,6398,3476 ATTACGCGCTGGCAGG-1,1,40,30,6278,3545 ACGCGCTACACAGGGT-1,1,41,31,6398,3614 TACGTTTACCGGCAAT-1,1,40,32,6278,3682 CGAAACGCAATTCATG-1,1,41,33,6398,3751 TAGTCTAACAACGAGA-1,1,40,34,6278,3820 TTGCATGCTGATCACG-1,1,41,35,6398,3889 TCTGGGTAGCGCTCAT-1,1,40,36,6278,3958 ACATCGGTCAGCCGCG-1,1,41,37,6398,4027 AGATACCGGTGTTCAC-1,1,40,38,6278,4095 GATTACTGAATTTGGG-1,1,41,39,6398,4164 TCCAACTTTAAATTCT-1,1,40,40,6278,4233 TCCTAGCAAAGAAGCT-1,1,41,41,6398,4302 GTCTATCTGAGTTTCT-1,1,40,42,6278,4371 GATGTTCAATCCACGA-1,1,41,43,6398,4440 AGTTAAACACTTGCGA-1,1,40,44,6278,4508 AGCTCTTTACTCAGTT-1,1,41,45,6398,4577 ATCCAGGATTCGTGAA-1,1,40,46,6278,4646 AGTCAACACCACCATC-1,1,41,47,6398,4715 CGATACCTCGCGGACA-1,1,40,48,6278,4784 TACAACGCACAACTCA-1,1,41,49,6398,4853 AATTAAAGGTCGGCGT-1,1,40,50,6278,4921 TACGCAGTTCTTTCCT-1,1,41,51,6398,4990 GACCGTGCTGACGGTG-1,1,40,52,6278,5059 GGCAAATTACTTTACT-1,1,41,53,6398,5128 GGTACAAACATGCTAT-1,1,40,54,6278,5197 CGGGCCTTCTTTGTAA-1,1,41,55,6398,5266 CGTGAAGTTAATTCAC-1,1,40,56,6278,5334 ATAGTGAAGCGTTCTC-1,1,41,57,6398,5403 TACGCCATATTCTAAT-1,1,40,58,6278,5472 GCCGGGTTAGGGTCGC-1,1,41,59,6398,5541 TACATAGGCATACACC-1,1,40,60,6278,5610 GCCGATTGGCCAAGCT-1,1,41,61,6398,5679 CTGCCATGCATCACAT-1,1,40,62,6278,5747 TTATGAATGAAAGGGA-1,1,41,63,6398,5816 GCTGAGGCGTGAGTAT-1,1,40,64,6278,5885 GCGCCGTTCCACGATA-1,1,41,65,6398,5954 CGCATGGTGCGATGCT-1,1,40,66,6278,6023 AGGTTTCACACACCTT-1,1,41,67,6398,6091 CAAGGATCGCATGTTC-1,1,40,68,6278,6160 ACGTTAATGTCGAAGA-1,1,41,69,6398,6229 TCCAGAGCACCGGTTC-1,1,40,70,6278,6298 GATTCGACGGTTCACG-1,1,41,71,6398,6367 GTTTCTGCAGTCTCCC-1,1,40,72,6278,6436 GCTGCACGGTTTCTTA-1,1,41,73,6398,6504 CGTGCAGACTGGGACA-1,1,40,74,6278,6573 GTGTTACTATGCGTCC-1,1,41,75,6398,6642 TCCTCGGGCTGGGCTT-1,1,40,76,6278,6711 GTGAGGACACTTAAGG-1,1,41,77,6398,6780 ATACGCCGGCGAAACC-1,1,40,78,6278,6849 TCTGCCAGAAACTGCA-1,1,41,79,6397,6917 TTCTGCGGGTTAGCGG-1,1,40,80,6278,6986 CTCGGTACCACTGCTC-1,1,41,81,6397,7055 GTAAGTAACAGTCTGG-1,1,40,82,6278,7124 GTGCGTGTATATGAGC-1,1,41,83,6397,7193 ATTTGTCTTGGGAGCT-1,1,40,84,6278,7262 CCTCGGACCGGGATAG-1,1,41,85,6397,7330 TAGGTGCTCGCCTAGC-1,1,40,86,6278,7399 CTTTAGGAACACTGTT-1,1,41,87,6397,7468 TCGGGCCGTCGTGGTA-1,1,40,88,6278,7537 AGTGCTTGCACGAATA-1,1,41,89,6397,7606 TGCAGTTTCCTCCCAT-1,1,40,90,6278,7675 TGAGAGATTTACCACG-1,1,41,91,6397,7743 GAAACAGATGACCACC-1,1,40,92,6278,7812 AGCAACATATCTTATT-1,1,41,93,6397,7881 CAAGTGTGGTTGCAAA-1,1,40,94,6278,7950 GCCTCATCTGGAAATA-1,1,41,95,6397,8019 AACCCTACTGTCAATA-1,1,40,96,6278,8088 ACGTATTACTCCGATC-1,1,41,97,6397,8156 TCTGGGAACCTTTGAA-1,1,40,98,6278,8225 GCTCGCTCATGTCCAA-1,1,41,99,6397,8294 GCGCAAGAGCGCGCTG-1,1,40,100,6278,8363 TTGACGCTCCATGAGC-1,1,41,101,6397,8432 TATAGATGGTCGCAGT-1,1,40,102,6278,8501 TTACATGCCACAACTA-1,1,41,103,6397,8569 ACATGGCGCCAAAGTA-1,1,40,104,6278,8638 TATGGTTAGTGGGAGA-1,1,41,105,6397,8707 CATGACTTCGCTGAAT-1,1,40,106,6278,8776 ACCACCAATGTAACAA-1,1,41,107,6397,8845 TCTTAGAGCTCCAATT-1,1,40,108,6278,8913 CCACGAATTTAACCTC-1,1,41,109,6397,8982 TTCTTGCTAGCATCTC-1,1,40,110,6278,9051 ACACCTTACTACTTGC-1,1,41,111,6397,9120 AGTCGGTTGCGTGAGA-1,1,40,112,6278,9189 ACCTACAGTATGTGGT-1,1,41,113,6397,9258 GAGGATAAACAGTGCT-1,1,40,114,6278,9326 TTCCGGTATCTGTGTC-1,0,41,115,6397,9395 GGAGGCCGAAGTCGTC-1,0,40,116,6278,9464 TTCGCTAGGAAGTTGT-1,0,41,117,6397,9533 TAAAGACAACCCTTTA-1,0,40,118,6278,9602 GTTGCGCTAACATTAC-1,0,41,119,6397,9671 GCTGAACTCTCCAGGG-1,0,40,120,6278,9739 AGCCCTGTCGCACCGT-1,0,41,121,6397,9808 AGCGCTAGAGCGATGT-1,0,40,122,6278,9877 TGCGCCGTTAATAACG-1,0,41,123,6397,9946 TGACAACGCATGTCGC-1,0,40,124,6278,10015 CGTCTTGAGTGTGACG-1,0,41,125,6397,10084 GATAGGATTAATTACA-1,0,40,126,6278,10152 GATACCGTGTCGGAGT-1,0,41,127,6397,10221 TGTGTCGAAGTCGAGG-1,1,42,0,6517,1480 GGATCAGAGCCATCAG-1,1,43,1,6637,1549 GTCCCAATCATCCCGC-1,1,42,2,6517,1618 TGCCACACTAGAGGAA-1,1,43,3,6637,1686 TGAATTTCACTTGCCT-1,1,42,4,6517,1755 CTCAGATTGTGATAAG-1,1,43,5,6637,1824 CGGACGTTACTTGAAG-1,1,42,6,6517,1893 CCGCCTTGCGATGTCG-1,1,43,7,6637,1962 CCAGTCAAATCTCTTA-1,1,42,8,6517,2031 AAACAGCTTTCAGAAG-1,1,43,9,6637,2099 TAGCTGATGTGAAGCG-1,1,42,10,6517,2168 CTTATGCGCTCAGGGC-1,1,43,11,6637,2237 TGGCTCTTGTCGCGTA-1,1,42,12,6517,2306 TTGTGAGGCATGACGC-1,1,43,13,6637,2375 CATAAGCTCTCCGTCT-1,1,42,14,6517,2444 CCTTCTCAGCGTTCCT-1,1,43,15,6637,2512 TGCAGCTACGTACTTC-1,1,42,16,6517,2581 CTCTACACTGGCGATT-1,1,43,17,6637,2650 TGCAGATCGTCCTAGG-1,1,42,18,6517,2719 TGCCAAAGTCAGACTT-1,1,43,19,6637,2788 ACAATAGTCGTACGTT-1,1,42,20,6517,2857 CATGCCAACTCGCAAA-1,1,43,21,6637,2925 TGCAGAACTATATCGT-1,1,42,22,6517,2994 TCGTGTCACGCTGACA-1,1,43,23,6637,3063 TGATCAGGGAACTGCT-1,1,42,24,6517,3132 CTTGCAACCGCCTCCT-1,1,43,25,6637,3201 AGCCCATACATGTAAG-1,1,42,26,6517,3270 ATCCAATGGAGGGTCC-1,1,43,27,6637,3338 AAACCGGGTAGGTACC-1,1,42,28,6517,3407 GCAACCACGGCCGCGT-1,1,43,29,6637,3476 CGCTTATTCCCGGTCG-1,1,42,30,6517,3545 TTACTCTGGTACGTAC-1,1,43,31,6637,3614 GTGGTTTCCGCCTTTC-1,1,42,32,6517,3682 ATAGGCGGCTATAGAA-1,1,43,33,6637,3751 GTGCCATCACACGGTG-1,1,42,34,6517,3820 CCCAACATACGTCGCG-1,1,43,35,6637,3889 CGGTGCAGATAGAACG-1,1,42,36,6517,3958 GGGCGGGTTCCCTACG-1,1,43,37,6637,4027 TGAGCCATACAGTCTC-1,1,42,38,6517,4095 CTCCGCCCACATGAGG-1,1,43,39,6637,4164 GTTGAACCGGTTCCAT-1,1,42,40,6517,4233 TTGACTACCATATGGT-1,1,43,41,6637,4302 ACCATCGTATATGGTA-1,1,42,42,6517,4371 TGCGTAAGAACCTGAT-1,1,43,43,6637,4440 AGAAGGTTGCCGAATT-1,1,42,44,6517,4508 AGGACGACCCATTAGA-1,1,43,45,6637,4577 GGTGCTGGTACACATT-1,1,42,46,6517,4646 GCGCTATGCCGAGGCA-1,1,43,47,6637,4715 ACCCGGTTACACTTCC-1,1,42,48,6517,4784 TAACTCATCCGCGCGG-1,1,43,49,6637,4853 CACAATGAGCTGCTAT-1,1,42,50,6517,4921 GTTACTTTGGGCCTAG-1,1,43,51,6637,4990 GGGCCCGTCTTAAACA-1,1,42,52,6517,5059 GAAATTGTCTCTATAA-1,1,43,53,6637,5128 GGCGCATGAATTGATG-1,1,42,54,6517,5197 CATATAGGTACAGTCA-1,1,43,55,6637,5266 TCAACGAGGAGACAAA-1,1,42,56,6517,5334 TTGCACAATTCAGAAA-1,1,43,57,6637,5403 CATCGGACGGGTTAAT-1,1,42,58,6517,5472 ATTAAACATGCGGACC-1,1,43,59,6637,5541 TATCTACCACAGCGGG-1,1,42,60,6517,5610 CGAGACCCTAGAGTGT-1,1,43,61,6637,5679 ACATCGATCGTTTACC-1,1,42,62,6517,5747 ATCGACCCAATACAGA-1,1,43,63,6637,5816 GAATCTGAACATTCTC-1,1,42,64,6517,5885 AGTTCCTATTTATGTT-1,1,43,65,6637,5954 CAGTCTGTATACTGGG-1,1,42,66,6517,6023 GGAGACGACACCTTTG-1,1,43,67,6637,6092 CCTAAATTAACGGTTC-1,1,42,68,6517,6160 GCTACGACTTATTGGG-1,1,43,69,6637,6229 CTGTGCAGGGTAGGTC-1,1,42,70,6517,6298 ACGCCGCTAGACGACC-1,1,43,71,6637,6367 ACTTGACTCCCTCTTT-1,1,42,72,6517,6436 CGCCTCCCTCCTCTAT-1,1,43,73,6637,6504 CTAGATTTACGACGGC-1,1,42,74,6517,6573 GATGCTGTATTTCATC-1,1,43,75,6637,6642 TGCGTTTGTTGACACT-1,1,42,76,6517,6711 CGTGCCCTCCCGAAGA-1,1,43,77,6637,6780 ACTCTCTGACTTAGGT-1,1,42,78,6517,6849 CTGGGTAGGCAGTTAA-1,1,43,79,6637,6917 GTTTGGCCCAAGTTAT-1,1,42,80,6517,6986 GAATGTGGTCCGGATT-1,1,43,81,6637,7055 CCCAGTTAAGGCGCCG-1,1,42,82,6517,7124 CGGTACTAGAATCAAA-1,1,43,83,6637,7193 GCTTAATGTAACTAAC-1,1,42,84,6517,7262 AGGACAGTCGAATCCC-1,1,43,85,6637,7330 GACAGCCAGACCTGAC-1,1,42,86,6517,7399 GGCGAAATCTAACTTG-1,1,43,87,6637,7468 CTGGTAACACATAGAA-1,1,42,88,6517,7537 TAGCCATTTCAAAGTC-1,1,43,89,6637,7606 GGGATTTACCGCACCT-1,1,42,90,6517,7675 ATGCCATTTGCGACCA-1,1,43,91,6637,7743 GAATAGACGCGACCCA-1,1,42,92,6517,7812 TGTATGGCGCAGACAG-1,1,43,93,6637,7881 GGATGAAGATCGCTGA-1,1,42,94,6517,7950 CGACAATTTGATCTAA-1,1,43,95,6637,8019 AAAGTTGACTCCCGTA-1,1,42,96,6517,8088 CGCGGCTCAACTTGAA-1,1,43,97,6637,8156 CACGCGGAACTGTTGC-1,1,42,98,6517,8225 TCTTAGAGTGAACTCT-1,1,43,99,6637,8294 AGTCCATTGGCTGATG-1,1,42,100,6517,8363 TTAAGATAGGATTGAC-1,1,43,101,6637,8432 ACATCCTGGTAACTGT-1,1,42,102,6517,8501 CACCTTGGCGCCTTTG-1,1,43,103,6637,8569 GCTAGACCGTCTACTG-1,1,42,104,6517,8638 CGGCCCAGGTATATCC-1,1,43,105,6637,8707 GTCCATTACTGCTACG-1,1,42,106,6517,8776 GGGTTTAGGATAGGAT-1,1,43,107,6637,8845 TAATTAGGACATCCGT-1,1,42,108,6517,8913 GCTCCCAGTCGGTCCA-1,1,43,109,6637,8982 ATTCGTGCTATCTCTT-1,1,42,110,6517,9051 GTTAACTATGTTGTCA-1,1,43,111,6637,9120 GCTCCGCTCGCTTCAG-1,1,42,112,6517,9189 GCAACGGCTAGTTATG-1,1,43,113,6637,9258 AATCGCGCAGAGGACT-1,1,42,114,6517,9326 AGGTTACACCATGCCG-1,1,43,115,6637,9395 CGAGATTTCGCTCGGG-1,0,42,116,6517,9464 CGATAATACTCAGGTT-1,0,43,117,6637,9533 AAGGCAGGCTGTCTCC-1,0,42,118,6517,9602 GTAAGTCCACACTCTA-1,0,43,119,6637,9671 ATGAGGGCAGCGGCTA-1,0,42,120,6517,9739 GCCGCACTCCGTTTCA-1,0,43,121,6637,9808 GAGCACGGCGCCTCTT-1,0,42,122,6517,9877 ACAATTTGGCCATATT-1,0,43,123,6637,9946 CTGGTTCGCGAGCTAC-1,0,42,124,6517,10015 GACGGTCCTAGGGTGT-1,0,43,125,6637,10084 ACTCGCGATCTGACGC-1,0,42,126,6517,10152 GTGTACGAACCGTTCC-1,0,43,127,6637,10221 CTTTGACGTCGCTTCT-1,1,44,0,6757,1480 CGTTATCATACTTCCA-1,1,45,1,6877,1549 GCTATGCCAGCTTATG-1,1,44,2,6757,1618 CAGTCGGCCTAGATAT-1,1,45,3,6877,1686 CCCGTGAGGGCGGTGA-1,1,44,4,6757,1755 TCTCGTGTTACGAGGA-1,1,45,5,6877,1824 ACGTCTCGTTCCGGGA-1,1,44,6,6757,1893 CGAGAGCGCGTAGATA-1,1,45,7,6877,1962 GACAGATTTCTGGCTC-1,1,44,8,6757,2031 GGGCCTAAATGGGCTA-1,1,45,9,6877,2099 ACTTGTAGTCCCTTCA-1,1,44,10,6757,2168 CCCGAAGTTTCGCGAA-1,1,45,11,6877,2237 ACCATCCGCCAACTAG-1,1,44,12,6757,2306 TGCGAATATGGGATTT-1,1,45,13,6877,2375 TACATCCCTATCCCTG-1,1,44,14,6757,2444 GTGGGAAGACTGAATC-1,1,45,15,6877,2512 TCAACATCGACCGAGA-1,1,44,16,6757,2581 CTATGTGAGTCACGGC-1,1,45,17,6877,2650 CCGAACACTGGGCCTC-1,1,44,18,6757,2719 AAACTTGCAAACGTAT-1,1,45,19,6877,2788 AGGGCGAGCAGCTGAT-1,1,44,20,6757,2857 AACACGAGACGCGGCC-1,1,45,21,6877,2925 TGACGAATATTTCCCT-1,1,44,22,6757,2994 TCGGAGAGTATCGGGA-1,1,45,23,6877,3063 CAAATCTCTCACAAGG-1,1,44,24,6757,3132 AGGCCCTAGAACGCCA-1,1,45,25,6877,3201 TAGAGATCATGCAACT-1,1,44,26,6757,3270 TTGTTTCCATACAACT-1,1,45,27,6877,3338 GAGAGGTGCATTCTGG-1,1,44,28,6757,3407 GTGGACCAACCCGATT-1,1,45,29,6877,3476 CTGGGCCTGCTATATC-1,1,44,30,6757,3545 CATAGTCCACAAGAAC-1,1,45,31,6877,3614 TTGACATGAACGTGGA-1,1,44,32,6757,3682 GGTTACCACCCTCGGG-1,1,45,33,6877,3751 TACCGGTCGTTTCCAT-1,1,44,34,6757,3820 CGAGTACTAAAGAGGA-1,1,45,35,6877,3889 GCAAGAATTCCTTGGC-1,1,44,36,6757,3958 TCGCCGAAGTTGCGTC-1,1,45,37,6877,4027 TTGAGAGTACTGCTAA-1,1,44,38,6757,4095 GCCACAATTTAAGGAC-1,1,45,39,6877,4164 ATATTCAGTTAAACCT-1,1,44,40,6757,4233 TGAGTGCCTCTTAAAT-1,1,45,41,6877,4302 ATCAGACGGCACGCCG-1,1,44,42,6757,4371 GTGCGAAATCGAACAC-1,1,45,43,6877,4440 GTGCCGCTTCAAAGGT-1,1,44,44,6757,4508 GATACGATGGGAGTCA-1,1,45,45,6877,4577 GACACTGAGTTCAGTG-1,1,44,46,6757,4646 ATCCTGCGTGGAATGG-1,1,45,47,6877,4715 ATCCTACCTAAGCTCT-1,1,44,48,6757,4784 AGTGATATGAGTAGTT-1,1,45,49,6877,4853 ATGATGCAATGGTACA-1,1,44,50,6757,4921 GAAACCGAATTACCTT-1,1,45,51,6877,4990 AGTGACCTACTTTACG-1,1,44,52,6757,5059 CAAATGTCCTTCCGTG-1,1,45,53,6877,5128 TTACTGGGATATTTCA-1,1,44,54,6757,5197 CTTGCCCAGGCTCTAC-1,1,45,55,6877,5266 AAATCGTGTACCACAA-1,1,44,56,6757,5334 GTGATCATAGATCTGC-1,1,45,57,6877,5403 TGGCAGATTACGATCA-1,1,44,58,6757,5472 TCACCCTCTTAAGATT-1,1,45,59,6877,5541 CAGGATATATCGTTGT-1,1,44,60,6757,5610 CCTGACCACCGATGGT-1,1,45,61,6877,5679 CTAAAGGGAAATAGGA-1,1,44,62,6757,5747 CCGCTATCAGCACCAG-1,1,45,63,6877,5816 CTTTAGTGCTATTATT-1,1,44,64,6757,5885 CGGGAATTTATGTAAA-1,1,45,65,6877,5954 TACGACTGCCTCTTAG-1,1,44,66,6757,6023 AAACTGCTGGCTCCAA-1,1,45,67,6877,6092 GTACGTTTGCCCGTCA-1,1,44,68,6757,6160 GGCAAGGCGAAATAGC-1,1,45,69,6877,6229 GATCTTGGAGGGCATA-1,1,44,70,6757,6298 AGCGTGGTATTCTACT-1,1,45,71,6877,6367 CTAAGGGAATGATTGG-1,1,44,72,6757,6436 CATGGTAAGTAGCGTT-1,1,45,73,6877,6504 CGTTGAGCGACCGTCG-1,1,44,74,6757,6573 TGCCCGTACCGTTAAA-1,1,45,75,6877,6642 ACAAGGGCAGGCTCTG-1,1,44,76,6757,6711 GAGATCTTCCATGACA-1,1,45,77,6877,6780 AATGACGTAGGATGTC-1,1,44,78,6757,6849 GTGGTGGCCAAGTGAA-1,1,45,79,6877,6917 TCCCGTGTGCAATTTG-1,1,44,80,6757,6986 ACATCGTATGCAATGG-1,1,45,81,6877,7055 GCGAAACTTAACTGGA-1,1,44,82,6757,7124 AATTGAACGCTCTGGT-1,1,45,83,6877,7193 ACAAATGGTAGTGTTT-1,1,44,84,6757,7262 ATGGTCGCGTGGTTTC-1,1,45,85,6877,7330 TGTTATTGTATGTGGC-1,1,44,86,6757,7399 TTCCGGTTACCCACTT-1,1,45,87,6877,7468 GAGTGTGCGGTACCCA-1,1,44,88,6757,7537 CAAGATATTATAACGT-1,1,45,89,6877,7606 ACACACCAGGACCAGT-1,1,44,90,6757,7675 ATGGGCCTCGGCCTCT-1,1,45,91,6877,7743 AAGGTGATAAACCAGC-1,1,44,92,6757,7812 TCTTACTTATGCCTCT-1,1,45,93,6877,7881 AAAGTGTGATTTATCT-1,1,44,94,6757,7950 TGCTCCACAGTTCTTA-1,1,45,95,6877,8019 CTGGCTGATTCATCCT-1,1,44,96,6757,8088 TAAGGCTGAATCCCTC-1,1,45,97,6877,8156 TCTAGTGATATCGTGG-1,1,44,98,6757,8225 TCGAAGAACCGAGCAC-1,1,45,99,6876,8294 GACAAACATATGCAGG-1,1,44,100,6757,8363 AAGTCAATTGTCGTCA-1,1,45,101,6876,8432 AGTGAACAAACTTCTC-1,1,44,102,6757,8501 CATGATGGAAGTTAGC-1,1,45,103,6876,8569 AAGTGCCTTGACTGTA-1,1,44,104,6757,8638 ATCGCCAGTCAACATT-1,1,45,105,6876,8707 ACCGCGGTGGAAGTCG-1,1,44,106,6757,8776 TCTTCTATAACCCGCC-1,1,45,107,6876,8845 CACATTTCTTGTCAGA-1,1,44,108,6757,8914 TAGCGTCCCTCGATTG-1,1,45,109,6876,8982 GTTCGGATCGGGAACA-1,1,44,110,6757,9051 CAAACTCGCGACGCCG-1,1,45,111,6876,9120 GTCTTGTAGCTATTCA-1,1,44,112,6757,9189 TCTCGACGTATCGCCG-1,1,45,113,6876,9258 TTGCCAAGCAGAACCC-1,1,44,114,6757,9326 AAACCCGAACGAAATC-1,0,45,115,6876,9395 TTGAGCAGCCCACGGT-1,0,44,116,6757,9464 CGCCTTTAGCATGCTC-1,0,45,117,6876,9533 TGTGGCTCCCACCAAC-1,0,44,118,6757,9602 CCGCCGTTGAGGATAA-1,0,45,119,6876,9671 CAATACGAGAGTCTGA-1,0,44,120,6757,9739 CATCTAGTGAAGGGAA-1,0,45,121,6876,9808 GGTGGAGGTTGATACG-1,0,44,122,6757,9877 CCGCACACGAACGTGT-1,0,45,123,6876,9946 AGAACCCAGCGTGACA-1,0,44,124,6757,10015 GCGCTCGATCACCTGT-1,0,45,125,6876,10084 ATCATGGACTACCGAC-1,0,44,126,6757,10152 TACGCCGCCTCAGAAG-1,0,45,127,6876,10221 CGACCTACTAGACAAT-1,1,46,0,6997,1480 GAGTCTTGTAAAGGAC-1,1,47,1,7116,1549 AATATCCTAGCAAACT-1,1,46,2,6997,1618 CCCTAGGCAACAAGAG-1,1,47,3,7116,1686 ACAAAGAAGGTAGGCC-1,1,46,4,6997,1755 CCCTGGCTGTTCCTTC-1,1,47,5,7116,1824 TCGCCGCACCGCGTGA-1,1,46,6,6996,1893 TATAGCGCACGTTATC-1,1,47,7,7116,1962 TTATCTGACATTAGGA-1,1,46,8,6996,2031 AGTGGTGTTACCCGTG-1,1,47,9,7116,2099 GCCAAGAATACTTCTG-1,1,46,10,6996,2168 CCGGCGTGAGACTCTG-1,1,47,11,7116,2237 TTCCCGGCGCCAATAG-1,1,46,12,6996,2306 AAACAGGGTCTATATT-1,1,47,13,7116,2375 ACAGTAATACAACTTG-1,1,46,14,6996,2444 CGAACGGCCGGACAAC-1,1,47,15,7116,2512 GCAACACACTAGAACT-1,1,46,16,6996,2581 ACTCCCATTCCTAAAG-1,1,47,17,7116,2650 ACCTGCGTGTCATGTT-1,1,46,18,6996,2719 TACTTTCCGCACGCCA-1,1,47,19,7116,2788 AGGTCAGGTGAGAGTG-1,1,46,20,6996,2857 TCCTCCTAAGACATTC-1,1,47,21,7116,2925 ATGTGAAAGCCTAATG-1,1,46,22,6996,2994 AGTCGGCTCAACTTTA-1,1,47,23,7116,3063 CGATCTGTTGGAGGAC-1,1,46,24,6996,3132 ACGGGAGTGTCGGCCC-1,1,47,25,7116,3201 TTAACTTCAGGTAGGA-1,1,46,26,6996,3270 CCACGGAGCCATAAGA-1,1,47,27,7116,3338 CTTCTATGTTGAAGTA-1,1,46,28,6996,3407 CACCGTTGCGCGATAT-1,1,47,29,7116,3476 TCTAGCAATCTCCGCC-1,1,46,30,6996,3545 AGTTTGGCCAGACCTA-1,1,47,31,7116,3614 TTGTAAGGACCTAAGT-1,1,46,32,6996,3683 AAATTTGCGGGTGTGG-1,1,47,33,7116,3751 AAGTTCGGCCAACAGG-1,1,46,34,6996,3820 CCGCTTACCTCACTCT-1,1,47,35,7116,3889 ATCACGTGCTAATTAA-1,1,46,36,6996,3958 GGTGAAGTACAGGGAT-1,1,47,37,7116,4027 GCTGTATTACTGGCCC-1,1,46,38,6996,4095 AACGGCCATCTCCGGT-1,1,47,39,7116,4164 TAAGTAACATCTTGAC-1,1,46,40,6996,4233 TTCTTGAGCCGCGCTA-1,1,47,41,7116,4302 AGTGCGTAGCTCGTAA-1,1,46,42,6996,4371 GGGATGGTCGTAACCG-1,1,47,43,7116,4440 GTCTGGGCGGTCGAGA-1,1,46,44,6996,4508 CGGAACGTAAACATAG-1,1,47,45,7116,4577 TGCGACACCCTAGTGC-1,1,46,46,6996,4646 CAAACGAGTATCGCAG-1,1,47,47,7116,4715 TCAGTAGGGACTATAA-1,1,46,48,6996,4784 GCGGTTCCCTATCATG-1,1,47,49,7116,4853 GTGACTTCAGTAGTGC-1,1,46,50,6996,4921 CGTCACGTCCATTGGT-1,1,47,51,7116,4990 ATAAGTTACCGCGACG-1,1,46,52,6996,5059 CTGAATCCGAGACCTC-1,1,47,53,7116,5128 TACGACGCTTGCTGCG-1,1,46,54,6996,5197 GAATTCACCCGGGTGT-1,1,47,55,7116,5266 GGGTGACACCTTAACT-1,1,46,56,6996,5334 TGAGTAAATTAGCGTA-1,1,47,57,7116,5403 GAGTCCGCTTACCGGA-1,1,46,58,6996,5472 GGCGCGTTCGAGTTTA-1,1,47,59,7116,5541 ATTCATATACTGTCCA-1,1,46,60,6996,5610 ATAAACGGACCCGTAA-1,1,47,61,7116,5679 GTCGTACCATCTCGGG-1,1,46,62,6996,5747 ATAATAGCTGTTGAAT-1,1,47,63,7116,5816 TTCCGCGTGAGGCGAT-1,1,46,64,6996,5885 TACCGGCTCACTGCCC-1,1,47,65,7116,5954 GTATCTTTCATAACCA-1,1,46,66,6996,6023 GTTACCTACAACTTGC-1,1,47,67,7116,6092 TGACTATAATCCTTTC-1,1,46,68,6996,6160 AAATACCTATAAGCAT-1,1,47,69,7116,6229 TTATTAGAGCGTGTTC-1,1,46,70,6996,6298 ATGTGGACATCTTGAT-1,1,47,71,7116,6367 GTACGCTTCATTGCAC-1,1,46,72,6996,6436 CACATATTAGCAGGAT-1,1,47,73,7116,6504 CAAGAGGGCGGAGTAC-1,1,46,74,6996,6573 TCGCAAAGATGCATTT-1,1,47,75,7116,6642 GTCGTTATTCGCTTAT-1,1,46,76,6996,6711 TCGTGTACTATGGATG-1,1,47,77,7116,6780 GCGGAGAGGGAGAACG-1,1,46,78,6996,6849 TCACAGGGAATCGCAA-1,1,47,79,7116,6917 GACTGCACCAGCCCAG-1,1,46,80,6996,6986 TCCGCTGTCATCCCGG-1,1,47,81,7116,7055 AGATACTCAAGATCGA-1,1,46,82,6996,7124 ACTGAATGGCGAAAGT-1,1,47,83,7116,7193 TCCTACATCCACGGCC-1,1,46,84,6996,7262 TTCAGCCCTGGTCCAC-1,1,47,85,7116,7330 TTAGAGTTTAGAAGGA-1,1,46,86,6996,7399 GGTTAGCTATATGTCT-1,1,47,87,7116,7468 CTGGTTTCGAGCAAGA-1,1,46,88,6996,7537 TGGGCCCATACTAATT-1,1,47,89,7116,7606 ACTTATTTATGTGCCA-1,1,46,90,6996,7675 TTAGTTATTCGTGGCA-1,1,47,91,7116,7743 AACCGCTAAGGGATGC-1,1,46,92,6996,7812 ATACGTCCACTCCTGT-1,1,47,93,7116,7881 CGACACGCTCCGACAG-1,1,46,94,6996,7950 TCTGAACTCGTACCCG-1,1,47,95,7116,8019 AGGATATAGGGATTTA-1,1,46,96,6996,8088 GTGATCACTAACGCCT-1,1,47,97,7116,8156 ACCGGGCCTTTGTTGA-1,1,46,98,6996,8225 TTGACAGGAGCTCCCG-1,1,47,99,7116,8294 CTCTTCTATTGACTGG-1,1,46,100,6996,8363 CGTGGCCGAATATCTA-1,1,47,101,7116,8432 GGTTTAGCCTTTCTTG-1,1,46,102,6996,8501 TCAAGGTTACTACACC-1,1,47,103,7116,8569 TTGACTATTGTCCGGC-1,1,46,104,6996,8638 CGGGAATATAGTATAC-1,1,47,105,7116,8707 GCAACAGCAGTATGCG-1,1,46,106,6996,8776 ATCAAACACTGTTCCA-1,1,47,107,7116,8845 TGGGCAATAGTTGGGT-1,1,46,108,6996,8914 TGAATGTCAGCCGGCC-1,1,47,109,7116,8982 CGGCTCTAAAGCTGCA-1,1,46,110,6996,9051 GGACACAAGTTTACAC-1,1,47,111,7116,9120 CCGAGAAGTCGCATAA-1,1,46,112,6996,9189 AACTACCCGTTTGTCA-1,1,47,113,7116,9258 GCACAAACGAGGCGTG-1,1,46,114,6996,9326 CACACAGCTTGCGCTC-1,0,47,115,7116,9395 GTGCTTACATCAGCGC-1,0,46,116,6996,9464 GCAAACGTCGCCAGGT-1,0,47,117,7116,9533 GAATAGCATTTAGGGT-1,0,46,118,6996,9602 TACTGTTTCTCTGGTA-1,0,47,119,7116,9671 TCCTATCATAGGTAAC-1,0,46,120,6996,9739 TTCGGTGGAGACGCCC-1,0,47,121,7116,9808 TGTTGCCGTCGTCCCA-1,0,46,122,6996,9877 CCAACTTGATAGATCC-1,0,47,123,7116,9946 CAAAGGTCATCTGAAA-1,0,46,124,6996,10015 CTAGCTTAATGGTCCC-1,0,47,125,7116,10084 AGGCCACATTGGTTAC-1,0,46,126,6996,10152 TTAGGATGGGAGGGTA-1,0,47,127,7116,10221 TATGACCTTGCGCTGG-1,1,48,0,7236,1480 ATATACATGTATGGTA-1,1,49,1,7356,1549 TGTGTGACCATGAATC-1,1,48,2,7236,1618 CCCACTCCACGGTATC-1,1,49,3,7356,1686 GGGCAGACGTCACTGC-1,1,48,4,7236,1755 TCAAAGAGCTATCTGT-1,1,49,5,7356,1824 AGATGCATCCTGTGTC-1,1,48,6,7236,1893 GTGAAGTCACGACTCG-1,1,49,7,7356,1962 TGATTCAGGTCCCGCG-1,1,48,8,7236,2031 CGCCACAGGTCGCGAT-1,1,49,9,7356,2099 TTGAATCGTTGTATAA-1,1,48,10,7236,2168 TGGCCAATTTGGTACT-1,1,49,11,7356,2237 CGACCCTTAACGCCGG-1,1,48,12,7236,2306 TCGGCGTACTGCACAA-1,1,49,13,7356,2375 GAGTAAGGCCACGGGA-1,1,48,14,7236,2444 ATCAGCTCGTCCACTA-1,1,49,15,7356,2512 AGGATCACGCGATCTG-1,1,48,16,7236,2581 AAATGGCCCGTGCCCT-1,1,49,17,7356,2650 CAGTACCAGTTTACGT-1,1,48,18,7236,2719 GGTCGTAAGCTCGCAC-1,1,49,19,7356,2788 CGTTCATGGTGCGCGT-1,1,48,20,7236,2857 GACTACAAAGCGGTGG-1,1,49,21,7356,2925 CAGCTGGCGTAACCGT-1,1,48,22,7236,2994 ACTCGTAACCCGTCCT-1,1,49,23,7356,3063 CATATGTCAGGCTACG-1,1,48,24,7236,3132 GCCCGACTTCTTCCCG-1,1,49,25,7356,3201 CTGGCATCCGAATGAG-1,1,48,26,7236,3270 AAGTGCGTTAGAATCT-1,1,49,27,7356,3338 AGAAGTGATTCGTGAT-1,1,48,28,7236,3407 TACCTTAAGATTTCCC-1,1,49,29,7356,3476 AATGACAGCAATGTCT-1,1,48,30,7236,3545 TCAACGCAGGAAATAA-1,1,49,31,7356,3614 TATTCCTCCGCCCACT-1,1,48,32,7236,3683 ATCAAACGAAGGTTTG-1,1,49,33,7356,3751 TTGATTAGCTGTTTCT-1,1,48,34,7236,3820 CTGCTGAGGCCACGAA-1,1,49,35,7356,3889 CCGTGTTAAATTCCAT-1,1,48,36,7236,3958 CACGTCGGCAACCTCT-1,1,49,37,7356,4027 GTATTCTGAGAAACGA-1,1,48,38,7236,4095 CGGTGCGCGTTGGTCC-1,1,49,39,7356,4164 GTGATTCGCCGCTCAA-1,1,48,40,7236,4233 GCCCGCGCGTAAACGG-1,1,49,41,7356,4302 AACGTACTGTGGGTAC-1,1,48,42,7236,4371 GAGTATGCCCGCCTTG-1,1,49,43,7356,4440 TGATTCTGTCGCCGGT-1,1,48,44,7236,4508 GGGCGGCAAATGAATT-1,1,49,45,7356,4577 TTCCAATCTGGCTATC-1,1,48,46,7236,4646 GGAACCGTGTAAATTG-1,1,49,47,7356,4715 TTGGTCACACTCGTAA-1,1,48,48,7236,4784 CGAACCCGCATGCGTC-1,1,49,49,7356,4853 GGGCGTCACCACGTAA-1,1,48,50,7236,4921 TACCAGAAGTAGGTTC-1,1,49,51,7356,4990 TTGGATATCGTCTACG-1,1,48,52,7236,5059 TAGATATGGACTGGAA-1,1,49,53,7356,5128 GCGACGATAGTTGTAC-1,1,48,54,7236,5197 CTGGATTTACACTTGA-1,1,49,55,7356,5266 GTTATATTATCTCCCT-1,1,48,56,7236,5334 TATTCCACTCAGCTCG-1,1,49,57,7356,5403 CTGTTACCCAATCTAG-1,1,48,58,7236,5472 ACGCGAAGTCAGACGA-1,1,49,59,7356,5541 GATAGGTAACGTTGAC-1,1,48,60,7236,5610 TGTTCGTATTGCGGTG-1,1,49,61,7356,5679 GGTCAGTGGGTCCCAC-1,1,48,62,7236,5747 AATTCTAGAGTTAGGC-1,1,49,63,7356,5816 GTTTACGTTCCATCTG-1,1,48,64,7236,5885 AGACCCACCGCTGATC-1,1,49,65,7356,5954 TCTAGCATGCCCAGAA-1,1,48,66,7236,6023 CCCGCAGCGCGAACTA-1,1,49,67,7356,6092 CTCTATTTGGCTGCAG-1,1,48,68,7236,6160 TTCTGCCGCGCCTAGA-1,1,49,69,7356,6229 ACTCCCTAGAATAGTA-1,1,48,70,7236,6298 CCGAACCTTCCCGGCC-1,1,49,71,7356,6367 TCAGACGCTATAGAAG-1,1,48,72,7236,6436 CAGGCGCCATGCTAGG-1,1,49,73,7356,6505 CACTCGAGCTGAACAA-1,1,48,74,7236,6573 ACAAGGAAATCCGCCC-1,1,49,75,7356,6642 ACCACGTGCAGCTATA-1,1,48,76,7236,6711 ACGGCGACGATGGGAA-1,1,49,77,7356,6780 TTACTAAAGGACTTTA-1,1,48,78,7236,6849 GGCTGAAATAGCAAAG-1,1,49,79,7356,6917 TTAGACGAGTCACCTC-1,1,48,80,7236,6986 GTAGTCGCGGGAATCA-1,1,49,81,7356,7055 GCAAACCCTACATTAT-1,1,48,82,7236,7124 GTTAAAGTAGGACTGG-1,1,49,83,7356,7193 ACCCTCCCTTGCTATT-1,1,48,84,7236,7262 CATGTCTCATTTATGG-1,1,49,85,7356,7330 AATAGAATCTGTTTCA-1,1,48,86,7236,7399 TCTCATGAGATAGGGT-1,1,49,87,7356,7468 ATGACTATGCGACATT-1,1,48,88,7236,7537 CCTAACCCAAACAAGT-1,1,49,89,7356,7606 AGTCAAGATGACACTT-1,1,48,90,7236,7675 CGGGAGCTTCAGTGTA-1,1,49,91,7356,7743 CCACAGTACCCATCCT-1,1,48,92,7236,7812 GAGCGCGCACGAGTAG-1,1,49,93,7356,7881 ACCAGTGCGGGAGACG-1,1,48,94,7236,7950 TCCTTACGACGGTCCG-1,1,49,95,7356,8019 TAGTCGATCACGGGTT-1,1,48,96,7236,8088 AGACGAAGTGCCGGTC-1,1,49,97,7356,8156 CGCTGGTGACTACCCT-1,1,48,98,7236,8225 AGGCATTGTCGTAGGG-1,1,49,99,7356,8294 TGGTAGAATATATGGG-1,1,48,100,7236,8363 TTCTTATCCGCTGGGT-1,1,49,101,7356,8432 AGTTTGGCACGGGTTG-1,1,48,102,7236,8501 GCATCGGCCGTGTAGG-1,1,49,103,7356,8569 ATGCGAGTCCCACCAC-1,1,48,104,7236,8638 TCGGCGAACCCAAACC-1,1,49,105,7356,8707 AGAGTAAACTTCACTA-1,1,48,106,7236,8776 AAGCCGAAGCGGTTTA-1,1,49,107,7356,8845 CCCGAGTTTCTCCGTA-1,1,48,108,7236,8914 AATATCGAATCAATGC-1,1,49,109,7356,8982 GCGCCTCCCACTCCGA-1,1,48,110,7236,9051 CGTTAAACTAGTTAGG-1,1,49,111,7356,9120 TGAGTTAAAGACATTC-1,1,48,112,7236,9189 AACAGGTAGTATGGAT-1,1,49,113,7356,9258 GAACGCGGGTCACACG-1,1,48,114,7236,9326 TGTTTGAGATCGTCAG-1,1,49,115,7356,9395 ATCTCGTGAGCGAAAC-1,0,48,116,7236,9464 CTGAGAAAGTTCGGCG-1,0,49,117,7356,9533 ATTACCACACTGCCTG-1,0,48,118,7236,9602 CTGCTGTCTAACGAGC-1,0,49,119,7355,9671 CGTGGAAGCCTCGTAC-1,0,48,120,7236,9739 GGACTCACAAATTAGG-1,0,49,121,7355,9808 CTTTGCTGTCATGGAT-1,0,48,122,7236,9877 GGTGTTGGGCGTCTTA-1,0,49,123,7355,9946 TACCATGTATTGATTT-1,0,48,124,7236,10015 GGTCTCCAAGTAGTGC-1,0,49,125,7355,10084 ATCCTTCTGAAAGAAC-1,0,48,126,7236,10152 CCGGAAGTTATCAGTC-1,0,49,127,7355,10221 TTAGAGGGATATACAG-1,1,50,0,7476,1480 TTGTACACCTCGAACA-1,1,51,1,7595,1549 GTGGGTACTGAGCGTA-1,1,50,2,7476,1618 CTTAAGCAGCGAGCCG-1,1,51,3,7595,1686 GCATTGACTTGCGGAA-1,1,50,4,7476,1755 CCATAACCTGTGCAGT-1,1,51,5,7595,1824 GGGCTACTATTTCGTG-1,1,50,6,7476,1893 GGCGAGCGAAACGGCA-1,1,51,7,7595,1962 GGAGACCATCTACATA-1,1,50,8,7476,2031 AGACCAAACCACACCT-1,1,51,9,7595,2099 CGACGCATCCGTACCT-1,1,50,10,7476,2168 CCTAGGTAAAGGTAGC-1,1,51,11,7595,2237 AGCGGCGGTTAGCGGT-1,1,50,12,7476,2306 CTGGACGCAGTCCGGC-1,1,51,13,7595,2375 AGCCTAATACCCACGT-1,1,50,14,7476,2444 AATCTGGCTTTCTAGT-1,1,51,15,7595,2512 CACCCTTGGTGAGACC-1,1,50,16,7476,2581 GGATCTTGACTCAACC-1,1,51,17,7595,2650 ACGAGGATACCACTCT-1,1,50,18,7476,2719 GAGTTGATGGCAATTT-1,1,51,19,7595,2788 GTGGCAAACAGCGGCA-1,1,50,20,7476,2857 GATGTAACGAACCACC-1,1,51,21,7595,2925 AGCCGTGGCTAAATGT-1,1,50,22,7476,2994 CTCCCTCCTTTCGATC-1,1,51,23,7595,3063 TTGGAAGAATACAGTC-1,1,50,24,7476,3132 AGTTAAGTCAACCGCT-1,1,51,25,7595,3201 CTCATGGTAATTTGCG-1,1,50,26,7475,3270 AAAGTAGCATTGCTCA-1,1,51,27,7595,3338 TTGTGGTAGGAGGGAT-1,1,50,28,7475,3407 AGTCGTGGGCATTACG-1,1,51,29,7595,3476 ACCTAAGTACCTTTCA-1,1,50,30,7475,3545 GTAGAGGGAGACAAGT-1,1,51,31,7595,3614 GATCCTCGACACTGGC-1,1,50,32,7475,3683 CCTACATTCACAGACG-1,1,51,33,7595,3751 TTGACCGTGTTAATGA-1,1,50,34,7475,3820 TCTGTTACCCAGCATA-1,1,51,35,7595,3889 CTAACTGGTCCGGTTC-1,1,50,36,7475,3958 AGCGACAGGAACGGTC-1,1,51,37,7595,4027 TAATAGAACAGAGTTA-1,1,50,38,7475,4095 ACAGGTGGAGGTGAGG-1,1,51,39,7595,4164 TGCGAGAATATTACCC-1,1,50,40,7475,4233 TTGCGTCGGCCAACCG-1,1,51,41,7595,4302 AGGCTTCCCGAAGAAG-1,1,50,42,7475,4371 GCGGACCGCGTTGTGG-1,1,51,43,7595,4440 GTAATCTGATTCTTCG-1,1,50,44,7475,4508 CCGCGGAATGCGTCAC-1,1,51,45,7595,4577 TTCCACACAGATTTGA-1,1,50,46,7475,4646 CTTCTATTAATGCTAG-1,1,51,47,7595,4715 CATTTGAGTGGTACGT-1,1,50,48,7475,4784 TCACAGCAAACTCGAA-1,1,51,49,7595,4853 CAGACGAACCTGATAC-1,1,50,50,7475,4921 TAGCTAGAAGGCATGA-1,1,51,51,7595,4990 ATCCAGAGCAACAACC-1,1,50,52,7475,5059 TCCGGTTCGTCCGGTC-1,1,51,53,7595,5128 CGCGCATGTTTGATTG-1,1,50,54,7475,5197 TGGCGATCAAGTTATG-1,1,51,55,7595,5266 CCCTTTGACAGGTCTT-1,1,50,56,7475,5334 CAGAGACGGTCACCCA-1,1,51,57,7595,5403 TAGCTCGCCTGATAAC-1,1,50,58,7475,5472 TTGTGTTTCCCGAAAG-1,1,51,59,7595,5541 TATACACAGACGCCTT-1,1,50,60,7475,5610 ACATTAGTTTATATCC-1,1,51,61,7595,5679 CCATCGCAGTTAAACT-1,1,50,62,7475,5747 AATTAGCGCTGCAGCG-1,1,51,63,7595,5816 TCCGCTTATCCCATTA-1,1,50,64,7475,5885 GTTTGGGCTTGTGAGC-1,1,51,65,7595,5954 CTTGTCAACATTCGAG-1,1,50,66,7475,6023 GGACCAACAGGATAAC-1,1,51,67,7595,6092 AAGCTAGATCGAGTAA-1,1,50,68,7475,6160 TACCGTGCCTCGGACC-1,1,51,69,7595,6229 GTAGCCAAACATGGGA-1,1,50,70,7475,6298 TGCCAGTACGTGGAGA-1,1,51,71,7595,6367 ATAAGGTGGAGAACAT-1,1,50,72,7475,6436 CTTTAATATTGGTCGA-1,1,51,73,7595,6505 TGGTTCAACGGGTAAT-1,1,50,74,7475,6573 GCTGCTACTGCGTAGC-1,1,51,75,7595,6642 CTGCACAACTACATAT-1,1,50,76,7475,6711 ATATTCCACATAGTGA-1,1,51,77,7595,6780 TGGCAAACTAAATTAC-1,1,50,78,7475,6849 ACCGCAATAACTGCCT-1,1,51,79,7595,6917 TCGCACCAGGAGGCAG-1,1,50,80,7475,6986 ACTTCAGGCTGATCCC-1,1,51,81,7595,7055 ACAAATCGCACCGAAT-1,1,50,82,7475,7124 TCTGATGTATTCTGTC-1,1,51,83,7595,7193 CTTTACCGAATAGTAG-1,1,50,84,7475,7262 GCAGATCCATAAGACT-1,1,51,85,7595,7330 TTCCTCGGACTAACCA-1,1,50,86,7475,7399 TTATCCGGGATCTATA-1,1,51,87,7595,7468 CTGGAAGACACGGTGG-1,1,50,88,7475,7537 GTTCGTCTAAAGAACT-1,1,51,89,7595,7606 GTCTATTGGTTCCGGT-1,1,50,90,7475,7675 CTAGGCGCCCTATCAG-1,1,51,91,7595,7743 AACGGACGTACGTATA-1,1,50,92,7475,7812 AACACACGCTCGCCGC-1,1,51,93,7595,7881 GCTACTATAGTAGAGT-1,1,50,94,7475,7950 AGCATATCAATATGCT-1,1,51,95,7595,8019 CGAACAGTATGGGCGT-1,1,50,96,7475,8088 ACAATTGTGTCTCTTT-1,1,51,97,7595,8156 GATCGCTACCCGATTT-1,1,50,98,7475,8225 ATTGCGATCAGTAACT-1,1,51,99,7595,8294 CAGCAGTCCAGACTAT-1,1,50,100,7475,8363 AGAGGCTTCGGAAACC-1,1,51,101,7595,8432 AAACAAGTATCTCCCA-1,1,50,102,7475,8501 GGCAGCAAACCTATGC-1,1,51,103,7595,8569 ACTAGTTGCGATCGTC-1,1,50,104,7475,8638 TTGGACCATCTGGCAA-1,1,51,105,7595,8707 CCCTCCTCGCTCGTAT-1,1,50,106,7475,8776 GAGCGCAAATACTCCG-1,1,51,107,7595,8845 ATTACATGTCAGTCTT-1,1,50,108,7475,8914 TTGGGACGTAAGAGTT-1,1,51,109,7595,8982 CTTCGGCCAATTGTTT-1,1,50,110,7475,9051 AGACCGCTCCGCGGTT-1,1,51,111,7595,9120 GACCGCGTCTGACGTG-1,1,50,112,7475,9189 CCAAATAACAAGATTC-1,1,51,113,7595,9258 TCTTTAGAGTCTAACA-1,1,50,114,7475,9327 CTCCCAATGAGTCGCG-1,0,51,115,7595,9395 TAGTCTTTCCGAATTG-1,0,50,116,7475,9464 GAGTAAACCGGAAAGT-1,0,51,117,7595,9533 GATCTCGACGCTGTGG-1,0,50,118,7475,9602 CTGACATAGAAATAGA-1,0,51,119,7595,9671 TATACCGAGTGCCACA-1,0,50,120,7475,9739 TCCGAACGTTGCCGCT-1,0,51,121,7595,9808 GCAGAAGGTAATCTCC-1,0,50,122,7475,9877 GGTACTAAGTGCTTTG-1,0,51,123,7595,9946 CAGAATATTCGTTATC-1,0,50,124,7475,10015 TAATCAACCAAATGGG-1,0,51,125,7595,10084 TCTGCTTAGAACAAGC-1,0,50,126,7475,10152 AACTGAGTTATACTGA-1,0,51,127,7595,10221 CCTCAACGATCGCTGT-1,1,52,0,7715,1480 CGGTACGGCAAACCCA-1,1,53,1,7835,1549 CAGATGTTTGTCCCAA-1,1,52,2,7715,1618 GTTTGTTAGCCAAGTA-1,1,53,3,7835,1686 ACGTGACAAAGTAAGT-1,1,52,4,7715,1755 TTGTCACCGCGGTATC-1,1,53,5,7835,1824 GATCTAACCGTATTCA-1,1,52,6,7715,1893 ATGGAAATTTAAGGAG-1,1,53,7,7835,1962 ATTGATCACCACATTT-1,1,52,8,7715,2031 ACTGCTCGGAAGGATG-1,1,53,9,7835,2099 TCTAAAGAACAGTCTC-1,1,52,10,7715,2168 CTGGGTTGAGTTAAAG-1,1,53,11,7835,2237 CCCAGTAAACTTGGGA-1,1,52,12,7715,2306 AGATTCACAACCGATA-1,1,53,13,7835,2375 AGAAGGTACACTTCAC-1,1,52,14,7715,2444 GCAGGAACTTAGATCT-1,1,53,15,7835,2512 AATCTAGGTTTACTTG-1,1,52,16,7715,2581 CCCGGTGTATCGGAAT-1,1,53,17,7835,2650 TTATCCTCAAGGAATA-1,1,52,18,7715,2719 AGCATCATTTCGAAAG-1,1,53,19,7835,2788 CGCGAGTCTGCCGGGT-1,1,52,20,7715,2857 TGCGTCATGACTGAGC-1,1,53,21,7835,2925 GTATGAAATTTCACTC-1,1,52,22,7715,2994 TTGGTTGCGGTGCGCG-1,1,53,23,7835,3063 ACAGAACTGAGAACAA-1,1,52,24,7715,3132 CTACTATCATAGGTTT-1,1,53,25,7835,3201 CCTCTATCGATTAGCA-1,1,52,26,7715,3270 CCGTATCTCGTCGTAG-1,1,53,27,7835,3338 TCACGATGTCCGTGGA-1,1,52,28,7715,3407 TCAACAAAGATAATTC-1,1,53,29,7835,3476 ATGACGCGTTCTATCC-1,1,52,30,7715,3545 ATTTGCGCGAGTAGCT-1,1,53,31,7835,3614 TTCTTGGACGATCTGC-1,1,52,32,7715,3683 AGACGACGATGCCGCT-1,1,53,33,7835,3751 GGTCTCTGAATGGACT-1,1,52,34,7715,3820 GCTCAATCCGTTTATT-1,1,53,35,7835,3889 CCAGAAAGCAACTCAT-1,1,52,36,7715,3958 CACCGTTAGGGATCAC-1,1,53,37,7835,4027 AATAACACTAGAACAA-1,1,52,38,7715,4096 CACCCGGTTTGTGACT-1,1,53,39,7835,4164 CCACCAACTTTACTGT-1,1,52,40,7715,4233 AACTCTCAGTGTGCTC-1,1,53,41,7835,4302 AAACCGTTCGTCCAGG-1,1,52,42,7715,4371 CAGCGATTCCCTTCAA-1,1,53,43,7835,4440 GCTAGCTTGAATAGCT-1,1,52,44,7715,4508 TTGCGGCATCAGAAAG-1,1,53,45,7835,4577 AATAGAACAGAGTGGC-1,1,52,46,7715,4646 GCCATCGAGCTGCGTG-1,1,53,47,7835,4715 ACACTGATCAAGGTGT-1,1,52,48,7715,4784 ACCAACGCTTATTTAT-1,1,53,49,7835,4853 GACATCGATTTATAAC-1,1,52,50,7715,4921 CAGACACCGATCGCTG-1,1,53,51,7835,4990 CCAAGAAAGTGGGCGA-1,1,52,52,7715,5059 GGTAGACCGTTGGGCG-1,1,53,53,7835,5128 TCGTTAGGAGTCCCTA-1,1,52,54,7715,5197 ACGGCCAACATGGACT-1,1,53,55,7835,5266 GTGAGTGGTACAACGC-1,1,52,56,7715,5334 GAGACTTCGCGACCGA-1,1,53,57,7835,5403 GAAGTCAGTTGCACTA-1,1,52,58,7715,5472 GTTTGGTAGGGTCAAC-1,1,53,59,7835,5541 ACGGCACTTGCTTGGG-1,1,52,60,7715,5610 CCTCTGTACTATTCTA-1,1,53,61,7835,5679 CTATTCATGTGTCCCA-1,1,52,62,7715,5747 GCCCGATCTGTGGTCG-1,1,53,63,7835,5816 TTAGAAGAACATGACT-1,1,52,64,7715,5885 AGGATAAAGTCGGGAT-1,1,53,65,7835,5954 CGGCAAACATCGTGCG-1,1,52,66,7715,6023 CTAGTTACAACCCGGT-1,1,53,67,7835,6092 TTCGACAGAGCCCGTG-1,1,52,68,7715,6160 AAGCATACTCTCCTGA-1,1,53,69,7835,6229 GACGACGATCCGCGTT-1,1,52,70,7715,6298 GGTAGAAGACCGCCTG-1,1,53,71,7835,6367 GAGATGGGAGTCGACA-1,1,52,72,7715,6436 AACGATAATGCCGTAG-1,1,53,73,7835,6505 TGGTCCCACGCTACGG-1,1,52,74,7715,6573 CTGCTTGGCGATAGCT-1,1,53,75,7835,6642 ATCGGAGACAGACGGC-1,1,52,76,7715,6711 TAGCGTCCGGTGTGGT-1,1,53,77,7835,6780 GTTGAGTCCCGCCGGT-1,1,52,78,7715,6849 AAATAGGGTGCTATTG-1,1,53,79,7835,6917 AAGTGTTTGGAGACGG-1,1,52,80,7715,6986 AGCACTACCTCACCAG-1,1,53,81,7835,7055 TGCCTTGGCCAGGCAA-1,1,52,82,7715,7124 TCGTCTTAGGCGTTAA-1,1,53,83,7835,7193 TTCTGACCGGGCTCAA-1,1,52,84,7715,7262 CAGAACTTAGCCCTCT-1,1,53,85,7835,7330 AGCTCCTTCGCACATC-1,1,52,86,7715,7399 ACAGGCTTGCCCGACT-1,1,53,87,7835,7468 GCTATACGTCTCGGAC-1,1,52,88,7715,7537 GAGCCAGCTACCTGTG-1,1,53,89,7835,7606 TGCTAAGTGTCTATTT-1,1,52,90,7715,7675 GTCCTACGAATAGTCT-1,1,53,91,7835,7743 CAGTGTCGGCTGGCCC-1,1,52,92,7715,7812 CTATCGACGAAATACA-1,1,53,93,7835,7881 CATCATTACCCTGAGG-1,1,52,94,7715,7950 TAAGTTGCGACGTAGG-1,1,53,95,7835,8019 AGTGCACGCTTAAGAA-1,1,52,96,7715,8088 TGTGCCGGTGCCGGAA-1,1,53,97,7835,8156 AGGAGGCCTTCGCGCG-1,1,52,98,7715,8225 TCCGCGGCCCAATGAA-1,1,53,99,7835,8294 TCCGTTAAGCTAATAT-1,1,52,100,7715,8363 AAATCTAGCCCTGCTA-1,1,53,101,7835,8432 CGCAGGCGATCCAAAC-1,1,52,102,7715,8501 CCATATGGAAACTATA-1,1,53,103,7835,8569 CACCGTATCCCATCCG-1,1,52,104,7715,8638 GGTCAAGACTACTTCG-1,1,53,105,7835,8707 CCCGTTTCGCAGATGT-1,1,52,106,7715,8776 GTTATAATACGGTGAA-1,1,53,107,7835,8845 ACTTGTGGATGGAACG-1,1,52,108,7715,8914 GTTCGCTGAGACGTCT-1,1,53,109,7835,8982 GACACTGGAACCCGAT-1,1,52,110,7715,9051 CCCAGGTCTGAAGGCT-1,1,53,111,7835,9120 TCTCGAGGAGGTTCGC-1,1,52,112,7715,9189 ACTATCTGCCCGCGTA-1,1,53,113,7835,9258 CCTAAATTGTATCCTA-1,1,52,114,7715,9327 AGAAATTATGACTCGC-1,0,53,115,7835,9395 TCCAGCGCTATAAGCG-1,0,52,116,7715,9464 TCGTGTATTGGTCACG-1,0,53,117,7835,9533 CCACATACTGCACCCA-1,0,52,118,7715,9602 GTTGCGGACGGTCAGG-1,0,53,119,7835,9671 GTATTTAATGGCATAA-1,0,52,120,7715,9739 GTCGATAGGTGACTTT-1,0,53,121,7835,9808 AATAATCTTCGTATCG-1,0,52,122,7715,9877 ATTGTTCAACGATCCG-1,0,53,123,7835,9946 GTGCAGCGTAGAGTAG-1,0,52,124,7715,10015 TGTGGTAGGGTGCCTT-1,0,53,125,7835,10084 TGTGGACTATCTACGT-1,0,52,126,7715,10152 GGCGGGCTCTAAGAGT-1,0,53,127,7835,10221 TGTGCCAGAGGCAAAG-1,1,54,0,7955,1480 TGGCTTATGTATAATG-1,1,55,1,8074,1549 GCAAGCTGGAAACCGC-1,1,54,2,7955,1618 GATATCAAGCAGGAGC-1,1,55,3,8074,1686 CACCAATCATCCGTCT-1,1,54,4,7955,1755 CCACATGGCTCTTTAT-1,1,55,5,8074,1824 GTCCTACTCTACGGGC-1,1,54,6,7955,1893 CTCGAGACATACGATA-1,1,55,7,8074,1962 TATCAGTGGCGTAGTC-1,1,54,8,7955,2031 TGACAGGACAAGTCCA-1,1,55,9,8074,2099 TATTAACACCAAAGCA-1,1,54,10,7955,2168 TAGGCGATGAGGTCTC-1,1,55,11,8074,2237 CCGCCGGAACTTCTCG-1,1,54,12,7955,2306 GCCATTAGCCTCAAAC-1,1,55,13,8074,2375 GTTAGGCTACCCGTTT-1,1,54,14,7955,2444 GGGTATTCTAGCAAAC-1,1,55,15,8074,2512 GCCCTAGCCGTCGCGA-1,1,54,16,7955,2581 CAGATCCTGGTTTGAA-1,1,55,17,8074,2650 CTTCAGTGGTCGCCTA-1,1,54,18,7955,2719 GGGCAACCGCACGTGC-1,1,55,19,8074,2788 GACCCAATTATGATAC-1,1,54,20,7955,2857 GAAGCTCGGACCCGTC-1,1,55,21,8074,2925 CGTCGGGTCTAAGCGC-1,1,54,22,7955,2994 GAGGCCCGACTCCGCA-1,1,55,23,8074,3063 TTGCCATAGCCCGCTC-1,1,54,24,7955,3132 TAACATACACGCGATC-1,1,55,25,8074,3201 CCGACGGGCATGAGGT-1,1,54,26,7955,3270 AATGTGCCCGAGGTGT-1,1,55,27,8074,3338 AGCTGTAACCTCAATC-1,1,54,28,7955,3407 CGAGTGAAGGTACCAG-1,1,55,29,8074,3476 AGTCTCACAAGACTAC-1,1,54,30,7955,3545 AAATTGATAGTCCTTT-1,1,55,31,8074,3614 TAAGTCGCCGAGTATC-1,1,54,32,7955,3683 GCGGAGAAACTTCGCA-1,1,55,33,8074,3751 GGCAAAGGCGCCAATA-1,1,54,34,7955,3820 ATTTCCGGGTTCTGCG-1,1,55,35,8074,3889 TAAACCCAGGAGGGCA-1,1,54,36,7955,3958 TTCGGGCGCTAGTCTT-1,1,55,37,8074,4027 GTGGAGTCGGCGGTTG-1,1,54,38,7955,4096 GCACGCCTACTTAGAT-1,1,55,39,8074,4164 CAATATTCTTGACCTA-1,1,54,40,7955,4233 CGTTTGTGTAGAGGGT-1,1,55,41,8074,4302 CATAGCGTTGCCCACC-1,1,54,42,7955,4371 TGATACATTTAGCCGT-1,1,55,43,8074,4440 TTCCGGCCTTGAGGCT-1,1,54,44,7955,4508 CCTATACCGTCCTGTC-1,1,55,45,8074,4577 TCGTTGCTATCCGGTC-1,1,54,46,7954,4646 TGGCGACTGCTCCAAA-1,1,55,47,8074,4715 CAGAGGCGATGCATGA-1,1,54,48,7954,4784 TCACAGGAGAATAAGA-1,1,55,49,8074,4853 GGGTTAACATTTGAGT-1,1,54,50,7954,4921 GCGGGCGAGCCTTACC-1,1,55,51,8074,4990 TGCCAATGGGTACTCT-1,1,54,52,7954,5059 AGGGACTCTACGCGAC-1,1,55,53,8074,5128 GACGCCGTAAAGGCTA-1,1,54,54,7954,5197 AAAGGCTCTCGCGCCG-1,1,55,55,8074,5266 CTGGCTGGTTGTCAGT-1,1,54,56,7954,5334 ACATCCCGGCCATACG-1,1,55,57,8074,5403 CTCTTGTCCCGCTTGG-1,1,54,58,7954,5472 TACGCCTCCATTCCGA-1,1,55,59,8074,5541 CGGAGCAATTTAATCG-1,1,54,60,7954,5610 AACTAGGCTTGGGTGT-1,1,55,61,8074,5679 GCGTCGCCAGGGTGAT-1,1,54,62,7954,5747 GTTGGTCATGCTATCC-1,1,55,63,8074,5816 GCTGGCGGCGCATGCT-1,1,54,64,7954,5885 TGAGCGGAAAGTGTTC-1,1,55,65,8074,5954 CTTTCTGTGCGGGCTT-1,1,54,66,7954,6023 ATCTAATATCCTACGG-1,1,55,67,8074,6092 AACCTTTAAATACGGT-1,1,54,68,7954,6160 TTCACTCGAGCACCTA-1,1,55,69,8074,6229 CTGCACTCCAGTACAG-1,1,54,70,7954,6298 CCATTTCTACCTATTA-1,1,55,71,8074,6367 CAAGCGGCACATAATT-1,1,54,72,7954,6436 GTTAACATCACTTAAA-1,1,55,73,8074,6505 ATATAAATGTAGCTGC-1,1,54,74,7954,6573 GCCTCTATACATAGCA-1,1,55,75,8074,6642 GTGTCGTATAGCGTTC-1,1,54,76,7954,6711 ATACGTACTTAGCCAC-1,1,55,77,8074,6780 GCTATCATACTCATGG-1,1,54,78,7954,6849 TATCCAATTGGTTATC-1,1,55,79,8074,6918 TCTCCCTGGGCAGCGT-1,1,54,80,7954,6986 CCTATAATGAGTGCCC-1,1,55,81,8074,7055 CAATGCGAGAAGTATC-1,1,54,82,7954,7124 TACCAATAAAGTACCA-1,1,55,83,8074,7193 GCTAGGCACCACGGAG-1,1,54,84,7954,7262 GGTGGACTGCTCTGGC-1,1,55,85,8074,7330 TATTCCGAGCTGTTAT-1,1,54,86,7954,7399 TTGAGAAGTTTAGCAT-1,1,55,87,8074,7468 GTCCGGACCTGAAATT-1,1,54,88,7954,7537 CTGTGGTCGGGAGATA-1,1,55,89,8074,7606 CATGTAAGAGACATTT-1,1,54,90,7954,7675 CACGTTTCGTACACAC-1,1,55,91,8074,7743 CATCCTCTCAAAGATC-1,1,54,92,7954,7812 GATTAACCGAAAGCCC-1,1,55,93,8074,7881 AACGCTGTTGCTGAAA-1,1,54,94,7954,7950 TTCAGCTGGCGTGCCC-1,1,55,95,8074,8019 TAGAGGTTCTACTTGT-1,1,54,96,7954,8088 GTATCAAACGTTAGCT-1,1,55,97,8074,8156 AATCATGTAAAGACTC-1,1,54,98,7954,8225 AGTCACTAGCTCTCGA-1,1,55,99,8074,8294 GCGAAACGATCGGGAG-1,1,54,100,7954,8363 CATGGATTGTCTTCCG-1,1,55,101,8074,8432 ATTGGATTACAGCGTA-1,1,54,102,7954,8501 CACACACGCTAACGAG-1,1,55,103,8074,8569 CTATGGGAAGCGGAAT-1,1,54,104,7954,8638 TGCCTAATTGAAGATT-1,1,55,105,8074,8707 TGGCAATGGGACGGCG-1,1,54,106,7954,8776 ACCTCCGCCCTCGCTG-1,1,55,107,8074,8845 CGGCAATAAGATCGCC-1,1,54,108,7954,8914 CCTTGACCACTTTATT-1,1,55,109,8074,8982 TCATTTAGAAGTGTGA-1,1,54,110,7954,9051 CAGAATAACACACGGA-1,1,55,111,8074,9120 TCTGAGCAATTGACTG-1,1,54,112,7954,9189 CAACGTGGTGGAGTCT-1,1,55,113,8074,9258 CACAGTTCGCTTCCCA-1,1,54,114,7954,9327 TCTCTCGCCGCACATA-1,0,55,115,8074,9395 TGGGCAGGCCACCGCA-1,0,54,116,7954,9464 CTCCTGTTCAAGGCAG-1,0,55,117,8074,9533 TATTTGATTTGCACAG-1,0,54,118,7954,9602 GCCACTCCTTACGGTA-1,0,55,119,8074,9671 ACCGATATTTAATCAT-1,0,54,120,7954,9739 TTATTATCTGGAAGGC-1,0,55,121,8074,9808 TACATGCCGGAATTGT-1,0,54,122,7954,9877 GGCACTCAGCCGACCC-1,0,55,123,8074,9946 AAACCGGAAATGTTAA-1,0,54,124,7954,10015 AGACAGGCATCTCAGC-1,0,55,125,8074,10084 GTGGTGATGGTTTGTG-1,0,54,126,7954,10152 CCGATATGACGTAAGG-1,0,55,127,8074,10221 CATACACAAAGTCAGC-1,1,56,0,8194,1480 TGCCGTTCTTAATCGG-1,1,57,1,8314,1549 TCGAGTCTACGATTCG-1,1,56,2,8194,1618 ACTTCCAGTGGAAGCT-1,1,57,3,8314,1687 CAATTGGGCCGCACTC-1,1,56,4,8194,1755 AAATCGCGGAAGGAGT-1,1,57,5,8314,1824 ATATCAATTCCAGCCT-1,1,56,6,8194,1893 CTTTGTCGAATGCTCC-1,1,57,7,8314,1962 TGAGGCATGTACTGTG-1,1,56,8,8194,2031 TTCGCCGCTCGCGCTA-1,1,57,9,8314,2099 GACTCCTTCCAATTGA-1,1,56,10,8194,2168 ACCGAAGAGTCTGGTT-1,1,57,11,8314,2237 TGGCATGAAGTTTGGG-1,1,56,12,8194,2306 ACGAGGTTTACAACGT-1,1,57,13,8314,2375 AACCCATCCCATGATC-1,1,56,14,8194,2444 TCGACAACTGAACCCG-1,1,57,15,8314,2512 TATCTTGCAATACAAC-1,1,56,16,8194,2581 TATTATGTTTGCCTGC-1,1,57,17,8314,2650 GTCAAAGAAGTGGTGT-1,1,56,18,8194,2719 GGGACAGAGTTACTCC-1,1,57,19,8314,2788 AGCAACCGAAAGTAAT-1,1,56,20,8194,2857 CTCTCTAACTGCCTAG-1,1,57,21,8314,2925 ATACGGAACGTCGTTT-1,1,56,22,8194,2994 TCGGTCCCGACAATAG-1,1,57,23,8314,3063 CGCGCAAATGTCCAGA-1,1,56,24,8194,3132 TCTAATACTGCCTCAG-1,1,57,25,8314,3201 CTCGGTTGTCGGCCCT-1,1,56,26,8194,3270 GGTAACCGGGAGGATA-1,1,57,27,8314,3338 CAGGCGCACGGTGGTC-1,1,56,28,8194,3407 TCATCGATGGTCCCAA-1,1,57,29,8314,3476 CAAAGATTATTGGGCC-1,1,56,30,8194,3545 ACAATGAATACGGAGA-1,1,57,31,8314,3614 GCTCCTGACATACTGG-1,1,56,32,8194,3683 GATGACAAGTAGGGCA-1,1,57,33,8314,3751 TACGCCGAGGGTACCC-1,1,56,34,8194,3820 AAGCGTCCCTCATCGA-1,1,57,35,8314,3889 CACTCCTATGTAAGAT-1,1,56,36,8194,3958 TCGAGCCAGGCAGGCC-1,0,57,37,8314,4027 TCGTCAAGTACGCGCA-1,1,56,38,8194,4096 CCCGTAGCTGGGAAGA-1,1,57,39,8314,4164 ACCCTATGCCATATCG-1,1,56,40,8194,4233 GTGGTATAGTCTGCCG-1,1,57,41,8314,4302 CCGGTTTGTAATTGTG-1,1,56,42,8194,4371 GTCGCCGTTGTGTGTT-1,1,57,43,8314,4440 TACTGAACAGATTTAG-1,1,56,44,8194,4508 TATTAACCTGACCGCG-1,1,57,45,8314,4577 CTGTAGCCATCTCACT-1,1,56,46,8194,4646 TACTATGGTTCCTCAG-1,1,57,47,8314,4715 GAGTCAGACCAGAATC-1,1,56,48,8194,4784 TAAGGCATAACATCAA-1,1,57,49,8314,4853 CTTCCGCTCCGTGAAG-1,1,56,50,8194,4921 GGGCTATGATCGATGG-1,1,57,51,8314,4990 GGTCGGTCGTCCACAG-1,1,56,52,8194,5059 GGGACCCGTATATCTT-1,1,57,53,8314,5128 GAAAGCAGTGCACTTT-1,1,56,54,8194,5197 AGCGCATAATGAATCG-1,1,57,55,8314,5266 GCTGCTAAGTAGTCGA-1,1,56,56,8194,5334 AATAGTCCGTCCCGAC-1,1,57,57,8314,5403 AGCAGCCAGATGAATA-1,1,56,58,8194,5472 CCTCGCGCTGTGCGAT-1,1,57,59,8314,5541 TTGTGTATGCCACCAA-1,1,56,60,8194,5610 TGCTCGGCGAAACCCA-1,1,57,61,8314,5679 CGATTAAATATCTCCT-1,1,56,62,8194,5747 TGCCGTGGATCGTCCT-1,1,57,63,8314,5816 GTGGACGCATTTGTCC-1,1,56,64,8194,5885 ACCCGGAAACTCCCAG-1,1,57,65,8314,5954 CCGCATGTGGTACGAT-1,1,56,66,8194,6023 GCAAACCTTGGCCATA-1,1,57,67,8314,6092 ATTCACTGATGTTGGA-1,1,56,68,8194,6160 ACTCCCTAATGCTAAA-1,1,57,69,8314,6229 TCCTAAATTGGGAAGC-1,1,56,70,8194,6298 GAACAGATTACTAAAT-1,1,57,71,8314,6367 ATACAGGCCCTCCAAT-1,1,56,72,8194,6436 CGATTCGCCTGGCTGC-1,1,57,73,8314,6505 TCCGCCTGTCTACAAG-1,1,56,74,8194,6573 CACATCTCACCGACGA-1,1,57,75,8314,6642 CGTCGTCCTTCGCGAA-1,1,56,76,8194,6711 CGTTGAATACCGCGCT-1,1,57,77,8314,6780 ACCAATATGCAAGTTA-1,1,56,78,8194,6849 ACGGATGGTGCGGATA-1,1,57,79,8314,6918 CACCCTAACAAGATCT-1,1,56,80,8194,6986 AAGCTCTTTCATGGTG-1,1,57,81,8314,7055 CATGGGTATGCCTTAT-1,1,56,82,8194,7124 GCGCGTCATTGGTACA-1,1,57,83,8314,7193 TCTGAAGCACGTGGTC-1,1,56,84,8194,7262 AAGTTCACTCCAAGCT-1,1,57,85,8314,7330 TTGGACCTATAACAGT-1,1,56,86,8194,7399 ATCAGCCTCATGCTGC-1,1,57,87,8314,7468 AGTACTCTTATGCCCA-1,1,56,88,8194,7537 GCGCTTAAATAATTGG-1,1,57,89,8314,7606 TTGTGAACCTAATCCG-1,1,56,90,8194,7675 AGTCGTCGACCACCAA-1,1,57,91,8314,7743 TCCTTTAAATCCGCTT-1,1,56,92,8194,7812 ACTGAAACGCCGTTAG-1,1,57,93,8314,7881 GCACACACTGGTAGCC-1,1,56,94,8194,7950 TAAGGGCCTGTCCGAT-1,1,57,95,8314,8019 GCTAGAGTAGAGATGT-1,1,56,96,8194,8088 CACAGGGCCATATAGT-1,1,57,97,8314,8156 CCATGCCCTAGATTTC-1,1,56,98,8194,8225 TCGCGTAGCAGTGTCC-1,1,57,99,8314,8294 GACGAGGCTAATAAAC-1,1,56,100,8194,8363 CTACACTCGCAGATGG-1,1,57,101,8314,8432 ACGCATACGTTTACTA-1,1,56,102,8194,8501 GGAGCAACATTTCAAG-1,1,57,103,8314,8569 CGAGAGGGTAGCCGCG-1,1,56,104,8194,8638 TCGTGTTCGACCACAA-1,1,57,105,8314,8707 TCTACCCAATAGAGAG-1,1,56,106,8194,8776 ATAAACGTTGCACCAC-1,1,57,107,8314,8845 TCAACTGCAGAGTCAG-1,1,56,108,8194,8914 AGCTCCATATATGTTC-1,1,57,109,8314,8982 GTAACATCTAAGATAA-1,1,56,110,8194,9051 CCGACAATAGGCCGCC-1,1,57,111,8314,9120 ATGCTTAGGAGTTGAT-1,1,56,112,8194,9189 AACCCGAGCAGAATCG-1,1,57,113,8314,9258 CGGGTTTGTTAGGGCT-1,1,56,114,8194,9327 AGGCACGTGACTGTCC-1,0,57,115,8314,9395 GACAAGAGATGAGATT-1,0,56,116,8194,9464 AGCCCTTGGACATCCC-1,0,57,117,8314,9533 GCAGTTCGATCCGAGC-1,0,56,118,8194,9602 TCGAGGGCAACAGACG-1,0,57,119,8314,9671 ACTTTGTCGACGCACT-1,0,56,120,8194,9740 GAGTATCAAAGTTACA-1,0,57,121,8314,9808 CCAGGCTGGCGTCTGA-1,0,56,122,8194,9877 GGACTCTTCCGGTTGA-1,0,57,123,8314,9946 TCCCAGCACACGACAT-1,0,56,124,8194,10015 GCTCCATGAGTGCAGA-1,0,57,125,8314,10084 GCAGTGCGGGCGGATG-1,0,56,126,8194,10152 GCGTTCTGACTAAGCG-1,0,57,127,8314,10221 ATTTGGAGATTGCGGT-1,1,58,0,8434,1480 AAGGGTTTGATTTCAG-1,1,59,1,8553,1549 CCGGCCGCGAGCATAT-1,1,58,2,8434,1618 GAGTTCTGTGGGTGCT-1,1,59,3,8553,1687 AAACGAAGATGGAGTA-1,1,58,4,8434,1755 AATTACGAGACCCATC-1,1,59,5,8553,1824 CCTTTGAATTATGGCT-1,1,58,6,8434,1893 CATGGTATTAGTTTGT-1,1,59,7,8553,1962 AACGAAAGTCGTCCCA-1,1,58,8,8434,2031 ACATAAGTCGTGGTGA-1,1,59,9,8553,2099 GCACAACCTCGGGCGT-1,1,58,10,8434,2168 ATGCACGCGCTGTTCA-1,1,59,11,8553,2237 CGATGTTGTTATCTAC-1,1,58,12,8434,2306 GTCTCCTGCCAGTATG-1,1,59,13,8553,2375 GATGCCTTCTGCGGCA-1,1,58,14,8434,2444 ACCGGTCAGGTACACC-1,1,59,15,8553,2512 GTGGAGCGTTTACCGA-1,1,58,16,8434,2581 AGGCGGTTTGTCCCGC-1,1,59,17,8553,2650 TCCCTGGCGTATTAAC-1,1,58,18,8434,2719 AAACACCAATAACTGC-1,1,59,19,8553,2788 TGGACGCAATCCAGCC-1,1,58,20,8434,2857 GGAACCTTGACTCTGC-1,1,59,21,8553,2925 GTATCTCAGTCTTGAC-1,1,58,22,8434,2994 GCTGAATCTTCCAATC-1,1,59,23,8553,3063 AGATGCAAGACGTGCA-1,1,58,24,8434,3132 TTAAGGCCCGTACTTT-1,1,59,25,8553,3201 AGTATGCTGGAGACCA-1,1,58,26,8434,3270 GCGGCAAAGTATTGCC-1,1,59,27,8553,3338 GCGCAAATATATTCAA-1,1,58,28,8434,3407 GAAGAAACGATATTGT-1,1,59,29,8553,3476 GGACCTCTAGGCCGCC-1,1,58,30,8434,3545 AACAGCTGTGTGGCAA-1,0,59,31,8553,3614 TCGAATATCCCGCAGG-1,1,58,32,8434,3683 GCGACCCAACCATCTG-1,0,59,33,8553,3751 AGAGAAGGAGTACAAT-1,1,58,34,8434,3820 GGCTCCTCCACCTGTT-1,0,59,35,8553,3889 TCAGTGTATACGTCAT-1,1,58,36,8434,3958 ATTATTATGTCCGTCA-1,1,59,37,8553,4027 TTAGGTCATAACCGAC-1,0,58,38,8434,4096 AAGATGGCACCGGACC-1,1,59,39,8553,4164 CTACAAGAAATAACCC-1,0,58,40,8434,4233 ACTATTTCCGGGCCCA-1,1,59,41,8553,4302 TTGTTTCACATCCAGG-1,1,58,42,8434,4371 CGCTGTGTGGATGTTG-1,1,59,43,8553,4440 TCAACATAGCGCCCTA-1,1,58,44,8434,4509 GGCCGTTTGGGTTTCA-1,1,59,45,8553,4577 TACTCTTTCGTCTTCA-1,1,58,46,8434,4646 TTCGCGCGCCATACGA-1,1,59,47,8553,4715 CGGAAAGAATCAAACG-1,1,58,48,8434,4784 AGGCAGGGAGCGTACT-1,1,59,49,8553,4853 CCTTTAAGGGAGCACT-1,1,58,50,8434,4921 CGCCCAGCACGCCTAG-1,1,59,51,8553,4990 AGTATTTGGCACGACC-1,1,58,52,8434,5059 TATCCGCACCGTCGGG-1,1,59,53,8553,5128 ACCCGTGTCATCAGTA-1,1,58,54,8434,5197 CAACTGCTCATCCGAT-1,1,59,55,8553,5266 CCGGAGCGTACTTTCT-1,1,58,56,8434,5334 TACCGTAGGTTAACTA-1,1,59,57,8553,5403 TGAGGAGTGCCAGCTT-1,1,58,58,8434,5472 CCAGTCTTGTCATAGA-1,1,59,59,8553,5541 GGGCAGAGCAATCGTT-1,1,58,60,8434,5610 TACCAAATAGCCCAGA-1,1,59,61,8553,5679 TACCTACTCCCAGTAT-1,1,58,62,8434,5747 TGTGAGACTAGCCCAA-1,1,59,63,8553,5816 ATACCTAACCAAGAAA-1,1,58,64,8434,5885 GTCGGGAAGCAGAAAC-1,1,59,65,8553,5954 AATTAACGGATTTCCA-1,1,58,66,8433,6023 GAACCCTCTGTGTTCT-1,1,59,67,8553,6092 ATATAAAGCGCTCGTG-1,1,58,68,8433,6160 CCTGGTCGAATGTGGG-1,1,59,69,8553,6229 CTAGGTTCGGACGTGA-1,1,58,70,8433,6298 TCGGGAGACAGCGTAC-1,1,59,71,8553,6367 CATAAGAAGCTTGGCT-1,1,58,72,8433,6436 AAGCACCCTGCGTATC-1,1,59,73,8553,6505 AGTTTGCACCTGCCTC-1,1,58,74,8433,6573 GACTGCAAATCGAGCT-1,1,59,75,8553,6642 GTGTATATCAGCGGGC-1,1,58,76,8433,6711 CTTCAACTCCACTTGG-1,1,59,77,8553,6780 ACTTACGCATCCACGC-1,1,58,78,8433,6849 CATGAGATGCACTCTC-1,1,59,79,8553,6918 GAGCACCTGTGTCCAG-1,1,58,80,8433,6986 CGCGACCGCGACAGAT-1,1,59,81,8553,7055 GTCCCGCGACGTTATG-1,1,58,82,8433,7124 GCAGTGTGGCTATAGG-1,1,59,83,8553,7193 ATACTGCCTTACACCG-1,1,58,84,8433,7262 TACAGAAACGGTGGGC-1,1,59,85,8553,7330 GGCGCGGAGATCTTTC-1,1,58,86,8433,7399 CGTTAATGTCCCGACG-1,1,59,87,8553,7468 GCTAACTGAAGTCTGA-1,1,58,88,8433,7537 AGTGATAACCTGCGCG-1,1,59,89,8553,7606 TATGATCTTCTCTTTA-1,1,58,90,8433,7675 TGTACCTACACGAGGG-1,1,59,91,8553,7743 ATATCGTTCCTCGAAC-1,1,58,92,8433,7812 CGGCGCCATCAATCCC-1,1,59,93,8553,7881 CCTGTTTGAAGACACG-1,1,58,94,8433,7950 GCCACTCAGAGCGCGA-1,1,59,95,8553,8019 TCGGCTTGTATCGACG-1,1,58,96,8433,8088 TAGATGGTTCCTTACT-1,1,59,97,8553,8156 ATCGTTAGCTAGCGGA-1,1,58,98,8433,8225 ACAGCGACATTCTCAT-1,1,59,99,8553,8294 ATTCAACCATTTAAGG-1,1,58,100,8433,8363 CACTCTTCTGCTAGCC-1,1,59,101,8553,8432 CTAACTGATAATCGCC-1,1,58,102,8433,8501 TGATGTCAATTAAGTG-1,1,59,103,8553,8569 CGCCGTTCAGCATAGT-1,1,58,104,8433,8638 ATACTACCCGTACCAC-1,1,59,105,8553,8707 CCACTGTTTGGATTAA-1,1,58,106,8433,8776 CACTACGGGAGCTGCC-1,1,59,107,8553,8845 GACCAAACGTTGACTG-1,1,58,108,8433,8914 AAGAGGCCCTTTGGAA-1,1,59,109,8553,8982 ACACGTAGGCCACAAG-1,1,58,110,8433,9051 TACTCTTACTTTACTG-1,1,59,111,8553,9120 AGAGCCGCCGAGATTT-1,1,58,112,8433,9189 GGCCTGCTTCTCCCGA-1,1,59,113,8553,9258 TCAAGCGCGGACGGTA-1,1,58,114,8433,9327 ACACTGGGACAGTCGT-1,0,59,115,8553,9395 TGTGTAGTAGCACGTG-1,0,58,116,8433,9464 CTAGCCACAGGCGAGC-1,0,59,117,8553,9533 CTATCGCGTAGAGAAC-1,0,58,118,8433,9602 GTTCCCGTAAACATAT-1,0,59,119,8553,9671 TCTTTCCTTCGAGATA-1,0,58,120,8433,9740 ATATTGGAGAGGCCTT-1,0,59,121,8553,9808 AGGCGATAACTGGCGT-1,0,58,122,8433,9877 ACCCGGAGCACCACAA-1,0,59,123,8553,9946 TGAGCTCAACTGTATA-1,0,58,124,8433,10015 ACGACCCATGAGTTGC-1,0,59,125,8553,10084 CGGTCAGTCCATATTT-1,0,58,126,8433,10152 TCCGGCTGTCGGGTCG-1,0,59,127,8553,10221 ATTCCCGAAGGTACAG-1,1,60,0,8673,1480 CTAACAGCACAATAAC-1,1,61,1,8793,1549 TTGTGCGGAAGCGGAT-1,1,60,2,8673,1618 GGGCACTATTGACCAT-1,1,61,3,8793,1687 CCATAGAGGCTGCCAG-1,1,60,4,8673,1755 CCGAAGCATTGACCAA-1,1,61,5,8793,1824 TACTGAGGGAAGAAAG-1,1,60,6,8673,1893 TATGTAGAAACCCGGC-1,1,61,7,8793,1962 TGGTTAACTTACATTT-1,1,60,8,8673,2031 AGAACCCTCAATTGGG-1,1,61,9,8793,2099 CCTATATCGTGTCACG-1,1,60,10,8673,2168 ACGGCTGGATGTAGAA-1,1,61,11,8793,2237 CTCGCATTGCATAGCC-1,1,60,12,8673,2306 ACTTTGGTCGTGCTCC-1,1,61,13,8793,2375 GCTGGTTTAGGCCATA-1,1,60,14,8673,2444 TGTCGTTATCACATAT-1,1,61,15,8793,2512 ACACATGATCAAATCT-1,1,60,16,8673,2581 TATCCATCTCGGTTAG-1,1,61,17,8793,2650 CTCGTCGAGGGCTCAT-1,1,60,18,8673,2719 ATCAATGCCGTGGCTG-1,1,61,19,8793,2788 GAAACATAGGAAACAG-1,1,60,20,8673,2857 TAATAGGTCACCAGAA-1,1,61,21,8793,2925 CACCAGTCAGCATGCA-1,1,60,22,8673,2994 GAGCGCTGTTAGGTAA-1,1,61,23,8793,3063 CCCTATGTAGAGCAGA-1,1,60,24,8673,3132 GATCGCTATATCTCAG-1,1,61,25,8793,3201 AAGGGACTATGCATTC-1,1,60,26,8673,3270 CTCGGGTTCTCTGGCC-1,0,61,27,8793,3338 TATATATCGAGAAATG-1,1,60,28,8673,3407 ACCCGAGCGAAATTAC-1,0,61,29,8793,3476 TTGTTTCATTAGTCTA-1,0,60,30,8673,3545 ATGGGACCTGCTGAAC-1,0,61,31,8793,3614 TGCGACGGCCGAACGT-1,0,60,32,8673,3683 AGCTAACAAGCAATGT-1,0,61,33,8793,3751 CGTGTCTCGTTACGAC-1,0,60,34,8673,3820 CTAAATCCGGTGTACA-1,0,61,35,8793,3889 GAATAGTGCTCGATTA-1,0,60,36,8673,3958 AACAATACATTGTCGA-1,0,61,37,8793,4027 CACATCGTGCACGCGC-1,0,60,38,8673,4096 ACCGTGACCACGTGGG-1,0,61,39,8793,4164 CCCAATGAGATTTGCA-1,0,60,40,8673,4233 TCAGCAGTAGGCCCTG-1,0,61,41,8793,4302 CTACTCAAGGTATAGT-1,1,60,42,8673,4371 TAGTAGCTTATACCAG-1,1,61,43,8793,4440 ACGTTCGTTCAGGAAA-1,1,60,44,8673,4509 TTAGAATAAGGGTCGG-1,1,61,45,8793,4577 TTCTAGAAAGTCTTAT-1,1,60,46,8673,4646 CCTTCAGTTAAAGTGA-1,1,61,47,8793,4715 TAGCTAGTGATGATGG-1,1,60,48,8673,4784 AGCCCGGTAGCCTGTA-1,1,61,49,8793,4853 TACCTCAGTTGTCTGT-1,1,60,50,8673,4921 GACGGGTTGGCCCGTA-1,1,61,51,8793,4990 GAAACCTATACAAATG-1,1,60,52,8673,5059 TCCACCTCTAGCCTTT-1,1,61,53,8793,5128 AAGACCCAACTGAACA-1,1,60,54,8673,5197 CATACACGGTTCCCAC-1,1,61,55,8793,5266 ACTACGCGTTAGAATT-1,1,60,56,8673,5334 CTTGGCCAAGCTGGGA-1,1,61,57,8793,5403 TTACCCTAGGGATTGG-1,1,60,58,8673,5472 TTCGCACTGTACGACA-1,1,61,59,8793,5541 TAGGTGTTCCACAGAT-1,1,60,60,8673,5610 GAAGCCTGCACATTCC-1,1,61,61,8793,5679 GGCTTTCAATAAGGGT-1,1,60,62,8673,5747 TCCAGGGTATATACGA-1,1,61,63,8793,5816 CTCGCCGAATGTAGGG-1,1,60,64,8673,5885 AGGAAGCTGTCCGCCG-1,1,61,65,8793,5954 GTCTCAAGGCCCGGCT-1,1,60,66,8673,6023 ACCGACACATCTCCCA-1,1,61,67,8793,6092 TATACGCGTCATCACT-1,1,60,68,8673,6160 ATCTGGGCTGTTCTTG-1,1,61,69,8793,6229 TCAAATTTGAGACTCA-1,1,60,70,8673,6298 GTTAAGGGTGCGATGT-1,1,61,71,8793,6367 AGCAAAGGCCGCTAGT-1,1,60,72,8673,6436 TTATAGGTAATTGTCT-1,1,61,73,8793,6505 TTGTGCAGCCACGTCA-1,1,60,74,8673,6573 ACGGACTCTCAAAGCG-1,1,61,75,8793,6642 ATGCTCTGGCGCGGTA-1,1,60,76,8673,6711 ATAATCTTGGAGAACC-1,1,61,77,8793,6780 CTCATTGCTCTAACAA-1,1,60,78,8673,6849 TAGGTGACGATAACCT-1,1,61,79,8793,6918 CGGATCCTCAAGGACT-1,1,60,80,8673,6986 GGGCGTGGTTTCCCAG-1,1,61,81,8793,7055 TGGCTATGTGACATAC-1,1,60,82,8673,7124 CTTAGTGTAGTAGCAT-1,1,61,83,8793,7193 GGATTGCTGTGACTCC-1,1,60,84,8673,7262 ACTTCCATGCGGGACA-1,1,61,85,8793,7331 ACTCAAGTGCAAGGCT-1,1,60,86,8673,7399 ACAAGGATGCTTTAGG-1,1,61,87,8793,7468 TTGAACGACGTGCTGA-1,1,60,88,8673,7537 ATTCATCGTTGAGGCA-1,1,61,89,8793,7606 TGCAACCCATCTGCGG-1,1,60,90,8673,7675 AGTGCTAAACACAGCA-1,1,61,91,8793,7743 AACTCCAGAGCGTGTT-1,1,60,92,8673,7812 TCTCCACAAGTTGAAT-1,1,61,93,8793,7881 CTTTGCATCGCTCTTG-1,1,60,94,8673,7950 TCACAAACCGAGGTAC-1,1,61,95,8793,8019 CCGTAGGAAATCCCTG-1,1,60,96,8673,8088 AAACATTTCCCGGATT-1,1,61,97,8793,8156 GCCTCCGACAATTCAC-1,1,60,98,8673,8225 TCTGCATACCTTGCTT-1,1,61,99,8793,8294 ACCCATTTGTCCCTCT-1,1,60,100,8673,8363 CTGCAGAGAATCAGAG-1,1,61,101,8793,8432 GGAAACTAAATGGGCC-1,1,60,102,8673,8501 CAACCTACCGAGCAGT-1,1,61,103,8793,8569 CCGTAGGGTTGTTTAC-1,1,60,104,8673,8638 GGTTTGTGACCTGAGG-1,1,61,105,8793,8707 TAACCTAGGGAGTCCA-1,1,60,106,8673,8776 TTCCATCGACAGCGTG-1,1,61,107,8793,8845 ATCTCCCACGGAATAT-1,1,60,108,8673,8914 GTTTCAAACGAGTTGT-1,1,61,109,8793,8982 GGAACTTTGGCGATTA-1,1,60,110,8673,9051 CTGCAAGCACGTTCCG-1,1,61,111,8793,9120 CACAGTCCCGCTTCGC-1,1,60,112,8673,9189 TACGCTCGGTATTGGA-1,1,61,113,8793,9258 CTGGTAAAGTGTGGGC-1,0,60,114,8673,9327 CAACCAGTGGCCTACC-1,0,61,115,8793,9395 TCTTCGCGGTGAGAGG-1,0,60,116,8673,9464 TTCGGGTTGCCACGGG-1,0,61,117,8793,9533 ACCGATAGGCATAACC-1,0,60,118,8673,9602 CTTATAGATGGCTGTT-1,0,61,119,8793,9671 AGACGCCCACTTCGCC-1,0,60,120,8673,9740 AGGTCGCCTCCCAACA-1,0,61,121,8793,9808 CCTCACGTCAGCTAAT-1,0,60,122,8673,9877 CCACCTGTATGGAATA-1,0,61,123,8793,9946 GGACTCGAAGCTTTCA-1,0,60,124,8673,10015 AAGCCGGAGAGCAGGA-1,0,61,125,8793,10084 GCCTATCAGGTAAGAT-1,0,60,126,8673,10152 TCAGAGTGTGAGCATG-1,0,61,127,8793,10221 AAACATGGTGAGAGGA-1,1,62,0,8913,1480 ATCATTGTACCGCATT-1,1,63,1,9032,1549 TCAGCTTGAGCTTTCG-1,1,62,2,8913,1618 CCGCGCAAGATACCCA-1,1,63,3,9032,1687 CTATACTTAAAGCGAG-1,1,62,4,8913,1755 ACCGTCCACTGGGCCC-1,1,63,5,9032,1824 AAGACCAAATAACTCA-1,1,62,6,8913,1893 TGTTTCTGAAGCGTGC-1,1,63,7,9032,1962 TTCTCTTACAGGTGAT-1,1,62,8,8913,2031 AATTCATTGTCATGCA-1,1,63,9,9032,2100 GCCCGAGAGTCTAAAT-1,1,62,10,8913,2168 ATATAGAGTATTGGTC-1,1,63,11,9032,2237 TTGAAGGATGGGCGCC-1,1,62,12,8913,2306 TAGTACCACAACTTTC-1,1,63,13,9032,2375 ATCCCATCCACAGCGC-1,1,62,14,8913,2444 CTTAGGTATAGACCAG-1,1,63,15,9032,2512 TGACTCCGAATCATAC-1,1,62,16,8913,2581 ACTACATCCCGACAAG-1,1,63,17,9032,2650 TTGCCGCAGACCTACA-1,1,62,18,8913,2719 TAAGAGGGACAGGGAC-1,1,63,19,9032,2788 CGAATGAAGTCATTGC-1,1,62,20,8913,2857 AAGGGAACGACTGGCT-1,1,63,21,9032,2925 CGAATCTGCTCGACGC-1,1,62,22,8913,2994 ATCGTGGAAAGTCTGG-1,0,63,23,9032,3063 CCTATGGCTCCTAGTG-1,1,62,24,8913,3132 CGTCGCTTGGTTATAC-1,0,63,25,9032,3201 TGCTTAGAGAGAATGC-1,0,62,26,8913,3270 GATTGCTCCAGTTGCA-1,0,63,27,9032,3338 CGTGCTTCTACCTAAA-1,0,62,28,8913,3407 ACTCCAATATCATCAT-1,0,63,29,9032,3476 GCCGACCCACGACTGC-1,0,62,30,8913,3545 GAGGGTAGTAACAAAG-1,0,63,31,9032,3614 GGCCGAGACTCTGGTG-1,0,62,32,8913,3683 TATATAGGGCTTTACG-1,0,63,33,9032,3751 TGCCGGATGTACGAGC-1,0,62,34,8913,3820 AGGTAGATCGAGATAT-1,0,63,35,9032,3889 GATCAACATAAAGGGA-1,0,62,36,8913,3958 TAACAGCGTTTGTGCT-1,0,63,37,9032,4027 CTATAGGCGTTGATGT-1,0,62,38,8913,4096 GGTTTAATTACCATCG-1,0,63,39,9032,4164 CTCTTCTGGAAGTTAG-1,0,62,40,8913,4233 ATCTGACATGGAAGGA-1,0,63,41,9032,4302 TACGTACTAGTGCTGA-1,0,62,42,8913,4371 ATGACTATCAGCTGTG-1,0,63,43,9032,4440 CTTTGGGATTGTTGCA-1,0,62,44,8913,4509 AGACTGTTACCGGGTC-1,0,63,45,9032,4577 TCCTAACCGTCGGGCA-1,1,62,46,8913,4646 ACGCAAACTAATAGAT-1,1,63,47,9032,4715 AAGTGAGTCGGGTTTA-1,1,62,48,8913,4784 TAAAGCGGTATTTCCA-1,1,63,49,9032,4853 CTCCTCCAGCTCACAC-1,1,62,50,8913,4921 CGGTCCGTCGCAAGCC-1,1,63,51,9032,4990 CCTATATTTGTCCTGG-1,1,62,52,8913,5059 TCCAATAAAGGCTACC-1,1,63,53,9032,5128 AAAGGCTACGGACCAT-1,1,62,54,8913,5197 CGTGACCAGTCCTCTG-1,1,63,55,9032,5266 GATCTGCTATCTAAGG-1,1,62,56,8913,5334 CGAATGACGCATAATG-1,1,63,57,9032,5403 GCTCAATGTAATACCG-1,1,62,58,8913,5472 CTATGCCCGAATGCAA-1,1,63,59,9032,5541 CACAGGGCCGTTGTCA-1,1,62,60,8913,5610 TCAATACGCCGTCATG-1,1,63,61,9032,5679 AGCGGACACTTCGTAG-1,1,62,62,8913,5747 ATACGTTATGCACGGA-1,1,63,63,9032,5816 GAGTAGATACTAGTTG-1,1,62,64,8913,5885 CCAGCTCGAACGCATT-1,1,63,65,9032,5954 TGCTTCCCAAGCAGTA-1,1,62,66,8913,6023 GTAGACACGCCTGACT-1,1,63,67,9032,6092 CTCACATTTACTAAAT-1,1,62,68,8913,6160 GAATGCGAATCGGTTC-1,1,63,69,9032,6229 TAAGGCCCGTCACCCT-1,1,62,70,8913,6298 GAGTATGCGCGTGCAT-1,1,63,71,9032,6367 ATCCACATCGACAGAA-1,1,62,72,8913,6436 TAGACGAAACGCCAAT-1,1,63,73,9032,6505 CAGCAGTCTGTGCTGC-1,1,62,74,8913,6573 CTCACTGTGATACTTA-1,1,63,75,9032,6642 TTAATTGCTTTGGGTG-1,1,62,76,8913,6711 GTAGTCTACGATATTG-1,1,63,77,9032,6780 TTGCCGCTTTCTAGTA-1,1,62,78,8913,6849 GAACCTTTAACGATCC-1,1,63,79,9032,6918 TGGACCAATCTAAGAT-1,1,62,80,8913,6986 CTAGTAGAAAGGGATT-1,1,63,81,9032,7055 AACCTCGCTTTAGCCC-1,1,62,82,8913,7124 GAGTGTCAACCAGAAA-1,1,63,83,9032,7193 GTGATCCTTGTCATGA-1,1,62,84,8913,7262 CTCAACTAACCCGGAT-1,1,63,85,9032,7331 AGTGGCGTCTGAAGGT-1,1,62,86,8912,7399 GGGAAGGGCTTTCTCA-1,1,63,87,9032,7468 AGGCCTGAGAATCTCG-1,1,62,88,8912,7537 AGCCACAGGTTACCCG-1,1,63,89,9032,7606 TGCAATCTAACACGGT-1,1,62,90,8912,7675 GTTTGGGTTTCGCCCG-1,1,63,91,9032,7743 CCTATGTCCACTCCAC-1,1,62,92,8912,7812 CTGCCTTTCTAGTAAA-1,1,63,93,9032,7881 TTAGTAAACCTGCTCT-1,1,62,94,8912,7950 TTGTGGTGGTACTAAG-1,1,63,95,9032,8019 CCTCCTGTTGTGTCGT-1,1,62,96,8912,8088 ACAAAGCATGACCTAG-1,1,63,97,9032,8156 ATAGCAACTAGGGAAG-1,1,62,98,8912,8225 CCGCTCTTCCGAACTA-1,1,63,99,9032,8294 TAGAATAGCCGATGAA-1,1,62,100,8912,8363 GGTCTCCGTCCAGGTT-1,1,63,101,9032,8432 TAATTACGTCAGTAGA-1,1,62,102,8912,8501 TACGTAAAGCGGAGTG-1,1,63,103,9032,8569 GCTACAATCGAGGATA-1,1,62,104,8912,8638 TCAGCCAATCCGTAAA-1,1,63,105,9032,8707 ATCATAGCCCTATGTA-1,1,62,106,8912,8776 GAGGCCTGTTGATACA-1,1,63,107,9032,8845 ATATACGCTCGTGACG-1,1,62,108,8912,8914 CCCTTTAATGGAGTTC-1,1,63,109,9032,8982 TTACACGATCTGCGAC-1,1,62,110,8912,9051 TCGCATAAAGGGCGCA-1,1,63,111,9032,9120 TGTAGCCATCCCATTC-1,1,62,112,8912,9189 GGGTCTATCGCTTTCC-1,1,63,113,9032,9258 GGAGGGTCAAGTAAGA-1,0,62,114,8912,9327 TACGGCATGGACGCTA-1,0,63,115,9032,9395 CACGTCAATCAATGGA-1,0,62,116,8912,9464 CGCTTGGACGGATAGA-1,0,63,117,9032,9533 CGCGGTACGGTATACA-1,0,62,118,8912,9602 CCTACTCAACACGATT-1,0,63,119,9032,9671 CGTTTGGTGTTGTGGG-1,0,62,120,8912,9740 GTTGTTGCAAGATGGC-1,0,63,121,9032,9808 CATTCGTCGTAGCGGT-1,0,62,122,8912,9877 CAGCCTCGATAGCGGT-1,0,63,123,9032,9946 TCATAAGTCCAAGAAG-1,0,62,124,8912,10015 AAAGTGCCATCAATTA-1,0,63,125,9032,10084 GTTGCAGTCGACAACA-1,0,62,126,8912,10153 TAAATGGGCTACTGAG-1,0,63,127,9032,10221 GGACAAGCCATGATCG-1,0,64,0,9152,1480 TCCAATGCGTCGCCGC-1,0,65,1,9272,1549 AAAGAATGACCTTAGA-1,0,64,2,9152,1618 CCTATTATTCCGGCCG-1,0,65,3,9272,1687 CAGTAAGGGACGTCTC-1,1,64,4,9152,1755 GGGATGGACCCGCGTC-1,0,65,5,9272,1824 GACGCATACCCGTCGG-1,1,64,6,9152,1893 GGGCAGGATTTCTGTG-1,0,65,7,9272,1962 TATCTGAGCCGATATT-1,1,64,8,9152,2031 AATGTATGGCACTGTA-1,0,65,9,9272,2100 GCCTTTGTCAGTGGAC-1,1,64,10,9152,2168 AACCATAGGGTTGAAC-1,0,65,11,9272,2237 AAACTTAATTGCACGC-1,1,64,12,9152,2306 TAGGCGGCTGCAAAGA-1,0,65,13,9272,2375 TCCATCAATACTAATC-1,1,64,14,9152,2444 ACCTCGAACTTATGCT-1,0,65,15,9272,2512 ACAAACCATGCGTCCT-1,1,64,16,9152,2581 ACACCGGTCTGACCGC-1,0,65,17,9272,2650 ATGGCAGCATTACGAT-1,1,64,18,9152,2719 CTGGTGAATGGGCCTA-1,0,65,19,9272,2788 CACAGAGACGAGGACG-1,0,64,20,9152,2857 GCACAAACCCTAGATG-1,0,65,21,9272,2925 ACTATGTCCAGCTGCC-1,0,64,22,9152,2994 AAGCCGCCGTGAAATC-1,0,65,23,9272,3063 ATGGGTGTATACCTCC-1,0,64,24,9152,3132 TAAGCGCGAATCAAAT-1,0,65,25,9272,3201 AGCCCGGCATTAGAGG-1,0,64,26,9152,3270 GTGATCAAGCGTGCAC-1,0,65,27,9272,3338 CTGCCTTTAATACCTT-1,0,64,28,9152,3407 TACCTCCACACCAATG-1,0,65,29,9272,3476 GAGCATCAACAACTTG-1,0,64,30,9152,3545 CATACGAACTAGCTGG-1,0,65,31,9272,3614 GGGAACGGTTTCAGAT-1,0,64,32,9152,3683 AGGGTGCCGTTCTTTA-1,0,65,33,9272,3751 ACGACTGGTCATACTC-1,0,64,34,9152,3820 GGATCTTACTGCCCTT-1,0,65,35,9272,3889 TACTCCTCTAGTTGAC-1,0,64,36,9152,3958 GTAGCTCCGGGAGGCT-1,0,65,37,9272,4027 GGACTCGTGAGTGGTC-1,0,64,38,9152,4096 GTAGTTCGAAGGCGAA-1,0,65,39,9272,4164 CTCAAACCACTGCCCG-1,0,64,40,9152,4233 GAGGGAGTCAGATCGC-1,0,65,41,9272,4302 GCGTCCCTAAGACATA-1,0,64,42,9152,4371 TCTCTTACCGCGAACC-1,0,65,43,9272,4440 GGCTACTATACACTCC-1,0,64,44,9152,4509 CGTTGCCCGCGTGGGA-1,0,65,45,9272,4577 TATAGAGTCGCTTGAA-1,0,64,46,9152,4646 ATGTTGTAGTCTGTTT-1,0,65,47,9272,4715 AGTATACACAGCGACA-1,0,64,48,9152,4784 TGTGGTTGCTAAAGCT-1,0,65,49,9272,4853 TGATTCCCGGTTACCT-1,1,64,50,9152,4922 CACTGACGATTGTGGA-1,1,65,51,9272,4990 AACATTGTGACTCGAG-1,1,64,52,9152,5059 GTCCAATATTTAGCCT-1,1,65,53,9272,5128 GCTCTTTCCGCTAGTG-1,1,64,54,9152,5197 GGGTCAGGAGCTAGAT-1,1,65,55,9272,5266 AGGTGGTGACCTTCGC-1,1,64,56,9152,5334 CCCGGGTCGTTCAGGG-1,1,65,57,9272,5403 AGCCAAGCTTTGTGTC-1,1,64,58,9152,5472 TGTGGCGGGCTTCTGG-1,1,65,59,9272,5541 CGAGGGTATCCAGGTG-1,1,64,60,9152,5610 CGTTCAGACCCGCGAA-1,1,65,61,9272,5679 TCATATGAGCTTTGTT-1,1,64,62,9152,5747 AGTTGACGGTCCTTGC-1,1,65,63,9272,5816 ATAATTAGCTAAGTAG-1,1,64,64,9152,5885 GGTCTTGAGCGCTCTT-1,1,65,65,9272,5954 CGTTGTCGGCAATTGA-1,1,64,66,9152,6023 TCTTGCTCCCGATACT-1,1,65,67,9272,6092 AGCAGAAGGAGAAAGA-1,1,64,68,9152,6160 GAAGCCACTGATTATG-1,1,65,69,9272,6229 TGCTCTTGAGAGTTTG-1,1,64,70,9152,6298 GACATCCGTCGAACTG-1,1,65,71,9272,6367 AGATATAATACGACTA-1,1,64,72,9152,6436 TCGCTACTGGCTTTGA-1,1,65,73,9272,6505 GTGACAGCTTCCCACT-1,1,64,74,9152,6573 GTGACGCAGGTTTCAT-1,1,65,75,9272,6642 GGGAAGACGGTCTGTC-1,1,64,76,9152,6711 CGTCGCATGTGAGCCA-1,1,65,77,9272,6780 AGAAGGTTGTAGGTCG-1,1,64,78,9152,6849 GGCCGGCGTCTGCTAT-1,1,65,79,9272,6918 TTGACCAGGAACAACT-1,1,64,80,9152,6986 AGTCGTATAAAGCAGA-1,1,65,81,9272,7055 GGCGTAGGGAAAGCTG-1,1,64,82,9152,7124 AAACCTAAGCAGCCGG-1,1,65,83,9272,7193 ATTGCCTTTATGTTTG-1,1,64,84,9152,7262 CAGGCAGTCTTACCAG-1,1,65,85,9272,7331 AGCTAAGTACGCAGGC-1,1,64,86,9152,7399 AAATCCGATACACGCC-1,1,65,87,9272,7468 CGAGAGCTTTCACTAC-1,1,64,88,9152,7537 CGCCCAGCGTTTCACG-1,1,65,89,9272,7606 TTAAGTATTGTTATCC-1,1,64,90,9152,7675 AAACGGGCGTACGGGT-1,1,65,91,9272,7744 GTATCCTTTGGTAACC-1,1,64,92,9152,7812 CATAGTCAAATACATA-1,1,65,93,9272,7881 TCTATGCTATAACGAC-1,1,64,94,9152,7950 ACGTTCTGTACAAGTC-1,1,65,95,9272,8019 CGCGAAGTGGCATACT-1,1,64,96,9152,8088 ACCAACCGCACTCCAC-1,1,65,97,9272,8156 GTCATGCGCGAGGGCT-1,1,64,98,9152,8225 TAGGCTAAAGTGGCAC-1,1,65,99,9272,8294 TGGCTACACTCTACCT-1,1,64,100,9152,8363 TGTCCACGGCTCAACT-1,1,65,101,9272,8432 AATTGCAGCAATCGAC-1,1,64,102,9152,8501 GTCAACCAGGCCTATA-1,1,65,103,9272,8569 ATCTTATCGCACACCC-1,1,64,104,9152,8638 GTCTATTGCATGCTCG-1,1,65,105,9272,8707 GCACGTGGTTTACTTA-1,1,64,106,9152,8776 ATTCGCGCCTTGAGAG-1,1,65,107,9272,8845 AGCGATGCGCCTAATA-1,1,64,108,9152,8914 TCTATAGGTGGGTAAT-1,1,65,109,9272,8982 AGATACCAATAGAACC-1,1,64,110,9152,9051 CGTGTATGGGAGCTGA-1,1,65,111,9272,9120 ATGTACATGCGGTGAG-1,1,64,112,9152,9189 TTGGCTCAATATGTGT-1,1,65,113,9272,9258 GTTCGGAGCACTCAAC-1,0,64,114,9152,9327 TCCCTCTTCTCAAGGG-1,0,65,115,9272,9395 TTACGTATCTATGACA-1,0,64,116,9152,9464 CCCATATAGGTCGATT-1,0,65,117,9272,9533 GTCCACGTCGCATTCG-1,0,64,118,9152,9602 AATTGTGGTTGCCAAA-1,0,65,119,9272,9671 AGGAACGAACGACTTC-1,0,64,120,9152,9740 TAATTCCAATGCTTCA-1,0,65,121,9272,9808 AGGGCCCTAATGTTCT-1,0,64,122,9152,9877 AGGTGTTGCCGACCAC-1,0,65,123,9272,9946 CTGAGAGTAGAAATAC-1,0,64,124,9152,10015 TAGAAGAAGGGTTACA-1,0,65,125,9272,10084 GGTGACTGATAGAGAG-1,0,64,126,9152,10153 TACCCTCGGTAACCCT-1,0,65,127,9272,10221 TGTAAGACTGATAAGA-1,0,66,0,9392,1480 GTCACTCATGAGCGAT-1,0,67,1,9512,1549 CGGTTTACTGAACATT-1,0,66,2,9392,1618 GAGATCGATCTTACTC-1,0,67,3,9512,1687 TCTTTCATCCGTCCTT-1,0,66,4,9392,1755 TCGGATCTGGATGACC-1,0,67,5,9512,1824 GTCCTAGGATACCTTA-1,0,66,6,9392,1893 TGTACCCGACCCTAAT-1,0,67,7,9512,1962 AGGTTCTCCTTTCCGG-1,0,66,8,9392,2031 AATTGCGTGGATTACA-1,0,67,9,9512,2100 CGATAGCGATACAGTG-1,0,66,10,9392,2168 CGCCCTAATTGTTCAA-1,0,67,11,9511,2237 ATTATGAGACCCAATT-1,0,66,12,9392,2306 CCTCCTAGCTAGAGTC-1,0,67,13,9511,2375 CGAAGGGTTTCAGATT-1,0,66,14,9392,2444 CACAAGCTAAGAAAGG-1,0,67,15,9511,2512 CACCGGTAGAGACATT-1,0,66,16,9392,2581 ACTCATGGCAGCCTTC-1,0,67,17,9511,2650 CATCCAATATAGTTTG-1,0,66,18,9392,2719 CAGATATGAAGATGAC-1,0,67,19,9511,2788 GCAAAGGGCGTTAGCC-1,0,66,20,9392,2857 CTGTTGTTCAGTCGTA-1,0,67,21,9511,2925 AAAGGCCCTATAATAC-1,0,66,22,9392,2994 AACATTGAAGTTGATC-1,0,67,23,9511,3063 GCTAGCGATAGGTCTT-1,0,66,24,9392,3132 ATTGCCTATTAGACCG-1,0,67,25,9511,3201 GGGCGTTTACATTCAT-1,0,66,26,9392,3270 CCTCTACAAGGATTGG-1,0,67,27,9511,3338 AGTGACTGTGACACAA-1,0,66,28,9392,3407 AGCCCTCCCTGGTGGC-1,0,67,29,9511,3476 CGGTTGAGTATCCTTC-1,0,66,30,9392,3545 CATTATCCCATTAACG-1,0,67,31,9511,3614 AATGTGCTAATCTGAG-1,0,66,32,9392,3683 CTTAGTTGAGGAATCG-1,0,67,33,9511,3751 CTTCTGGGCGTACCTA-1,0,66,34,9392,3820 AGTCAAATGATGTGAT-1,0,67,35,9511,3889 TGAACAAGCAGGGACT-1,0,66,36,9392,3958 AATACCGGAGGGCTGT-1,0,67,37,9511,4027 GTTTGGTGATCGGTGC-1,0,66,38,9392,4096 GACTGCTGGTGAGAAA-1,0,67,39,9511,4164 CGATCCTCGCAACATA-1,0,66,40,9392,4233 CCAACCTTATGTAACT-1,0,67,41,9511,4302 CGTGGTACCCAAAGGC-1,0,66,42,9392,4371 AACCCGACAACCCGTG-1,0,67,43,9511,4440 TATGTAAAGTGCTTAA-1,0,66,44,9392,4509 GCAAGTGTAAAGCATG-1,0,67,45,9511,4577 GTAACTGCCCAAGGAG-1,0,66,46,9392,4646 GGTGCATAAATGATTA-1,0,67,47,9511,4715 TAGTCCCGGAGACCAC-1,0,66,48,9392,4784 CGGCCAGAGCGACCAT-1,0,67,49,9511,4853 TGAATGAGTGTTTCCC-1,0,66,50,9392,4922 TTCACTTCCTAGAACG-1,0,67,51,9511,4990 CGCTTAGTATTGATAC-1,0,66,52,9392,5059 AACGCGACCTTGGGCG-1,0,67,53,9511,5128 GTGCACCAGCTTCAAC-1,1,66,54,9392,5197 TTCTATTAAACGCAGC-1,1,67,55,9511,5266 TGGCAAGCACAAGTCG-1,1,66,56,9392,5334 ATCTGCTGTTATCGCC-1,1,67,57,9511,5403 AATTCCAAGCATGTAC-1,1,66,58,9392,5472 AAACGGTTGCGAACTG-1,1,67,59,9511,5541 GAAGGAGTCGAGTGCG-1,1,66,60,9392,5610 GGCAAGCCCATAGTGG-1,1,67,61,9511,5679 TGACCAAATCTTAAAC-1,1,66,62,9392,5747 TTCGGTACTGTAGAGG-1,1,67,63,9511,5816 ATGTACGATGACGTCG-1,1,66,64,9392,5885 ATCTAGCTTGTGAGGG-1,1,67,65,9511,5954 AGCGGGTCTGACACTC-1,1,66,66,9392,6023 GAGGTACGCGTGTCCC-1,1,67,67,9511,6092 GCACTGCCTACCTTTA-1,1,66,68,9392,6160 GGAGCGAGGCCTACTT-1,1,67,69,9511,6229 AAACTCGGTTCGCAAT-1,1,66,70,9392,6298 GGCGCTTCATTCCCTG-1,1,67,71,9511,6367 TGCTCGGTGGGTCACC-1,1,66,72,9392,6436 AAGCGCAGGGCTTTGA-1,1,67,73,9511,6505 GACCTTCCACGTCTAC-1,1,66,74,9392,6573 GTTAGCCCATGACATC-1,1,67,75,9511,6642 AATTCGATTCGAGGAT-1,1,66,76,9392,6711 AGTCTTTAAAGTGTCC-1,1,67,77,9511,6780 GACTAGGCCGTTAGGT-1,1,66,78,9392,6849 TCCAAGCCTAGACACA-1,1,67,79,9511,6918 GATTGGGAAAGGTTGT-1,1,66,80,9392,6986 GATGAGGAACCTTCGG-1,1,67,81,9511,7055 TGAATACCGACGCGTA-1,1,66,82,9392,7124 AATCGTGAGCCGAGCA-1,1,67,83,9511,7193 GCGAAGAATCTGACGG-1,1,66,84,9392,7262 GAACAACTGGGATGAA-1,1,67,85,9511,7331 TCGCCGACATATTCGC-1,1,66,86,9392,7399 CGTAGAGAGTAATTAT-1,1,67,87,9511,7468 CGATAGTCGTACTGCA-1,1,66,88,9392,7537 CGCATGCCGAATGCGT-1,1,67,89,9511,7606 CTCTAGCCCTCGGAAA-1,1,66,90,9392,7675 TCCAACTCAGCTATCT-1,1,67,91,9511,7744 GTCTAGTGAGCCGCTT-1,1,66,92,9392,7812 TACCGAATAATTGTAA-1,1,67,93,9511,7881 ATATAACACGGGCGCA-1,1,66,94,9392,7950 GCATGCTAATAACGAT-1,1,67,95,9511,8019 TCCGGGCTTGACGGGA-1,1,66,96,9392,8088 ATTATTCAGAGTCACT-1,1,67,97,9511,8156 TAGCAGTATGACTAAA-1,1,66,98,9392,8225 TATGAAGAATTAAGGT-1,1,67,99,9511,8294 CAAGGCCAGTGGTGCA-1,1,66,100,9392,8363 TGCGTACGGCTAATTG-1,1,67,101,9511,8432 CAGATAATGGGCGGGT-1,1,66,102,9392,8501 GAGAACGGTTCTGACT-1,1,67,103,9511,8569 CGCTTCGGTCTAAGAC-1,1,66,104,9392,8638 AGCATTACGAGGCAAG-1,1,67,105,9511,8707 AAGTTGTGATGTTATA-1,1,66,106,9391,8776 TTGCTCATAGTACGTG-1,1,67,107,9511,8845 TACTTAAACATGTACA-1,1,66,108,9391,8914 AGTGAGTCGAATTAAC-1,1,67,109,9511,8982 TAGGCCTATATAGTCT-1,1,66,110,9391,9051 ACTCGCCGTTCGATAA-1,1,67,111,9511,9120 GGTACGTTGCGGCCGG-1,1,66,112,9391,9189 TATTGCCGGGCTTGTA-1,1,67,113,9511,9258 GGTAAACTCTGCGCTG-1,0,66,114,9391,9327 TCCCTTGTCTGAAACT-1,0,67,115,9511,9395 CATGATCGCTTTGAGA-1,0,66,116,9391,9464 GGACGGGCGACCAACC-1,0,67,117,9511,9533 GAGAGGCCTATGTGTA-1,0,66,118,9391,9602 TCGTAACTCCCAAGAC-1,0,67,119,9511,9671 CGTATCTAGAACTAAG-1,0,66,120,9391,9740 GCACCTTCCCGAAGGT-1,0,67,121,9511,9808 TACCTCTTTACCATCC-1,0,66,122,9391,9877 TGACTCTAACTGGTAA-1,0,67,123,9511,9946 GAGAAACTGGATCCCA-1,0,66,124,9391,10015 ATCCGGACCAGCCTGA-1,0,67,125,9511,10084 TTATTTAGGTTCCTTA-1,0,66,126,9391,10153 CGAATTCCCGGTTCAA-1,0,67,127,9511,10221 TAGTTTGATCGGTCGC-1,0,68,0,9631,1480 AGTCGAAACGATTCAG-1,0,69,1,9751,1549 ACGTCGGGCAACTCGG-1,0,68,2,9631,1618 ACTTGCTCTATCTACC-1,0,69,3,9751,1687 ATTACGGGCTACGGTT-1,0,68,4,9631,1755 GCAAACGTAAGCGACC-1,0,69,5,9751,1824 TTCGGACTGGGCATGG-1,0,68,6,9631,1893 GGGCCATTCGTGCTGG-1,0,69,7,9751,1962 GAGTTGTCACCAGTCT-1,0,68,8,9631,2031 AGTAATTTGCAAGCGT-1,0,69,9,9751,2100 GTGGCCGGTTTCTCGG-1,0,68,10,9631,2168 ATCTTAGGGCATTAAT-1,0,69,11,9751,2237 CTACATATCGCGGGAC-1,0,68,12,9631,2306 ATGGTATTTACTGATT-1,0,69,13,9751,2375 CCGTCAACCTCTGGCG-1,0,68,14,9631,2444 GCTTCGACGTTCAATC-1,0,69,15,9751,2513 GACCGATTAAATATGT-1,0,68,16,9631,2581 GTCGAATTTGGGCGCT-1,0,69,17,9751,2650 TCACGGCCCAAGAGAG-1,0,68,18,9631,2719 CTACGATCCTATCCTA-1,0,69,19,9751,2788 TGCAGTGGTAGGGAAC-1,0,68,20,9631,2857 TTGCCTAATCCAAAGT-1,0,69,21,9751,2925 AGCGAGACGTGAAGGC-1,0,68,22,9631,2994 TTCTACCTCAATCGGT-1,0,69,23,9751,3063 TACGCTGCTGTGTTAA-1,0,68,24,9631,3132 TAGTCCTGCACTAAGC-1,0,69,25,9751,3201 AGCACACGTTTAGACT-1,0,68,26,9631,3270 GTTCGACAATTGTATA-1,0,69,27,9751,3338 CCATGTTCATCTATAT-1,0,68,28,9631,3407 CCCTCGATAATACACA-1,0,69,29,9751,3476 ACTGCCCGCCATTCTC-1,0,68,30,9631,3545 CTATATCCAGCCTGGC-1,0,69,31,9751,3614 CGCACGTCTGTTTATG-1,0,68,32,9631,3683 AATCAGGTTTCATTTA-1,0,69,33,9751,3751 AGAGAAACACCAGAAA-1,0,68,34,9631,3820 ACCCGATTGGTTCCGA-1,0,69,35,9751,3889 TACCAGGAATCCCGTC-1,0,68,36,9631,3958 AAGTGCTTCTCTATTG-1,0,69,37,9751,4027 TGCATGTGACCCATAG-1,0,68,38,9631,4096 TAGATTCAAAGTGCGG-1,0,69,39,9751,4164 AGGGTCAGAGCACTCG-1,0,68,40,9631,4233 GTGAGATAACCTTATA-1,0,69,41,9751,4302 GTGTCAGTGTACGTGG-1,0,68,42,9631,4371 CTGTAGTGAGGATCGA-1,0,69,43,9751,4440 GATAGTGCGAGTAAGT-1,0,68,44,9631,4509 AGGGTTCAGACGGTCC-1,0,69,45,9751,4577 GGACTCGACAGCGCAT-1,0,68,46,9631,4646 CACTCTCAAGCATCGA-1,0,69,47,9751,4715 GAGCGGAATGCGGTGT-1,0,68,48,9631,4784 CAGTTCGAGGACCCGA-1,0,69,49,9751,4853 CGTAACTTCGACACTT-1,0,68,50,9631,4922 TGATCACCACACTGAC-1,0,69,51,9751,4990 CATACTTCTTTCTCCG-1,0,68,52,9631,5059 CAAGTAAGTGATAGAC-1,0,69,53,9751,5128 AGCCCGCAACAAGCAG-1,0,68,54,9631,5197 GCTCCATGCAAAGCAA-1,0,69,55,9751,5266 TCGTACCGACGTCAAG-1,1,68,56,9631,5334 ATTATCGGAATGTACG-1,0,69,57,9751,5403 ACCTACTATAAATCTA-1,1,68,58,9631,5472 TATCCTATCAACTGGT-1,0,69,59,9751,5541 GCGCGGTCTAGTAACT-1,1,68,60,9631,5610 CCTTCGTATAGAATCC-1,1,69,61,9751,5679 TGGGCGATACAATAAG-1,1,68,62,9631,5747 TGACATGTAACGTGAC-1,1,69,63,9751,5816 TTGGGAAGACGAGCCG-1,1,68,64,9631,5885 GGTGTAAATCGATTGT-1,1,69,65,9751,5954 GAACTGTGGAGAGACA-1,1,68,66,9631,6023 TCCACATCGTATATTG-1,1,69,67,9751,6092 CCTATGGTCAAAGCTG-1,1,68,68,9631,6160 TCCTTGTCCTTTAATT-1,1,69,69,9751,6229 AAGTTTATGGGCCCAA-1,1,68,70,9631,6298 GTCCGGGTTCACATTA-1,1,69,71,9751,6367 ATGTAGCGCGCGTAGG-1,1,68,72,9631,6436 TACGGAAGCCAAACCA-1,1,69,73,9751,6505 GCCCTGAGGATGGGCT-1,1,68,74,9631,6573 CGGGCGATGGATCACG-1,1,69,75,9751,6642 TGCGGACTTGACTCCG-1,1,68,76,9631,6711 TAATACTAGAACAGAC-1,1,69,77,9751,6780 TCAAATTGTTGTGCCG-1,1,68,78,9631,6849 GGGCAGTCAACGCCAA-1,1,69,79,9751,6918 TCGCTGCCAATGCTGT-1,1,68,80,9631,6986 TACGTGGGCCCAGGGC-1,1,69,81,9751,7055 GCCTACGTTCTGTGCA-1,1,68,82,9631,7124 TTGCGTGTGTAGGCAT-1,1,69,83,9751,7193 TCCTAAAGATTCAGAC-1,1,68,84,9631,7262 TGGGTGCACAAGCCAT-1,1,69,85,9751,7331 GGGAGCGACCGTAGTG-1,1,68,86,9631,7399 CATAGAGGAGATACTA-1,1,69,87,9751,7468 CTTCGATTGCGCAAGC-1,1,68,88,9631,7537 GTGCCTGAGACCAAAC-1,1,69,89,9751,7606 TCATCCTCAGCTGCTT-1,1,68,90,9631,7675 TGCAACTACTGGTTGA-1,1,69,91,9751,7744 ATGTTGATTAGAGACT-1,1,68,92,9631,7812 GTTTCTAGAGGCGCGG-1,1,69,93,9751,7881 TACAAGGGCTTCTTTA-1,1,68,94,9631,7950 ACAGGCACGGATCCTT-1,1,69,95,9751,8019 CCCAAGTCATTACACT-1,1,68,96,9631,8088 ACTCTGACCTAATAGA-1,1,69,97,9751,8156 GGATGGCTTGAAGTAT-1,1,68,98,9631,8225 ACCCTCCCGTCAGGGC-1,1,69,99,9751,8294 AGCTTCTTCTCGAGCA-1,1,68,100,9631,8363 TTGCGCTTGATCAATA-1,1,69,101,9751,8432 TGGGTGTAATAGATTT-1,1,68,102,9631,8501 CATGGTTTATTAATCA-1,1,69,103,9751,8569 TTGCCTTCTCGCCGGG-1,1,68,104,9631,8638 TTAAAGTAAGTCGCCA-1,1,69,105,9751,8707 AGTTGGCAAGGCTAGA-1,1,68,106,9631,8776 CCGCACTTGCAATGAC-1,1,69,107,9751,8845 AATGTTGTCGTGAGAC-1,1,68,108,9631,8914 CCAAGGTTGCCCTTTC-1,1,69,109,9751,8982 TGTGACTAGAGTTTGC-1,1,68,110,9631,9051 TTGTGATCTGTTCAGT-1,1,69,111,9751,9120 GGAGTTGATTCTGTGT-1,1,68,112,9631,9189 GTAGTGAGCAACCTCA-1,1,69,113,9751,9258 AAGACATACGTGGTTT-1,1,68,114,9631,9327 CATCCCGAGATTCATA-1,0,69,115,9751,9395 GGCCGCAGGAACCGCA-1,0,68,116,9631,9464 CCAGAAACTGATGCGA-1,0,69,117,9751,9533 ACCCGTAGTCTAGTTG-1,0,68,118,9631,9602 AGGGTCTGGACGCAGT-1,0,69,119,9751,9671 TTGAAACCCTCATTCC-1,0,68,120,9631,9740 TCAATCCCGCGCCAAA-1,0,69,121,9751,9808 AGGACATCGGCACACT-1,0,68,122,9631,9877 GACAAGACGCCCGTGC-1,0,69,123,9751,9946 GAGTAAATTAAGAACC-1,0,68,124,9631,10015 TAAGTCGGTGAGCTAG-1,0,69,125,9751,10084 CTTACTGACTCCTCTG-1,0,68,126,9631,10153 TTAACTGATCGTTTGG-1,0,69,127,9751,10221 GGGCGATCCATAGGCC-1,0,70,0,9871,1480 CGTATTGTTTGGCGCC-1,0,71,1,9991,1549 CGCGTGGGCCTGTGTT-1,0,70,2,9871,1618 AAATCTGCCCGCGTCC-1,0,71,3,9991,1687 GCTGAAGGGTTCTTGG-1,0,70,4,9871,1755 TAATTGCGCTGATTAC-1,0,71,5,9991,1824 TCGCGCGTTTACATGA-1,0,70,6,9871,1893 TAACTGAAATACGCCT-1,0,71,7,9991,1962 CCTGTCGTGTATGAAG-1,0,70,8,9871,2031 CGGGAACGCCCTGCAT-1,0,71,9,9991,2100 AGTTGCGGTCCTCAAC-1,0,70,10,9871,2168 GGGTCCTTGGAAGAAG-1,0,71,11,9991,2237 CGGGAAGTACCGTGGC-1,0,70,12,9871,2306 ACGGTCACCGAGAACA-1,0,71,13,9991,2375 TAGACTCAGTTGGCCT-1,0,70,14,9871,2444 CCTTCCGCAACGCTGC-1,0,71,15,9991,2513 CAGTGTTAATCTCTCA-1,0,70,16,9871,2581 CAGGCCAGTACCACCT-1,0,71,17,9991,2650 GATCGCTGTGGTGCGT-1,0,70,18,9871,2719 GTATCTCGGGCGCTTT-1,0,71,19,9991,2788 CCGGGCTAAGAATTTC-1,0,70,20,9871,2857 CTTCACGCCCTGGTAC-1,0,71,21,9991,2925 CGGATGAATGCTGTGA-1,0,70,22,9871,2994 CTCTGCAGGCATTCTT-1,0,71,23,9991,3063 TCAAAGTCACGGCGTC-1,0,70,24,9871,3132 TGGGTTTCGGGCGTAC-1,0,71,25,9991,3201 TGCTATGGCAAAGGGA-1,0,70,26,9871,3270 TTCACGGTCGTCACGT-1,0,71,27,9991,3338 CGTAAAGCAAGAAATC-1,0,70,28,9871,3407 CTGATCCCTTTATGCA-1,0,71,29,9991,3476 GAGTATACCCTAATCA-1,0,70,30,9871,3545 CCACGGCAGGTGTAGG-1,0,71,31,9990,3614 ACGTACTTTGGCACGG-1,0,70,32,9871,3683 CCTGAGAATAAATGCA-1,0,71,33,9990,3751 TTGATGCCGCTCGTCG-1,0,70,34,9871,3820 GCCAGGAAAGAACACT-1,0,71,35,9990,3889 TCCGACCGCTAATCAA-1,0,70,36,9871,3958 TATATCAAAGTGATCT-1,0,71,37,9990,4027 CCTCGGTTTCCTTGCC-1,0,70,38,9871,4096 TAAATTTAGTAACACC-1,0,71,39,9990,4164 CTCGTTACGGCTACCA-1,0,70,40,9871,4233 GCCGTCGGTTTCGGGC-1,0,71,41,9990,4302 GTTCCGTCCGCCTGCA-1,0,70,42,9871,4371 CTAGGGATAGGGACAA-1,0,71,43,9990,4440 GTCCAGGCACGTGTGC-1,0,70,44,9871,4509 GAGGAATATCTCTTTG-1,0,71,45,9990,4577 CGTTCAAGGAAACGGA-1,0,70,46,9871,4646 GTGGGAACAAACCGGG-1,0,71,47,9990,4715 GGGCACGTAGTACTGT-1,0,70,48,9871,4784 ACTACAAAGAGAGGTG-1,0,71,49,9990,4853 GGTGTAGGTAAGTAAA-1,0,70,50,9871,4922 CCTGTTCAACCTCGGG-1,0,71,51,9990,4990 TGAGGTGTGTGGCGGA-1,0,70,52,9871,5059 ATTGTCTGTTTCATGT-1,0,71,53,9990,5128 GCCGGTCGTATCTCTC-1,0,70,54,9871,5197 ATATGGGATAGCAACT-1,0,71,55,9990,5266 GATAGAACCCGCTAGG-1,0,70,56,9871,5335 ATCCCATTTCCGTGGG-1,0,71,57,9990,5403 GATGCTACCTTCGATG-1,0,70,58,9871,5472 TGCAAAGTTCGTCTGT-1,0,71,59,9990,5541 GTTCAGTCGCCAAATG-1,0,70,60,9871,5610 GTCTCCGCCTCAATAC-1,0,71,61,9990,5679 AGTAGAAGGCGCCTCA-1,0,70,62,9871,5747 GTTGTAGATTTATGAG-1,0,71,63,9990,5816 CTATTTGCTTGGAGGA-1,1,70,64,9871,5885 AGCCATATAGTATGTG-1,0,71,65,9990,5954 TGTTCCGGCCTGAGCT-1,1,70,66,9871,6023 TGCCGTGGGACCCAAT-1,1,71,67,9990,6092 TTGCAAGAAGACTCCT-1,1,70,68,9871,6160 CCGCTCCAGGGCGATC-1,1,71,69,9990,6229 CAAACCCTCCGGCGGG-1,1,70,70,9871,6298 CGGACCTTTACGTCCC-1,1,71,71,9990,6367 ACGTAGGAGAGTCGCT-1,1,70,72,9871,6436 CTGCGACCTCGCCGAA-1,1,71,73,9990,6505 GGCCCAGCTGGTTTGC-1,1,70,74,9871,6573 GGATTAATCATGGACC-1,1,71,75,9990,6642 GAGATGGCTTTAATCA-1,1,70,76,9871,6711 AGTACAGAAGCTTATA-1,1,71,77,9990,6780 GCGGCTTTAGCAAGTT-1,1,70,78,9871,6849 ACCGAGTCTCCTTATT-1,1,71,79,9990,6918 GACCACACTTCCCTTT-1,1,70,80,9871,6986 TGTAGCCAATTCCGTT-1,1,71,81,9990,7055 CTCGTCTGTGCCTTCG-1,1,70,82,9871,7124 GGACTCTTTGACTAAG-1,1,71,83,9990,7193 TCATCGACGACCGTCG-1,1,70,84,9871,7262 TACTACGTGCAATGCG-1,1,71,85,9990,7331 CCCTACTTGAACAATG-1,1,70,86,9871,7399 GCAGCTGTCAACGCAT-1,1,71,87,9990,7468 ATCTACCATCTGCTCC-1,1,70,88,9871,7537 TCTCAAATCAATCGGG-1,1,71,89,9990,7606 ACTCCGGCCGACCACT-1,1,70,90,9871,7675 TACACCTCTTCGAATC-1,1,71,91,9990,7744 GTGGCTGTTTCTGTTC-1,1,70,92,9871,7812 ACGACTCTAGGGCCGA-1,1,71,93,9990,7881 TCGTTTACGCGACCCT-1,1,70,94,9871,7950 AGGATATCCGACTGCA-1,1,71,95,9990,8019 GACACTTCCAATTACC-1,1,70,96,9871,8088 ACGGTACAGTTCAATG-1,1,71,97,9990,8157 CTTGATGACCATCCAG-1,1,70,98,9871,8225 CTGGTAAAGACTTACA-1,1,71,99,9990,8294 CTTGCCCACCCACGCA-1,1,70,100,9871,8363 CATTACGCAGGAAGGG-1,1,71,101,9990,8432 GGACAACCATGAAGCC-1,1,70,102,9871,8501 GTTATCAAGCTATCGA-1,1,71,103,9990,8569 AGGTGCACGTCCACAT-1,1,70,104,9871,8638 AAAGAATGTGGACTAA-1,1,71,105,9990,8707 ACCAAGTCATCGGCAG-1,1,70,106,9871,8776 AGTAACTATAGCAGCC-1,1,71,107,9990,8845 GGGAAAGAATGCCAAC-1,1,70,108,9871,8914 AATCGGTATAGCCCTC-1,1,71,109,9990,8982 GGCACTGCGGTGGTTT-1,1,70,110,9871,9051 CGTCCTCATCGCGTGC-1,1,71,111,9990,9120 CTTAACTTACAGTATA-1,1,70,112,9871,9189 GTGAGTCGACTAATAG-1,1,71,113,9990,9258 TGTACTTCCGGGCATG-1,1,70,114,9871,9327 CACAAACCGCAGAACT-1,0,71,115,9990,9395 GCTTTACACAACTGGG-1,0,70,116,9871,9464 CACCGATGATGGGTAC-1,0,71,117,9990,9533 TCAATATACAGGAGGC-1,0,70,118,9871,9602 AACGACCTCCTAGCCG-1,0,71,119,9990,9671 GTTACCAAGGCGTACG-1,0,70,120,9871,9740 TAGCTCACTGTGTTTG-1,0,71,121,9990,9808 TCGGCAGGGTTAAGGG-1,0,70,122,9871,9877 TGTTAACAAAGTGACT-1,0,71,123,9990,9946 CAGAGCGATGGATGCT-1,0,70,124,9871,10015 CATAGTGGGCACGCCT-1,0,71,125,9990,10084 AAACGCTGGGCACGAC-1,0,70,126,9870,10153 TGTTCTCATACTATAG-1,0,71,127,9990,10221 TCCGCTGGGTCGATCG-1,0,72,0,10110,1480 TTAGCATCCCTCACGT-1,0,73,1,10230,1549 AGCCTTGTCACTGATA-1,0,72,2,10110,1618 GTTTCTTGTTAGAGCT-1,0,73,3,10230,1687 TCGGAGTCCTGGTTGC-1,0,72,4,10110,1755 ACGGGTTGTGACCTGT-1,0,73,5,10230,1824 TGACAGAAATCTTGCT-1,0,72,6,10110,1893 TAGCCATGATTGCCTA-1,0,73,7,10230,1962 CGAAGCCACAGCATGG-1,0,72,8,10110,2031 ACCCGTAGCAGAGAAT-1,0,73,9,10230,2100 AGCTTGATCAGGGTAG-1,0,72,10,10110,2168 CTATAAGTAGGGTTTG-1,0,73,11,10230,2237 TATAGTTAGGTGTACT-1,0,72,12,10110,2306 TACGCTGATAGTTGTA-1,0,73,13,10230,2375 TTCTCAATTGCTACAA-1,0,72,14,10110,2444 GTTGCCCTAACGGGTG-1,0,73,15,10230,2513 GACATTTCGCCCAGCC-1,0,72,16,10110,2581 GTAACTACGTAGACCT-1,0,73,17,10230,2650 CCAGTGTACAGACCGA-1,0,72,18,10110,2719 GCCCGATGCCCAGTTC-1,0,73,19,10230,2788 CGAACCTCTTTCCTAG-1,0,72,20,10110,2857 CATGCACATGAGAGGC-1,0,73,21,10230,2925 CCAAATCAAAGGGCAA-1,0,72,22,10110,2994 GTAGTTTAAGCACACG-1,0,73,23,10230,3063 TAAGTGAATAGTCTAC-1,0,72,24,10110,3132 GGTCCACGTCTATTTG-1,0,73,25,10230,3201 CCCTCAGATCGAGAAC-1,0,72,26,10110,3270 ACATTCGCGCGGAATA-1,0,73,27,10230,3338 CTATGAACACCTTGCC-1,0,72,28,10110,3407 CGAATGGTAGGTCGTC-1,0,73,29,10230,3476 TCGACATAGCGTAGCG-1,0,72,30,10110,3545 GACGTTGCTCGGCGGC-1,0,73,31,10230,3614 CTCGAGGCAAGTTTCA-1,0,72,32,10110,3683 GCTGGGAGCGCGTCAA-1,0,73,33,10230,3751 GACGATATCACTGGGT-1,0,72,34,10110,3820 CAGCTCTGGGCTCACT-1,0,73,35,10230,3889 TAGTCTGCGGCACATT-1,0,72,36,10110,3958 TGGTCGATATACCTCT-1,0,73,37,10230,4027 TTCAGTTCAAGAGGAG-1,0,72,38,10110,4096 GTCTGTAGGTTGAACA-1,0,73,39,10230,4164 GAGAGTCTCGGGAGAG-1,0,72,40,10110,4233 TTGTTTGTATTACACG-1,0,73,41,10230,4302 GCACAGCACGGGCCGA-1,0,72,42,10110,4371 AAACAGTGTTCCTGGG-1,0,73,43,10230,4440 TGGAGAATAATCGTCC-1,0,72,44,10110,4509 CGCCGCGTTCTGAACG-1,0,73,45,10230,4577 CATGCGTTGAGAGGAG-1,0,72,46,10110,4646 TACGCTCCTAGAACTG-1,0,73,47,10230,4715 CCTTGTGAACGTGGTT-1,0,72,48,10110,4784 TTAAAGGCGATGCTCG-1,0,73,49,10230,4853 TAAAGTGCACGTCTCG-1,0,72,50,10110,4922 TGTCCGCAAACAATTC-1,0,73,51,10230,4990 AGGCCTATCATACCAA-1,0,72,52,10110,5059 ACTGGGATGCCAGTGC-1,0,73,53,10230,5128 CGATCCACCATTGTTG-1,0,72,54,10110,5197 GGGCATGCATGTCGAG-1,0,73,55,10230,5266 TCCAGGCAGGACGATC-1,0,72,56,10110,5335 CAATGACCCTTAATTT-1,0,73,57,10230,5403 ATCAAGATCCCAGGAC-1,0,72,58,10110,5472 CTCAGAGCTAATGTCG-1,0,73,59,10230,5541 CTGCGATTTCGAGATT-1,0,72,60,10110,5610 ATGGTATTGGGAACCG-1,0,73,61,10230,5679 TAGGTTCTGCTGAGAA-1,0,72,62,10110,5747 GCCTGTCCCGGTGCAT-1,0,73,63,10230,5816 GGCCTGCTCTGATGTT-1,0,72,64,10110,5885 TAGAGGGAGTTTATCT-1,0,73,65,10230,5954 CTCTACATCCTGCGTG-1,0,72,66,10110,6023 ACACATTTCCGTAGAC-1,0,73,67,10230,6092 TGGGTGTTAAGTAGAA-1,0,72,68,10110,6160 GAGCTCAACATGAGCG-1,0,73,69,10230,6229 GCACACAGCTATTACC-1,0,72,70,10110,6298 GCGACGGTAGTCTCCT-1,0,73,71,10230,6367 GTATCAAGGTACTTCC-1,0,72,72,10110,6436 GGAAAGGGAATTGAGC-1,0,73,73,10230,6505 TACACAGCCGTGGTGC-1,1,72,74,10110,6573 CGGTTGGGCAGGGTCC-1,0,73,75,10230,6642 GCACGCCGATTCCCGC-1,1,72,76,10110,6711 CGCTTTCCGCCAAGGT-1,0,73,77,10230,6780 TGCCCGATAGTTAGAA-1,1,72,78,10110,6849 TCAGAACGGCGGTAAT-1,0,73,79,10230,6918 CCACTCAGATCCGCAA-1,1,72,80,10110,6986 GAAGGGTCATTAAGAC-1,0,73,81,10230,7055 GCGGTCTTGCTTTCAC-1,1,72,82,10110,7124 GGAAGGACACCGTATA-1,0,73,83,10230,7193 CGCGGGAATTCCTTTC-1,1,72,84,10110,7262 TCTATCCGATTGCACA-1,0,73,85,10230,7331 TACTCGTTTGAATCAA-1,1,72,86,10110,7399 GTCTGCCGACTCGACG-1,0,73,87,10230,7468 TACGGGTAATAACATA-1,1,72,88,10110,7537 CCAGTTCGGTAACTCA-1,1,73,89,10230,7606 CGTGCACACCACTGTA-1,1,72,90,10110,7675 CTTCAGTTGGACAACG-1,1,73,91,10230,7744 TGATCTATCACACTCT-1,1,72,92,10110,7812 TAGCAGATACTTAGGG-1,1,73,93,10230,7881 CACCGGGCATCACAAG-1,1,72,94,10110,7950 AGTGGCCCGCAAATGG-1,1,73,95,10230,8019 CTCGCTAGGTAAGCGA-1,1,72,96,10110,8088 GATAGATAGTACAGTC-1,1,73,97,10230,8157 CGAGACTACTGCTGCT-1,1,72,98,10110,8225 ACATTTGAAACCTAAC-1,1,73,99,10230,8294 GACACAGCCGGGACTG-1,1,72,100,10110,8363 ACGGAACACGAGTGCC-1,1,73,101,10230,8432 ACGCCAGTGCGTTTGC-1,1,72,102,10110,8501 GCACCTAGGCGAGTCC-1,1,73,103,10230,8569 TCACTATCCCTTCGGT-1,1,72,104,10110,8638 TGAGTGTAACAACGGG-1,1,73,105,10230,8707 ATTGGTTGTGCATTAC-1,1,72,106,10110,8776 CTGGCGATTTACATGT-1,1,73,107,10230,8845 TCCACCAAGACATAGG-1,1,72,108,10110,8914 AACGTCGCTGCACTTC-1,1,73,109,10230,8982 GTCTCCCGAGTCCCGT-1,1,72,110,10110,9051 AGCCCGCACTACAATG-1,1,73,111,10230,9120 GGCACGCTGCTACAGT-1,1,72,112,10110,9189 GCTTGAGTGACCTCTG-1,1,73,113,10230,9258 ATAGCCATAACAGTCA-1,1,72,114,10110,9327 TAGGAGGCTCGAGAAC-1,1,73,115,10230,9395 GTCGGGTGAAGTACCG-1,0,72,116,10110,9464 CACCCGCGTTTGACAC-1,0,73,117,10230,9533 TCTCAGGCTACTCGCT-1,0,72,118,10110,9602 TAGATTCTCTAGCAAA-1,0,73,119,10230,9671 CCCATTATTGTATCCT-1,0,72,120,10110,9740 GTTAATAGCGTCATTA-1,0,73,121,10230,9808 TACTCATTGACGCATC-1,0,72,122,10110,9877 TAGTGACAAGCTCTAC-1,0,73,123,10230,9946 ATCGCCGTGGTTCATG-1,0,72,124,10110,10015 CTGCTCTCAACACACC-1,0,73,125,10230,10084 GCATGACACAAAGGAA-1,0,72,126,10110,10153 CAACGATCGATCCAAT-1,0,73,127,10230,10221 ATACAGCGTCCACTGA-1,0,74,0,10350,1480 TCACGATTAATACGTT-1,0,75,1,10470,1549 CGTGAACTGACCCGAT-1,0,74,2,10350,1618 TCGATGTTACGGCCGT-1,0,75,3,10470,1687 GACCGGTGATACTCTC-1,0,74,4,10350,1755 TCTGTTTAGATTGTTC-1,0,75,5,10470,1824 GCTACTCGGACGCAGA-1,0,74,6,10350,1893 AAGAAAGTTTGATGGG-1,0,75,7,10470,1962 CGCCAGTAGTACCTTG-1,0,74,8,10350,2031 CTCCCTTGTATCAAGG-1,0,75,9,10470,2100 ACTTTATACACCACTT-1,0,74,10,10350,2168 AGATCTGGAGAGGATA-1,0,75,11,10470,2237 TCATCCATCTGATCAC-1,0,74,12,10350,2306 AGAACACGGCGATGGT-1,0,75,13,10470,2375 CCGCTCCGGATAAGCT-1,0,74,14,10350,2444 TGGTCGGGTACAGGGC-1,0,75,15,10470,2513 ACGGACGCAGCGACAA-1,0,74,16,10350,2581 ACATGCTTACGGCAGC-1,0,75,17,10470,2650 CAGGGAGATAGGCCAG-1,0,74,18,10350,2719 TCGCCACCCGGATTAC-1,0,75,19,10470,2788 TCTTGGTCAATGATAC-1,0,74,20,10350,2857 CATAGGGACACTTGTG-1,0,75,21,10470,2926 CACGAGCAAACCAGAC-1,0,74,22,10350,2994 GGGATATTGATCGCCA-1,0,75,23,10470,3063 TAATGTCGGTTCATGG-1,0,74,24,10350,3132 CGATTTGTCATTAATG-1,0,75,25,10470,3201 TTAAGATACCCAGAGA-1,0,74,26,10350,3270 CGACCGTTGGTATTCG-1,0,75,27,10470,3338 ACTACCATCCGAGGGC-1,0,74,28,10350,3407 TCTACGCACGATCTCC-1,0,75,29,10470,3476 AAGCCACTTGCAGGTA-1,0,74,30,10350,3545 ACGTTGTCGTTGAAAG-1,0,75,31,10470,3614 ACCGCTAGTCATTGGT-1,0,74,32,10350,3683 CATGAGTCCATCACGG-1,0,75,33,10470,3751 ATGTCTTGTTTGACTC-1,0,74,34,10350,3820 TCCCAAACATCCTCTA-1,0,75,35,10470,3889 CAACGCGATGAGCCAA-1,0,74,36,10350,3958 GACACCCAAAGACGCG-1,0,75,37,10470,4027 ACGTTCACTATGCCGC-1,0,74,38,10350,4096 ATGGACTGCTTAGTTG-1,0,75,39,10470,4164 CCATGGCAAACGCTCA-1,0,74,40,10350,4233 TTGTCTCGGCAAGATG-1,0,75,41,10470,4302 TTCGAACGAAACATGC-1,0,74,42,10350,4371 GGAAAGTCTTGATTGT-1,0,75,43,10470,4440 ATCATAGATCGACGAG-1,0,74,44,10350,4509 GATTAGAAACAAGCGT-1,0,75,45,10470,4577 GCTGCGAAGAATTATT-1,0,74,46,10350,4646 TTCCAGTGGGTTTCGT-1,0,75,47,10470,4715 GCTCATTGATCATATC-1,0,74,48,10350,4784 ATACGAGGTTTGTAAG-1,0,75,49,10470,4853 GTCTATACACGCATGG-1,0,74,50,10350,4922 GATTTGCGCTAACACC-1,0,75,51,10469,4990 TGGTGCCCTGCCTTAC-1,0,74,52,10350,5059 TCGAATCGCAGGGTAG-1,0,75,53,10469,5128 GGTAGGCCAATATCAC-1,0,74,54,10350,5197 CGTAAATAACAAAGGG-1,0,75,55,10469,5266 GACTCTAGAGTTCCAA-1,0,74,56,10350,5335 ACTTGTTACCGGATCA-1,0,75,57,10469,5403 GGCATTGAACATCTCA-1,0,74,58,10350,5472 AACCCGCTGTATTCCA-1,0,75,59,10469,5541 TAACAGGTTCCCTTAG-1,0,74,60,10350,5610 GTTCTTGTAACTCAAT-1,0,75,61,10469,5679 GATTCTGTTAATGAGT-1,0,74,62,10350,5747 GGACACCTCGGTGTTG-1,0,75,63,10469,5816 CGATCGAGAAGCACCA-1,0,74,64,10350,5885 CTGCTCTGACGGCAAA-1,0,75,65,10469,5954 CCCAGGAAGAATTCGA-1,0,74,66,10350,6023 TGTCGTGGGTATAGGC-1,0,75,67,10469,6092 ATATGTCTAGAGCGTG-1,0,74,68,10350,6160 GCGGGCAGACGGGTGA-1,0,75,69,10469,6229 CGAAGATCAGTTTCAT-1,0,74,70,10350,6298 ACGCCACTCGAAACAG-1,0,75,71,10469,6367 CTTAGATGTTTCATCC-1,0,74,72,10350,6436 GCGAATGGACTAGCGA-1,0,75,73,10469,6505 TAAATTGTGGGTAAAG-1,0,74,74,10350,6573 ATTTATACTGGTAAAG-1,0,75,75,10469,6642 GCGTCGTAACATGGTC-1,0,74,76,10350,6711 CGGTATGGGCACTCTG-1,0,75,77,10469,6780 CTATCACAACGCTGGA-1,0,74,78,10350,6849 ACCACACGGTTGATGG-1,0,75,79,10469,6918 TCAGCGCACGCCGTTT-1,0,74,80,10350,6986 AATGTTAAGACCCTGA-1,0,75,81,10469,7055 CTTACATAGATTTCTT-1,0,74,82,10350,7124 AACATAGCGTGTATCG-1,0,75,83,10469,7193 AGCAGTCGAAGCATGC-1,0,74,84,10350,7262 GTACTACGGCCTCGTT-1,0,75,85,10469,7331 TTGGCCAAATTGTATC-1,0,74,86,10350,7399 TCAGAGGACGCGTTAG-1,0,75,87,10469,7468 AGTAATCTAAGGGTGG-1,0,74,88,10350,7537 AATTATACCCAGCAAG-1,0,75,89,10469,7606 CCCTACCCACACCCAG-1,0,74,90,10350,7675 CTCTGTCCATGCACCA-1,0,75,91,10469,7744 TCGCCTCCTTCGGCTC-1,0,74,92,10350,7812 CCTGATTCGCGAAGAA-1,0,75,93,10469,7881 GACTTCAACGCATCAA-1,0,74,94,10350,7950 GCTTCCCGTAAGCTCC-1,0,75,95,10469,8019 AGAACTGTACTTTGTA-1,0,74,96,10350,8088 CCTTAAGTACGCAATT-1,0,75,97,10469,8157 GGCCAATTGTATAGAC-1,1,74,98,10350,8225 AGAATACAGGCTATCC-1,0,75,99,10469,8294 AATACCTGATGTGAAC-1,1,74,100,10350,8363 TAGAGTGTTCCGGGTA-1,0,75,101,10469,8432 GCTTCCATGTAACCGC-1,1,74,102,10350,8501 GGTTCGCATTTGCCGT-1,0,75,103,10469,8569 TCAATCCGGGAAGTTT-1,1,74,104,10350,8638 CCGGAAGTGCAATATG-1,0,75,105,10469,8707 CTCATAAATGTGTATA-1,1,74,106,10350,8776 GACGGGCATCGAATTT-1,0,75,107,10469,8845 GCTAAGCCCAGTATGC-1,1,74,108,10350,8914 GCTATAAGGGCCAGGA-1,0,75,109,10469,8982 GCGGATTACTTGTTCT-1,1,74,110,10350,9051 TACAACAGCGCATACA-1,1,75,111,10469,9120 TCTAACCTAGCCTGCG-1,1,74,112,10350,9189 TCTACCCGCATCATTT-1,1,75,113,10469,9258 GTCTTACCACGCCAAG-1,1,74,114,10350,9327 AATAACGTCGCGCCCA-1,0,75,115,10469,9395 CGTTTCGGTTATATGC-1,0,74,116,10350,9464 TACGCTGCACGGTCGT-1,0,75,117,10469,9533 CGTTAAATACGACCAG-1,0,74,118,10350,9602 TCGACTGACGATGGCT-1,0,75,119,10469,9671 ACGCTACTGAATGGGC-1,0,74,120,10350,9740 AAGGGTTAGCCATGCG-1,0,75,121,10469,9808 ACGTTTCGGTGCACTT-1,0,74,122,10350,9877 TTAACCCGAGGCGTGT-1,0,75,123,10469,9946 GCCCGTCAAGCCCAAT-1,0,74,124,10350,10015 ATTCGTCCCGAGGTTA-1,0,75,125,10469,10084 TCAAATTATGTTCGAC-1,0,74,126,10350,10153 AACGTTCTACCATTGT-1,0,75,127,10469,10221 CTGTCCTGCGCACTAC-1,0,76,0,10589,1480 GCCCAGCGACACAAAG-1,0,77,1,10709,1549 TATAGCTATTATCTCT-1,0,76,2,10589,1618 AAGCTATGGATTGACC-1,0,77,3,10709,1687 CCGAAACACGACCTCT-1,0,76,4,10589,1755 ATCAGAAGCTGGTTGC-1,0,77,5,10709,1824 TGAACCTGAATGTGAG-1,0,76,6,10589,1893 TCAGAGCATGTCAACG-1,0,77,7,10709,1962 ATACCCTCCCGGCCAA-1,0,76,8,10589,2031 GCCACCTTATTCGCGA-1,0,77,9,10709,2100 CAACGAGCTTATTATG-1,0,76,10,10589,2168 GATCCAACCTTTAAAC-1,0,77,11,10709,2237 TTGTGGAGACAGCCGG-1,0,76,12,10589,2306 AGATAATCACACCTAT-1,0,77,13,10709,2375 GGTTAGGGATGCTAAT-1,0,76,14,10589,2444 TCATGAAGCGCTGCAT-1,0,77,15,10709,2513 CCAGGCGAGATGGTCT-1,0,76,16,10589,2581 CGACTTGCCGGGAAAT-1,0,77,17,10709,2650 GTGCCAAACGTTTCGA-1,0,76,18,10589,2719 TGCTCAAAGGATGCAC-1,0,77,19,10709,2788 GCCTCAGGTACCGGTC-1,0,76,20,10589,2857 ACAAGTAATTGTAAGG-1,0,77,21,10709,2926 TTAGAGTATTGTCGAG-1,0,76,22,10589,2994 GTCATCCCAAACTCAC-1,0,77,23,10709,3063 CTCCTAGTAATCGTGA-1,0,76,24,10589,3132 CCCTCATCACAGAGTA-1,0,77,25,10709,3201 GGAAACAGAGCTTGGG-1,0,76,26,10589,3270 ACATCGCAATATTCGG-1,0,77,27,10709,3338 TTAGCCATAGGGCTCG-1,0,76,28,10589,3407 CGGATTCTGCCTTATG-1,0,77,29,10709,3476 TTGTTGGCAATGACTG-1,0,76,30,10589,3545 GGATCTACCGTTCGTC-1,0,77,31,10709,3614 GGTGGGATTAGGTCCC-1,0,76,32,10589,3683 AATAGGCACGACCCTT-1,0,77,33,10709,3751 GAGGTCCGTTCGCTGT-1,0,76,34,10589,3820 TGACAACTTAAAGGTG-1,0,77,35,10709,3889 AGCTAAGCTCCGTCCG-1,0,76,36,10589,3958 CTAGCCCGGGAGACGA-1,0,77,37,10709,4027 CCCAGATTCCCGTGAC-1,0,76,38,10589,4096 GCCCGTTCACACAATT-1,0,77,39,10709,4164 ACAAACTCCATCAGAG-1,0,76,40,10589,4233 GGTTCCGTACGACTAA-1,0,77,41,10709,4302 TTGCGGAAAGCTGCCC-1,0,76,42,10589,4371 GGAACTCGTGAATACG-1,0,77,43,10709,4440 GGTTACCGCTCCCTAC-1,0,76,44,10589,4509 AGCGACTTTGAAGACA-1,0,77,45,10709,4577 CCGTTTCCTTTCCGTG-1,0,76,46,10589,4646 TGGATGGCATCTTGGA-1,0,77,47,10709,4715 CAAGTCGTTGAAATCT-1,0,76,48,10589,4784 TGCAAACGTACTAGTT-1,0,77,49,10709,4853 GGCTGTCCTACTGCGG-1,0,76,50,10589,4922 TACTGCATGATTAAAT-1,0,77,51,10709,4990 CGGACCTCTGTAGTTA-1,0,76,52,10589,5059 CCCTAGCTCTAAGGTC-1,0,77,53,10709,5128 GTAACAGGTTAACGGC-1,0,76,54,10589,5197 CCCGCAAATAATCATC-1,0,77,55,10709,5266 AATCCCGCTCAGAGCC-1,0,76,56,10589,5335 AACGTCATCCGGCTTG-1,0,77,57,10709,5403 CTCGTGGCACTGAAAG-1,0,76,58,10589,5472 CTTTATCCGACGCATG-1,0,77,59,10709,5541 AAATGACTGATCAAAC-1,0,76,60,10589,5610 TTCGTGCATGTTATAG-1,0,77,61,10709,5679 ATCAAAGAGCCGTGGT-1,0,76,62,10589,5748 ATGTCATAATAAACGA-1,0,77,63,10709,5816 CTTAATCGACTTAGTA-1,0,76,64,10589,5885 CTAAAGGATGAGATAC-1,0,77,65,10709,5954 GTGCTATCCAGCTGGA-1,0,76,66,10589,6023 GACAGAGGTCTTCAGT-1,0,77,67,10709,6092 TCGATTTACGAAACGA-1,0,76,68,10589,6160 AGGCTTAAGTTGCACA-1,0,77,69,10709,6229 TAGGATCTTAACCGCA-1,0,76,70,10589,6298 GACTTTCGAGCGGTTC-1,0,77,71,10709,6367 CCCTGGGAGGGATCCT-1,0,76,72,10589,6436 TAGTAGTTGCCGGACA-1,0,77,73,10709,6505 GCCCTAAGTGCAGGAT-1,0,76,74,10589,6573 ACTTGGGCTTTCGCCA-1,0,77,75,10709,6642 CATGATACGGTGAAAC-1,0,76,76,10589,6711 TCCTGCAGCCGCCAAT-1,0,77,77,10709,6780 GTCCGTTAGAGGGCCT-1,0,76,78,10589,6849 GCTTTATTAAGTTACC-1,0,77,79,10709,6918 AATCGAGGTCTCAAGG-1,0,76,80,10589,6986 GTGCTGTTAGAACATA-1,0,77,81,10709,7055 GGCTTCTCGTGGGTGG-1,0,76,82,10589,7124 CCTACAGTTGAGGGAG-1,0,77,83,10709,7193 TATCCCTCGATCTGCA-1,0,76,84,10589,7262 CCGGTATCTGGCGACT-1,0,77,85,10709,7331 CTCTAACACCGGCAGC-1,0,76,86,10589,7399 CTTCATCACCAGGGCT-1,0,77,87,10709,7468 GGCTGGCAATCCCACG-1,0,76,88,10589,7537 CAAACCATAAGCGTAT-1,0,77,89,10709,7606 CATGTGGGCTCATCAC-1,0,76,90,10589,7675 CCCGTGACAGTGCCTT-1,0,77,91,10709,7744 GATTATCTTGCATTAT-1,0,76,92,10589,7812 GAGTCCACCAGGTTTA-1,0,77,93,10709,7881 TGTACTATCGCTCGTT-1,0,76,94,10589,7950 TTAGCGAATAGATAGG-1,0,77,95,10709,8019 ACATACAATCAAGCGG-1,0,76,96,10589,8088 ATTCGTGTACCCATTC-1,0,77,97,10709,8157 GCTAGCAACGCACCTA-1,0,76,98,10589,8225 ACCTCAGCGAGGCGCA-1,0,77,99,10709,8294 TCCGGGCCACTAACGG-1,0,76,100,10589,8363 TCCCAATATCGACGAC-1,0,77,101,10709,8432 GGATTGAAGTAGCCTC-1,0,76,102,10589,8501 GGCGCACAGTTTACCT-1,0,77,103,10709,8570 CCCGGCACGTGTCAGG-1,0,76,104,10589,8638 AGTAAGGGACAGAATC-1,0,77,105,10709,8707 TTGCGCACAACCACGT-1,0,76,106,10589,8776 CCTGCTACAACCATAC-1,0,77,107,10709,8845 CTGGGCTACTGGAGAG-1,0,76,108,10589,8914 AGCTACGAATGGTGGT-1,0,77,109,10709,8982 ATTCGTTTATCGTATT-1,0,76,110,10589,9051 GGCATTCCCTCCCTCG-1,0,77,111,10709,9120 AGTTATTCAGACTGTG-1,0,76,112,10589,9189 CCACTTTCCTTCTAGG-1,0,77,113,10709,9258 CGCAGTTCTATCTTTC-1,0,76,114,10589,9327 AGGAGCGTTTATTATC-1,0,77,115,10709,9395 GACAGGTAATCCGTGT-1,0,76,116,10589,9464 TTCCCAAAGTACTGAT-1,0,77,117,10709,9533 CTGCAGGGTGACGCTC-1,0,76,118,10589,9602 TCATGGAGGCCTTTGT-1,0,77,119,10709,9671 ATGGCCCGAAAGGTTA-1,0,76,120,10589,9740 CGTAATATGGCCCTTG-1,0,77,121,10709,9808 AGAGTCTTAATGAAAG-1,0,76,122,10589,9877 GAACGTTTGTATCCAC-1,0,77,123,10709,9946 ATTGAATTCCCTGTAG-1,0,76,124,10589,10015 TACCTCACCAATTGTA-1,0,77,125,10709,10084 AGTCGAATTAGCGTAA-1,0,76,126,10589,10153 TTGAAGTGCATCTACA-1,0,77,127,10709,10221 Seurat/tests/testdata/visium/spatial/scalefactors_json.json0000644000176200001440000000024413712563445024050 0ustar liggesusers{"spot_diameter_fullres": 96.3607212981978, "tissue_hires_scalef": 0.17211704, "fiducial_diameter_fullres": 144.54108194729673, "tissue_lowres_scalef": 0.051635113}Seurat/tests/testdata/visium/filtered_feature_bc_matrix.h50000644000176200001440000070561313712563445023631 0ustar liggesusersHDF  `TREEHEAPXmatrixHHhTREE80HEAPXHbarcodesdataindicesindptrshapefeaturesSNOD Hh  deflate %z]PSNOD(8668 (0$0p-TREEPwj6 GCOLATAACGCCGGAGGGTC-1ATAACGGAGTCCAACG-1ATAACGTTACCTCCAC-1ATAAGGTGGAGAACAT-1ATAAGTAGGATTCAGA-1ATAAGTAGGGCGACTC-1ATAAGTTACCGCGACG-1ATAATAGCTGTTGAAT-1 ATAATCTTGGAGAACC-1 ATAATTAGCTAAGTAG-1 ATACAGGCCCTCCAAT-1 ATACCACGGGCAACTT-1 ATACCGTCATCCATAA-1ATACCTAACCAAGAAA-1ATACGCCGGCGAAACC-1ATACGGAACGTCGTTT-1ATACGGGTTTCGATTG-1ATACGTACTTAGCCAC-1ATACGTCCACTCCTGT-1ATACGTTATGCACGGA-1ATACTACCCGTACCAC-1ATACTAGCATGACCCT-1ATACTGCCTTACACCG-1ATAGACAACGGGACCT-1ATAGACGAAGAGAAAG-1ATAGAGTACTGGGACA-1ATAGAGTTATCAACTT-1ATAGCAACTAGGGAAG-1ATAGCCATAACAGTCA-1ATAGGCGGCTATAGAA-1ATAGGCTAGCTTCGCA-1 ATAGGGATATCCTTGA-1!ATAGGTTGGGCAGATG-1"ATAGTGAAGCGTTCTC-1#ATAGTTCCACCCACTC-1$ATATAAAGCGCTCGTG-1%ATATAAATGTAGCTGC-1&ATATAACACGGGCGCA-1'ATATACATGTATGGTA-1(ATATACGCTCGTGACG-1)ATATAGAGTATTGGTC-1*ATATCAACCTACAGAG-1+ATATCAATTCCAGCCT-1,ATATCGGTAGGGAGAT-1-ATATCGTTCCTCGAAC-1.ATATCTCCCTCGTTAA-1/ATATCTTAGGGCCTTC-10ATATGTCTCCCTAGCC-11ATATTCAGTTAAACCT-12ATATTCCACATAGTGA-13ATATTTAACCCTCAAG-14ATCAAACACTGTTCCA-15ATCAAACGAAGGTTTG-16ATCAATCTGGGCTGCA-17ATCAATGCCGTGGCTG-18ATCACATTAGAATATC-19ATCACGTGCTAATTAA-1:ATCACTTCATCCTCGC-1;ATCAGACGGCACGCCG-1<ATCAGCCTCATGCTGC-1=ATCAGCTCGTCCACTA-1>ATCAGTAGGCAGGGAT-1?ATCATAGCCCTATGTA-1@ATCATCCAATATTTGT-1AATCATTGTACCGCATT-1BATCCAACGCAGTCATA-1CATCCAATGGAGGGTCC-1DATCCACATCGACAGAA-1EATCCAGAGCAACAACC-1FATCCAGGATTCGTGAA-1GATCCCATCCACAGCGC-1HATCCTACCTAAGCTCT-1IATCCTGAATCGCTGCG-1JATCCTGCGTGGAATGG-1KATCGACCCAATACAGA-1LATCGACTCTTTCCGTT-1MATCGCACGCCGGGAGA-1NATCGCCAGTCAACATT-1OATCGCTGCGTGCAGCA-1PATCGGAGACAGACGGC-1QATCGGCAAGCAGTCCA-1RATCGTTAGCTAGCGGA-1SATCTAATATCCTACGG-1TATCTACCATCTGCTCC-1UATCTAGCTTGTGAGGG-1VATCTCCCACGGAATAT-1WATCTCCCTGCAATCTA-1XATCTGCACCTCTGCGA-1YATCTGCTGTTATCGCC-1ZATCTGGGCTGTTCTTG-1[ATCTGGTTAAGACTGT-1\ATCTTATCGCACACCC-1]ATCTTGACCTGCAACG-1^ATCTTGACTTGTCCAA-1_ATGAAGCCAAGGAGCC-1`ATGACGCGTTCTATCC-1aATGACTATGCGACATT-1bATGAGGAGTGTTAATC-1cATGATGCAATGGTACA-1dATGCACGCGCTGTTCA-1eATGCACTACCGCATTG-1fATGCATGATCCAGGAT-1gATGCCAATCGCTCTGC-1hATGCCATTTGCGACCA-1iATGCCGGTCTTGCATA-1jATGCCGGTTGATGGGA-1kATGCGACAATTGGTCC-1lATGCGACAGTCCCATT-1mATGCGAGTCCCACCAC-1nATGCTCTGGCGCGGTA-1oATGCTTAGGAGTTGAT-1pATGGAAATTTAAGGAG-1qATGGAGCAGGCCGTGA-1rATGGATCCGGCGTCCG-1sATGGATTGACCAAACG-1tATGGCAGCATTACGAT-1uATGGGCCTCGGCCTCT-1vATGGTCGCGTGGTTTC-1wATGTAAGGCTGCTCTT-1xATGTACATGCGGTGAG-1yATGTACGATGACGTCG-1zATGTAGCGCGCGTAGG-1{ATGTGAAAGCCTAATG-1|ATGTGCATCCGACGCA-1}ATGTGGACATCTTGAT-1~ATGTTGATTAGAGACT-1ATTAAACATGCGGACC-1ATTAATACTACGCGGG-1ATTAATGAACCAGTCG-1ATTACATGTCAGTCTT-1ATTACGCGCTGGCAGG-1ATTACTTACTGGGCAT-1ATTAGATTCCTCAGCA-1ATTAGGCGATGCTTTC-1ATTATACTTTGCTCGT-1ATTATAGCTACTTTAC-1ATTATGCCATAGGGAG-1ATTATTATGTCCGTCA-1ATTATTCAGAGTCACT-1ATTCAACCATTTAAGG-1ATTCACTGATGTTGGA-1ATTCAGGACCTATTTC-1ATTCATATACTGTCCA-1ATTCATCGTTGAGGCA-1ATTCCCGAAGGTACAG-1ATTCCTAAGACGTGGA-1ATTCGACGCCGGGCCT-1ATTCGCGCCTTGAGAG-1ATTCGTGCTATCTCTT-1ATTGAAGATCTTAGTG-1ATTGACCGGCGATGAC-1ATTGATCACCACATTT-1ATTGATGAGTCCTAAC-1ATTGCCTTTATGTTTG-1ATTGCGATCAGTAACT-1ATTGCTGCTCCTCCAT-1ATTGGATTACAGCGTA-1ATTGGTTGTGCATTAC-1ATTGTACAACTCGGCT-1ATTGTCGCAATACCTT-1ATTGTGACTTCGCTGC-1ATTTACAGTTTACTGG-1ATTTACTAAGTCCATT-1ATTTCCGGGTTCTGCG-1ATTTGCGCGAGTAGCT-1ATTTGGAGATTGCGGT-1ATTTGTCTTGGGAGCT-1ATTTGTTCCAGGGCTC-1CAAACCCTCCGGCGGG-1CAAACGAGTATCGCAG-1CAAACGGTCGCACTTT-1CAAACTCGCGACGCCG-1CAAAGATTATTGGGCC-1CAAATCTCTCACAAGG-1CAAATGTCCTTCCGTG-1CAACCTACCGAGCAGT-1CAACGACCCGTTTACA-1CAACGTGGTGGAGTCT-1CAACTATATCGAATGC-1CAACTGCTCATCCGAT-1CAAGAGGGCGGAGTAC-1CAAGATATTATAACGT-1CAAGCAACGTCGGAGT-1CAAGCACCAAATGCCT-1CAAGCGGCACATAATT-1CAAGGATCGCATGTTC-1CAAGGCCAGTGGTGCA-1CAAGGTCCTATAGGCT-1CAAGTGTGGTTGCAAA-1CAATAAACCTTGGCCC-1CAATACGCTCTGAGGC-1CAATATTCTTGACCTA-1CAATCCCTATACCAGC-1CAATGCGAGAAGTATC-1CAATGGATCTCTACCA-1CAATGTGCCAACCCTT-1CAATTAAGGGTGATGA-1CAATTGGGCCGCACTC-1CAATTTCGTATAAGGG-1CACAAGAAAGATATTA-1CACAATGAGCTGCTAT-1CACACACGCTAACGAG-1CACACGCGCTGTCTTA-1CACAGCACCCACGGCA-1CACAGCTAGGGAGTGA-1CACAGGGCCATATAGT-1CACAGGGCCGTTGTCA-1CACAGTCCCGCTTCGC-1CACAGTTCGCTTCCCA-1CACATATTAGCAGGAT-1CACATCTCACCGACGA-1CACATGATTCAGCAAC-1CACATTCTTTCGATGG-1CACATTTCTTGTCAGA-1CACCAATCATCCGTCT-1CACCACGCCACACAGA-1CACCAGTCAGCATGCA-1CACCATGATCGCAAAG-1CACCCAAATCTTATGT-1CACCCACGAGGCAATT-1CACCCGGTTTGTGACT-1CACCCTAACAAGATCT-1CACCCTTGGTGAGACC-1CACCGCCAGAAGGTTT-1CACCGCGTCCACTCTA-1CACCGGGCATCACAAG-1CACCGTATCCCATCCG-1CACCGTTAGGGATCAC-1CACCGTTGCGCGATAT-1CACCTAATCAGTTTAC-1CACCTCGATGGTGGAC-1CACCTTGCGAAACTCG-1CACCTTGGCGCCTTTG-1CACGAAAGTTAGTCCC-1CACGCACAGCGCAGCT-1CACGCAGCGAGGCTTT-1CACGCGGAACTGTTGC-1CACGGCGCGCCAAAGG-1CACGTCGGCAACCTCT-1CACGTTCGTGCTCTAG-1CACGTTTCGTACACAC-1CACTAAAGTTGCCTAT-1CACTACGGGAGCTGCC-1CACTCAAGAGCTATGG-1CACTCCTATGTAAGAT-1CACTCCTCTCGGTCGG-1CACTCGAGCTGAACAA-1CACTCTTCTGCTAGCC-1CACTGACGATTGTGGA-1CACTTAATCAGACGGA-1CAGAACTTAGCCCTCT-1CAGAATAACACACGGA-1CAGACACCGATCGCTG-1CAGACCTGTAAGTGTT-1CAGACGAACCTGATAC-1CAGAGACGGTCACCCA-1CAGAGCATGAGCTTGC-1CAGAGGCGATGCATGA-1CAGATAATGGGCGGGT-1CAGATACTAACATAGT-1CAGATCATTTAAAGTC-1CAGATCCTGGTTTGAA-1CAGATGTTTGTCCCAA-1CAGCAGCCCGTTCCTT-1 CAGCAGTCCAGACTAT-1 CAGCAGTCTGTGCTGC-1 CAGCCTCCTGCAGAGG-1 CAGCCTCTCCTCAAGA-1 CAGCGATTCCCTTCAA-1CAGCTCACTGAGACAT-1CAGCTCGACAAGTTAA-1CAGCTGGCGTAACCGT-1CAGCTTAGTAGGTAGC-1CAGGATATATCGTTGT-1CAGGCAGTCTTACCAG-1CAGGCCGTTTGGGTGT-1CAGGCGCACGGTGGTC-1CAGGCGCCATGCTAGG-1CAGTAACTATTTATTG-1CAGTAAGGGACGTCTC-1CAGTACATTCTCTAAA-1CAGTACCAGTTTACGT-1CAGTAGCCCACGCGGT-1CAGTCGAGGATGCAAT-1CAGTCGGCCTAGATAT-1CAGTCTGTATACTGGG-1CAGTGAATAAATGACT-1 CAGTGTCGGCTGGCCC-1!CAGTTCAAATTGACAC-1"CATAACGGACAGTCGT-1#CATAAGAAGCTTGGCT-1$CATAAGCTCTCCGTCT-1%CATACACAAAGTCAGC-1&CATACACGGTTCCCAC-1'CATACCCGTACCCAGT-1(CATACGGCGTCTGGGC-1)CATACTATGTAATTGT-1*CATACTTAGGCAATAC-1+CATAGAGGAGATACTA-1,CATAGCGTTGCCCACC-1-CATAGTACATTGAGAG-1.CATAGTCAAATACATA-1/CATAGTCCACAAGAAC-10CATATACTACTGATAA-11CATATAGGTACAGTCA-12CATATGTCAGGCTACG-13CATCATTACCCTGAGG-14CATCCTCTCAAAGATC-15CATCGGACGGGTTAAT-16CATCTATCCCGTGTCT-17CATCTTACACCACCTC-18CATGACTTCGCTGAAT-19CATGAGATGCACTCTC-1:CATGATGCACAATTCT-1;CATGATGGAAGTTAGC-1<CATGCCAACTCGCAAA-1=CATGCGACCAGTTTAA-1>CATGGATTGTCTTCCG-1?CATGGGTATGCCTTAT-1@CATGGGTCGGGTGTGG-1ACATGGTAAGTAGCGTT-1BCATGGTATTAGTTTGT-1CCATGGTCTAGATACCG-1DCATGGTTTATTAATCA-1ECATGTAAGAGACATTT-1FCATGTAGGAGCGCCAA-1GCATGTCTCATTTATGG-1HCATTACGCAGGAAGGG-1ICATTACGTCGGCCCGT-1JCATTATGCTTGTTGTG-1KCATTGCGGGTCAATTC-1LCATTTAGCGGACCATG-1MCATTTGAGTGGTACGT-1NCCAAACAGAACCCTCG-1OCCAAATAACAAGATTC-1PCCAACGATGCACTGAT-1QCCAAGAAAGTGGGCGA-1RCCAAGACTTCTGCGAA-1SCCAAGCGTAACTCGTA-1TCCAAGGTTGCCCTTTC-1UCCAATAGTGCCGTCGA-1VCCAATCGGTAGATCGA-1WCCAATGTCACAGCAAG-1XCCAATTACGGGTCGAG-1YCCACAATGTACGTCTT-1ZCCACACTGAGATATTA-1[CCACAGTACCCATCCT-1\CCACATGGCTCTTTAT-1]CCACCAACTTTACTGT-1^CCACCCAAGGAAAGTG-1_CCACGAATTTAACCTC-1`CCACGAGAAGAGAATC-1aCCACGGAGCCATAAGA-1bCCACGGTGCCCGGTAG-1cCCACTCAGATCCGCAA-1dCCACTGGTGGCTGGTT-1eCCACTGTTTGGATTAA-1fCCAGAAAGCAACTCAT-1gCCAGATAGTTGAGTGA-1hCCAGCCTGGACCAATA-1iCCAGCTCGAACGCATT-1jCCAGCTGATGGTACTT-1kCCAGGGACGTGGCCTC-1lCCAGTAGTCTGATCCA-1mCCAGTCAAATCTCTTA-1nCCAGTCTAGACGGCGC-1oCCAGTCTTGTCATAGA-1pCCAGTTCGGTAACTCA-1qCCATAACCTGTGCAGT-1rCCATACCTTTACTTGT-1sCCATAGAGGCTGCCAG-1tCCATAGGTTGGCGTGG-1uCCATATGGAAACTATA-1vCCATCGCAGTTAAACT-1wCCATCTCACCAGTGAA-1xCCATGCCCTAGATTTC-1yCCATGCCTGTTTAGTA-1zCCATTAGCGATAATCC-1{CCATTCCCTGCCCACA-1|CCATTTCTACCTATTA-1}CCCAAACATGCTGCTC-1~CCCAACATACGTCGCG-1CCCAAGTCATTACACT-1CCCAATTTCACAACTT-1CCCACTCCACGGTATC-1CCCAGAGGAGGGCGTA-1CCCAGGTCTGAAGGCT-1CCCAGTAAACTTGGGA-1CCCAGTTAAGGCGCCG-1CCCGAAGTTTCGCGAA-1CCCGACCATAGTCCGC-1CCCGAGTTTCTCCGTA-1CCCGCAGCGCGAACTA-1CCCGGGTCGTTCAGGG-1CCCGGTGTATCGGAAT-1CCCGTAAGTCTAGGCC-1CCCGTAGCTGGGAAGA-1CCCGTCAGCGTCTGAC-1CCCGTGAGGGCGGTGA-1CCCGTTTCGCAGATGT-1CCCTACTTGAACAATG-1CCCTAGGCAACAAGAG-1CCCTATGTAGAGCAGA-1CCCTCCTCGCTCGTAT-1CCCTGAAATGAGTTGA-1CCCTGCCCAATCCGCT-1CCCTGCGCTACGCATA-1CCCTGGCTGTTCCTTC-1CCCTTTAATGGAGTTC-1CCCTTTGACAGGTCTT-1CCGAACACTGGGCCTC-1CCGAACCTTCCCGGCC-1CCGAAGCATTGACCAA-1CCGACAATAGGCCGCC-1CCGACGGGCATGAGGT-1CCGACGTAAACACAAC-1CCGAGAAGTCGCATAA-1CCGAGCTGTGCTTGTC-1CCGATCTCAACCTTAT-1CCGATTCGAGGGACCC-1CCGCACAAAGACCAAC-1CCGCACTTGCAATGAC-1CCGCATGTGGTACGAT-1CCGCCGGAACTTCTCG-1CCGCCTGCGAATTGGT-1CCGCCTTGCGATGTCG-1CCGCGATTTGGTAGGT-1CCGCGCAAGATACCCA-1CCGCGGAATGCGTCAC-1CCGCGGGTACGAAGAA-1CCGCTATCAGCACCAG-1CCGCTCCAGGGCGATC-1CCGCTCTTCCGAACTA-1CCGCTTACCTCACTCT-1CCGCTTGCTGACATGG-1CCGGAGCGTACTTTCT-1CCGGCACGACCGTTTC-1CCGGCCGCGAGCATAT-1CCGGCGTGAGACTCTG-1CCGGGCGGTCTCGTCA-1CCGGGCTGCTCCATAC-1CCGGGTTCGAGGTTAC-1CCGGTAATGGCTAGTC-1CCGGTTTGTAATTGTG-1CCGTAAGTTGGTCCCA-1CCGTAGGAAATCCCTG-1CCGTAGGGTTGTTTAC-1CCGTATCTCGTCGTAG-1CCGTATTAGCGCAGTT-1CCGTGGAACGATCCAA-1CCGTGTTAAATTCCAT-1CCGTTACGTTAGAACA-1CCGTTCCGAATCTCGG-1CCTAAAGGCTGACGCT-1CCTAAATTAACGGTTC-1CCTAAATTGTATCCTA-1CCTAACCCAAACAAGT-1CCTAACTAAGGCTCTA-1CCTACATTCACAGACG-1CCTACTGCTTACACTT-1CCTAGGCGTAGCGATC-1CCTAGGTAAAGGTAGC-1CCTATAATGAGTGCCC-1CCTATACCGTCCTGTC-1CCTATATCGTGTCACG-1CCTATATTTGTCCTGG-1CCTATCTATATCGGAA-1CCTATGAAGTGGTGCC-1CCTATGGCTCCTAGTG-1CCTATGGGTTACCGTC-1CCTATGGTCAAAGCTG-1CCTATGTCCACTCCAC-1CCTCAACGATCGCTGT-1CCTCACCTGAGGGAGC-1CCTCACCTTAGCATCG-1CCTCCGACAATTCAAG-1CCTCCTGAGCCCACAT-1CCTCCTGTTGTGTCGT-1CCTCGCCAGCAAATTA-1CCTCGCGCGATATAGG-1CCTCGCGCTGTGCGAT-1CCTCGGACCGGGATAG-1CCTCTAATCTGCCAAG-1CCTCTATCGATTAGCA-1CCTCTCTCCCATCTAG-1CCTCTGGCCTAGACGG-1CCTCTGTACTATTCTA-1CCTGAACGATATATTC-1CCTGAATATTTACATA-1CCTGACCACCGATGGT-1CCTGCTATTTGAGAAG-1CCTGGAAACGTTCTGC-1CCTGGCTAGACCCGCC-1CCTGGTCGAATGTGGG-1CCTGTAAGACATGATA-1CCTGTACTCACGCCCA-1CCTGTCACCCGGGCTC-1CCTGTCCCTCACGTTA-1CCTGTCGCCCGTAAAT-1CCTGTTTGAAGACACG-1CCTTCAGTTAAAGTGA-1CCTTCGTATAGAATCC-1CCTTCTCAGCGTTCCT-1CCTTCTTGATCCAGTG-1CCTTGACCACTTTATT-1CCTTTAAGGGAGCACT-1CCTTTCAATGAAGAAA-1CCTTTGAATTATGGCT-1CGAAACGCAATTCATG-1CGAACAGTATGGGCGT-1CGAACCCGCATGCGTC-1CGAACGGCCGGACAAC-1ACGATCATACATAGAG-1ACGATCATCTTGTAAA-1ACGATGCATATGTTAT-1ACGCAAACTAATAGAT-1ACGCAATCACTACAGC-1ACGCATACGTTTACTA-1ACGCATTCGTGAGTAC-1ACGCCAGATGATTTCT-1 ACGCCAGTGCGTTTGC-1 ACGCCGCTAGACGACC-1 ACGCGAAGTCAGACGA-1 ACGCGCTACACAGGGT-1 ACGCGGGCCAAGGACA-1ACGCTAGTGATACACT-1ACGCTGTGAGGCGTAG-1ACGGAACACGAGTGCC-1ACGGACTCTCAAAGCG-1ACGGAGCGCAAATTAC-1ACGGATGGTGCGGATA-1ACGGCACTTGCTTGGG-1ACGGCCAACATGGACT-1ACGGCGACGATGGGAA-1ACGGCTGGATGTAGAA-1ACGGGAGTGTCGGCCC-1ACGGTACAGTTCAATG-1ACGTAGATTGCTGATG-1ACGTAGGAGAGTCGCT-1ACGTATTACTCCGATC-1ACGTCTCGTTCCGGGA-1ACGTGACAAAGTAAGT-1ACGTGCGCCTCGTGCA-1 ACGTTAATGTCGAAGA-1!ACGTTAGATTTGCCCG-1"ACGTTCGCAATCAATT-1#ACGTTCGTTCAGGAAA-1$ACGTTCTGTACAAGTC-1%ACTACATCCCGACAAG-1&ACTACCAGCTCTCTGG-1'ACTACGCGTTAGAATT-1(ACTAGTTGCGATCGTC-1)ACTATATGCTGTGTTC-1*ACTATCCAGGGCATGG-1+ACTATCTGCCCGCGTA-1,ACTATTTCCGGGCCCA-1-ACTCAAGTGCAAGGCT-1.ACTCAATAAAGGCACG-1/ACTCCCATTCCTAAAG-10ACTCCCGAATTCGTTT-11ACTCCCTAATGCTAAA-12ACTCCCTAGAATAGTA-13ACTCCGGCCGACCACT-14ACTCGCCGTTCGATAA-15ACTCGTAACCCGTCCT-16ACTCGTCAGTAATCCC-17ACTCTCTGACTTAGGT-18ACTCTGACCTAATAGA-19ACTCTTGTATAGTAAC-1:ACTGAAACGCCGTTAG-1;ACTGAATGGCGAAAGT-1<ACTGCCGTCGTAACTC-1=ACTGCTCGGAAGGATG-1>ACTGTACGATACACAT-1?ACTGTAGCACTTTGGA-1@ACTGTCCAGGATTATA-1AACTGTCTTCTTTAGAA-1BACTTACGCATCCACGC-1CACTTATACTTACCCGG-1DACTTATTAGGATCGGT-1EACTTATTTATGTGCCA-1FACTTCAGGCTGATCCC-1GACTTCCAGTGGAAGCT-1HACTTCCATGCGGGACA-1IACTTCGCCATACGCAC-1JACTTGACTCCCTCTTT-1KACTTGGGACCCGGTGG-1LACTTGTAGTCCCTTCA-1MACTTGTGGATGGAACG-1NACTTTACCCTCATGAA-1OACTTTCCTATAGCTTC-1PACTTTGACTGCATCCT-1QACTTTGGTCGTGCTCC-1RAGAACCCTCAATTGGG-1SAGAACGTGGTACATTC-1TAGAAGAGCGCCGTTCC-1UAGAAGGTACACTTCAC-1VAGAAGGTTGCCGAATT-1WAGAAGGTTGTAGGTCG-1XAGAAGTGATTCGTGAT-1YAGAATGCGGGTTCGGA-1ZAGAATTATGGATTCGA-1[AGACCAAACCACACCT-1\AGACCCACCGCTGATC-1]AGACCCGCCCTCCTCG-1^AGACCGCTCCGCGGTT-1_AGACCGGGAAACCCTG-1`AGACGAAGTGCCGGTC-1aAGACGACGATGCCGCT-1bAGACTAGCCTTCCAGA-1cAGAGAACCGTCTAGGA-1dAGAGAAGGAGTACAAT-1eAGAGATCTCTAAAGCG-1fAGAGCCGCCGAGATTT-1gAGAGGCTTCGGAAACC-1hAGAGTAAACTTCACTA-1iAGATAACTTCAGGGCC-1jAGATACCAATAGAACC-1kAGATACCGGTGTTCAC-1lAGATACTCAAGATCGA-1mAGATATAATACGACTA-1nAGATCGTGCATAAGAT-1oAGATCTCAGGTGTGAT-1pAGATGACTCGCCCACG-1qAGATGAGGGTTGCGAT-1rAGATGATGGAGTCTGG-1sAGATGCAAGACGTGCA-1tAGATGCATCCTGTGTC-1uAGATTATAGGACGTTT-1vAGATTCACAACCGATA-1wAGCAAAGGCCGCTAGT-1xAGCAACATATCTTATT-1yAGCAACCGAAAGTAAT-1zAGCACCAGTACTCACG-1{AGCACTACCTCACCAG-1|AGCACTTAAGGACGCC-1}AGCAGAAGGAGAAAGA-1~AGCAGCCAGATGAATA-1AGCATATCAATATGCT-1AGCATCATTTCGAAAG-1AGCATCGTCGATAATT-1AGCATTACGAGGCAAG-1AGCCAAGCTTTGTGTC-1AGCCACAGGTTACCCG-1AGCCACTCCCGTGCTT-1AGCCCATACATGTAAG-1AGCCCGCACTACAATG-1AGCCCGGTAGCCTGTA-1AGCCCTTCTAATCCGA-1AGCCGTGGCTAAATGT-1AGCCTAATACCCACGT-1AGCGACAGGAACGGTC-1AGCGACCAACGATATT-1AGCGATGCGCCTAATA-1AGCGCATAATGAATCG-1AGCGCGGGTGCCAATG-1AGCGGACACTTCGTAG-1AGCGGCGGTTAGCGGT-1AGCGGGTCTGACACTC-1AGCGTGGTATTCTACT-1AGCTAAGTACGCAGGC-1AGCTAGAAGCAGAAGT-1AGCTATTTAATCCAAC-1AGCTCCATATATGTTC-1AGCTCCTTCGCACATC-1AGCTCTAGACGTTCCA-1AGCTCTTCCCAGTGCA-1AGCTCTTCGTAACCTT-1AGCTCTTTACTCAGTT-1AGCTGAAGTAAACCAA-1AGCTGTAACCTCAATC-1AGCTTCTTCTCGAGCA-1AGCTTGATCTTAACTT-1AGGAAGCTGTCCGCCG-1AGGACAGTCGAATCCC-1AGGACGACCCATTAGA-1AGGACGCTCGATGTTG-1AGGAGGCCTTCGCGCG-1AGGATAAAGTCGGGAT-1AGGATATAGGGATTTA-1AGGATATCCGACTGCA-1AGGATCACGCGATCTG-1AGGCAGATGCGTAAAC-1AGGCAGGGAGCGTACT-1AGGCATTGTCGTAGGG-1AGGCCACCCGTTATGA-1AGGCCCTAGAACGCCA-1AGGCCTGAGAATCTCG-1AGGCGGTTTGTCCCGC-1AGGCGTCTATGGACGG-1AGGCTTCCCGAAGAAG-1AGGGAAACGAGGTACT-1AGGGACCGGCTGCGTT-1AGGGACTCTACGCGAC-1AGGGAGACATACTTCG-1AGGGCGAGCAGCTGAT-1AGGGCTGCAGTTACAG-1AGGGTCAGTAACCCTA-1AGGGTCGATGCGAACT-1AGGGTGCTCTCGAGGG-1AGGGTTCCCTTTGGTT-1AGGGTTTAGTTCGGGA-1AGGTAGGTACAAAGCT-1AGGTATAATTGATAGT-1AGGTCAGGTGAGAGTG-1AGGTCGCGGAGTTACT-1AGGTGCACGTCCACAT-1AGGTGGTGACCTTCGC-1AGGTTACACCATGCCG-1AGGTTGAGGCACGCTT-1AGGTTTCACACACCTT-1AGTAACTATAGCAGCC-1AGTACAGAAGCTTATA-1AGTACATCATTTATCA-1AGTACGGGCACCTGGC-1AGTACTCTTATGCCCA-1AGTAGGAAGGAAGTTG-1AGTATGCTGGAGACCA-1AGTATTTGGCACGACC-1AGTCAACACCACCATC-1AGTCAAGATGACACTT-1AGTCACTAGCTCTCGA-1AGTCCATTGGCTGATG-1AGTCCCGCCTTTAATT-1AGTCGACGGTCTCAAG-1AGTCGGCCCAAACGAC-1AGTCGGCTCAACTTTA-1AGTCGGTTGCGTGAGA-1AGTCGTATAAAGCAGA-1AGTCGTCGACCACCAA-1AGTCGTGGGCATTACG-1AGTCTAAAGTATACTC-1AGTCTCACAAGACTAC-1AGTCTTCTCCTCAAAT-1AGTCTTTAAAGTGTCC-1AGTGAACAAACTTCTC-1AGTGACCTACTTTACG-1AGTGAGACTTCCAGTA-1AGTGAGCCTCGCCGCC-1AGTGAGTCGAATTAAC-1AGTGATAACCTGCGCG-1AGTGATATGAGTAGTT-1AGTGATTCAAGCAGGA-1AGTGCACGCTTAAGAA-1AGTGCGTAGCTCGTAA-1AGTGCTAAACACAGCA-1AGTGCTTGCACGAATA-1AGTGGCCCGCAAATGG-1AGTGGCGGCAATTTGA-1AGTGGCGTCTGAAGGT-1AGTGGCTCCGTCGGCC-1AGTGGTGTTACCCGTG-1AGTGTATTGCGCATTG-1AGTTAAACACTTGCGA-1AGTTAAGCGGTCCCGG-1AGTTAAGTCAACCGCT-1AGTTACTCTATCGTGG-1AGTTCCTACAGAATTA-1AGTTCCTATTTATGTT-1AGTTGACGGTCCTTGC-1AGTTGGCAAGGCTAGA-1AGTTTGCACCTGCCTC-1AGTTTGGCACGGGTTG-1AGTTTGGCCAGACCTA-1ATAAACGGACCCGTAA-1ATAAACGTTGCACCAC-1ATAAAGGCTCGGTCGT-1ATAAATATTAGCAGCT-1AATAGAATCTGTTTCA-1AATAGCTACCGCGTGC-1AATAGTCCGTCCCGAC-1AATATCCTAGCAAACT-1AATATCGAATCAATGC-1AATATCGAGGGTTCTC-1AATATTGGAGTATTGA-1AATCATGTAAAGACTC-1 AATCGCCTCAGCGCCA-1 AATCGCGCAGAGGACT-1 AATCGGTATAGCCCTC-1 AATCGTGAGCCGAGCA-1 AATCTAGGTTTACTTG-1AATCTATGCCGGAGCC-1AATCTCTACTGTGGTT-1AATCTGCGTTGGGACG-1AATCTGGCTTTCTAGT-1AATGACAGCAATGTCT-1AATGACGTAGGATGTC-1AATGACTGTCAGCCGG-1AATGATGATACGCTAT-1AATGATGCGACTCCTG-1AATGCAACCGGGTACC-1AATGCACCAAGCAATG-1AATGTGCCCGAGGTGT-1AATGTTGTCGTGAGAC-1AATTAAAGGTCGGCGT-1AATTAACGGATTTCCA-1AATTACGAGACCCATC-1AATTAGCGCTGCAGCG-1AATTCATAAGGGATCT-1 AATTCATTGTCATGCA-1!AATTCCAAGCATGTAC-1"AATTCGATTCGAGGAT-1#AATTCTAGAGTTAGGC-1$AATTGAACGCTCTGGT-1%AATTGCAGCAATCGAC-1&AATTTGGGACATAGTA-1'ACAAACCATGCGTCCT-1(ACAAAGAAGGTAGGCC-1)ACAAAGCATGACCTAG-1*ACAAATCGCACCGAAT-1+ACAAATGGTAGTGTTT-1,ACAACAGCATGAGCTA-1-ACAAGCAGTGCCTAGC-1.ACAAGGAAATCCGCCC-1/ACAAGGACAAGAGGTT-10ACAAGGATGCTTTAGG-11ACAAGGGCAGGCTCTG-12ACAATAGTCGTACGTT-13ACAATCCATTTAAACC-14ACAATGAATACGGAGA-15ACAATTGTGTCTCTTT-16ACAATTTGAGCAGTGG-17ACACAAAGACGGGTGG-18ACACACCAGGACCAGT-19ACACATGATCAAATCT-1:ACACCCAGCATGCAGC-1;ACACCTTAAGTAGGGC-1<ACACCTTACTACTTGC-1=ACACGAGACTCCTTCT-1>ACACGGGAACTTAGGG-1?ACACGTAGGCCACAAG-1@ACACTGATCAAGGTGT-1AACAGAACTGAGAACAA-1BACAGCGACATTCTCAT-1CACAGGCACGGATCCTT-1DACAGGCTTGCCCGACT-1EACAGGTGGAGGTGAGG-1FACAGTAATACAACTTG-1GACATAAGTCGTGGTGA-1HACATAATAAGGCGGTG-1IACATCCCGGCCATACG-1JACATCCTGGTAACTGT-1KACATCGATCGTTTACC-1LACATCGGTCAGCCGCG-1MACATCGTATGCAATGG-1NACATGGCGCCAAAGTA-1OACATTAGTTTATATCC-1PACATTTGAAACCTAAC-1QACCAAACACCCAGCGA-1RACCAACACCACACACT-1SACCAACCGCACTCCAC-1TACCAACGCTTATTTAT-1UACCAAGTCATCGGCAG-1VACCAATATGCAAGTTA-1WACCACAAGTTTCTATC-1XACCACCAATGTAACAA-1YACCACGTGCAGCTATA-1ZACCAGACCATAACAAC-1[ACCAGTGCGGGAGACG-1\ACCATATCCGCAATAA-1]ACCATCCGCCAACTAG-1^ACCATCGTATATGGTA-1_ACCCAACGCCCGTGGC-1`ACCCATCTTGAGGGTA-1aACCCATTTGTCCCTCT-1bACCCGGAAACTCCCAG-1cACCCGGATGACGCATC-1dACCCGGTTACACTTCC-1eACCCGTGTCATCAGTA-1fACCCTATGCCATATCG-1gACCCTCCCGTCAGGGC-1hACCCTCCCTTGCTATT-1iACCCTGGTAACGCCCT-1jACCGAAGAGTCTGGTT-1kACCGACACATCTCCCA-1lACCGACTGAGTCCCAC-1mACCGAGTCTCCTTATT-1nACCGATGGTAGCATCG-1oACCGCAATAACTGCCT-1pACCGCGGTGGAAGTCG-1qACCGGGCCTTTGTTGA-1rACCGGTCAGGTACACC-1sACCGTCCACTGGGCCC-1tACCTAAGTACCTTTCA-1uACCTAATCGACTTCCT-1vACCTACAGTATGTGGT-1wACCTACTATAAATCTA-1xACCTCCGCCCTCGCTG-1yACCTCCGTTATTCACC-1zACCTGCGTGTCATGTT-1{ACGAAATGGGCGGCAC-1|ACGACTCTAGGGCCGA-1}ACGAGGATACCACTCT-1~ACGAGGTTTACAACGT-1ACGAGTACGGATGCCC-1ACGATACATAGAACTA-1AACGCATGATCTGGGT-1AACGCGGTCTCCAGCC-1AACGCTGTTGCTGAAA-1AACGGACGTACGTATA-1AACGGCCATCTCCGGT-1AACGTACTGTGGGTAC-1AACGTCAGACTAGTGG-1AACGTCGCTGCACTTC-1AACGTGCGAAAGTCTC-1AACTACCCGTTTGTCA-1AACTAGGCTTGGGTGT-1AACTCAAGTTAATTGC-1AACTCCAGAGCGTGTT-1AACTCCTAATCCCATG-1AACTCTCAATAGAGCG-1AACTCTCAGTGTGCTC-1AACTGGGTCCCGACGT-1AACTTGCCCGTATGCA-1AAGACATACGTGGTTT-1AAGACCAAATAACTCA-1AAGACCCAACTGAACA-1AAGACTGCAAGCTACT-1AAGAGCTCTTTATCGG-1AAGAGGATGTACGCGA-1AAGAGGCATGGATCGC-1AAGAGGCCCTTTGGAA-1AAGATGGCACCGGACC-1AAGATTGGCGGAACGT-1AAGCACCCTGCGTATC-1AAGCATACTCTCCTGA-1AAGCCGAAGCGGTTTA-1AAGCGCAGGGCTTTGA-1AAGCGTCCCTCATCGA-1AAGCTAGATCGAGTAA-1AAGCTCGTGCCAAGTC-1AAGCTCTTTCATGGTG-1AAGGAGCGGTTGGTGC-1AAGGATCGATCGCTTG-1AAGGATGAGGGACCTC-1AAGGCGCGTAAAGCTT-1AAGGCTGTGCTCATCG-1AAGGGAACGACTGGCT-1AAGGGACAGATTCTGT-1AAGGGACTATGCATTC-1AAGGGTTTGATTTCAG-1AAGGTATCCTAATATA-1AAGGTGATAAACCAGC-1AAGTAGTGACGCGAGG-1AAGTCAATTGTCGTCA-1AAGTCTTCTGTGGCCT-1AAGTGACGACCGAATT-1AAGTGAGTCGGGTTTA-1AAGTGCCTTGACTGTA-1AAGTGCGTTAGAATCT-1AAGTGTTTGGAGACGG-1AAGTTCACTCCAAGCT-1AAGTTCAGTCTGCGTA-1AAGTTCGGCCAACAGG-1AAGTTGTGATGTTATA-1AAGTTTATGGGCCCAA-1AATAACAACGCTCGGC-1AATAACACTAGAACAA-1AATACCTGATGTGAAC-1AATAGAACAGAGTGGC-1AAATCGCGGAAGGAGT-1AAATCGTGTACCACAA-1AAATCTAGCCCTGCTA-1AAATGATTCGATCAGC-1AAATGGCCCGTGCCCT-1AAATGGTCAATGTGCC-1AAATTAACGGGTAGCT-1AAATTACACGACTCTG-1AAATTACCTATCGATG-1AAATTCCAGGTCCAAA-1AAATTGATAGTCCTTT-1AAATTTGCGGGTGTGG-1AACAACTGGTAGTTGC-1AACACACGCTCGCCGC-1AACACGAGACGCGGCC-1AACAGGATGGGCCGCG-1AACAGGTAGTATGGAT-1AACATATCAACTGGTG-1AACATCGATACGTCTA-1AACATTGTGACTCGAG-1AACCAAGACTTCTCTG-1AACCCATCCCATGATC-1AACCCGAGCAGAATCG-1AACCCTACTGTCAATA-1AACCGAGCTTGGTCAT-1AACCGCTAAGGGATGC-1AACCTCGCTTTAGCCC-1AACCTTTAAATACGGT-1AACCTTTACGACGTCT-1AACGAAAGTCGTCCCA-1AACGATAATGCCGTAG-1AACGATATGTCAACTG-1AAACTGCTGGCTCCAA-1AAACTTAATTGCACGC-1AAACTTGCAAACGTAT-1AAAGAATGTGGACTAA-1AAAGGCTACGGACCAT-1AAAGGCTCTCGCGCCG-1AAAGGGATGTAGCAAG-1AAAGTAGCATTGCTCA-1AAAGTGTGATTTATCT-1AAAGTTGACTCCCGTA-1AAATAACCATACGGGA-1AAATAAGGTAGTGCCC-1AAATACCTATAAGCAT-1AAATAGCTTAGACTTT-1AAATAGGGTGCTATTG-1AAATCCGATACACGCC-1AAACCGTTCGTCCAGG-1AAACCTAAGCAGCCGG-1AAACGAAGATGGAGTA-1AAACGAGACGGTTGAT-1AAACGGGCGTACGGGT-1AAACGGTTGCGAACTG-1AAACTCGGTTCGCAAT-1AAACTCGTGATATAAG-1AAACAGGGTCTATATT-1AAACATGGTGAGAGGA-1AAACATTTCCCGGATT-1AAACCGGGTAGGTACC-1AAACAGAGCGACTCCT-1AAACAGCTTTCAGAAG-1AAACACCAATAACTGC-1AAACAAGTATCTCCCA-1GCATTGTAATTCATAT-1GCATTTCCAAGGCTCC-1GCCAACCATTTCCGGA-1GCCAAGAATACTTCTG-1GCCAATAGGGCATCTC-1GCCACAATTTAAGGAC-1GCCACTCAGAGCGCGA-1GCCAGGAGTAACCGAT-1 GCCATATTGCACACAG-1 GCCATCGAGCTGCGTG-1 GCCATCGATGCTGCAT-1 GCCATTAGCCTCAAAC-1 GCCCGACTTCTTCCCG-1GCCCGAGAGTCTAAAT-1GCCCGATCTGTGGTCG-1GCCCGCGCGTAAACGG-1GCCCGTAATACCTTCT-1GCCCTAGCCGTCGCGA-1GCCCTGAGGATGGGCT-1GCCGAAATTCCTACGT-1GCCGATTGGCCAAGCT-1GCCGCTTGTGAGAAAC-1GCCGGGTTAGGGTCGC-1GCCGTGGAAGAAATGT-1GCCTACGTTCTGTGCA-1GCCTATAGTGTCAGGG-1GCCTCATCTGGAAATA-1GCCTCCGACAATTCAC-1GCCTCTATACATAGCA-1GCCTTTGTCAGTGGAC-1GCGAAACGATCGGGAG-1 GCGAAACTTAACTGGA-1!GCGAAGAATCTGACGG-1"GCGAAGCCATACCCGT-1#GCGACATGTAAACATC-1$GCGACGATAGTTGTAC-1%GCGAGAGGCCATGTAA-1&GCGAGAGTTGCGTCCA-1'GCGAGCGCATGCTCCC-1(GCGAGGCCCGAGCAGA-1)GCGAGTTCTGCAAAGA-1*GCGATTGTTAACGTTA-1+GCGCAAATATATTCAA-1,GCGCAAGAGCGCGCTG-1-GCGCATCCAGTCAGCA-1.GCGCCGTTCCACGATA-1/GCGCCTCCCACTCCGA-10GCGCGGTCTAGTAACT-11GCGCGTCATTGGTACA-12GCGCTAATTGAATAGA-13GCGCTATGCCGAGGCA-14GCGCTGATCCAGACTC-15GCGCTGCTTTGCATTT-16GCGCTTAAATAATTGG-17GCGGACCGCGTTGTGG-18GCGGAGAAACTTCGCA-19GCGGAGAGGGAGAACG-1:GCGGATTACTTGTTCT-1;GCGGCAAAGTATTGCC-1<GCGGCTCTGACGTACC-1=GCGGCTTTAGCAAGTT-1>GCGGGCGAGCCTTACC-1?GCGGTCTTGCTTTCAC-1@GCGGTGAACTGCGCTC-1AGCGGTTCCCTATCATG-1BGCGTAAATGGCCATAA-1CGCGTCGAAATGTCGGT-1DGCGTCGCCAGGGTGAT-1EGCGTCTCTGCATTGGG-1FGCGTGGTACTGGGTTA-1GGCGTTCGGAGACCGGG-1HGCTAACTGAAGTCTGA-1IGCTAAGCCCAGTATGC-1JGCTAAGTAGTTTCTCT-1KGCTAATACCGAATGCC-1LGCTACAATCGAGGATA-1MGCTACAGTACGGACCG-1NGCTACGACTTATTGGG-1OGCTACTATAGTAGAGT-1PGCTAGACCGTCTACTG-1QGCTAGAGTAGAGATGT-1RGCTAGCAGGGAGTGGG-1SGCTAGCTTGAATAGCT-1TGCTAGGCACCACGGAG-1UGCTAGTAGAGCTTGTA-1VGCTATACGTCTCGGAC-1WGCTATCATACTCATGG-1XGCTATGCCAGCTTATG-1YGCTCAATCCGTTTATT-1ZGCTCAATGTAATACCG-1[GCTCATTACTGCATGT-1\GCTCCCAGTCGGTCCA-1]GCTCCGCTCGCTTCAG-1^GCTCCTGACATACTGG-1_GCTCGCTCATGTCCAA-1`GCTCGGAATTTAAAGC-1aGCTCTAAACCCTGACG-1bGCTCTATGTTACGTGC-1cGCTCTCGGGTACCGAA-1dGCTCTTTCCGCTAGTG-1eGCTGAATCTTCCAATC-1fGCTGAGGCGTGAGTAT-1gGCTGCACGGTTTCTTA-1hGCTGCTAAGTAGTCGA-1iGCTGCTACTGCGTAGC-1jGCTGCTCTCCGGACAC-1kGCTGGACCCAAAGTGG-1lGCTGGCATATTCACCT-1mGCTGGCGGCGCATGCT-1nGCTGGTGACTCGTAGT-1oGCTGGTTTAGGCCATA-1pGCTGTATTACTGGCCC-1qGCTGTTGCTACCGAAC-1rGCTTAATGTAACTAAC-1sGCTTACGTAGTTAGTA-1tGCTTATGAAGCAGGAA-1uGCTTCCATGTAACCGC-1vGCTTCCGTCCCTAGAC-1wGCTTGAGTGACCTCTG-1xGCTTGGATCGATTAGG-1yGCTTTCAGAGGAGGTG-1zGGAAACCTTGTTGAAT-1{GGAAACTAAATGGGCC-1|GGAAAGTGCCCATGCC-1}GGAACCGTGTAAATTG-1~GGAACCTTGACTCTGC-1GGAACTTTGGCGATTA-1GGAATGCGCTAGCGTG-1GGACAACCATGAAGCC-1GGACACAAGTTTACAC-1GGACCAACAGGATAAC-1GGACCTACGGTAACGT-1GGACCTCTAGGCCGCC-1GGACGTCCATAGTTGG-1GGACTCTTTGACTAAG-1GGAGAAGTCATTGGCA-1GGAGACCATCTACATA-1GGAGACGACACCTTTG-1GGAGCAACATTTCAAG-1GGAGCACCAAGAACTA-1GGAGCGAGGCCTACTT-1GGAGGGCTTGGTTGGC-1GGAGTTGATTCTGTGT-1GGATCAGAGCCATCAG-1GGATCATCCCGTACGC-1GGATCCGGAATATACT-1GGATCTTGACTCAACC-1GGATGAAGATCGCTGA-1GGATGACGCGAGTTTA-1GGATGGCTTGAAGTAT-1GGATGTCCTTACCGCA-1GGATTAATCATGGACC-1GGATTCCGCTATACCC-1GGATTGCTGTGACTCC-1GGATTTCACTTCTATA-1GGCAAAGGCGCCAATA-1GGCAAATTACTTTACT-1GGCAAGCCCATAGTGG-1GGCAAGGCGAAATAGC-1GGCAATAGTCAATGAG-1GGCACGCTGCTACAGT-1GGCACTCCACTGGGCA-1GGCACTGCGGTGGTTT-1GGCAGCAAACCTATGC-1GGCAGCGGTAATCCTA-1GGCATACAGGTAGCGG-1GGCATATCGGTTCTGC-1GGCCAATTGTATAGAC-1GGCCCAGCTGGTTTGC-1GGCCCTCACCCACTTA-1GGCCGGCGTCTGCTAT-1GGCCGTTTGGGTTTCA-1GGCCTGCTTCTCCCGA-1GGCGAAATCTAACTTG-1GGCGAGCGAAACGGCA-1GGCGCAGGACATCTTC-1GGCGCATGAATTGATG-1GGCGCGGAGATCTTTC-1GGCGCGTTCGAGTTTA-1GGCGCTTCATTCCCTG-1GGCGGAGTAATATTAG-1GGCGGTAGGATCATTG-1GGCGTAGGGAAAGCTG-1GGCGTCCTATCCGCTG-1GGCTAAAGGGCGGGTC-1GGCTAATGATTGAAAT-1GGCTATTAAGTTGTAT-1GGCTCGCGTTGAGGTA-1GGCTCTGCTCCAACGC-1GGCTGAAATAGCAAAG-1GGCTTTCAATAAGGGT-1GGGAAAGAATGCCAAC-1GGGAACGGGAGGTTAG-1GGGAAGACGGTCTGTC-1GGGAAGGGCTTTCTCA-1GGGAATGAGCCCTCAC-1GGGACAGAGTTACTCC-1GGGACCCGTATATCTT-1GGGACTGCATAGATAG-1GGGAGAACTCACAGTA-1GGGAGCGACCGTAGTG-1GGGAGGATGCCCGAAA-1GGGAGTTAATGAGGCG-1GGGATGGTCGTAACCG-1GGGATTTACCGCACCT-1GGGCAACCGCACGTGC-1GGGCACTATTGACCAT-1GGGCAGACGTCACTGC-1GGGCAGAGCAATCGTT-1GGGCAGTCAACGCCAA-1GGGCCCGTCTTAAACA-1GGGCCGGCCGAAGTAC-1GGGCCTAAATGGGCTA-1GGGCGATATGTGTGAA-1GGGCGGCAAATGAATT-1GGGCGGGTTCCCTACG-1GGGCGTACATTTATAT-1GGGCGTCACCACGTAA-1GGGCGTCCACTGGCTC-1GGGCGTGGTTTCCCAG-1GGGCTACTATTTCGTG-1GGGCTATGATCGATGG-1GGGCTCATCGAACCCA-1GGGCTGCCTAGGGCGA-1GGGCTGGTTAGTCGCG-1GGGTACTTCATGAACT-1GGGTATGTATGCACTT-1GGGTATTCTAGCAAAC-1GGGTCACCGTGACGGT-1GGGTCAGGAGCTAGAT-1GGGTCGTGGCAAGTGT-1GGGTCTATCGCTTTCC-1GGGTGACACCTTAACT-1GGGTGCATATGAAAGC-1GGGTTAACATTTGAGT-1GGGTTTAGGATAGGAT-1GGTAACCGGGAGGATA-1GGTACAAACATGCTAT-1GGTACCATTAAGACGG-1GGTACGTTGCGGCCGG-1GGTAGAAGACCGCCTG-1GGTAGACCGTTGGGCG-1GGTAGTGCTCGCACCA-1GGTATAGTGACACATA-1GGTATTGCCGAGTTTA-1GGTCAAGACTACTTCG-1GGTCAGTGGGTCCCAC-1GGTCGGATAAACGGCG-1GGTCGGCCAGGAGCTT-1GGTCGGTCGTCCACAG-1GGTCGTAAGCTCGCAC-1GGTCTCCGTCCAGGTT-1GGTCTCTGAATGGACT-1GGTCTGAGAATCTGGA-1GGTCTTGAGCGCTCTT-1GGTGAAGTACAGGGAT-1GGTGATAAGGAGCAGT-1GGTGCAGAGCCTATCG-1GGTGCTGGTACACATT-1GGTGGACTGCTCTGGC-1GGTGTAAATCGATTGT-1GGTTACCACCCTCGGG-1GGTTAGCTATATGTCT-1GGTTAGGCTTGGAGAA-1 GGTTCGGATTATACTA-1 GGTTCTACTCGTCTGA-1 GGTTTAATTGAGCAGG-1 GGTTTACAATCTCAAT-1 GGTTTAGCCTTTCTTG-1GGTTTCAATCGGTCAG-1GGTTTGACAAGAAGCT-1GGTTTGTGACCTGAGG-1GTAACATCTAAGATAA-1GTAAGCGGGCAGTCAG-1GTAAGTAACAGTCTGG-1GTAAGTAGGGTATACC-1GTAATAAAGGGCTCCC-1GTAATCTGATTCTTCG-1GTACACTTACCTGAAG-1GTACAGAGGCAAGGGT-1GTACGAGATTGCGACA-1GTACGCTTCATTGCAC-1GTACGTCACGTATTAA-1GTACGTTTGCCCGTCA-1GTACTAAGATTTGGAG-1GTACTCCCTTATCGCT-1GTACTCCTGGGTATGC-1 GTACTGGAGTTAGACC-1!GTAGACACGCCTGACT-1"GTAGAGGGAGACAAGT-1#GTAGCCAAACATGGGA-1$GTAGCGGCTATACACT-1%GTAGCTTCCTCTTGTT-1&GTAGGTGATCCGTGTA-1'GTAGTCGCGGGAATCA-1(GTAGTCTACGATATTG-1)GTAGTGAGCAACCTCA-1*GTATAGGACTCAGTAG-1+GTATCAAACGTTAGCT-1,GTATCCTTTGGTAACC-1-GTATCTCAGTCTTGAC-1.GTATCTTTCATAACCA-1/GTATGAAATTTCACTC-10GTATGTGGGTCTAGTT-11GTATTCTGAGAAACGA-12GTCAAAGAAGTGGTGT-13GTCAACCAGGCCTATA-14GTCAAGCGGACTCGGG-15GTCAGAATAGTCTATG-16GTCATGCACCTCCGTT-17GTCATGCGCGAGGGCT-18GTCATGGACATGACTA-19GTCATTAGAGCGAACG-1:GTCCAATATTTAGCCT-1;GTCCATTACTGCTACG-1<GTCCCAACGTAAAGTA-1=GTCCCAATCATCCCGC-1>GTCCCGCGACGTTATG-1?GTCCGGACCTGAAATT-1@GTCCGGCTGAATTGCG-1AGTCCGGGTTCACATTA-1BGTCCTACGAATAGTCT-1CGTCCTACTCTACGGGC-1DGTCGCCGTTGTGTGTT-1EGTCGCGTAACCCGTTG-1FGTCGGAAGGATACCAG-1GGTCGGATATCTCAGAC-1HGTCGGATGTAGCGCGC-1IGTCGGGAACATGGTAG-1JGTCGGGAAGCAGAAAC-1KGTCGTACCATCTCGGG-1LGTCGTACCTACGATTG-1MGTCGTATTGGCGTACA-1NGTCGTGTCTGGTCATC-1OGTCGTTATTCGCTTAT-1PGTCTAGTGAGCCGCTT-1QGTCTATCTGAGTTTCT-1RGTCTATTGCATGCTCG-1SGTCTATTGGTTCCGGT-1TGTCTCAAGGCCCGGCT-1UGTCTCCCGAGTCCCGT-1VGTCTCCTGCCAGTATG-1WGTCTCGACTAAGTTTG-1XGTCTCGATCTGCTTTC-1YGTCTGGGCGGTCGAGA-1ZGTCTTACCACGCCAAG-1[GTCTTGTAGCTATTCA-1\GTGAAACGGCGCCACC-1]GTGAACTCCCATTCGA-1^GTGAAGATTTCAAGTG-1_GTGAAGCCGTATAGTC-1`GTGAAGTCACGACTCG-1aGTGACAGCTTCCCACT-1bGTGACCGCACACTACG-1cGTGACGAGGGTGACCC-1dGTGACGCAGGTTTCAT-1eGTGACTTCAGTAGTGC-1fGTGAGCGTGCTGCACT-1gGTGAGGACACTTAAGG-1hGTGAGGAGCGGTTGAG-1iGTGAGTCGACTAATAG-1jGTGAGTGGTACAACGC-1kGTGATCACTAACGCCT-1lGTGATCATAGATCTGC-1mGTGATCCTTGTCATGA-1nGTGATGCACAACATCT-1oGTGATTCGCCGCTCAA-1pGTGCACCAGCTTCAAC-1qGTGCACGAAAGTGACT-1rGTGCCATCACACGGTG-1sGTGCCCGTTCGGATTC-1tGTGCCGCTTCAAAGGT-1uGTGCCTGAGACCAAAC-1vGTGCGAAATCGAACAC-1wGTGCGACAGGGAGTGT-1xGTGCGGGTCTCCAAAT-1yGTGCGTGTATATGAGC-1zGTGCTCAAGTACTGTC-1{GTGGAACCTACATGCG-1|GTGGACCAACCCGATT-1}GTGGACGCATTTGTCC-1~GTGGAGCGTTTACCGA-1GTGGAGTCGGCGGTTG-1GTGGCAAACAGCGGCA-1GTGGCGGTCCCAGCGT-1GTGGCTGTTTCTGTTC-1GTGGGAAGACTGAATC-1GTGGGCTTAGACACAC-1GTGGGTACTGAGCGTA-1GTGGTATAGTCTGCCG-1GTGGTGGCCAAGTGAA-1GTGGTTACTTCTTTCG-1GTGGTTTCCGCCTTTC-1GTGTACCTTGGCTACG-1GTGTATATCAGCGGGC-1GTGTCGTATAGCGTTC-1GTGTGAATAACTTAGG-1GTGTTACTATGCGTCC-1GTTAAAGTAGGACTGG-1GTTAACATCACTTAAA-1GTTAACTATGTTGTCA-1GTTAAGGGTGCGATGT-1GTTACAATTGGTGACG-1GTTACCTACAACTTGC-1GTTACTTTGGGCCTAG-1GTTAGCCCATGACATC-1GTTAGCCGTAAATCAA-1GTTAGGCTACCCGTTT-1GTTATAATACGGTGAA-1GTTATATCAGGAGCCA-1GTTATATTATCTCCCT-1GTTATCAAGCTATCGA-1GTTCACAGGAGTCTAG-1GTTCATCGTTTGGCTG-1GTTCGCCATAAGTGCC-1GTTCGCTGAGACGTCT-1GTTCGGATCGGGAACA-1GTTCGGGCGTACCATT-1GTTCGTCTAAAGAACT-1GTTGAACCGGTTCCAT-1GTTGAGTCCCGCCGGT-1GTTGCACGGAGTTTCG-1GTTGGACCGCATCAGG-1GTTGGATTCAGTGGCT-1GTTGGATTGAGAACAC-1GTTGGATTTGCGTTGG-1GTTGGTCATGCTATCC-1GTTTACGTTCCATCTG-1GTTTCAAACGAGTTGT-1GTTTCATATCGTCGCT-1GTTTCCTGGAGGGTGA-1GTTTCTAGAGGCGCGG-1GTTTCTGCAGTCTCCC-1GTTTGACCAAATCCTA-1GTTTGGCCCAAGTTAT-1GTTTGGGCTTGTGAGC-1GTTTGGGTTTCGCCCG-1GTTTGGTAGGGTCAAC-1GTTTGTTAGCCAAGTA-1TAAACCCAGGAGGGCA-1TAAAGCGGTATTTCCA-1TAAAGCGTTAGGAGAA-1TAAAGCTGCAATAGGG-1TAAATGCCGTCTCATG-1TAACATACAATGTGGG-1TAACATACACGCGATC-1TAACCTACCGTCCGAG-1TAACCTAGGGAGTCCA-1TAACTATTACGCCAAA-1TAACTCATCCGCGCGG-1TAAGAGGGACAGGGAC-1TAAGCCCTTACGACCA-1TAAGGCAACATAAGAT-1TAAGGCATAACATCAA-1TAAGGCCCGTCACCCT-1TAAGGCTGAATCCCTC-1TAAGGGCCTGTCCGAT-1TAAGTAACATCTTGAC-1TAAGTCGCCGAGTATC-1TAAGTTGCGACGTAGG-1TAATAAACAAGGAGAT-1TAATACACAGTAGTAT-1TAATACTAGAACAGAC-1TAATAGAACAGAGTTA-1TAATAGGTCACCAGAA-1TAATAGTGACGACCAG-1TAATCGATCCGTACGT-1TAATTACGTCAGTAGA-1TAATTAGGACATCCGT-1TAATTGGAATCGGGAA-1TAATTTCCGTCCAGTA-1TACAACAGCGCATACA-1TACAACGCACAACTCA-1TACAAGGGCTTCTTTA-1TACAAGTCTCGTGCAT-1TACACAGCCGTGGTGC-1TACACCTCTTCGAATC-1TACAGAAACGGTGGGC-1TACATAGGCATACACC-1TACATCCCTATCCCTG-1TACATTTCTAACGTGC-1TACCAAATAGCCCAGA-1TACCAATAAAGTACCA-1TACCAGAAGTAGGTTC-1TACCGAATAATTGTAA-1TACCGCGGACTTGCAG-1TACCGGCTCACTGCCC-1TACCGGTCGTTTCCAT-1TACCGTAGGTTAACTA-1TACCGTGCCTCGGACC-1TACCTACTCCCAGTAT-1TACCTATCCCTAGAGG-1TACCTCAGTTGTCTGT-1TACCTTAAGATTTCCC-1TACGAACACGACTTCA-1TACGACGCTTGCTGCG-1TACGACTGCCTCTTAG-1TACGAGAACTTCACGT-1TACGATGTTGATCATC-1TACGCAGTTCTTTCCT-1TACGCCATATTCTAAT-1TACGCCGAGGGTACCC-1TACGCCTCCATTCCGA-1TACGCTATAGAAACCT-1TACGCTCGGTATTGGA-1TACGGAAGCCAAACCA-1TACGGGATGCTAGCAG-1TACGGGTAATAACATA-1TACGTAAAGCGGAGTG-1TACGTGGGCCCAGGGC-1TACGTTTACCGGCAAT-1TACTACGTGCAATGCG-1CTGGGATAAATAATGG-1CTGGGCCTGCTATATC-1CTGGGTAGGCAGTTAA-1CTGGGTTGAGTTAAAG-1CTGGTAAAGACTTACA-1CTGGTAACACATAGAA-1CTGGTAACGAGCTCTT-1CTGGTTTCGAGCAAGA-1 CTGTACTTCTTAGCAT-1 CTGTAGCCATCTCACT-1 CTGTCAAATGGCTCGG-1 CTGTGCAGGGTAGGTC-1 CTGTGGTCGGGAGATA-1CTGTTACCCAATCTAG-1CTGTTCACTGCCTGTG-1CTTAACTTACAGTATA-1CTTAAGCAGCGAGCCG-1CTTACACGGTATTCCA-1CTTAGCCTTCCACATG-1CTTAGGTATAGACCAG-1CTTAGTAGGCCTACAG-1CTTAGTGTAGTAGCAT-1CTTATGCGCTCAGGGC-1CTTCAACTCCACTTGG-1CTTCAGTGGTCGCCTA-1CTTCAGTTGGACAACG-1CTTCATTGTCAGTGGA-1CTTCCGCTCCGTGAAG-1CTTCGATTGCGCAAGC-1CTTCGGCCAATTGTTT-1CTTCGTAGATAGGTGA-1 CTTCTATGTTGAAGTA-1!CTTCTATTAATGCTAG-1"CTTGAGTTAGGGTAAT-1#CTTGATGACCATCCAG-1$CTTGCAACCGCCTCCT-1%CTTGCCCACCCACGCA-1&CTTGCCCAGGCTCTAC-1'CTTGGCCAAGCTGGGA-1(CTTGTACTTGTTGACT-1)CTTGTCAACATTCGAG-1*CTTGTGCTCACCGATT-1+CTTGTTGCTGAGTCAA-1,CTTTAACTTTCAAAGG-1-CTTTAATATTGGTCGA-1.CTTTACCGAATAGTAG-1/CTTTAGGAACACTGTT-10CTTTAGTGCTATTATT-11CTTTCTGTGCGGGCTT-12CTTTGACGTCGCTTCT-13CTTTGCATCGCTCTTG-14CTTTGGCTTTAGTAAA-15CTTTGTCGAATGCTCC-16GAAACAGATGACCACC-17GAAACATAGGAAACAG-18GAAACCATGGTGCGCT-19GAAACCGAATTACCTT-1:GAAACCTATACAAATG-1;GAAAGAACAGCGTTAT-1<GAAAGCAGTGCACTTT-1=GAAAGCCCTTTGGACC-1>GAAATATCACCATCAG-1?GAAATATGCTTGAATG-1@GAAATCGCGCGCAACT-1AGAAATGGCGGTGTTAG-1BGAAATTAGCACGGATA-1CGAAATTGTCTCTATAA-1DGAACAACTGGGATGAA-1EGAACACACATCAACCA-1FGAACAGATTACTAAAT-1GGAACCCTCTGTGTTCT-1HGAACCTCGACCTACAC-1IGAACCTTTAACGATCC-1JGAACGACCGAATGATA-1KGAACGCGGGTCACACG-1LGAACGTCTCATGGTCG-1MGAACTGTGGAGAGACA-1NGAAGAAACGATATTGT-1OGAAGCCACTGATTATG-1PGAAGCCTGCACATTCC-1QGAAGCGTGAGGAATTT-1RGAAGCTCGGACCCGTC-1SGAAGCTTGCTGACCGC-1TGAAGGAGTCGAGTGCG-1UGAAGTCAGTTGCACTA-1VGAAGTCTCCCTAGCGA-1WGAAGTGATTTATCGTG-1XGAAGTTTCCACTCAAT-1YGAATAGACGCGACCCA-1ZGAATAGCCCTGCGGTC-1[GAATCGACATGGTCAC-1\GAATCTGAACATTCTC-1]GAATGCGAATCGGTTC-1^GAATGGGCTTATCGAC-1_GAATGTATGGCAGGTC-1`GAATGTGGTCCGGATT-1aGAATGTTGGGTAATCT-1bGAATTATAGTGAAAGG-1cGAATTCACCCGGGTGT-1dGAATTTCTCGCTGCAG-1eGACAAACATATGCAGG-1fGACAACGACCATTGAA-1GCOLGACAACGCAGCTTACG-1GACACAAGGGAAGAAA-1GACACAGCCGGGACTG-1GACACTGAGTTCAGTG-1GACACTGGAACCCGAT-1GACACTTCCAATTACC-1GACAGATTTCTGGCTC-1GACAGCCAGACCTGAC-1 GACAGGCACACACTAT-1 GACATCCGTCGAACTG-1 GACATCGATTTATAAC-1 GACCAAACGTTGACTG-1 GACCACACTTCCCTTT-1GACCAGAGCCCTGTAG-1GACCCAATTATGATAC-1GACCGACGTGAAAGCA-1GACCGACTGAAGCGTC-1GACCGCGTCTGACGTG-1GACCGTGCTGACGGTG-1GACCGTTACATGCGAC-1GACCTTCCACGTCTAC-1GACGACGATCCGCGTT-1GACGAGGCTAATAAAC-1GACGCATACCCGTCGG-1GACGCCGTAAAGGCTA-1GACGCTTGCTTCTAAA-1GACGGGTTGGCCCGTA-1GACGGTCAATAGAAGC-1GACGTGTAGGGATTAT-1GACTAAGATCATGCAC-1GACTAAGTAGGCTCAC-1 GACTACAAAGCGGTGG-1!GACTAGGCCGTTAGGT-1"GACTCACCCACGTGAG-1#GACTCCCAGAATAAGG-1$GACTCCTTCCAATTGA-1%GACTCGCGGGAATGAC-1&GACTGCAAATCGAGCT-1'GACTGCACCAGCCCAG-1(GAGAACGGTTCTGACT-1)GAGACCCTGCAACGCC-1*GAGACTGATGGGTAGA-1+GAGACTTCGCGACCGA-1,GAGAGGGCGCGAGGTT-1-GAGAGGTGCATTCTGG-1.GAGATCTGCTTGGCAT-1/GAGATCTGTCACTCCG-10GAGATCTTCCATGACA-11GAGATGGCTTTAATCA-12GAGATGGGAGTCGACA-13GAGCACCTGTGTCCAG-14GAGCCAGCTACCTGTG-15GAGCCGAGCGTTTATT-16GAGCGCAAATACTCCG-17GAGCGCGCACGAGTAG-18GAGCGCTGTTAGGTAA-19GAGCTAAGGGCATATC-1:GAGCTGTCGTCTCGGA-1;GAGGAATGGAGAGGTT-1<GAGGATAAACAGTGCT-1=GAGGCCCGACTCCGCA-1>GAGGCCTGTTGATACA-1?GAGGCTATCAAAGTCG-1@GAGGGCGCAGCTCTGC-1AGAGGTACGCGTGTCCC-1BGAGTAAGGCCACGGGA-1CGAGTAGATACTAGTTG-1DGAGTATGCCCGCCTTG-1EGAGTATGCGCGTGCAT-1FGAGTCAGACCAGAATC-1GGAGTCCGCTTACCGGA-1HGAGTCTTGTAAAGGAC-1IGAGTGTCAACCAGAAA-1JGAGTGTGCGGTACCCA-1KGAGTTCTGTGGGTGCT-1LGAGTTGATGGCAATTT-1MGATAACTCGCACTGTG-1NGATACGATGGGAGTCA-1OGATAGATAGTACAGTC-1PGATAGGTAACGTTGAC-1QGATATCAAGCAGGAGC-1RGATATGAGACACTAAC-1SGATATGCGGTAGCCAA-1TGATATTGAGATTGGCG-1UGATATTTCCTACATGG-1VGATCATTCCAAACATT-1WGATCCCTTTATACTGC-1XGATCCGGGAATTAACA-1YGATCCTAAATCGGGAC-1ZGATCCTCGACACTGGC-1[GATCGACACTATCTGA-1\GATCGCGGGCTCTCCA-1]GATCGCTACCCGATTT-1^GATCGCTATATCTCAG-1_GATCGGATAGAACCAT-1`GATCGGTGGCCATAAC-1aGATCGTGACTGATATC-1bGATCTAACCGTATTCA-1cGATCTGCTATCTAAGG-1dGATCTTGGAGGGCATA-1eGATCTTTGCAGGGTAT-1fGATGACAAGTAGGGCA-1gGATGACGATGATCGCG-1hGATGAGGAACCTTCGG-1iGATGCCTTCTGCGGCA-1jGATGCGTCCTGCATTC-1kGATGCTGTATTTCATC-1lGATGTAACGAACCACC-1mGATGTTCAATCCACGA-1nGATTAACCGAAAGCCC-1oGATTACTGAATTTGGG-1pGATTCCCTTGTCGCAG-1qGATTCCGCGTTTCCGT-1rGATTCGACGGTTCACG-1sGATTGGGAAAGGTTGT-1tGCAAACCCTACATTAT-1uGCAAACCTTGGCCATA-1vGCAAATATTACGCTTT-1wGCAAATGAGGACACTT-1xGCAACACACTAGAACT-1yGCAACAGCAGTATGCG-1zGCAACCACCAGACCGG-1{GCAACCACGGCCGCGT-1|GCAACGGCTAGTTATG-1}GCAAGAATTCCTTGGC-1~GCAAGATGTGTTCGCG-1GCAAGCTGGAAACCGC-1GCAAGTGCACAGAGAA-1GCACAAACGAGGCGTG-1GCACAACCTCGGGCGT-1GCACACACTGGTAGCC-1GCACCTAGGCGAGTCC-1GCACGCCGATTCCCGC-1GCACGCCTACTTAGAT-1GCACGTGGTTTACTTA-1GCACTGCCTACCTTTA-1GCAGACCCAGCACGTA-1GCAGATCCATAAGACT-1GCAGATTAGGGATATC-1GCAGCACACAGCCCAG-1GCAGCCTATATCACAT-1GCAGCTATGGACAGGT-1GCAGCTGTCAACGCAT-1GCAGGAACTTAGATCT-1GCAGGACTATAGAATA-1GCAGGTAGAGTATGGT-1GCAGTGTGGCTATAGG-1GCATAGAGCACTCAGG-1GCATCGGCCGTGTAGG-1GCATGAGGGACGCGGC-1GCATGCTAATAACGAT-1GCATGGGTACTGACGC-1GCATTCAAGGCAACGC-1GCATTGACTTGCGGAA-1CGTGCATTGTCGACGC-1CGTGCCCTCCCGAAGA-1CGTGGCCGAATATCTA-1CGTGTATGGGAGCTGA-1CGTGTCCCATTCGCGA-1CGTTAAACTAGTTAGG-1CGTTAATGTCCCGACG-1CGTTAGCTCACAACTG-1CGTTATCATACTTCCA-1CGTTCAGACCCGCGAA-1CGTTCATGGTGCGCGT-1CGTTCTTCGCACACCT-1CGTTGAATACCGCGCT-1CGTTGAGCGACCGTCG-1CGTTGAGTAATTGCGT-1CGTTGTAAGATTGATT-1CGTTGTCGGCAATTGA-1CGTTGTTTCAATTCCC-1CGTTTCACTTCGGGCG-1CGTTTCGCTCATTACA-1CGTTTGTGTAGAGGGT-1CTAAAGAATGCCTACT-1CTAAAGGGAAATAGGA-1CTAAATCCTATTCCGG-1CTAACAGCACAATAAC-1CTAACCGCGCGCCCGT-1CTAACGAAACTTGCTG-1CTAACTGATAATCGCC-1CTAACTGGTCCGGTTC-1CTAAGGGAATGATTGG-1CTAATTCGCACGCGCT-1CTAATTCTCAGATATT-1CTACACTCGCAGATGG-1CTACCCTAAGGTCATA-1CTACGACTAGCTATAA-1CTACGCACGGAGTACC-1CTACTATCATAGGTTT-1CTACTATCTTTCAGAG-1CTACTCAAGGTATAGT-1CTACTCTAGGCCCGGC-1CTACTGCCACCTGACC-1CTAGATGTGAGTGTAA-1CTAGATTTACGACGGC-1CTAGCATAGTATAATG-1CTAGCCGATGTTATGA-1CTAGGCGCCCTATCAG-1CTAGGTCTGAAGGAAT-1CTAGGTTCGGACGTGA-1CTAGTAGAAAGGGATT-1CTAGTCACGTCTTAAG-1CTAGTGAAGGACAGGA-1CTAGTTACAACCCGGT-1CTAGTTGGGCCCGGTA-1CTATAAGAGCCAATCG-1CTATACTTAAAGCGAG-1CTATCGACGAAATACA-1CTATCGGGTCTCAACA-1CTATGCCCGAATGCAA-1CTATGGGAAGCGGAAT-1CTATGTCACTAGCCCA-1CTATGTGAGTCACGGC-1CTATTCATGTGTCCCA-1CTATTGTGTTTGGTCA-1CTATTTGCTTGGAGGA-1CTATTTGGTTACGGAT-1CTCAACTAACCCGGAT-1CTCAAGACATTAGCGC-1CTCACATTTACTAAAT-1CTCACTGTGATACTTA-1CTCACTTGGCTGGTAA-1CTCAGATTGTGATAAG-1CTCAGTCACGACAAAT-1CTCATAAATGTGTATA-1CTCATGGTAATTTGCG-1CTCATTGCTCTAACAA-1CTCATTTGATGGGCGG-1CTCCCTCCTTTCGATC-1CTCCGCCCACATGAGG-1CTCCGGCCTAATATGC-1CTCCTCCAGCTCACAC-1CTCCTTTACGCAAGTC-1CTCGAGACATACGATA-1CTCGAGGTCGAACAGT-1CTCGATATTTGCGAGC-1CTCGCACCTATATAGT-1CTCGCATTGCATAGCC-1CTCGCCGAATGTAGGG-1CTCGCTAGGTAAGCGA-1CTCGGTACCACTGCTC-1CTCGGTTGTCGGCCCT-1CTCGTCGAGGGCTCAT-1CTCGTCTGTGCCTTCG-1CTCGTTTCTAATGTTT-1CTCTAATGCATTGATC-1CTCTACACTGGCGATT-1CTCTAGCCCTCGGAAA-1CTCTATTTGGCTGCAG-1CTCTCACAATCGATGA-1CTCTCTAACTGCCTAG-1CTCTGGACGCCTGGTG-1CTCTTCTATTGACTGG-1CTCTTGTCCCGCTTGG-1CTGAAAGAGATCCGAC-1CTGAATCCGAGACCTC-1CTGAATTTATTGCCAG-1CTGACTGCGCAGCTCG-1CTGCAAGCACGTTCCG-1CTGCACAACTACATAT-1CTGCACTCCAGTACAG-1CTGCAGAGAATCAGAG-1 CTGCCAAGGTTGGAAG-1 CTGCCATGCATCACAT-1 CTGCCTTTCTAGTAAA-1 CTGCGACCTCGCCGAA-1 CTGCGTTACGATATAA-1CTGCTGAGGCCACGAA-1CTGCTTGGCGATAGCT-1CTGCTTTATGTCCGCG-1CTGGAAATGGATGCTT-1CTGGAAGACACGGTGG-1CTGGACGCAGTCCGGC-1CTGGATTTACACTTGA-1CTGGCATCCGAATGAG-1CTGGCGACATAAGTCC-1CTGGCGATTTACATGT-1CTGGCGCACAGGTCTG-1CTGGCTGATTCATCCT-1CTGGCTGGTTGTCAGT-1CGCGTTCATGAAATAC-1CGCTACGGGACATTTA-1CGCTAGAGACCGCTGC-1CGCTATTCTTAGGCTC-1CGCTCTCCGTAGATTA-1 CGCTGGTGACTACCCT-1!CGCTGTGACGCCGCAC-1"CGCTGTGTGGATGTTG-1#CGCTTATTCCCGGTCG-1$CGCTTCGGTCTAAGAC-1%CGCTTTCTTGCATTCG-1&CGGAAAGAATCAAACG-1'CGGAACGTAAACATAG-1(CGGACCTTTACGTCCC-1)CGGACGTTACTTGAAG-1*CGGAGCAATTTAATCG-1+CGGAGCATGGCGATCC-1,CGGAGTTTGAGAGACA-1-CGGATCCTCAAGGACT-1.CGGCAAACATCGTGCG-1/CGGCAATAAGATCGCC-10CGGCAGGGTCGGGTTG-11CGGCCACGCACAAAGT-12CGGCCCAACCTGTAGT-13CGGCCCAGGTATATCC-14CGGCGCCATCAATCCC-15CGGCTCTAAAGCTGCA-16CGGGAATATAGTATAC-17CGGGAATTTATGTAAA-18CGGGAGCTTCAGTGTA-19CGGGATCAATGTAAGA-1:CGGGCAGCTAAACCGC-1;CGGGCCTTCTTTGTAA-1<CGGGCGATGGATCACG-1=CGGGTGTACCCATTTA-1>CGGGTTTGTTAGGGCT-1?CGGTACGGCAAACCCA-1@CGGTACTAGAATCAAA-1ACGGTAGAGGTGCAGGT-1BCGGTATAGGTATTAGC-1CCGGTCAAGTGGGAACC-1DCGGTCCGTCGCAAGCC-1ECGGTCTATCAACCCGT-1FCGGTGAAGACTAAAGT-1GCGGTGCAGATAGAACG-1HCGGTGCGCGTTGGTCC-1ICGGTGGGCTCCAGCCT-1JCGGTTATCCAACAGTG-1KCGGTTCAAGTAGGTGT-1LCGGTTGACCTGGCATA-1MCGTACCTGATAGGCCT-1NCGTAGAGAGTAATTAT-1OCGTATTAAGAGATCTA-1PCGTCACGTCCATTGGT-1QCGTCCTCATCGCGTGC-1RCGTCGCATGTGAGCCA-1SCGTCGGATAGTGTTGA-1TCGTCGGGTCTAAGCGC-1UCGTCGTCCTTCGCGAA-1VCGTGAAGTTAATTCAC-1WCGTGACATTGGGTCGT-1XCGTGACCAGTCCTCTG-1YCGTGCACACCACTGTA-1ZCGTGCAGACTGGGACA-1[CGATGTTGTTATCTAC-1\CGATTAAATATCTCCT-1]CGATTCGCCTGGCTGC-1^CGCAAACACGAGTTAC-1_CGCAATTAGGGTAATA-1`CGCACATGTCCACTAC-1aCGCACGTGCGCTATCA-1bCGCAGATCTTCACCCG-1cCGCAGGCGATCCAAAC-1dCGCATGCCGAATGCGT-1eCGCATGGTGCGATGCT-1fCGCATTAGCTAATAGG-1gCGCCACAGGTCGCGAT-1hCGCCATCCGATTATGA-1iCGCCCAGCACGCCTAG-1jCGCCCAGCGTTTCACG-1kCGCCCGCTTCCGTACA-1lCGCCGCCCATGCCTGT-1mCGCCGTTCAGCATAGT-1nCGCCTCCCTCCTCTAT-1oCGCCTGGCCTACGTAA-1pCGCGAAGTGGCATACT-1qCGCGACCGCGACAGAT-1rCGCGAGAGGGACTTGT-1sCGCGAGTCTGCCGGGT-1tCGCGCAAATGTCCAGA-1uCGCGCATGTTTGATTG-1vCGCGCCCGACTTAATA-1wCGCGGCTCAACTTGAA-1xCGCGGGAATTAGGCAG-1yCGCGGGAATTCCTTTC-1zCGCGGTCACAAACCAA-1{CGAGAGCGCGTAGATA-1|CGAGAGCTTTCACTAC-1}CGAGAGGGTAGCCGCG-1~CGAGCTGGGCTTTAGG-1CGAGGCTAAATATGGC-1CGAGGGACTGCGGTCG-1CGAGGGTATCCAGGTG-1CGAGTACTAAAGAGGA-1CGAGTGAAGGTACCAG-1CGAGTTCTGTCCCACC-1CGAGTTTATCGGACTG-1CGATACCTCGCGGACA-1CGATAGTCGTACTGCA-1CGATCCGACCCAGTGC-1CGATCTGTTGGAGGAC-1CGATGGACCCTACGCC-1CGACCCTTAACGCCGG-1CGACCTACTAGACAAT-1CGACGCATCCGTACCT-1CGACTCAGGATGTTAT-1CGACTTTGTATAGCCT-1CGAGACCCTAGAGTGT-1CGAGACTACTGCTGCT-1CGAGAGATGTGAACCT-1CGAATGACGCATAATG-1CGACAATTTGATCTAA-1CGACACGCTCCGACAG-1CGACAGTTCGCGTTAT-1CGAATCTGCTCGACGC-1CGAATGAAGTCATTGC-1CGAAGTTGCTCTGTGT-1CGAAGCTATAAATTCA-1TGAGAGATTTACCACG-1TGAGATTAGGCCCTAA-1TGAGCCATACAGTCTC-1TGAGCGGAAAGTGTTC-1TGAGCTTTAATGACGC-1TGAGGAGTGCCAGCTT-1TGAGGCATGTACTGTG-1TGAGTAAATTAGCGTA-1TGAGTGCCTCTTAAAT-1TGAGTGGTCCGTGACG-1TGAGTGTAACAACGGG-1TGAGTTAAAGACATTC-1TGATACATTTAGCCGT-1TGATCAGGGAACTGCT-1TGATCCCAGCATTAGT-1TGATCGGTTTGACCCT-1TGATCTACGCTGATCT-1TGATCTATCACACTCT-1TGATCTCCGGCGCCAG-1TGATGGCTGTTTCTGA-1TGATGTCAATTAAGTG-1TGATTATGGCACGCAG-1TGATTCAGGTCCCGCG-1TGATTCCCGGTTACCT-1TGATTCGTCTATCACT-1TGATTCTGTCGCCGGT-1TGATTTATTAGCTGTG-1TGCAACCCATCTGCGG-1TGCAACTACTGGTTGA-1TGCAAGAATGACGTAA-1TGCAATCTAACACGGT-1TGCAATTTGGGCACGG-1TGCACAGTGAAGTTAT-1TGCAGAACTATATCGT-1TGCAGAGTACCGAGCA-1TGCAGATCGTCCTAGG-1TGCAGCTACGTACTTC-1TGCAGGATCGGCAAAG-1TGCAGTTTCCTCCCAT-1TGCATATGTCTGTCAC-1TGCATGGATCGGATCT-1TGCATGTGGTAATCTA-1TGCCAAAGTCAGACTT-1TGCCAATGGGTACTCT-1TGCCACACTAGAGGAA-1TGCCACCTGGCGAAAC-1TGCCAGTACGTGGAGA-1TGCCATTACTAAAGAA-1TGCCCGATAGTTAGAA-1TGCCCGTACCGTTAAA-1TGCCGTGGATCGTCCT-1TGCCGTGGGACCCAAT-1TGCCGTTCTTAATCGG-1TGCCTAATTGAAGATT-1TGCCTGATCAAACGAT-1TGCCTTGGCCAGGCAA-1TGCGAATATGGGATTT-1TGCGACACCCTAGTGC-1TGCGAGAATATTACCC-1TGCGAGATGGCGGCCA-1TGCGAGCCCTTCCGCG-1TGCGCAAAGCATTTGG-1TGCGCGATTAACGGAG-1TGCGGACTTGACTCCG-1TGCGGAGTAAAGGTGC-1TGCGTAAGAACCTGAT-1TGCGTACGGCTAATTG-1TGCGTCATGACTGAGC-1TGCGTTTGTTGACACT-1TGCTAAGTGTCTATTT-1TGCTCCACAGTTCTTA-1TGCTCGGCGAAACCCA-1TGCTCGGTGGGTCACC-1TGCTCTTGAGAGTTTG-1TGCTGTTGAAGAACTC-1TGCTTCCCAAGCAGTA-1TGGAACCACTGACACA-1TGGAAGAAGGGAACGT-1TGGAAGGATAAAGATG-1TGGAATATCCTTGACC-1TGGACCAATCTAAGAT-1TGGACGCAATCCAGCC-1TGGACTGTTCGCTCAA-1TGGAGTGATGCGATGA-1TGGCAAACTAAATTAC-1TGGCAACTCGCGCGCC-1TGGCAAGCACAAGTCG-1TGGCAATGGGACGGCG-1TGGCAGATTACGATCA-1TGGCAGCAGTAATAGT-1TGGCATGAAGTTTGGG-1TGGCCAAACTGAAGTA-1TGGCCAATTTGGTACT-1TGGCCGTATATTGACC-1TGGCGACTGCTCCAAA-1TGGCGATCAAGTTATG-1TGGCTACACTCTACCT-1TGGCTATGTGACATAC-1TGGCTCTTGTCGCGTA-1TGGCTTATGTATAATG-1TGGCTTGTACAAGCTT-1TGGGATGCACTCATTC-1TGGGCAATAGTTGGGT-1TGGGCCACAAGAGCGC-1TGGGCCCATACTAATT-1TGGGCGATACAATAAG-1TGGGTGCACAAGCCAT-1TGGGTGGGATGTCATT-1TGGGTGTAATAGATTT-1TGGTAAGCAGGATTGA-1 TGGTAGAATATATGGG-1 TGGTCCCACGCTACGG-1 TGGTCGTGCAAGGCAA-1 TGGTCTGTTGGGCGTA-1 TGGTTAACTTACATTT-1TGGTTAAGGGCGCTGG-1TGGTTCAACGGGTAAT-1TGGTTCGTAGCAAAGG-1TGGTTTAAACGTGGGT-1TGTACCTACACGAGGG-1TGTACGAACAAATCCG-1TGTACTTCCGGGCATG-1TGTAGCCAATTCCGTT-1TGTAGCCATCCCATTC-1TGTAGGAGAAATTTCC-1TGTAGTGATCTATAAT-1TGTATAACAGATCCTG-1TGTATCAGACTGAAGC-1TGTATGGCGCAGACAG-1TGTCATTTGTTGGGAA-1TGTCCACGGCTCAACT-1TGTCCTAAGTCACCGC-1TGTCGTTATCACATAT-1 TGTGACTACGCCAGTC-1!TGTGACTAGAGTTTGC-1"TGTGAGACTAGCCCAA-1#TGTGCCAGAGGCAAAG-1$TGTGCCGGTGCCGGAA-1%TGTGGCAAAGCGTATG-1&TGTGGCGGGCTTCTGG-1'TGTGTCGAAGTCGAGG-1(TGTGTGACCATGAATC-1)TGTTATTGTATGTGGC-1*TGTTCCGCTTCCATGA-1+TGTTCCGGCCTGAGCT-1,TGTTCGTATTGCGGTG-1-TGTTCTTCCATTGACT-1.TGTTGTCAAGAAGTCT-1/TGTTTCTGAAGCGTGC-10TGTTTGAGATCGTCAG-11TTAAACTCGAATTCAT-12TTAAAGTAAGTCGCCA-13TTAACCAACCCTCCCT-14TTAACGAACAAGCAGT-15TTAACTTCAGGTAGGA-16TTAAGATAGGATTGAC-17TTAAGCGCCTGACCCA-18TTAAGGCCCGTACTTT-19TTAAGTATTGTTATCC-1:TTAATCAGTACGTCAG-1;TTAATGTAGACCAGGT-1<TTAATTGCTTTGGGTG-1=TTAATTTCAGACGCGG-1>TTACAACTACGCATCC-1?TTACACGATCTGCGAC-1@TTACAGACCTAAATGA-1ATTACATGCCACAACTA-1BTTACCATTGATTACCC-1CTTACCCATTGCCGGGT-1DTTACCCTAGGGATTGG-1ETTACGGATGGTTCGAG-1FTTACTAAAGGACTTTA-1GTTACTCCGGCCGGGAA-1HTTACTCTGGTACGTAC-1ITTACTGGGATATTTCA-1JTTACTGTCTAGAGCTC-1KTTAGAAGAACATGACT-1LTTAGAATAAGGGTCGG-1MTTAGACACGATCGTTG-1NTTAGACGAGTCACCTC-1OTTAGAGGGATATACAG-1PTTAGAGTTTAGAAGGA-1QTTAGCTAATACGATCT-1RTTAGCTGATTTGCCGT-1STTAGTAAACCTGCTCT-1TTTAGTAGGGCGGCGGG-1UTTAGTTATTCGTGGCA-1VTTAGTTCAAGTGTTCG-1WTTATAGGTAATTGTCT-1XTTATATACGCTGTCAC-1YTTATATTTGGCAATCC-1ZTTATCCGGGATCTATA-1[TTATCCTCAAGGAATA-1\TTATCTGACATTAGGA-1]TTATCTGTATCATAAC-1^TTATGAATGAAAGGGA-1_TTATGACAAACTGGAT-1`TTATGATCTTAACGAA-1aTTATTAGAGCGTGTTC-1bTTCAAAGTCTCTAGCC-1cTTCAATACTCTGAATC-1dTTCACGAAAGGATCAC-1eTTCACTCGAGCACCTA-1fTTCAGCCCTGGTCCAC-1gTTCAGCTGGCGTGCCC-1hTTCAGGCGTCAAAGCC-1iTTCATGGCGCAACAGG-1jTTCCAATCAGAGCTAG-1kTTCCAATCTGGCTATC-1lTTCCACACAGATTTGA-1mTTCCAGACGAGATTTA-1nTTCCATCATGCGGTGA-1oTTCCATCGACAGCGTG-1pTTCCCGGCGCCAATAG-1qTTCCGCAGAGAAATAT-1rTTCCGCGTGAGGCGAT-1sTTCCGGCCTTGAGGCT-1tTTCCGGCTCGACTTCT-1uTTCCGGTTACCCACTT-1vTTCCTCGAGGGTGTCT-1wTTCCTCGGACTAACCA-1xTTCGACAGAGCCCGTG-1yTTCGACGGGAAGGGCG-1zTTCGCACTCGCGTGCT-1{TTCGCACTGTACGACA-1|TTCGCCGCTCGCGCTA-1}TTCGCGCGCCATACGA-1~TTCGCTATCTGACGTG-1TTCGGCAACCCGCTGA-1TTCGGGACTAATCGCG-1TTCGGGCGCTAGTCTT-1TTCGGTACTGTAGAGG-1TTCGTACTCCAGAACG-1TTCGTTCAACGAAGTT-1TTCTAACCGAAGCTTA-1TTCTAGAAAGTCTTAT-1TTCTAGGCCAATTGTG-1TTCTATTAAACGCAGC-1TTCTCTTACAGGTGAT-1TTCTGACCGGGCTCAA-1TTCTGCCGCGCCTAGA-1TTCTGCGGGTTAGCGG-1TTCTGCTAGACTCCAA-1TTCTTATCCGCTGGGT-1TTCTTGAGCCGCGCTA-1TTCTTGCTAGCATCTC-1TTCTTGGACGATCTGC-1TTCTTGGAGTAATGAG-1TTCTTTGGTCGCGACG-1TTGAAAGGTGTAAAGG-1TTGAACGACGTGCTGA-1TTGAAGAATTCCCAGG-1TTGAAGGATGGGCGCC-1TTGAATATGGACTTTC-1TTGAATCGTTGTATAA-1TTGAATTCACGTGAGG-1TTGACAGGAGCTCCCG-1TTGACATGAACGTGGA-1TTGACCAGGAACAACT-1TTGACCATGTTCTCCG-1TTGACCGTGTTAATGA-1TTGACGCTCCATGAGC-1TTGACTACCATATGGT-1TTGACTATTGTCCGGC-1TTGAGAAGTTTAGCAT-1TTGAGAGTACTGCTAA-1TTGATAGTCAATACAT-1TTGATCTAACTTTGTC-1TTGATGTGTAGTCCCG-1TTGATTAGCTGTTTCT-1TTGATTATGCAGATGA-1TTGCAAGAAGACTCCT-1TTGCACAATTCAGAAA-1TTGCACGGAGCAGCAC-1TTGCATGCTGATCACG-1TTGCCAAGCAGAACCC-1TTGCCATAGCCCGCTC-1TTGCCCTGATCACGGG-1TTGCCGCAGACCTACA-1TTGCCGCTTTCTAGTA-1TTGCCTTCTCGCCGGG-1TTGCGCTTGATCAATA-1TTGCGGCATCAGAAAG-1TTGCGTCGGCCAACCG-1TTGCGTGAACGCTTAG-1TTGCGTGTGTAGGCAT-1TTGCTCATAGTACGTG-1TTGCTCCCATACCGGA-1TTGCTGAAGGAACCAC-1TTGCTGATCATGTTCG-1TTGGAAGAATACAGTC-1TTGGACATGTGGCTTA-1TTGGACCATCTGGCAA-1TTGGACCTATAACAGT-1TTGGATATCGTCTACG-1TTGGCCTAGAATTTCG-1TTGGCTCAATATGTGT-1TTGGGAAGACGAGCCG-1TTGGGACACTGCCCGC-1TTGGGACGTAAGAGTT-1TTGGGCGGCGGTTGCC-1TTGGTCACACTCGTAA-1TTGGTTGCGGTGCGCG-1TTGTAAGGACCTAAGT-1TTGTAAGGCCAGTTGG-1TTGTAATCCGTACTCG-1TTGTACACCTCGAACA-1TTGTCACCGCGGTATC-1TTGTCGTTCAGTTACC-1TTGTGAACCTAATCCG-1TTGTGAGGCATGACGC-1TTGTGATCTGTTCAGT-1TTGTGCAGCCACGTCA-1TTGTGCGGAAGCGGAT-1TTGTGGCCCTGACAGT-1TTGTGGTAGGAGGGAT-1TTGTGGTGGTACTAAG-1TTGTGTATGCCACCAA-1TTGTGTTTCCCGAAAG-1TTGTTCAGTGTGCTAC-1TTGTTGTGTGTCAAGA-1TTGTTTCACATCCAGG-1TTGTTTCCATACAACT-1TCCATCAATACTAATC-1TCCCAAACAGACAACG-1TCCCAAAGACGAAGGA-1TCCCACTCTCTTCCGG-1TCCCAGCTTTAGTCTG-1TCCCAGGCTTAGCTAA-1TCCCGCGTACTCCTGG-1TCCCGGGTGTGCTGCT-1TCCCGTCAGTCCCGCA-1TCCCGTCGCGTCATAG-1TCCCGTGTGCAATTTG-1TCCCTAGATCAATAGG-1TCCCTGGCGTATTAAC-1TCCCTGGCTCGCTGGA-1TCCCTTAGATTACTCG-1TCCGAAGTAGTCACCA-1TCCGAATGGTCCTGAG-1TCCGATAATTGCCATA-1TCCGATGACTGAGCTC-1TCCGATGGTGCGACAT-1TCCGATTACATTGCCG-1TCCGCCTGTCTACAAG-1TCCGCGGCAGCATCTG-1TCCGCGGCCCAATGAA-1TCCGCTGTCATCCCGG-1TCCGCTTATCCCATTA-1TCCGGAGGAAGGGCTG-1TCCGGCCTAGCGTACA-1TCCGGGCTTGACGGGA-1TCCGGTTCGTCCGGTC-1TCCGTTAAGCTAATAT-1TCCGTTTAGCCTTGAA-1TCCTAAAGATTCAGAC-1TCCTAAATTGGGAAGC-1TCCTAACCGTCGGGCA-1TCCTACATCCACGGCC-1TCCTAGCAAAGAAGCT-1TCCTCCTAAGACATTC-1TCCTCGGGCTGGGCTT-1TCCTCTACGAGATGGC-1TCCTGCCAACTGGAGA-1TCCTGCGTTGATACTC-1TCCTGGCGCTGCCTGG-1 TCCTTACGACGGTCCG-1 TCCTTCAATCCCTACG-1 TCCTTGTCCTTTAATT-1 TCCTTTAAATCCGCTT-1 TCCTTTCTTACGCTTA-1TCGAAATTTAGGACCA-1TCGAAGAACCGAGCAC-1TCGAATATCCCGCAGG-1TCGACAACTGAACCCG-1TCGAGACCAACACCGT-1TCGAGTCTACGATTCG-1TCGCAAAGATGCATTT-1TCGCACCAGGAGGCAG-1TCGCATAAAGGGCGCA-1TCGCCGAAGTTGCGTC-1TCGCCGACATATTCGC-1TCGCCGCACCGCGTGA-1TCGCCGGTCGATCCGT-1TCGCGTAGCAGTGTCC-1TCGCGTCCAGAAGGTC-1TCGCTAAACCGCTATC-1TCGCTACTGGCTTTGA-1TCGCTCGATATATTCC-1 TCGCTCGGCACCAGCG-1!TCGCTGCCAATGCTGT-1"TCGCTGGGCGGATTGT-1#TCGCTTTAAACGTTTG-1$TCGGAATGCGCTCTGA-1%TCGGACGCCCAGCCCA-1&TCGGAGAGTATCGGGA-1'TCGGAGTACATGAGTA-1(TCGGCGAACCCAAACC-1)TCGGCGTACTGCACAA-1*TCGGCTTGTATCGACG-1+TCGGGAACGTGCCTAG-1,TCGGGAGACAGCGTAC-1-TCGGGCCGTCGTGGTA-1.TCGGTCCCGACAATAG-1/TCGGTGACCGCTCCGG-10TCGTAAGACGACATTG-11TCGTAAGCTCCGAGGA-12TCGTACCGACGTCAAG-13TCGTATTACCCATTGC-14TCGTCAAGTACGCGCA-15TCGTCTTAGGCGTTAA-16TCGTGTACTATGGATG-17TCGTGTCACGCTGACA-18TCGTGTTCGACCACAA-19TCGTTAGGAGTCCCTA-1:TCGTTGCTATCCGGTC-1;TCGTTTACGCGACCCT-1<TCTAAAGAACAGTCTC-1=TCTAACCTAGCCTGCG-1>TCTAATACTGCCTCAG-1?TCTACCCAATAGAGAG-1@TCTACCCGCATCATTT-1ATCTACCGTCCACAAGC-1BTCTAGCAATCTCCGCC-1CTCTAGCATCTTCGATG-1DTCTAGCATGCCCAGAA-1ETCTAGGTGGCGACGCT-1FTCTAGTGATATCGTGG-1GTCTAGTTATCAGAAGA-1HTCTATAGGTGGGTAAT-1ITCTATCGGTCGCAACA-1JTCTATGCTATAACGAC-1KTCTATTACTAGAGGAT-1LTCTCAAATCAATCGGG-1MTCTCATGAGATAGGGT-1NTCTCCAACGTAGGTTA-1OTCTCCACAAGTTGAAT-1PTCTCCCTGGGCAGCGT-1QTCTCGAACGAGGTCAC-1RTCTCGACGTATCGCCG-1STCTCGAGGAGGTTCGC-1TTCTCGTGTTACGAGGA-1UTCTCTAATAGCTGGTA-1VTCTGAACTCGTACCCG-1WTCTGAAGCACGTGGTC-1XTCTGAATTCCGTACAA-1YTCTGAGCAATTGACTG-1ZTCTGATGTATTCTGTC-1[TCTGCATACCTTGCTT-1\TCTGCCAGAAACTGCA-1]TCTGGGAACCTTTGAA-1^TCTGGGTAGCGCTCAT-1_TCTGTGCCATCATAGT-1`TCTGTTACCCAGCATA-1aTCTTACCGGAACTCGT-1bTCTTACTTATGCCTCT-1cTCTTAGAGCTCCAATT-1dTCTTAGAGTGAACTCT-1eTCTTCCCATGGGCACA-1fTCTTCGAATAGACGTT-1gTCTTCGATACCAATAA-1hTCTTCTATAACCCGCC-1iTCTTGATGCGTAGCGA-1jTCTTGCTCCCGATACT-1kTCTTGGTAACACCAAA-1lTCTTTAAGACTATGAA-1mTCTTTAGAGTCTAACA-1nTGAAAGGACCTGACTC-1oTGAACTGCTATGACTT-1pTGAATACCGACGCGTA-1qTGAATATGCTATAAAC-1rTGAATGTCAGCCGGCC-1sTGAATTTCACTTGCCT-1tTGACACTTCTCTTTGC-1uTGACAGGACAAGTCCA-1vTGACATATATGACGAT-1wTGACATCGAGCGGACC-1xTGACATGTAACGTGAC-1yTGACCAAATCTTAAAC-1zTGACCCACGTTAGACA-1{TGACGAATATTTCCCT-1|TGACGATGCACTAGAA-1}TGACTATAATCCTTTC-1~TGACTCCGAATCATAC-1TGAGACGTACCTCTCA-1TATGGGTACGTATCGT-1TATGGTCTGAGTAACA-1TATGGTTAGTGGGAGA-1TATGTAGAAACCCGGC-1TATGTCAAGACCGACT-1TATGTCTCATTGTGCC-1TATTAACACCAAAGCA-1TATTAACCTGACCGCG-1TATTACCATCCTGCTT-1TATTATGTTTGCCTGC-1TATTCAATTCTAATCC-1TATTCCACTCAGCTCG-1TATTCCGAGCTGTTAT-1TATTCCTCCGCCCACT-1TATTCGTGCCAGAATA-1TATTGCCGGGCTTGTA-1TATTTAGTCTAGATCG-1TATTTATACCGAGTAG-1TATTTGTTACCCTTTA-1TCAAACAACCGCGTCG-1TCAAACTTAGATTGTT-1TCAAAGAGCTATCTGT-1TCAAATTGTTGTGCCG-1TCAAATTTGAGACTCA-1TCAACAAAGATAATTC-1TCAACATAGCGCCCTA-1TCAACATCGACCGAGA-1TCAACCATGTTCGGGC-1TCAACGAGGAGACAAA-1TCAACGCAGGAAATAA-1TCAACGCGACCGGCAG-1TCAACTGCAGAGTCAG-1TCAAGAAATACTAGCT-1TCAAGCGCGGACGGTA-1TCAAGGTTACTACACC-1TCAATACGCCGTCATG-1TCAATCCGGGAAGTTT-1TCACAAACCGAGGTAC-1TCACAGCAAACTCGAA-1TCACAGGAGAATAAGA-1TCACAGGGAATCGCAA-1TCACAGGTTATTGGGC-1TCACCCTCTTAAGATT-1TCACCGCTCGGCACTC-1TCACGATGTCCGTGGA-1TCACGCATTGTAGATC-1TCACGGTCATCGCACA-1TCACGTGCCCGATTCA-1TCACTACGACCAATGC-1TCACTATCCCTTCGGT-1TCACTCGTGCAACGGC-1TCAGAACCTCCACAGG-1TCAGACGCTATAGAAG-1TCAGCAAATGCATCTC-1TCAGCCAATCCGTAAA-1TCAGCTTGAGCTTTCG-1TCAGGGTGTAACGTAA-1TCAGGTTCTTTGAGAA-1TCAGTACTGACCCGCG-1TCAGTAGGGACTATAA-1TCAGTGTATACGTCAT-1TCATATGAGCTTTGTT-1TCATCCTCAGCTGCTT-1TCATCGACGACCGTCG-1TCATCGATGGTCCCAA-1TCATGCAGGTTCTCAT-1TCATTTAAGTCTCCGA-1TCATTTAGAAGTGTGA-1TCCAACTCAGCTATCT-1TCCAACTTTAAATTCT-1TCCAAGCCTAGACACA-1TCCAATAAAGGCTACC-1TCCACAATGGTTTACG-1TCCACATCGTATATTG-1TCCACCAAGACATAGG-1TCCACCTCTAGCCTTT-1TCCACTTTATCTAGGT-1TCCAGAGCACCGGTTC-1TCCAGATGTACGCCAA-1TCCAGGCGAGTACGGT-1TCCAGGGTATATACGA-1TAGGGTGTTTCAAGAG-1TAGGTCGCCGGAACTG-1TAGGTGACGATAACCT-1TAGGTGAGCCCTACTC-1TAGGTGCTCGCCTAGC-1TAGGTGTTCCACAGAT-1TAGGTTCGAGTTCGTC-1TAGTACCACAACTTTC-1TAGTAGCTTATACCAG-1TAGTCCGCAGAGAATG-1TAGTCGATCACGGGTT-1TAGTCTAACAACGAGA-1TAGTCTGTGACGTTGC-1TAGTGCCCTCCAGAGT-1TATACACAGACGCCTT-1TATACGCGTCATCACT-1TATAGATGGTCGCAGT-1TATAGCGCACGTTATC-1TATATATCGAGAAATG-1TATATCCCTGGGAGGA-1TATATTACAAATGTCG-1TATCACCCAACCGACC-1TATCACTTCGAGTAAC-1TATCAGTGGCGTAGTC-1TATCCAATTGGTTATC-1TATCCATCTCGGTTAG-1TATCCGCACCGTCGGG-1TATCGATCTATGCATA-1TATCGATGATTAAACG-1TATCTACCACAGCGGG-1TATCTGAGCCGATATT-1TATCTTGCAATACAAC-1TATGAAGAATTAAGGT-1TATGACCTTGCGCTGG-1TATGATCCGGCACGCC-1TATGATCTTCTCTTTA-1TATGCTCCCTACTTAC-1TATGGATGTGCTACGC-1TATGGCCCGGCCTCGC-1TATGGGACCGAGCAGG-1TAGATATGGACTGGAA-1TAGATGGTTCCTTACT-1TAGCAGATACTTAGGG-1TAGCAGTATGACTAAA-1TAGCCATTTCAAAGTC-1TAGCCGGCGGTCAGCG-1TAGCGTCCCTCGATTG-1TAGCGTCCGGTGTGGT-1TAGCGTTGGGTCTTAC-1TAGCTAAGTCCGGGAG-1TAGCTAGAAGGCATGA-1TAGCTAGTGATGATGG-1TAGCTCGCCTGATAAC-1TAGCTGATGTGAAGCG-1TAGGAGGCTCGAGAAC-1TAGGCATGTTACGCCA-1 TAGGCCTATATAGTCT-1 TAGGCGATGAGGTCTC-1 TAGGCTAAAGTGGCAC-1 TAGGGAGCTTGGGATG-1 TACTTTACTGAGCCGG-1TACTTTCCGCACGCCA-1TAGAAAGGTGGCGCTA-1TAGAATAGCCGATGAA-1TAGACGAAACGCCAAT-1TAGACGCCCGTACCGG-1TAGACTACCTAGCGTT-1TAGAGATCATGCAACT-1TAGAGGTTCTACTTGT-1TAGAGTCTAAGCGAAC-1TACTGAGGGAAGAAAG-1TACTGCAATCAATTAC-1TACTGGACAGCTCGGC-1TACTTAAACATGTACA-1TACTTGTTAGTAGTCC-1TACTCTTACTTTACTG-1TACTCTTTCGTCTTCA-1TACTGAACAGATTTAG-1TACTCTCCGAACAAAT-1 TACTCGTTTGAATCAA-1!TACTATGGTTCCTCAG-1"Gene Expression#Gene Expression$Gene Expression%Gene Expression&Gene Expression'Gene Expression(Gene Expression)Gene Expression*Gene Expression+Gene Expression,Gene Expression-Gene Expression.Gene Expression/Gene Expression0Gene Expression1Gene Expression2Gene Expression3Gene Expression4Gene Expression5Gene Expression6Gene Expression7Gene Expression8Gene Expression9Gene Expression:Gene Expression;Gene Expression<Gene Expression=Gene Expression>Gene Expression?Gene Expression@Gene ExpressionAGene ExpressionBGene ExpressionCGene ExpressionDGene ExpressionEGene ExpressionFGene ExpressionGGene ExpressionHGene ExpressionIGene ExpressionJGene ExpressionKGene ExpressionLGene ExpressionMGene ExpressionNGene ExpressionOGene ExpressionPGene ExpressionQGene ExpressionRGene ExpressionSGene ExpressionTGene ExpressionUGene ExpressionVGene ExpressionWGene ExpressionXGene ExpressionYGene ExpressionZGene Expression[Gene Expression\Gene Expression]Gene Expression^Gene Expression_Gene Expression`Gene ExpressionaGene ExpressionbGene ExpressioncGene ExpressiondGene ExpressioneGene ExpressionfGene ExpressiongGene ExpressionhGene ExpressioniGene ExpressionjGene ExpressionkGene ExpressionlGene ExpressionmGene ExpressionnGene ExpressionoGene ExpressionpGene ExpressionqGene ExpressionrGene ExpressionsGene ExpressiontGene ExpressionuGene ExpressionvGene ExpressionwGene ExpressionxGene ExpressionyGene ExpressionzGene Expression{Gene Expression|Gene Expression}Gene Expression~Gene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene Expressionmm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10ENSMUSG00000025935ENSMUSG00000025937ENSMUSG00000067813ENSMUSG00000025932ENSMUSG00000054493ENSMUSG00000025930ENSMUSG00000032769ENSMUSG00000092083ENSMUSG00000025925ENSMUSG00000032719ENSMUSG00000067795ENSMUSG00000043716ENSMUSG00000025921ENSMUSG00000100868ENSMUSG00000025920ENSMUSG00000089982ENSMUSG00000025939ENSMUSG00000079658ENSMUSG00000097744ENSMUSG00000025940ENSMUSG00000025779ENSMUSG00000101640ENSMUSG00000042686ENSMUSG00000100110ENSMUSG00000025777ENSMUSG00000099899ENSMUSG00000067780ENSMUSG00000100053ENSMUSG00000085125ENSMUSG00000025776ENSMUSG00000099895 ENSMUSG00000025774 ENSMUSG00000073735 ENSMUSG00000067773 ENSMUSG00000089787 ENSMUSG00000042596ENSMUSG00000025927ENSMUSG00000100538ENSMUSG00000043760ENSMUSG00000089914ENSMUSG00000099906ENSMUSG00000025929ENSMUSG00000041872ENSMUSG00000041859ENSMUSG00000102121ENSMUSG00000097934ENSMUSG00000025931ENSMUSG00000041809ENSMUSG00000041779ENSMUSG00000099971ENSMUSG00000067879ENSMUSG00000099827ENSMUSG00000025912ENSMUSG00000045210 ENSMUSG00000097893!ENSMUSG00000025915"ENSMUSG00000046101#ENSMUSG00000098234$ENSMUSG00000099032%ENSMUSG00000025916&ENSMUSG00000087199'ENSMUSG00000025917(ENSMUSG00000056763)ENSMUSG00000067851*ENSMUSG00000042501+ENSMUSG00000048960,ENSMUSG00000057715-ENSMUSG00000097171.ENSMUSG00000101314/ENSMUSG000000169180ENSMUSG000000259381ENSMUSG000000994982ENSMUSG000000424143ENSMUSG000000058864ENSMUSG000001014765ENSMUSG000000259056ENSMUSG000000337747ENSMUSG000000259078ENSMUSG000000900319ENSMUSG00000087247:ENSMUSG00000033740;ENSMUSG00000051285<ENSMUSG00000097797=ENSMUSG00000103067>ENSMUSG00000025909?ENSMUSG00000061024@ENSMUSG00000025911AENSMUSG00000025903BENSMUSG00000104217CENSMUSG00000033813DENSMUSG00000002459EENSMUSG00000085623FENSMUSG00000033793GENSMUSG00000025902HENSMUSG00000104328IENSMUSG00000033845JENSMUSG00000102343KENSMUSG00000025900LENSMUSG00000089699MENSMUSG00000051951NTram1OLactb2PXkr9QEya1RGm9947SMscTTrpa1UKcnb2VTerf1WSbsponX 4930444P10RikYRpl7ZRdh10[Gm28095\Stau2]Gm7568^Ube2w_Eloc` D030040B21RikaTmem70bLy96cGm28376dJph1eGm28783fGdap1gGm28784hPi15iGm28154jGm16070kCrispld1lGm28153mCrisp4nDefb18oDefb41pGm15825qTfap2drTfap2bsGm28340tPkhd1u 4930486I03RikvGm28653wIl17axIl17fyMcm3zGm28065{ 6720483E21Rik|Paqr8}Efhc1~Tram2Gm28287 3110035E14RikGm29520Mybl1Vcpip1 1700034P13RikSgk3Mcmdc2Snhg6Tcf24Ppp1r42Gm15818Cops5Cspp1Arfgef1Cpa6Prex2 A830018L16RikGm17644Gm29663Sulf1Slco5a1Gm29283Prdm14Ncoa2Gm29570Oprk1Npbwr1Rb1cc1 4732440D04RikAlkal1St18Pcmtd1Gm26901Gm30414Sntg1Rrs1Adhfe1Lypla1Gm37988Tcea1Rgs20Gm16041Atp6v1hSox17Gm37323Mrpl15Gm37381Rp1Gm1992Xkr4+  deflate%z]XTREE#din,s Bx(I}0%8@HȐPݕX`hpxgx[:-    deflate%z]XTREE T(08S@ lHuPXl`!h&p ,7x16za<A_GMRX ^c   deflate@%%z]XTREE0 Rg?s  deflate.%z]XTREE%w68TREE:(HEAPX09feature_typegenomeidname(d deflate;%z]PSNODp9C 8M(xVTREEcd deflateE%z]PTREEbd deflateHN%z]PTREEad deflateW%z]PTREEa_x^RQ/d%H"I@9#9$ ^u;U +z%DQNŸcMس{H=D 7ž=~ ?@0LP?)9üs̻=c?IB~f?G~YF~%_E|o·Bп)#_O)W#W: Ϙ_z-a= ߀7ތ- vxGy D+w^xx"?'a0G0o(""""""""""""""""""""""""""""+x^еVCQ-אۿ]-`jʙݞ8pJ_>xޅu8o O3,< "/+*p; np< <O|S >/K o[G ~_W ~?O8\I8b.rjN1\ p7p c_?umx^Ra@ϙ|i[ ,-v]gwB D!D_#?/+~g9~_W5~w=>/ox[6{x~x^x / «x=ހ7Mx3ނmx;ށw]x7ރ}x?>C0> #!D~g~o;~'>O,| | _ <s</K2  o g=x>\8· B.KR .+8\ W k:n&n[6;$w=p/ <#(<$<OG~gffffffffffffffffffffffffffff7Łsx^鮤5 a71]ґď$u{< Ɵfs 9V2>[mS_>H>}9uT7/ڔWF@oox MZv_}V͙-|ƥ۵[`W銥<%.O}_QZ}_>| 9ENmvmյm){Ҝ_Uҋ_uBLWy/!V1x[ViOcxԧ[9:ߊ+gΙv4Fٷ^qI'#ƪw+O<(o[7 1y+mATךFwИTϱww׿,l2WK^@kõ {hg\V\dgwۣS}x)_}гovlzvUkH.vX]{:q/]kXY}|2]kP1I_.7֕(zyvnnǭڤoߜWV;ևn`n۳`}7밺ѥc^iJ[o+;WWs(W^ѹ\GV/h>9;[md~*ϻoe}87iKs3r-Ms߱yߠهu6oک{o8_vW9u_;1aW_ykjO#/4cob[SkV^L74}wA]ͭ6bcW}7z߭}t}i19:۷1۾`n`eZul{SgrCR+wkm]:7^[UA'K/=l[="Nulǫ|xomt kؠ7}΋Vg=P"x^mE5("R ^L4a >~w/G|228{Q;OP]|_գ]yzN';_DX[*7x}Q:Fm)yk0_c׼7Gmrh +wѱNsLkm`dKw g?ykzؾ AXݭlvG>"/~_/6q0)Mk/gW/oLƸ<}zg{6beWƜ&W {.ةy&w伴nqJk;r6vca{I_z6_|y4]]gy{*ԗ @]o[gB|bәH# i{`fЏ틕wwNպ:=Ys*z_=z+W^ ]Qe{Hw6}s( m{<6@?]g< mT*4ÇN4gDwoo.oY뵾~;`cO~#X2oj]cjпճ>z]l-JƯ)e|=.6^߷K_gPWiv=|omyz'GS/VO|9Fk#p{vzg빻/~[/֨>lur#]Ko^r׾7o9xPo%%ۏ-ksй=6}^Zک tx=.K=|Rޞե,ǾWWeeS`swrl{gM{yXuK@׋[K~h/^=.M~u럯h{|}~T~s{BeF_k־i9~.;O=G/+WIulݿñkt_@+'UN .;>iQڥz̙h.]}o+|l|{Bȋv772# ":/cǑ(۷.{>RO+@93ƈLd(3kS7oo&\^'vW/UA\k8[ڑwqʧ=֫Q?S?DgOwޫ6ѹouc{: kx^TEFE %+9+|zSU{jӯǫoQܷX Nxi\צ4ϟyA~(M YkOY7׸9 5`czchBuihΤ5Niɣ|k2x.ZO+q P?74e٘ԥyk}G_Ap[SֵQ6u`T_m~uUk`ss'S6[k(кk9nohk9z~INh_9r\T<y`N{o}8gnNz9Q"6b2Ncwӯۼioy}y4to-YxկY+c,>v}zVߝ\ytx [߉f;yum㵹T?u>orc\9F(ӽx8õ +n<=7i.̻=\ROc߹6e))]1[ijtmm5յ_㳺k}3fm2hog㕯 qGN uO^ovܾKٯ{q1{ ^{v =)tʧ^y!k|S1|,ߥʣ-wG}sh~|Os\ߏ}Dgm~W[0MbV4Wܜߣжh`^]Wcfl[iqe孿rdjc\w6iwW4uְz|'`~&׷7@s+<};Yٮ9joam9WwgW:/lnn~wGm k}P9뷠Vo7IZݲguPۭ9;m'{~Z}?ٯҚщz4{>,ÞqwGoxmػS+Usl5'gl}'/|S_niܫ[~k:o-)?wZUZsOרmk\{/8VWuo|'^ơ-;8km c\}S]8K+Ou-YY:Z [q}{x^٪$G x; ࠾~RjR'||\>dJmo.wwK^o~=ח><dmLqn_>U<#'7K|Z_W~c/?s}pxo}`chG-/{xU_cV~eʿtǑgey~U4sZVO.PN]~oV5&Qys>)g~k_2Si}^_ګ~>jy'zg*yz>k~{3#']8ꟸ|K8wq\XƲ^p":;3.g.>+l KgϞ:[͇C}͹u}}ʰJ^vvI4s5fywW}8y58goq,o ..s}κX0N_cټ ^>lΚ=Bo}Cۧ8z{hoU{<4Ve߽=/@k/>?'ϮͳkG8;һrl}Ҟ::1]3CߖWhX`k=}L΋W_>+{[y=]Y{V7st͗v5ދ=9~Ӛ='}}9y/yޫGߪ˿)\]Tyk KO56OssqtYhھRs<ꅆ_=[}l .}q(< KՎXȣ#)K&1vmVoӳ-6Οӧ]gQ6~Mk׹="009ʯWwwtv-h%7Ь3yc~{S=9Zڥ{y{xs%rƷv]g Z _K'gy ^Z`Ζ9]+@K|ڗs\۪8s}Tw`t]<=?k}e5q|ch=g z'җ Aoi>iǚ\p.wz^6?;ޑDy%38wm9tdנ2;W({߬/FLkJ^QKẈskkko5ھB۳ վ0mu}E;yQ;<@w}sim{ֺ< mSyOK/W=zvSƦ/^ŭ]yx{EZ;K^l-mmv?6:wZ]>|kz&C}^/Gc{Կiges=W D?|hwsZ_șsչmcsC1mk{uQϫ[yZ z39妺]w+ּ{U=<~Rovi&y{{>{|'/kν7N{1^ǝpwR{ho40kz!!~,NaOp=l=k}9/88೜c^9;"uat?.u:YWocgzk[c[߯aϖPS5G=wlĠ.Ƚs/{7:m@O:x:/כ|f>ˍ߿3f\0=rWqn8dKmL/zF_{ 6zl[c݌fmwg@s/}āwÚ]]_-Yk>X?χ|ǽ~ܵqU Q7ϯ5OlƝ}#? D>WԀwt_s;Aڲ8n\Lk ;ֶw;tJOuofyFl[13qֳ_sZ܅gp>Zoso;{ܱpj6wWخkbTًϋʃ_o+|e/^ |<7F0U Q`zۛ[n˔ݿ|49ݯUw{sr}dgCF/u;^:H,'(^ǚ߲yֳoM2]?;gkmOw-^*㩿>~]ϋܶoh;)^g96Clõzc־]iַyXeԺp=<y=-Ğs~ {oɒ }<~5 1/~ݳFW[[+ ෎`ƻj\^11%;Zݡ.g{Լ)斷:};&oJۺad_c{4ifߠcm~h>5L3G[9oܽ8qSg?ǵzvAR;/~o;;f즆}ɏ>^+smj祏&v6 ɱ19O#xO>X=ϗM~[x^ْ]5`6ygp3"CtԭE|~{{ݫo1r+gNe Ww9+o9j3]rcg_@\~fy-.Y>|X'A7ם¼_;[3}}qL\qͱ]/ڲ'҆Q~oyqgiI+,G۞2ba%:_2/Z=cյ6u5m U=竖j pZkn*j3rNV{)Q<_3[*wk@\\]x7sU{6߻;-b~nZY}cSͻ6k]Ucc۽x\]gr'[|Stso}h*/~;:{pcs=~|ڨVWony; Ph>sԼm>u?g9wGYw,W6捜6wwr9:N9nh|+oc] ] 2x.y7s+kNxAY{mgvԥ5K~ͯ"5wL =5a׹ӽ?v>ao^꭯S;֦k]\qoe9v'\s|Svufl9|x;gWustSg&_ yfcg`k:|[K|_.a;ֳйyxw;zmϓk<] >-,w{5o)4&۾z8~!V%c[]7=ܼx^ٮ7 ,iR@R8\?Hp3>G_=g+kyHdmt9c#wo7_lI d>#?F&^?DqI`v9uoI_|~yQLycGs1[s17yosMAHm6^q/:<ԮXˍ^jzV ƟʠO5'͟>[??k0b9(Y.|ߜ8J^e~W>4V|sR;ՑϜ{E,h->*ޫ/b9VnācxQ{:93okw?==l-uo+_ݒ0NSgs|[_Oi1h>t﹤7:{mnl =/;x9/^ll>:+}69s}=7W`XvԞyTfLWjo*u:Żlns$_֯>*߽H=y)[΍>\]ߔ=SM>ˮKڮ{xԾ1yn~j-J^ߦ|(cW_+cGֵgu 9yn̟{/G=ߢi 73|YNcZRc~+Ƴakqٽr{Fq}]ȍ>3v1{Kvޯ-Y}4bTx$U~_ϟ( ]g7ŽlW{y/fso:Y}dn.=3=klP9Co9gKթ~mUAj|Λ*sۣ덳~16SGjx^׮dU s@%{@c7b\g]X^4tQkVr?3b<uz~f^X_Ʒ8L'z}Fs;9;Ε9=+21qNk6ONó>q0fs3v׺<Ϛ8WL?IXofC|Ӳ篼7lkqAba>7:y~1c:}60mCu47ڽ"_{-|uZq=x-FraxqWZ~vS<Ye/cmo0g]w&ctS[Y^[fFqvWegk/}ߥ&M`g2FxGZ+%yƮؾ<ߔE 迯[B*3f{CdwXw$|w^qunx4^siKݚ{FeGmZI#?}|^gp{^o,!W]>zϭ:İwRutBjK{ޓ #dҫ5e#G?c{{11ٟްmjxm.Z6B}m.uzۚW9?~ךx|n-c]tz12ZyFsǿ7.ǹZoF>̳5YaG=!dY|/-vy߳ͿE6e>2706X=aYh]@:Qk8kh(u=b=+/E}~{oKA?qfDy^H6p/321^;X:# {]-?-:'kObl]ax^ۮ7E!S+9K%-mr]˗y~zzz~ їy?soc>\^uhm>ګ c8uYٝyx5ՅOmX~7fKe[1wƬ\S u<=t}G͋!96[So!~xU% Q‡5 wɚGㅰ)g@ewf⼺+g'd;9LVpcSŠ~>V, {N;}83*ӳVםcy{yVkܽOS派ުbI;bb*SAתQsrq~h=רƧcgwy [#BvkdI{wX׿ܠomL:Nq^/7!ړz>K?˓[rqº{ŘIez&?bhϨ%fz_4K_ x^G1sin@ i !ѳLd 7mBs1Yr"́?فo7J=jw߃5xD8׽Y1.cMM ϺxÞ]_Uv6tVe'UL_-R[xXio}8l篧mj鬽Ϲ.[qmܺh>Η;wZcd|: ΰsG׃si O79r޶Kf41y+lc{j.b3m\yky#ڰ_=ٴc]C9}g W5 {tL{?;Ղ5j]l<5^u&Omп=vw6*W1/s|x6ݍ8>mO;˳>Mh;Q{5U_]o㩆mG^On9VZO6{:'Gkw3v!7:}w۶[e~nG~,s3"7KeO6ѻ'Nk>GUGcǃehW-?n? S״mWk K9G,]Pzq1a}' }{N6^ЯܑaoŮKa?Tnx5=ڟsϛ=^Fo},|wm{{9[~s#F'W]RvmN7ych=|h=>F}odl+~'cPt,TbN1tI{^Wq'6~7 h{p}O1W!6Ϡ=_>ޣږ{m\^gqnSߩz5Sb2V6Ίk/װ}i_^bȹos?^]k'7tbot֖o~kǚz6ק[WW/Yt6TS,خcfZ Aw8w~z/1zo<,]\,F5@ԝv;/27)o]v `osy֞rgo>ܸOj0wYףnÖv}@|.x^ɎA }pPVLv:]Uxi=x6ۿ|zoO>ٯ֔u{{99Ks?YnukccI^ݳпo7{4`^k92Llad^aVeq>zN*cnlܜq <[7t֮kD| kd}_j йػ|qB3ˀm>g_ͳ5-8VA~pf{txY\7GH,ۚ׾w ;8\ї u(bb1o:+0{o]} SAYbam};{ly~@p1w0 .gq^7ABۥywNt>/|b  xwѶNba؟j\&s2vټ~!FvW-،mu-ǜ ~A޿C[sFbx^ dE[]Qt:Td#}><Ϟyç=eW"t:ƴ GV1W߽O%ֵ#gbYco!~_6>0pL&G(r$.|cT׌ŵgxCbfOOm5N[7|N}_LiGlqOeVDګ^TR~ksŘ+]Ǯ׳y>sB>}}n=7>99Rlwk=jmv !/X^Iw\ƿqU1x׶(wGwS{t7%s˷pص/TfsVs_.ګή3J+ٵ[3˫MFߒjr K7ybP,Wvb_{5޻sp{֡vMm=נ.s/i |g۳K>{Q+ ko[D1Mr寽ݟ/i[O8u>+cCK* ݳ:W+_+wπE=p>ouU-6ƨzW7y6k1owt_9mkuWƔ]յ^']o?*'N΃m[5Dky;=ߺo>;ʷǹ{xMq;7}'kַ_re\oЅ7eֿ}nׯd17￧k x^ٮ7E$)',KYr_.%v vwxB/. 5}?^ceὺƵ!~AK~$d 2\9Ɓߥ_l|3o \ƺꗑZ5g}s9^>udjA>jyn=ܿdk|.} |sU2wp-L s Y?q_} Qlk_ʯLey_LįA:泹#v䊯ώݨl뽵2_ؽZ.Y%';dmδ>[WV_E}}3~gQ,Re>b.7^X\WƼX\,E>sRrm/\[Mչ}ݥN׸?e9/ރث/sv70ijwGu;ڼSk3^\=N*5;6ޕO>}OgcglS_g}{Ρڬ-Zƻ$)i,=y"1w2-;כWD6/[q1)MHus}K&;_^bgsnUSn_b#m{>NkWFGcgjm4g7of1!U5dOr}^-?rލMvj}ևz<7]G;Vs[y*˼gvj0kcWJ򍃵yұkz|5ڬ/2/Gb1{ƺ=$﹋ƛ3Kx^W$7{7 l(5􎦺?^g?k>v g^OhXm|9ܾ}ymol.z_ Xp}j._~_Xym 5~ּ:m_pOQ[qٶsۈ]Sር4sTc}Pg}#51מ6.ѧ`?{x'w͹S2\{~k!B+/!>kOZgl[?sb}8E;zƺkW K|a|ͫc,v<ַ楧_8=t7IJ֕)8'e]wgQuhCu26몏[X}7_n;ؚmsqwuضq@fo<[}2p.ԁ33u#ϛ>o_^肎wֶǻ\ImFҌ7;?oy}h*b3e=|#Gulpg~3^_e_bc>9/fT->yһsdloz2wM:o^7ؼgstN3{]"_=z{<#= 追 ;b[dzӿfWl˾{~ހ>u8WL>gl U31w.y/\9y_ro|LbkvOvdb SPr,sٷ~pؼn>9ޱ4i]ճ>h̋hg+?۔\]1hэWUso[̇=p˵{ҷه˗ҽU C;[v>ͳ7 Bw7|'-۩>Ywc@`5+4 Lk\L7mr4n<;VO~7oN6A߻~#wwz{i٧aَm_gHcmkpN}f3LGk`9||&]ZpӶx^T7 J;C[t;V_̵[k1k$obkԖ/TL+0]c+9DgݮCǝ_;܃u7?=)gvI;H\s1gz;mbWoNS٫1Ծw~{ʳF'epyo.WlcbZwB u.o-z<'Ěډ)\Uw;wާUaiٝ L~9N7u(~9w_c|w^~[̟|ӷw=keݔmu16W\͍Vcs+}<ɫ)w 6<'o7ksoD.S {ヰs1U.S9&O}/v孟{]揄Χw%AxƬƮ%ʵzwЮosX[ksSL~/h Xi7lƧ+{'/!~G^3_vxQ?ڙ=/ku,fɳ~<{;^;.;S~ 5xFgpeg{̵>r7I)l}Od/Ɏ=o']sE98:tvmloo|um;gu=zg+b6N[Z;_K+/]ou37z8r{H3}u(v}kSx&CǾ6VʑuQCS]5?)ߖ=:n~x6,o8#kMo0Ofs7 "/nۯ  긏sRƥ\{Zػhmcm.;ƶ*x^n1 $ qSH$j6)J??><\a7Ǐ~ѷ_sF]l_j?v^CB>([_Wc}Os#T["_`:dA`xIj]C\ՃC.|6lGIkg!wX9\]6wS:x3;?Ƃ#lc78NZ ~<'q*?8''L؏lk^{4^7{'?`n=brz:'k5Zweql8~gl5jlqr`o;`m˜ݏw7G1vLyxpc[΍KS9Z0G>Ĺ-zlxw4ػ_[G:9c}:.l7 o9mψ} S?rQ>e^g}vڮMm=vO<]AzwL]g8|]g8_>~0zFXז9q0׺s;;1c;'훇 yq6|3`>K}xt0ξ51抮6 /8`+7oC`qǛ Y/I}2ta΁[B=O}7wo5\E8/?k:}p;͘8r1G̶տwIn݆_B'o,۝#w9r\N{e=+u¶Mv\x-63?>;+u0-ݩwʭ6BbSsu3뉽-\ZϝU|?LpPV377b%Qy~k7ϛsyۏζ3~0dC~j=Y+x6vC焜S'>;qm-6+l=>.rh;ui9q^-x^E`6 >XH\xW:S/}w_d/|YKc ^ó;*gU)eLE^ᓴOq=xn~Ѐo\usz^zc>笍Ch ҭI}8M=iq}Ur} ' v㶟d6Q]K!vmU+W~y 浲:= UCocy{ q>b7W|'>%OYڱ|\OXOݥU>͟ki=#樺k{su>:|䛰cNk8ܟl.{+q;9[fYgT>>@ֻbᓚ@:s M}ʛ9+W{=Kd;*w))N`<+¯.K,z{Ƶ͗8^oY#۾B9!3e^Og?S1sWhMckОi6Hxa5|y hܫwZAV Mwԟ7scmSsx{ԟ  YQwBgcyo:y⋼5O'?sl |^Pk=_t&VO_{T'ge_5%/Oco%+TuưzƿN3st~f]έۜ<[˕ 3>w='zکoySO2RGכj_pfnY'~"W9cjsV+Ϛмۼ;۳臧_e^_ ݽߪ*>7v~cߑg.gEkK*r]^e^^>{O^[[)@?W_m7ձ~-?qt.js[w xA_<I_}k|Gչܞ(tg%we[oƽ pKkO}OY}un;cIYgиgl8kO^pU}]3G{4Mx^G /K H%iJJ=7Vo^nwϼ|</ӫTS)57=gja|Nw_V}l0tpypWk\wuVz'v)>ktM9l-Øζsa'2Fh9=ۥo3,w~ZuwVu𯸠 I?Lã?s9V^j7s;xm=lA9C׉]B>,-kwF=:ó<;= qnN|>m:['='Dݫ;Յž=a):?gv׶/W]zkk9s5MLvsԎڲ_e:&Ȝ2|8M\:~頎z}!~2pvޑo\푓NӿW똺EMLKn4zOզ:V{{w:9xlu]4Y%C}ꛖ|un(޷93>W^={1x5O%7\~+Y7x/F;w`Aqgc3^~-Ns_9TO=]l]Y],k=;Z禎Nu\Cy7Q[̛7~q~oQ=w9;.t'||fybr:# <8~{wEuyvߜ7o!qf߭[/wmΌv膗VONq-sU}tݍ{> 4Oiw֜"\ssaᄉmw:kgG}ylkΟIXG݃z:^}X%}f,`?WyRvZ_з qkzמ޵fY}kͻ}9}Ͽ_/:N#ș0Ųl&boۑkM7ֳwKJK.=7{9ƾ56|xecY|'2&RYOqbkmeou2>|jy77ZR}u{66&gxgm9ad5CXqy |,8gks>#:Hw} ;q|/x2m$ⷭZ]ĦydZ+ox;7s Zs@<̾z+ƹ9'{?;_5GnZ][ZϮsv:lzWfpw_{}k_ 6gka`}s/d˅Y_f=}w?yrrZ;|ӑsgz\kCyVNߗs8z^cʶ!5nͽ~gŶo۵['sU۬o=n2|7ϝe;C#gpg\Xnkcy l;5'|s%ޟx{֪mcva6d{g]Nχmosmo-+lα̉5}plw v{N9{rxMF-<oy +2m ؎k+㜟ޞn?-2&͕}4r|ҵ֩N點jy%׹rh?>s~go楺{Veݭ3͍]ccϝYU>H>g铏h럵Fg^o#kѵ-W_{/"W9Xh<-.]7_ig>ڗ+_fÇ#sxSIlkd l68Nkgjݶ^p[#=W^'_.Q/Ζh\w}͜|bm Ď-5Fc}LL}wk4p<dCi|zՇ6hg9f12gܵNkD>ۖ/O>3>|fM.kNޝdev6Vϑ}\B:Fxxp吸&{x|ΫI+cl5UkxWLk-F|gzrZǶig\?b%?ָb3ȶ| l#s 7.Xmkz}M\{>r|u _Y=ث<˚؍zm{7}d'+/ɺc;ƙ/峮}yoZ_|>z=a_rdrYx+2摒j؍4J#gk9nbs[w93u;]eM{'m[{w. 3"xsrglm$_m`o;a=?h[=,z]y!c^l>_`r8=OrWڗ{x{՟A׿ >zLzrݿ[g߲lϿY{z׃o,=Q*}=hzFR>w/5D}g76b\s丳sn{wzieG=[&|z\;jE-s0'/?9DZ[wxԮX]V' u_}d:y7y<^͌yExC5)n OE&bq.}>@[d<0߈gw~0wCwOUz꿭ۧ&֧8Pbjt{G=Y߽:j?#6gYi>0b |7&oBw7Ηm> m~hbb#([ X/Qx^V7@ !@%R¨[J;̏v/.Oӌw{=:\$z;-ig)7[-|q4Xĸ9QV>~YbgOCڂql̍K [o/$[ZAXWqYkm,:WY]r59r[`n^j;-Ky9\[ye/Ci׋q3n?@Y;O̱g )_,ͣ#R|1oއkԟgr%?[/ڻ򛳒͙v٧O1ŵxfc z:?w.ĝx¼vTjNe &qom;%3S|]^Vo ܯp䘛寍]K|)U;)8ۿ|GPrԾu+]1~|g#Z(v7ͣgT?wk8[ӕY[^uʳfϋy8ꨧ߸8<.Y_mTNwӓSx|;vOcT;ŹN;YߏSN[}@t^j<}:6s7ueij<>WNymṽ__ڳO\/.t/=Zylh<'FK;~eguż5BkUcҷ=\}*WnqVVO\70s=Gmo/@>wԓo,o͟o\}?߮e}1Sӹ>U=Ӌ%N6͉~9wͿֽ57/Q;c;պ{o3Tߕџfһ6֒9^{·wX̣5Z9oXaIhߴj]>/uֿ63g?;Zս>>>k~ μ򭁱gbp}+a_sB{gc[wyNw+[e}gg~͟m8W{vNds`~{ֻx低);l\+'wo|[t,ڲcy??i:-^sPycAlIYL}ڡ;|X^ww֗k{K˹[g2^}x.߬yOY'^qDƨ1lT]%U{ӚQjZy2IwY;~vjx^ɮ\E`0ر@wD 5TUe;}mIכ/uV=-7x,ZM胝̡o)vc|1E#eU gcw4A ;o0y_;}-nZjwTdy?(π|׹ Oqgd$Ϝwmn~YLW}`ncn (݀>n^K_r92ȑCW׮mCѹF~33 <.9Zx]?]Jp9fkhYf_66ȴ3rY;[%Zdo;|ٟvї[k>}Wx..zp?x]C^7w q ~bYon5{`y"y_Y726뵮˟8c]S{6CsEc{i3~~R?p>Ct7^gWl5v.>g~,owSjWn-'[ߓh|Z5O8Ww6܏':_yz= Bx^ n0 6om H!璢l'mnts'v"ytO{fk_7k 6mJeݾ&nlrנe%1ѷONyp6c:wmrdԘz87f?9fMm $&z_s3։}o k2o ~z?9C}{Wz޼geבQ7ĉmp:Wk$|]S]2}sDwɖy3sۺ~۱z{{޹7DMs6>9 񬌍{g??:7?ك}({bg]a ;pŹP/5:"t[zقS-!?)45O<5!1u_&VGyC#lK\:'רu?pы}ܵ=kn}b(X#3L?l"v=.sp<ӹ9F?};w ,Լ'l?Y,~OfCQ~g }xٓGq~!缌Y}˯8Lͻ39?q2;VKwLx#-97;㚄?)c?=0np5Xc5Xc5XcО x^X[UQ/(:,j$jA1냯Gx|.>3{Λ'||ΏKoǿ·oѱ~>&A{sAߠVxz{o.:'Aޞ+[9מLZx9S3+L 6 Uy4G|.p>Ԅ?ʛXpǽIP hWKy_[.;=VΞujz [gNjcno,ڬfzy_c5WH~ sVyf̜ޫq'\CjU\Ιz?}Ok~>{G }D,RmHƨ70%JJb1 /DvY#`1$?}Vܼnߝϑ{$1u k~2)Ο聪3寬JwF Wҝzo8>˚׺W 8KNމL޾9 /7qa+m>HiF@ ;kVd^Ws?{Lʿ!m56.r8~Ccl>94cŻ߉C]f}ݰ7?يGsgη?]^yj.䡝^ f~{Q?zvM > ̇zS{;}7?̊# 7u_б]Oqs9prfv%!&1{/G]#w8b$jK7KΦsVyW9%mxHbm8ji[ܷ5NYƪD>#z$meEx^Yۊ%EEQt ʲiчyk?gāz:UYu<d[~ojzn[/mm=zg[˶>Oe/!?o;:Gzcv1YYc?c~9nz^∭<8 v,rK# @;j6$'Ǭ%佊sk}3 *{#|asG ]>'=Ul~u%_+qnԩྫγ^\K3 8/QfYk1Tc/W7Sj(WF^L6\'q5y.XwQO5?uwƊFydUxMM}y "sjÈs{==>Sg!>z$Q~ǜn>COS>=0\CcN?g۵78h<(_gUsg+wac2ֻ;6~i 9uRGlyAr?A^>~^pAs<-~/> 9k_ʟde/33;{坚8^<̟goDw⊺i0Ct/bG:9KrTok\fɁx/W+b~'S]K,'z7F`UY%MgV{b>wf]U#>|&I1j؋>gCzxYE׸җ>-+i.d O4x^X[EQ/A(&BPNCQ}NSv9y 3.3~1oxs[3ޞތf|4ˌ/f|9_fw3x=/XnjR -Vb>AIX1o⓯r?'p~VsK-c_u?Ìk^;ɗXPkX9 lS}` #s]<}l{LlzKsINܪB3Enw}4>.Or93Tuo^$w؋\q^9q`?Y@'YOs_u3GWruyrƅ{❘|_aS|'X~~<tO+Ȃ-o=ge.:z哪}H͚`m=O/Y39NړkM4rՂ<#f:oMwa=zsw9q0G 1. gJ=xDzrCMKuFf"7znܥTonQYԷk|sLڪ{w71k8\OXz3|Z3{DؕG3Gܽz jN3g. лg|RтNR5kNXǿXsB{vXQMSoy_Jb y{#&5+||LWf7sw& ՙ5iW?Ps>}~z!k{@3{>fr1O- # CZ'}ooZy 6W`_%ϲ&םc8j&D=g\+9O͕#kz|wmNW7SqwT; 58.iuV[b+ϓF7s~gWX3{~˼?7o:_{X1rs7O)19kM|άvL[yP8a=k'W#yGLI_}͙wLbg<|A=gy;.W5 ~sZt=!\b|6AlR0ylx,s.>%{0u gůuv~%3n;ȟHdx^XۊdEEQ,*^PdUViQp|? N'*N֙q`驊̪ű^ձ^뽱XX?uױ~f?ƺoXox_Xߌx{_C 'ذ?sl1asW!r?ϩ8xVLٟqy]#b5HjPWz9nԕxb/1ExaU$k9J3nYuoU^p+/cw#K 3ơ]̸a&JۇXZ4;Ԛs5'\kUy]g}} lu׵CJץ!nn0kAʝ74K#$wW|ӌyrݧ\^\Uȃ˞O+<杳:3 {JGuyOγد]Jgȣ˜VyT:dj=/Up\L*5w2~I}gޥ~>SgTJ/{/u5'cdZ]PϮ[̜t4̽|]n0|=[*?{Ƀ՝9ӵO:wg0oXjF%lظj~QG=7i ޿lG{s\qR8QoaH N\Z u|ul|CW̵afuGPK%N/5b?vQoGH'μXȏyߣck*LYʳ7 YՓXVq6c;>Z~"Gbl9[Hܵr:jU|:vn^j>'>g꫎cvcݢ#׆sb_|^Ug~%wi~o]۬O/B{C]k14˻ɝܯMy)?3Es틇uhuV sɭf?og9gy8Zг+=4/c<OPψ5t0_~`Į HK}1Mjnf sWwfyטPSb{Z|+ZG>ޑGS;ϏC1zWx)7Ky,l6uNU#afW+L<1]ꠞ<)<]r๮sro<>V{x} S0Mecާ; S= :s,y&O0j 纪8){ɼQ]ϰ z bceV3Trr&7sQ{*WFȫ^%5pOϡ~j~_tO?'Hޟ0m}5 ڑ'qC'i"߯S\'0jrGH)uާ&5|ςGg %u;2OLϙLY ~ؚ qVF|z\a{5h5߃MLsp5|{OǮausw+ƙgCs2$.3WXKu!蝟QG9}if_735@-;%M7gX϶gUJϹ'=`uf~GVk8.wsއEhH|f,knw]ʏj{PabdT+^yĔw6zV3,_ҭ1xH{wڃ '=wkyp>"'JNUoKr5fU ;{{90ߕeLM565~=|g{=Gߑ<ƾջ(^gLipϚ>_LlO+ |'}|vT P?#sA}PV(ԄN+a/i<"^ϩK}<~|zyE&=BZiWX$2*K+eV+qOut33}gl_*/+;9X7b^cjwԡܐߟ?^n 1] 23ړ'԰(5ϳ}_}^H爇9ìEN/A'=7e }f׳3%p/Wy=S7-u7aU]gҼ%>F\|UZzjȌ{#m{Qm8޻/Z{zGG`x^X]%5EQ\?P4 ʲrP"]9T:s yIuuuu's~[?nm^Kze[omѶ>WzCq,5g{_uyq>|5X*?j(Uqs f?jV3=^|Qzo^էysd^sMLkor%ǧ1tug3_W^R[ϧ&<|9xCͩ55u}qsFdV GꂽN3??4T+sPs}*k'=k0=\Oȹ&~W_0ʱ+?k# YUayprUU3\)ן=p-y {N٬w}ċ}:->O3*~DZ/4w+m仪7TZx]35;=Ӷ!˿`o88b[jܛc,־:f9?5uUM|1Ovꛣ1a1.c7Kp]'[gt_8Y?; {-E{f9渚:K\g'JSᐗ޹uG1{<0ceoR;$f-σZg=Xc9ˤ{c8kXlhư:n=~ Z݊7u|AW` 9'cq=X% 8>c}T3qz#1jN>h3cqs~Y3<(~uMݟ2nc=:jg{wvs]uq-',~,/匽9_Z |{HɺHK(?5K!KvLiN,!H47v-{_>V7OLzD!z/==wM?a6voU 0ztx5?wPܻKmxe]x5^>95Ԗꂏ]=RG53(6~S^>9R_:'ԋN۵~^SATU=3'ae>y}R0ox߉˟uΓZs/+ߥv=$yznj8b6V:C1UwXs]G[άkfuȝ=]z{O>7QJ{FCsWx2779{/g?;~G1׋YԎ+,T_qٹԹf^!V~b^ݹwf ^\I}E9+u{k!LjM5_3̞tL&\M~Xͤ?\ }0]ZuYyY^%k-3>^[.׿u_j^eT^$3c0Wl|W9+fU: QCc'7s<Oz?=\fuH < Y8/]/<Dj^w]tKLo >}O|&i ֣:l\Uԅ.qn|]b?w~o9W=8x9Iiby{5\9:tqg.::fky Ͼ\q_y u3MxkmWö9,p1qi(3/ϛCqԢɛĬk:穙v [d|~qߤf:U1o0:;c`h"gݏc,=%QYﺬ!%Of]w'2jFfA}-Ņ#Ula&wsVI7yn6GϿ?䍻Y ]ٰrQګ}w1Z17Sޗwa?kUm*.u{r}w|+k b~Z;?2?cxm\ώ|F̗ʘU^Nv3Oru}̌zJ Wf?yw׫ƨޥ'0GUw:zŕwN}r:c5zk4x{y<\T+?{9ϓ?yg^Gӂz17Z幆]Y?ylo' mKϡaI,OͿ 'ڟGdW > * G3wAVK̏3뗆mgsZSzf<žvsv'NmmUƚ7)䑵"' 13U?yuw7糦wAru~aOOd|o㚞/pYʓ'ls[ w^zg ٷ>+m܇ĦFUǙ>w~&6qtV{GU\闳(Gq{%}Ͱg>}?\{x̖8g(E^)ky]ϑ3Ϲ'>tW}1خέ>WiJ_+Oɯ8gʝy'қo +?Q1ORJx^YMEE0(&?!BPTLTҍ{0[tMO&wNsoh}}}}}wl|}k}׃^[}ml|'}}g}}W}}qxg&&'=:X!ob GZծgXԕ0̰۾dž#ҮzX)FӰc\5>bsߪ5ɷνua {xb8}nOZfͬ ̇=${ݩgj$Oe3ݳԈgJyfaӰaWJ%G,JE&3\g]M>?_zR*mϴQ_+ٙUϥ oqaú6׉=* see8WxT礦g̤ci8N]rZxFr'{|w Oö{Re35{b6WsEmka9ԘX?x|>?{3ރGoew [zUZ%n|$w; 6z޼­g:9[͊}{UJj稉S2WD9y#oƮsS+ksJdzy'o&0ΠM<֤p 3?AY~⸎9TA8zא70U܃wg}ha߇'~yo{F3r&/ z9/S/U4RՕqw/!we Gj7Ik<>V:TۻUf|}W3ߥwZs612ܗrٜ}_d+U㊁I<)s_yp4 {5G9qyM3{遗z7ۧksu^"} 3&nwgw~{gZT\YSZSG`^TǩszYwL̅ϷƆ ٨cTp|3{;Gޔćo]y8w j`éwRG>D5\7i_z;]Zԣ0=ҏϊKzܓ}T oηg┹L/Ovld]zOPDWg\UߏrW&_jGg `ʻ:J/?`ጻwYUiO5ϲ]a/p++Wk|/]aynZz7zwzz֥O +k1?hCVL}cx?a++m#H,|kXK[p{+;X#l }:5fUǙpC=v0 { W^/Uq;lf;a [`3-O'vXӶ1&DGn仿|'D~Ö߱{'qt0Fsȋk}\y'{\K8,lr{639qlטj~_?Ƒn'gpx{4 g4Ol:LrMU}u?qnf>49͵3]xyGl>9GY4?z zq\L3gԼ 93Ι9]]=T~yFSU-r(󟽓F+n~kF~f_^`[/l|15#sB/9^;{-xӜ [|Ћ'Tu:{ajfCbr#[z\'>OݩaĔgTW_{s7D썰;ys0[#Un`S<%#/׾e7ڛZ'a>ġ%w<;Qw?0SU|=/ҥųk[+% V' T+}\ݢ[ 7lԽ[?QWH~z;أzF;?|:Rz*=Ts7[UHO Ƀ^_u#^z<^ng;=~忊 Kwo8^aQ9C{5?&#8}f}k\q89O2w~glHhNQ;uܦzyܢ֪ԻwK寬E/ظñ'Q r;F/#zwJU/2VϹK|x^X]\E(~EG+p1DY/H^ķũSg.C5=뇱X~뷱~`Wzm7z{z8'c}:VKlx֟8bzc}j¼u(GO 5U<#lg58\ʛqc\SZI<;Z [JDZa .#O\}zWk>&G1kF?}ձϛjݗKGMOVW%yV= ^#S-jn`tUsVNm3! t7ϻg㵔O}|~Y5u0 lJ;|p>Xy<{]ڴ^sP]Gꔵ{_ɜs.5lr=kǖg}]J<ޮ;=;y+Ls5E}ֱ+SqU ?;^z/jftL\Q|+iA,b|+ɿF?a+VV8>sN6ԾϹyaG^3LMU|98gjVJ{9LOAzeݫwTsT ͑ ;691_i{FoFs;>R5 ɿ%>uYSL;>晘9c*=,}şɩ~Ngհ_i\Tϳ~K S~P-#- ػU}#->QUvzix/gdUû9{νy /}9PyI<;3xySU GMcr0KycƊ4wԂzN|Uq<p*/IS4Ĺ@g-tkx;QsWqVҏ#gkք9ۮ oasM5WHW>ރeN'ﬓxzC Ͷ גXʵ}eg*mɹ]Ϫw@zy(bX }'՛P\DALjqk!=Q>ڼO[q,V.zgWu&cV<p_+-&޿s^ڰU5;}'Oy*.f>-:{hf<=#?^ z_{|y{==wcriشajz'U:xM佦Ygq6ުurO|>84*^oak8rr=*ϤQզA <[:vHwx^XK&E(D QWAAwI8ȂxoamPQY=@櫨~`Bzoxǻ=~g=Uo{[{WEw$~&y$[8pxx/s- 3&Ez#cq|KsɳP?ERSuL.k1:,0w*w>¹6wvwzk̘U*܊td+Ly抧jD1|Ua;_zotQ~k3~gv̺oyuQZqɅg|jJ憹6;q>xmXac9噜 ռ1{߆Qj<5SOy~Ҷꡬ5p.W;m0;ff{J^T5?s~׵ìrqU;CeW#sKO}䩪~q> Y}V8r%\|uWogp\oֿ뷘S>fgʼn|8՞g/ru^UՇ)^q~e鑆2% 5E0ɹaYsP/$rzxz<soVUᒛg:9S>e;|swp#{  Ꙝ>kZxQz"{LZ5H-ɵz%^OކzĽ j?z{|<{9;;sgyFkYQ"ű#rxI9-{$Mz\og1~P_~Ix^XU,0(J5J4(: `rQ\į8q~q8osǙy2|9η K,8bo&96/8V"G|<'Xqqj&I\#Xj3,ssK,'HCڇ#=-yI?rs}{nաyPPi>J;KYczь/1Xj2WWvilTURKrV=|NXTخ]5ݮbXr<i_7K%Lx7RyjQ0&5MO_+y-=@]Smc_G\'y7sv¾a3}Ͻ$?coXiHW}Q9JM#Lu3< t3Zrf? gl{Gwތntͻ$g_uw5W[y>^p0gεx*n&#䵣9뮍NҾf=ݮN#G{['NarzSP'hR;?i3~t׳L<'wqbĪc;T;޷X~O}Mb,-rbT)|^8^gs" ٧W0sܠw+Fz3yJsϻz5u]Ǫ H'׈4}zM@ؕ+!\očXwUCq8aoXOJ+1G3zo{vwih_^<#;~WV{:=V.bflH.9<y|Ҟ3i$O/n|o0HSmL|afl= CuU=GWwߙ;IΊjyyqNy]>;.9.gc^;ǥ&zc{҇wYuJW'Եi^󩹨4.BoWN=:L; tK=b7o|.^<18YvoW:&mP\Ww*U9a[ߨ>V|ŧs^֟F~eGqœk1 iAP;3L~Ws/VZWӻwzwگmUDx^Y˪%E(@G-/>AA/.VP78Yg{1z|O=~K'{<xǣ/xk=zAz|bFN\1r>&|b9xb ܉%',i"k]֞:OkM:ů%7i/FJl'9爫Zy8wrad=#gSk_Ɨ6wWxfzL}%|^ƞ#yVJ~1p;<+džf8wAS&Q}'wZT;{ݓ䜺 WVZkWZ'<.]wko8ΟLp٣[oXfnK{\{ʱ#=ƞg5y+U;3DXvK5s=M?gO/{:\}yԚμ{s9>Kќ!yL<: jN^Y3نkPCv/wLU qTu͑{=@lY.j'V驜\ g\q\BXCoWtgʫzþ+m}{-ǹq/a`c+ [ |y_z+[|^x7s,FoOCgᯇ9~^KNSouf{A|S|+lXckf3N+K*7~ozI{[jmMu]+g1ϩ\^ύ k6+v>U.*Mt~_UFPLќ{s =` P>|yN{Ϸ?Ix^[]EAKEA(Q6AɋN1~Z{9%Y'y\UOz|;w \aPjZ>ڲ]}Z/~OaPW7gܽqoț{x_jiz~L=%w^L+Ͷw;(v_.u o"9 Ay?kr_eU n>C#Sog%stRNj3;V;Zt0tu%5Cz?GUx^Xۊ%EQWU\EF x_YqE'#7:gz늌̬3OKΫvnvv>lvE;_S;k;O;^āy8>ŽI/q9ֆ3#^r &y~ v|i1Mq>1'v̟q:\Oi$T?j+U ̪'.c/8t!VaI[%Ǚ/ {'⨜B%/kcD}s>+\o}7i;x㞭tU^NS:If_֮>z3Gg-}|:~6*49ql?@rG+r5<9șwGKmϾWs|fu}KY yIu|sıQ;ew;b`>AzsqҳzJFzz,e#*6Gj`#FJ5i5k6k$h-8p }/n]HE^Vݳμ5wM}ڣ;gyt"weޮ(~#}kvxc/gwxG[ RiK f0w>3lq<ޠD_<ӥzWM%gb=}f:f|~i{K/ w)4VXޘqM^xzRM/jzGq-t־P3jr'F^9ub}ڐKUrW:TLXI\a-Z2FvXqkc6oE?{qO T\,pN1{{W1=(k[iW9Tkc2ciN'sN,W*sHu]qe8}{DN*ަ+{|.6y0]黡gnJ~z v^̙;{m8c㈟pGqN'UCy߰ۨO}~rjƜY?q*/Yc}yOS4G5x;sH fC<Ƃ#8qt_~ 跴em\['W!YF=ڛ,ȝ9-kor><ƙ{!ņK1/@ߢ-K,i5wڞμ5rg 6r E97.sOz?Ϫ~ jCsڵ7{Ś"cQzEx^X]]5bQXtS bbiTD냯c0fس'+9UȃޝZf&9>[_;}ݾ^;냾i_U_og}]zU?DšH ;g8X㬾?cװ~ƬNb?FpԚ}K8ܻח<~֯qvN>SY}|L3V #%~9&!/]x₵82}l4kap2qjgAC=p6qڞe ~0j¼3keCͅ49;7ii*V3&,tFJ7՝xby%b4w>3ٰ?)06^_/ekF/fcU_<_\7߬b]Ir"<GVE-5uÎe_Uv棞4: c^3e}=wͼhb&^ڪ33"sp]eʕ_\k3g׆[tuU5u~,sX^p$W[/=K -^iS|zhc6;翪)5븳ukr7{owwb(]ivӲUc߾}Ege ✽ؙyO/WoU4J]q5R|?Hx^X[ED F!DQPEE2/"yra.+6:UuNcX5X/z52c=뭱ñ>{Wc}=wXq믱ބO<}~ۗ_ΐ}#='YpCu>yvcMj%8bP?dž!NI A {ƫ“ W# ֤=/&?[\œ6qOr=٪.3kMlxɄ̑XUwdtǞFI̒Q'c&bKI҂Ǘ9Q͙Gjծxo^{?fe[Wz)]߄:g,= YQIGuAGcf&&qFϱ^>ȾԛqVי^[s_q^晩zIK|bwꞒ'񈡼y.[Q}zFw)}:WW3;Gؿa\yiF^> Ҝjܟdϲ}*9_\cvm{ͬߘp}CVu*?]=9 &kP^:\dŠouIIXyÞwyծ3PSy帝W'ӧ[;,{3['g{Z}4]*K3{xvUsv<^ɬtoYR/X#{1q'6qs6׶eW7wp@nV ,8pOQzߢ於X5 q2wg_Ϛɉ8(}u^#\]xWύ<|V ^#]`) ~5O8¸-D7ԙv;}Wo^~oxLg.JLbBg}g2q3uHx^X۪$EYQ\=*.D2(DV΋X3LWdfddTR_/J_z7Z:k_w}51a_6,b|p~G_Ɗ<K}6g(C'X+sc۾~^3ySΟ`+nX1j^O~Xw;kڽʋc5qo0ǖn^6l~ ?X+\d>(?<ӉtYs-,W9Sx^V=sݨߑ&WKMͼ'vUC;9q3_~Uz-ϱ_U3vy-U|x<}[s1K*_jjȡbs*üMl1ِƨj6חX$5}m,/\?hrQ3.?+1^2K=I[!5IL-=/7G:K}>6'~36lgC̳{׷X+omߋ2;zlwjys'>q”sP֝Ȝ̛'9V~==ay6Cª4!@'ͯ4|Gz(,1kfQĬĻz[UZsӣ󝵤G&vNO3c`CX0 7Xg wrK^='CrIeLNnS=~rը^[>kv>T:Йy#LzZp<rMϪ^q煝d.ڗ=Lf杞O|o'x:w]{1fP-gpC^WWL1ӘtmgKƞV<3yd'~fɘ)޳|.y?ϸ.>bU8[ݥf-nz N| ;H#JG[O㴾vV ?'+]]qN|z}<|Oqj_Y270/΄*z6?zNڗG`ߓjcU޵yyW{Fp>twٻ qե;_ziĽņ9W^Oůr=oO|͟c&1'WJ{K FNg^ګ9]0r\ Aϴ@?PladV'謞@LGFFDf ~~^a?a?U??k?}~~=Y?_c{gwlk?w~†.6^ c qWoM\~>†)670nk~]'=VcVOǾT9 5~j Y#$]'{1v}oQ}g̎|yHC͋X̡g//|!SS>+ {#0X=YkB@##略mݝŦup- WK=ϐ/k;ꋟ]?r_fB9WnņH#q̄Y>r>g{Pz9bջ3$mC+nwܷ<ƨ?rO6 3@Î^+kϾzf+ř7q,7;O[  -k^FlbǼV<87Y}W)ərU19aݜ;eS_= >3]y?ϝ!{^Un:٨+ =Or7h^f cM|h(ڃ ji54?<$O+I r9Ϧņ=U;B:^el&܏Q|?Ùi3sH#z_Z8ֹVw8=>էWTw~9_[8Đ/Ou7a7.8􃘼H+kűO:p WWwpt`3Fr_@nwwނcyg5L'_93һᨵgVoQrM<ǚqs_<5iru-?n|;?&{w= F.㌦!b2{1$];b$'ϴS|}`uZQK'3ZK窱j: Sy;K٪]Z[| |xe-> 7Vs}%;.1ԯgx+{%0qf~S#{` Q<}5Mq}{h]w%ey p݂jr*_uL>ޛy/ {^Y;s2<{b0bGwCĊ:Ӟ '7^l i65/LX݃ŗ;PzxV%^$yx;>U7DYk:Hx^XMdEâ(.*~W .* x"j? /^n.=UQ3|gk}k}ַk~Zf_u}?֚kX=l8< <`zezk;kևk}ǜs<<a UclGlq{ $'qT/{zF9M901nY3Ϝnqb+l=ng8szU+5PkHqYk9c ;ׅr}-ϯ{/=?j>&3I|hF;>W9OO9Y>S1fm[wAtg'v)iӽ0u$ לpSGz|7koK>7yfޕ6 \’4s/=*n>t8V޳΋JGœ8-uM<ϳ:|\5Ys~szϋƼyfkϷs>]kiW3c%y`s9szyuwk.g ŪbLԾM{{`#ԯϿkz^`{o*qrDu 9gž'1~8beO1Kj-ңUZSN%Wzqnq1.|wO1/E[9qή0Flzh5_=aש<5gI]=*KCS,\:$N=Fy=ν#MMAkZunъq?Yzi6e☃4zq!oqO}.j7UJZW98q^yg[",iOTogdb)E3v5C+f5K:޽M8cכ#gҕ׽Ÿq4>GkIkx^YYED<΃ċE1y>\SlD |3=_nxzWzz}}ק}}׏}}׎&k/cr?P^L_1:.o]c^DgqJ'*?b} 9J{'65/fG}#-Yשahus~q3 9d M/*;/ߗrQP0𬼘%?퓴M.\-:}o/`y 8+k,^~N`?qj\c(}PyJ(Imy55aͯz{Y{X'/3&tfL~}z,y9-\[r\yfMRSzz?o#I|Uw{6 Đ^ڟ~ks;ֳgYş|p iM>r+^Pqج97"r?j.K)=#w_F4̚ɹ^[CTsck(<|r GUٓ/gC%@+/z<'5q,̻ sUѼq4/ȍܳj87b7ICc%uiWyoùOG|sF1jUWlsO!ë^Cnܯ||Ú_3@;kQ|o_̿CWaq,wwΫY؞?nAuU\I*Gֆ`'=zù'59gn/^\GaKq޽ɸz7|s֗c}=ַc}7ӱ~k뷱~ϱͱX;c\b+;/qŎM>m&.y>IOQ9:uops;󶪝g9OOl0;ּuI:{j^T+q+5My-e/q6'8yscŒ?5%~vyy!X=arհ_;k?ϫcHm/N%VVSMSױςĽײ75$ߪ< Uu!{cbtj֘+Dِ^ LbUO) |8⹯VT]ל7G7HbIXqA7 V7XUx͙}7wM^/wu~Ȏ7vMa罂~Zki9JOJ&˜҃wͻ8zQ @1.LJ˜Ϫӳibm8zKyJ:lf[W񌊿^KL{yǺK?;S}ϓNOG}SJwgu5+#_ý }C|?kqn8~ǥ1q+Mjg^޺Gը'U.tu~Zg9y>*gfWU^U^kˌ9α;ns+ϭpr(&}s{M }BQ%GyaG>?WJ]P8{'?Vw. Ry!3_vRo?召**?~L/s^;z=@wy+yQ|h >QtI8q!OXAߠz63UYu:oc>֟c^륱^덱>X_Xߍ38/2vױ58pvOtϱ5֓~c_qYq;P G1yn?oy(Wʛ93Ʒc}#N^R^<1WܪzC$qp {|Si&ܲo{VOqH4_VV2٪`Cǹ?Q[k ;aUR:TTky.7~rsam8]Ñq֯p+ޣε,Lj[͓;vx3> X`'Mr~H+՜grlqUW$Os1ݗ^}g{vnǎC Ճ{Tی#fO9~Q:2=1=gz̟H=ܗ_XU]saT 㾾8cKÎ~x>z'{^ms8UV3X{ιW=(_k(9.m+K>I3w/ɭU9w޵{ g+^fj9bٛQ)?W@nb>3~5ˉKWws 1g]_H=lj" s+3Lp*^ GV7CΨp[Xҏyf~=I~% 鋫䛴W}Qk>T=֭zSf yk8Wx gf MD䆚{&uH /pѬLN<^UEv^y)71r&KPWCZjIOUH7}x>_譜ηʳ^J-WS*L 3pb=Y7jnP?JXn8dQujyu{ s7}}%َBΔ*8sq-f=T[8ɘ+crׇ^c~ы>\|3򃴨t=0&g̜'5u;k+3>1΁uS\G+~jNWoVt*|oJ߫yx_^}⽹z?8p'ቛޫ[sM=m8 qڈ'h?p|4U~Wɕ1O7gpVHx^WˊÃ(@6DQPVi= fPTLtuϮ*NFEFFfEčw3g<2g.Sl|RoDC룘|ƨ?j s3~駟~駟~駟~?"5x^-ux/-u Zצ.5Cr*5Bh )!0EJ+p:~|}= I^>]ֲ&MIYA1fSr 7(Fuo#"c;yDtAԤf< ,c3wɒ>^ hF;bqeK8]D`%99lR0:xd5[8JLAH#:1 氖1_5hN7$2e(Hyѝ1L+-OɝEO9Ӟ2U$soB}:3 a ;ynق!#zo氞p;9xԢ e,s.s4RJ #GRK2-Pm2ɤޥ gc:~W%WLQђHbg%H'Nq'doԤ1]&Ds4/ڛT!Lg853Oo oS4'ij#!c~9E)ҁLa [9@H~Sv#ֱ$cAT1]Nn~8xNBF]IH` ?r<#OaAeӖHF26\99!N+2q|jvsY7_BcD41g 9J΢AԦ9]I,YF"U,Js0V REPЖNds]"TlHaЖhbbD HWR~QJԢ3g+G8]Җ w(KujҀOa I*7I҄fM$'ʈ!4hd99++(DijҊn a:sYqT2S Mg15MA*Ҙd8&S%/SPZA|b9E9ԣ#t='d%KQ bRD2,%C\yPяQd-{H6TO\ybԠ-#VauG)ќO4!p |hI4\iK2M+VQsGh !]DbkIW3R&t/&r<%GGF3lԻGiҜp1lb=}(F{HX6qǤmfޤe1n& #^b*Y9MҴ0QPZх^e4d;GlioИ`l(OrI *Sc`ǸNZS2҈f,sYnR/iÂ5Ptg8,c;ǹJ6)F%҉>0$r lm6ŨI4f8;\,)C}ӋYA2Gs^VRD1yecr3Bu>+CbYxBmhuInsLG~Ҁp3%l`+G92ttǼCY@&2e`7gOEI?q.JRpֳ3"m'uEQjӎ.$'s=(LyсLd`?sdQJ?1|A_\.W PtfSXsTtT)QđrvrKs˼A~S6D16I~A{hJgF25dxHr|Иpz2e[8nGUhM3,b+Ǹ3 F{Iސ"d&sYͷ !V|DƲDqd)E(-`0$sIrwQf8%TD0x^wubTB!GI 51gQ\24'vB1\f=b\ʽ R-JCD|<~ׇ߯!ԝBcڑƑ"J8 ĵ9!412CT  z5]f N3WMygI#L<ֱ#@!JcJ_3N Y.r!ԧ O҉^ c}O;cgQdѐ'j*q[^iEd1%5UM'@2ɥ-;.S>xA:o3U|A%Rk%ya2sY.*!ɣ'CGKYvsjbWp-Ha Y{Z垈=(O1[89:xeSYrQI5khiO/).Ps=AsО[Le)a3#1yiEgz3Lal+Pc/&X@!|I%Ps9 I >d0\)UbvI)0,f=Tq:!ҕ> &l>}lp4'ƑEcPɍ41%L&elfs MI+|J){8i.Ss3i m)`;Wʝ) $ӓt2G,a9OMSڒB:ɧRvsјDR;d>+/`x^A 0{s^%m_tx^]4v[mm۶m7m۶mvdKX>/p_'o;~w=?O3{>~G1~g9~_W5ރ}x?>C0>c8>OS4>s#(>$>O3,> "/+*&o;.!~')~%~_7-~#?/+'q Aq0!q(aq8'oGđpdGptıplp|ND81Nd89NST85Nt8=΀3L83΂l8;΁s\87΃|8?. B0Eq1\%q)\eq9\Wq%\WUq5\5q-\uq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q? <C0<#(<x'xxgxxx^x^Wx^x^7xތxގwxލxޏ>>O>>/o~~_~~?=?p@ApP !pH apX8"#(8*8&c88.p'‰q'q ©qqg™qgq¹qq\…߸..KҸ .+ʸ kڸnƸ nn[ָ n;θ {޸xxxGx x<Oēd<Ot<ijl<|/ċb/rījzoěfovĻn~|ća|q|ħi|y|_ėe|_u|ķm|}?ďc?sįk{ğgwĿ8xPPx^]C۶m۶m۶mmjc6ڤ6Nsr;b&j@@(AcJԯ<³4<38<O$<OSq? <C0<#(<cq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q<' q"'Iq2)q*iq:gq&gYq69q.yq>q!\Eq1\%q)\eq9\Wq%\WUq5\5q-\uo=/ "/Kw{ /? «o?fovĻn~|ća|q|ħi|y|_ėe|_u|ķ/W|]|C?OS ?/K į?38C88#8׎8c81 Hoq¡qqG‘qGq ±qq'‰q'q ©qqg™qgq¹qq\…q\q \¥q\q\W•q\Wq \µq\q7q7q o8C8 8#8p,_;ptıplp|'ĉpb'prĩpjpzgępfgpvĹpnp~\ąpa\pq\ĥpi\py\Wĕpe\Wpu\ĵpm\p}7čpc7psĭpkp{wĝpgwpwĽpop<ă`<p<ģh<x<Oēd<Ot<ijl<$8C8 8#8 8c88N8 N8NS8 N8Έ38 Ί8Ήs8΋. ¸..KҸ .+ʸ kڸnƸ nn[ָ n;θ {޸xxxGx x'x xgxx^x ^ex9Wx^x^7x 7-x+ކx'ޅw=x/އ >#(>$>O3,> "/+*& {>~G1~g9~_W'~~? qAkAq0!q(aq8Gq$GQq41q,qq<' q"'Iq2)q*iq:gq&gYq69q.yq>q!\Eq1\%q)\eq9\Wq%\WUq5\5q-\uq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q? <C0<#(<c8<O$<OS4<3,<s<</ "/Kw{ /? «o?fovĻn~|ća|q|ħi|y|_ėe|_u|ķ/W|]|C?OS ?/K į?3|x^Eetå;APRQJ:TQBIFPѕ<{wosY93B)}P(?NT85Nd89NS88.8'ĉpb?g9~_W80#8 8cx~|ća|q|ħi|y|_ėe|_u|ķm|}?ďcw]q7=q/}q??xÿ?`' C0<#(<c8<O$<OS4<3,<s<</ "/K2+*k:o&o[6;. ݧi|y|_ėe|_u|ķm|}?ďc?sįpq$GQq41q,qq<'8!N$8)N8%NS48-N &Έ38 ~ gpvĹpn8?. wp!\=>.$.Kp!r).+?UpU\ W5pM\ up]\ǟ/p7q_f-n[p#n;=#(<c8<O$<OS4<3,<s<</ "/K2+*k:o&o[6;.{>!|G1|')|g9|_%|_W5|7-|w=|?#?O3/+q$GQq41q,qq<'8!N$8)N8%NS48-N &Έ38 ~ gpvĹpn8?. wp!\=>.$.Kp!r).+?UpU\ W5pM\ up]\ǟ/p7q_f-n[p#n;pgwpwĽpop<A+ /<p<ģh<x<Oēd<Ot<ijl<|/ċb/rījzoěfovĻn~|ća|q|"?7oSeurat/tests/testthat.R0000644000176200001440000000007013712563445014651 0ustar liggesuserslibrary(testthat) library(Seurat) test_check("Seurat") Seurat/src/0000755000176200001440000000000014170333640012305 5ustar liggesusersSeurat/src/stats.cpp0000644000176200001440000000227714156670503014165 0ustar liggesusers#include using namespace Rcpp; // the following code in-parts taken from sparseMatrixStats (http://www.bioconductor.org/packages/release/bioc/html/sparseMatrixStats.html). // [[Rcpp::export]] NumericVector row_sum_dgcmatrix(NumericVector &x, IntegerVector &i, int rows, int cols) { NumericVector rowsum(rows, 0.0); int x_length = x.length(); for (int k=0; k #include #include #include #include using namespace ModularityOptimizer; using namespace std::chrono; JavaRandom::JavaRandom(uint64_t seed) { setSeed(seed); } void JavaRandom::setSeed(uint64_t seed) { this->seed = (seed ^ uint64_t(0x5DEECE66D)) & ((uint64_t(1) << 48) - 1); } int JavaRandom::next(int bits) { // Only 31 bits ever used. seed = (seed * uint64_t(0x5DEECE66D) + uint64_t(0xB)) & ((uint64_t(1) << 48) - 1); return static_cast(seed >> (48 - bits)); } int JavaRandom::nextInt(int n) { if (n <= 0) throw std::out_of_range("n must be positive"); if ((n & -n) == n) // i.e., n is a power of 2 return static_cast((static_cast(n) * static_cast(next(31))) >> 31); int bits, val; do { bits = next(31); val = bits % n; } while (bits - val + (n - 1) < 0); return val; } IVector Arrays2::generateRandomPermutation(int nElements, JavaRandom& random) { IVector permutation(nElements, 0); for (int i = 0; i < nElements; i++) permutation[i] = i; for (int i = 0; i < nElements; i++) { int j = random.nextInt(nElements); int k = permutation[i]; permutation[i] = permutation[j]; permutation[j] = k; } return permutation; } Clustering::Clustering(int nNodes): nNodes(nNodes), nClusters(1), cluster(nNodes) {}; Clustering::Clustering(IVector cluster) : nNodes(cluster.size()), cluster(cluster.cbegin(), cluster.cend()) { nClusters = *std::max_element(cluster.cbegin(), cluster.cend()) + 1; } IVector Clustering::getNNodesPerCluster() const { IVector nNodesPerCluster(nClusters, 0); for(const int& clust: cluster) { nNodesPerCluster.at(clust)++; } return nNodesPerCluster; } std::vector Clustering::getNodesPerCluster() const { std::vector nodePerCluster(nClusters); IVector nNodesPerCluster = getNNodesPerCluster(); for(int i =0; i < nClusters; i++) { const int cnt = nNodesPerCluster.at(i); nodePerCluster.at(i).reserve(cnt); } for(int i=0; i< nNodes; i++) { nodePerCluster.at(cluster.at(i)).push_back(i); } return nodePerCluster; } void Clustering::setCluster(int node, int cluster) { this->cluster.at(node) = cluster; nClusters = std::max(nClusters, cluster+1); } void Clustering::initSingletonClusters() { for(int i=0; i < nNodes; i++) { cluster.at(i) = i; } nClusters = nNodes; } void Clustering::orderClustersByNNodes() { typedef std::pair ipair; // holds numNodes, cluster std::vector clusterNNodes; clusterNNodes.reserve(nClusters); IVector nNodesPerCluster = getNNodesPerCluster(); for(int i=0; i&a, const std::pair& b) { return b.first < a.first; }); //std::greater()); // now make a map from old to new names IVector newCluster(nClusters, 0); int i=0; do { newCluster[clusterNNodes[i].second] = i; i++; } while (i < nClusters && clusterNNodes[i].first > 0); nClusters = i; for(int i=0; icbegin(), edgeWeight->cend(), this->edgeWeight.begin()); if (nodeWeight != nullptr) { std::copy(nodeWeight->cbegin(), nodeWeight->cend(), this->nodeWeight.begin()); } else { this->nodeWeight = getTotalEdgeWeightPerNode(); } } Network::Network(int nNodes, DVector* nodeWeight, std::vector& edge, DVector* edgeWeight) : nNodes(nNodes), nEdges(0), nodeWeight(), firstNeighborIndex(nNodes + 1, 0), neighbor(), edgeWeight(), totalEdgeWeightSelfLinks(0) { if(edge.size() != 2 || edge[0].size() != edge[1].size()) { throw std::length_error("Edge was supposed to be an array with 2 columns of equal size."); } IVector neighbor(edge.at(0).size(), 0); DVector edgeWeight2(edge.at(0).size(), 0.0); int i = 1; for (size_t j = 0; j < edge[0].size(); j++) if (edge[0][j] != edge[1][j]) { if (edge[0][j] >= i) for (; i <= edge[0][j]; i++) firstNeighborIndex.at(i) = nEdges; neighbor[nEdges] = edge[1][j]; edgeWeight2[nEdges] = (edgeWeight != nullptr) ? (*edgeWeight)[j] : 1.0; nEdges++; } else totalEdgeWeightSelfLinks += (edgeWeight != nullptr) ? (*edgeWeight)[j] : 1; for (; i <= nNodes; i++) firstNeighborIndex.at(i) = nEdges; this->neighbor.resize(nEdges); std::copy(neighbor.begin(), neighbor.begin() + nEdges, this->neighbor.begin()); this->edgeWeight.resize(nEdges); std::copy(edgeWeight2.begin(), edgeWeight2.begin() + nEdges, this->edgeWeight.begin()); if(nodeWeight == nullptr) { this->nodeWeight = getTotalEdgeWeightPerNode(); } else { this->nodeWeight = *nodeWeight; } } double Network::getTotalNodeWeight() { return std::accumulate(nodeWeight.cbegin(), nodeWeight.cend(), 0.0); } DVector Network::getNodeWeights() { return nodeWeight; } IVector Network::getNEdgesPerNode() { IVector nEdgesPerNode(nNodes, 0); for(int i=0; i< nNodes; i++) { nEdgesPerNode.at(i) = firstNeighborIndex.at(i + 1) - firstNeighborIndex.at(i); } return nEdgesPerNode; } std::vector Network::getEdges() { std::vector edge(2); edge[0].resize(nEdges); for(int i=0; i < nNodes; i++) { std::fill(edge[0].begin() + firstNeighborIndex.at(i), edge[0].begin() + firstNeighborIndex.at(i + 1), i); } edge.at(1) = neighbor; return edge; } IVector Network::getEdges(int node) { return IVector(neighbor.begin() + firstNeighborIndex.at(node), neighbor.begin() + firstNeighborIndex.at(node + 1)); } std::vector Network::getEdgesPerNode() { std::vector edgePerNode(nNodes); for (int i = 0; i < nNodes; i++) { edgePerNode[i] = IVector(neighbor.begin() + firstNeighborIndex.at(i), neighbor.begin() + firstNeighborIndex.at(i + 1)); } return edgePerNode; } double Network::getTotalEdgeWeight() { return std::accumulate(edgeWeight.cbegin(), edgeWeight.cend(), 0.0) / 2.0; } double Network::getTotalEdgeWeight(int node) { return std::accumulate(edgeWeight.cbegin() + firstNeighborIndex.at(node), edgeWeight.cbegin() + firstNeighborIndex.at(node + 1), 0.0); } DVector Network::getTotalEdgeWeightPerNode() { DVector totalEdgeWeightPerNode(nNodes, 0.0); for (int i = 0; i < nNodes; i++) { totalEdgeWeightPerNode[i] = getTotalEdgeWeight(i); } return totalEdgeWeightPerNode; } DVector Network::getEdgeWeights(int node) { return DVector(edgeWeight.cbegin() + firstNeighborIndex.at(node), edgeWeight.cbegin() + firstNeighborIndex.at(node+1)); } std::vector Network::getEdgeWeightsPerNode() { std::vector edgeWeightPerNode(nNodes); for (int i = 0; i < nNodes; i++) edgeWeightPerNode[i] = getEdgeWeights(i); return edgeWeightPerNode; } // Skipping unused Network creators // Network createNetworkWithoutNodeWeights() // Network createNetworkWithoutEdgeWeights() // Network createNetworkWithoutNodeAndEdgeWeights() // Network createNormalizedNetwork1() // Network createNormalizedNetwork2() // Network createPrunedNetwork(int nEdges) // Network createPrunedNetwork(int nEdges, Random random) // Network createSubnetwork(int[] node) // Network createSubnetwork(boolean[] nodeInSubnetwork) // Network createSubnetwork(Clustering clustering, int cluster) std::vector Network::createSubnetworks(Clustering clustering) const { std::vector subnetwork(clustering.nClusters); auto nodePerCluster = clustering.getNodesPerCluster(); IVector subnetworkNode(nNodes); IVector subnetworkNeighbor(nEdges); DVector subnetworkEdgeWeight(nEdges); for (int i = 0; i < clustering.nClusters; i++) subnetwork[i] = createSubnetwork(clustering, i, nodePerCluster[i], subnetworkNode, subnetworkNeighbor, subnetworkEdgeWeight); return subnetwork; } // Network createSubnetworkLargestComponent() // Network createReducedNetwork(Clustering clustering) Network Network::createReducedNetwork(const Clustering& clustering) const { Network reducedNetwork; reducedNetwork.nNodes = clustering.nClusters; reducedNetwork.nEdges = 0; reducedNetwork.nodeWeight = DVector(clustering.nClusters); reducedNetwork.firstNeighborIndex = IVector(clustering.nClusters + 1); reducedNetwork.totalEdgeWeightSelfLinks = totalEdgeWeightSelfLinks; IVector reducedNetworkNeighbor1(nEdges); DVector reducedNetworkEdgeWeight1(nEdges); IVector reducedNetworkNeighbor2(clustering.nClusters - 1); DVector reducedNetworkEdgeWeight2(clustering.nClusters); std::vector nodePerCluster = clustering.getNodesPerCluster(); for (int i = 0; i < clustering.nClusters; i++) { int j = 0; for (size_t k = 0; k < nodePerCluster[i].size(); k++) { int l = nodePerCluster[i][k]; reducedNetwork.nodeWeight[i] += nodeWeight[l]; for (int m = firstNeighborIndex[l]; m < firstNeighborIndex[l + 1]; m++) { int n = clustering.cluster[neighbor[m]]; if (n != i) { if (reducedNetworkEdgeWeight2[n] == 0) { reducedNetworkNeighbor2[j] = n; j++; } reducedNetworkEdgeWeight2[n] += edgeWeight[m]; } else reducedNetwork.totalEdgeWeightSelfLinks += edgeWeight[m]; } } for (int k = 0; k < j; k++) { reducedNetworkNeighbor1[reducedNetwork.nEdges + k] = reducedNetworkNeighbor2[k]; reducedNetworkEdgeWeight1[reducedNetwork.nEdges + k] = reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]]; reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]] = 0; } reducedNetwork.nEdges += j; reducedNetwork.firstNeighborIndex[i + 1] = reducedNetwork.nEdges; } reducedNetwork.neighbor = IVector(reducedNetworkNeighbor1.cbegin(), reducedNetworkNeighbor1.cbegin() + reducedNetwork.nEdges); reducedNetwork.edgeWeight = DVector(reducedNetworkEdgeWeight1.cbegin(), reducedNetworkEdgeWeight1.cbegin() + reducedNetwork.nEdges); return reducedNetwork; } Clustering Network::identifyComponents() { std::vector nodeVisited(nNodes, false); IVector node(nNodes); Clustering clustering(nNodes); clustering.nClusters = 0; for (int i = 0; i < nNodes; i++) if (!nodeVisited[i]) { clustering.cluster[i] = clustering.nClusters; nodeVisited[i] = true; node[0] = i; int j = 1; int k = 0; do { for (int l = firstNeighborIndex[node[k]]; l < firstNeighborIndex[node[k] + 1]; l++) if (!nodeVisited[neighbor[l]]) { clustering.cluster[neighbor[l]] = clustering.nClusters; nodeVisited[neighbor[l]] = true; node[j] = neighbor[l]; j++; } k++; } while (k < j); clustering.nClusters++; } clustering.orderClustersByNNodes(); return clustering; } // private: // double generateRandomNumber(int node1, int node2, const IVector& nodePermutation); Network Network::createSubnetwork(const Clustering& clustering, int cluster, IVector& node, IVector& subnetworkNode, IVector& subnetworkNeighbor, DVector& subnetworkEdgeWeight) const { Network subnetwork; subnetwork.nNodes = node.size(); if (subnetwork.nNodes == 1) { subnetwork.nEdges = 0; subnetwork.nodeWeight = DVector(1, nodeWeight[node[0]]); subnetwork.firstNeighborIndex = IVector(2); subnetwork.neighbor = IVector(0); subnetwork.edgeWeight = DVector(0); } else { for (size_t i = 0; i < node.size(); i++) subnetworkNode[node[i]] = i; subnetwork.nEdges = 0; subnetwork.nodeWeight = DVector(subnetwork.nNodes, 0); subnetwork.firstNeighborIndex = IVector(subnetwork.nNodes + 1); for (int i = 0; i < subnetwork.nNodes; i++) { int j = node[i]; subnetwork.nodeWeight[i] = nodeWeight[j]; for (int k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++) if (clustering.cluster[neighbor[k]] == cluster) { subnetworkNeighbor[subnetwork.nEdges] = subnetworkNode[neighbor[k]]; subnetworkEdgeWeight[subnetwork.nEdges] = edgeWeight[k]; subnetwork.nEdges++; } subnetwork.firstNeighborIndex[i + 1] = subnetwork.nEdges; } subnetwork.neighbor = IVector(subnetworkNeighbor.cbegin(), subnetworkNeighbor.cbegin() + subnetwork.nEdges); subnetwork.edgeWeight = DVector(subnetworkEdgeWeight.cbegin(), subnetworkEdgeWeight.cbegin() + subnetwork.nEdges); } subnetwork.totalEdgeWeightSelfLinks = 0; return subnetwork; } VOSClusteringTechnique::VOSClusteringTechnique(std::shared_ptr network, double resolution) : network(network), resolution(resolution) { clustering = std::make_shared(network->getNNodes()); clustering->initSingletonClusters(); }; VOSClusteringTechnique::VOSClusteringTechnique(std::shared_ptr network, std::shared_ptr clustering, double resolution) : network(network), clustering(clustering), resolution(resolution){}; double VOSClusteringTechnique::calcQualityFunction() { double qualityFunction = 0.0; for (int i = 0; i < network->getNNodes(); i++) { int j = clustering->cluster[i]; for (int k = network->getFirstNeighborIndexValue(i); k < network->getFirstNeighborIndexValue(i + 1); k++) if (clustering->cluster[network->getNeighborValue(k)] == j) qualityFunction += network->edgeWeight[k]; } qualityFunction += network->totalEdgeWeightSelfLinks; DVector clusterWeight(clustering->nClusters); for (int i = 0; i < network->nNodes; i++) clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; for (int i = 0; i < clustering->nClusters; i++) qualityFunction -= clusterWeight[i] * clusterWeight[i] * resolution; qualityFunction /= 2 * network->getTotalEdgeWeight() + network->totalEdgeWeightSelfLinks; return qualityFunction; } bool VOSClusteringTechnique::runLocalMovingAlgorithm(JavaRandom& random){ bool update = false; double maxQualityFunction, qualityFunction; DVector clusterWeight(network->getNNodes(), 0); IVector nNodesPerCluster(network->getNNodes(), 0); int bestCluster, j, k, l, nNeighboringClusters, nStableNodes; if (network->getNNodes() == 1) return false; for (int i = 0; i < network->getNNodes(); i++) { clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; nNodesPerCluster[clustering->cluster[i]]++; } int nUnusedClusters = 0; IVector unusedCluster(network->getNNodes(), 0); for (int i = 0; i < network->getNNodes(); i++) { if (nNodesPerCluster[i] == 0) { unusedCluster[nUnusedClusters] = i; nUnusedClusters++; } } IVector nodePermutation = Arrays2::generateRandomPermutation(network->nNodes, random); DVector edgeWeightPerCluster(network->getNNodes(), 0.0); IVector neighboringCluster(network->getNNodes() - 1, 0); nStableNodes = 0; int i = 0; do { j = nodePermutation[i]; nNeighboringClusters = 0; for (k = network->firstNeighborIndex.at(j); k < network->firstNeighborIndex.at(j + 1); k++) { l = clustering->cluster[network->neighbor[k]]; if (edgeWeightPerCluster[l] == 0) { neighboringCluster[nNeighboringClusters] = l; nNeighboringClusters++; } edgeWeightPerCluster[l] += network->edgeWeight[k]; } clusterWeight[clustering->cluster[j]] -= network->nodeWeight[j]; nNodesPerCluster[clustering->cluster[j]]--; if (nNodesPerCluster[clustering->cluster[j]] == 0) { unusedCluster[nUnusedClusters] = clustering->cluster[j]; nUnusedClusters++; } bestCluster = -1; maxQualityFunction = 0; for (k = 0; k < nNeighboringClusters; k++) { l = neighboringCluster[k]; qualityFunction = edgeWeightPerCluster[l] - network->nodeWeight[j] * clusterWeight[l] * resolution; if ((qualityFunction > maxQualityFunction) || ((qualityFunction == maxQualityFunction) && (l < bestCluster))) { bestCluster = l; maxQualityFunction = qualityFunction; } edgeWeightPerCluster[l] = 0; } if (maxQualityFunction == 0) { bestCluster = unusedCluster[nUnusedClusters - 1]; nUnusedClusters--; } clusterWeight[bestCluster] += network->nodeWeight[j]; nNodesPerCluster[bestCluster]++; if (bestCluster == clustering->cluster[j]) nStableNodes++; else { clustering->cluster[j] = bestCluster; nStableNodes = 1; update = true; } i = (i < network->nNodes - 1) ? (i + 1) : 0; } while (nStableNodes < network->nNodes); IVector newCluster(network->getNNodes()); clustering->nClusters = 0; for (i = 0; i < network->nNodes; i++) if (nNodesPerCluster[i] > 0) { newCluster[i] = clustering->nClusters; clustering->nClusters++; } for (i = 0; i < network->nNodes; i++) clustering->cluster[i] = newCluster[clustering->cluster[i]]; return update; } bool VOSClusteringTechnique::runLouvainAlgorithm(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); bool update2 = vosClusteringTechnique.runLouvainAlgorithm(random); if (update2) { update = true; clustering->mergeClusters(*vosClusteringTechnique.clustering); } } return update; } bool VOSClusteringTechnique::runIteratedLouvainAlgorithm(int maxNIterations, JavaRandom& random) { bool update; int i = 0; do { update = runLouvainAlgorithm(random); i++; } while ((i < maxNIterations) && update); return ((i > 1) || update); } bool VOSClusteringTechnique::runLouvainAlgorithmWithMultilevelRefinement(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); bool update2 = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); if (update2) { update = true; clustering->mergeClusters(*vosClusteringTechnique.clustering); runLocalMovingAlgorithm(random); } } return update;} bool VOSClusteringTechnique::runIteratedLouvainAlgorithmWithMultilevelRefinement(int maxNIterations, JavaRandom& random) { bool update; int i = 0; do { update = runLouvainAlgorithmWithMultilevelRefinement(random); i++; } while ((i < maxNIterations) && update); return ((i > 1) || update); } bool VOSClusteringTechnique::runSmartLocalMovingAlgorithm(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { std::vector subnetwork = network->createSubnetworks(*clustering); auto nodePerCluster = clustering->getNodesPerCluster(); clustering->nClusters = 0; IVector nNodesPerClusterReducedNetwork(subnetwork.size()); for (size_t i = 0; i < subnetwork.size(); i++) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(subnetwork[i]), resolution); vosClusteringTechnique.runLocalMovingAlgorithm(random); for (int j = 0; j < subnetwork[i].nNodes; j++) clustering->cluster[nodePerCluster[i][j]] = clustering->nClusters + vosClusteringTechnique.clustering->cluster[j]; clustering->nClusters += vosClusteringTechnique.clustering->nClusters; nNodesPerClusterReducedNetwork[i] = vosClusteringTechnique.clustering->nClusters; } VOSClusteringTechnique vosClusteringTechnique2(std::make_shared(network->createReducedNetwork(*clustering)), resolution); int i = 0; for (size_t j = 0; j < nNodesPerClusterReducedNetwork.size(); j++) { for (int k = 0; k < nNodesPerClusterReducedNetwork[j]; k++) { vosClusteringTechnique2.clustering->cluster[i] = static_cast(j); i++; } } vosClusteringTechnique2.clustering->nClusters = nNodesPerClusterReducedNetwork.size(); update |= vosClusteringTechnique2.runSmartLocalMovingAlgorithm(random); clustering->mergeClusters(*vosClusteringTechnique2.clustering); } return update; } bool VOSClusteringTechnique::runIteratedSmartLocalMovingAlgorithm(int nIterations, JavaRandom& random) { bool update = false; for (int i = 0; i < nIterations; i++) update |= runSmartLocalMovingAlgorithm(random); return update; } int VOSClusteringTechnique::removeCluster(int cluster) { DVector clusterWeight(clustering->nClusters); DVector totalEdgeWeightPerCluster(clustering->nClusters); for (int i = 0; i < network->nNodes; i++) { clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; if (clustering->cluster[i] == cluster) for (int j = network->firstNeighborIndex[i]; j < network->firstNeighborIndex[i + 1]; j++) totalEdgeWeightPerCluster[clustering->cluster[network->neighbor[j]]] += network->edgeWeight[j]; } int i = -1; double maxQualityFunction = 0; for (int j = 0; j < clustering->nClusters; j++) if ((j != cluster) && (clusterWeight[j] > 0)) { double qualityFunction = totalEdgeWeightPerCluster[j] / clusterWeight[j]; if (qualityFunction > maxQualityFunction) { i = j; maxQualityFunction = qualityFunction; } } if (i >= 0) { for (int j = 0; j < network->nNodes; j++) if (clustering->cluster[j] == cluster) clustering->cluster[j] = i; if (cluster == clustering->nClusters - 1) clustering->nClusters = *std::max_element(clustering->cluster.cbegin(), clustering->cluster.cend()) + 1; } return i; } void VOSClusteringTechnique::removeSmallClusters(int minNNodesPerCluster) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); auto nNodesPerCluster = clustering->getNNodesPerCluster(); int i; do { i = -1; int j = minNNodesPerCluster; for (int k = 0; k < vosClusteringTechnique.clustering->nClusters; k++) if ((nNodesPerCluster[k] > 0) && (nNodesPerCluster[k] < j)) { i = k; j = nNodesPerCluster[k]; } if (i >= 0) { j = vosClusteringTechnique.removeCluster(i); if (j >= 0) nNodesPerCluster[j] += nNodesPerCluster[i]; nNodesPerCluster[i] = 0; } } while (i >= 0); clustering->mergeClusters(*vosClusteringTechnique.clustering); } std::shared_ptr ModularityOptimizer::matrixToNetwork(IVector& node1, IVector& node2, DVector& edgeWeight1, int modularityFunction, int nNodes) { int n1_max = *std::max_element(node1.cbegin(), node1.cend()); int n2_max = *std::max_element(node2.cbegin(), node2.cend()); IVector nNeighbors(nNodes); for (size_t i = 0; i < node1.size(); i++) if (node1[i] < node2[i]) { nNeighbors[node1[i]]++; nNeighbors[node2[i]]++; } IVector firstNeighborIndex(nNodes + 1); int nEdges = 0; for (int i = 0; i < nNodes; i++) { firstNeighborIndex[i] = nEdges; nEdges += nNeighbors[i]; } firstNeighborIndex[nNodes] = nEdges; IVector neighbor(nEdges); DVector edgeWeight2(nEdges); std::fill(nNeighbors.begin(), nNeighbors.end(), 0); for (size_t i = 0; i < node1.size(); i++) if (node1[i] < node2[i]) { int j = firstNeighborIndex[node1[i]] + nNeighbors[node1[i]]; neighbor[j] = node2[i]; edgeWeight2[j] = edgeWeight1[i]; nNeighbors[node1[i]]++; j = firstNeighborIndex[node2[i]] + nNeighbors[node2[i]]; neighbor[j] = node1[i]; edgeWeight2[j] = edgeWeight1[i]; nNeighbors[node2[i]]++; } if (modularityFunction == 1) return std::make_shared(nNodes, firstNeighborIndex, neighbor, &edgeWeight2); else { DVector nodeWeight(nNodes, 1.0); return std::make_shared(nNodes, &nodeWeight, firstNeighborIndex, neighbor, &edgeWeight2); } } std::shared_ptr ModularityOptimizer::readInputFile(std::string fname, int modularityFunction) { std::ifstream f; f.open(fname, std::ios::in); if(!f) { throw std::runtime_error("File could not be opened."); } std::string line; int nLines = 0; while(std::getline(f, line)) { nLines++; } f.clear(); f.seekg(0, std::ios::beg); IVector node1(nLines); IVector node2(nLines); DVector edgeWeight1(nLines, 1.0); for (int j = 0; j < nLines; j++) { std::getline(f, line); auto splittedLine = split(line, '\t'); node1[j] =std::stoi(splittedLine[0]); node2[j] = std::stoi(splittedLine[1]); if(splittedLine.size() > 2) { edgeWeight1[j] = std::stod(splittedLine[2]); } } int n1_max = *std::max_element(node1.cbegin(), node1.cend()); int n2_max = *std::max_element(node2.cbegin(), node2.cend()); int nNodes = std::max(n1_max, n2_max) + 1; return matrixToNetwork(node1, node2, edgeWeight1, modularityFunction, nNodes); } std::vector ModularityOptimizer::split(const std::string& s, char delimiter) { std::vector tokens; std::string token; std::istringstream tokenStream(s); while (std::getline(tokenStream, token, delimiter)) { tokens.push_back(token); } return tokens; } #ifdef STANDALONE void writeOutputFile(std::string fname, Clustering& clustering) { int nNodes = clustering.getNNodes(); clustering.orderClustersByNNodes(); std::ofstream f(fname, std::ios::out); for(int i=0; i < nNodes; i++) f << clustering.getCluster(i) << std::endl; f.close(); } template void input(std::string msg, T& value) { std::cout << msg << std::endl << std::endl; std::cin >> value; } int main(int argc, char* argv[]) { std::string msg = "Modularity Optimizer version 1.3.0 by Ludo Waltman and Nees Jan van Eck"; std::vector args; std::string inputFileName, outputFileName; bool printOutput, update; double modularity, maxModularity, resolution, resolution2; int algorithm, i, j, modularityFunction, nIterations, nRandomStarts; unsigned long long int randomSeed; for(int i=0; i 0); if (printOutput) { std::cout << msg << std::endl << std::endl; } } else { std::cout << msg << std::endl << std::endl; input("Input file name: ", inputFileName); input("Output file name: ", outputFileName); input("Modularity function (1 = standard; 2 = alternative): ", modularityFunction); input("Resolution parameter (e.g., 1.0): ", resolution); input("Algorithm (1 = Louvain; 2 = Louvain with multilevel refinement; 3 = smart local moving): ", algorithm); input("Number of random starts (e.g., 10): ", nRandomStarts); input("Number of iterations (e.g., 10): ",nIterations); input("Random seed (e.g., 0): ", randomSeed); int tmp; input("Print output (0 = no; 1 = yes): ",tmp); printOutput = tmp > 0; std::cout << std::endl; } if (printOutput) { std::cout << "Reading input file..." << std::endl << std::endl; } std::shared_ptr network = readInputFile(inputFileName, modularityFunction); if (printOutput) { std::printf("Number of nodes: %d\n", network->getNNodes()); std::printf("Number of edges: %d\n", network->getNEdges()); std::cout << std::endl; std::cout << "Running " << ((algorithm == 1) ? "Louvain algorithm" : ((algorithm == 2) ? "Louvain algorithm with multilevel refinement" : "smart local moving algorithm")) << "..."; std::cout << std::endl; } resolution2 = ((modularityFunction == 1) ? (resolution / (2 * network->getTotalEdgeWeight() + network->getTotalEdgeWeightSelfLinks())) : resolution); auto beginTime = duration_cast(system_clock::now().time_since_epoch()); std::shared_ptr clustering; maxModularity = -std::numeric_limits::infinity(); JavaRandom random(randomSeed); for (i = 0; i < nRandomStarts; i++) { if (printOutput && (nRandomStarts > 1)) std::printf("Random start: %d\n", i + 1); VOSClusteringTechnique vosClusteringTechnique(network, resolution2); j = 0; update = true; do { if (printOutput && (nIterations > 1)) std::printf("Iteration: %d\n", j + 1); if (algorithm == 1) update = vosClusteringTechnique.runLouvainAlgorithm(random); else if (algorithm == 2) update = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); else if (algorithm == 3) vosClusteringTechnique.runSmartLocalMovingAlgorithm(random); j++; modularity = vosClusteringTechnique.calcQualityFunction(); if (printOutput && (nIterations > 1)) std::printf("Modularity: %.4f\n", modularity); } while ((j < nIterations) && update); if (modularity > maxModularity) { clustering = vosClusteringTechnique.getClustering(); maxModularity = modularity; } if (printOutput && (nRandomStarts > 1)) { if (nIterations == 1) std::printf("Modularity: %.4f\n", modularity); std::cout << std::endl; } } auto endTime = duration_cast(system_clock::now().time_since_epoch()); if (printOutput) { if (nRandomStarts == 1) { if (nIterations > 1) std::cout << std::endl; std::printf("Modularity: %.4f\n", maxModularity); } else std::printf("Maximum modularity in %d random starts: %.4f\n", nRandomStarts, maxModularity); std::printf("Number of communities: %d\n", clustering->getNClusters()); std::printf("Elapsed time: %d seconds\n", static_cast((endTime - beginTime).count() / 1000.0)); std::cout << std::endl << "Writing output file..." << std::endl; } writeOutputFile(outputFileName, *clustering); } catch (std::exception a) { std::cout << a.what() << std::endl; } return 0; }; #endif Seurat/src/fast_NN_dist.cpp0000644000176200001440000000400414005656653015373 0ustar liggesusers#include using namespace Rcpp; // code in-parts taken from https://gallery.rcpp.org/articles/parallel-distance-matrix/ // Assumption: the end position of vector2 is implied by the end position of vector1 // generic function for euclidean distance template inline double euclidean_distance(InputIterator1 begin1, InputIterator1 end1, InputIterator2 begin2) { // value to return double rval = 0; // set iterators to beginning of ranges InputIterator1 it1 = begin1; InputIterator2 it2 = begin2; // for each input item while (it1 != end1) { // take the value and increment the iterator double d1 = *it1++; double d2 = *it2++; // update the distance rval += pow(d1-d2, 2); } return sqrt(rval); } // [[Rcpp::export]] List fast_dist(NumericMatrix x, NumericMatrix y, List n) { // extracting the number of element in the knn graph size_t ngraph_size = n.size(); if (x.nrow() != ngraph_size) { return List(); } List distances_list = clone(n); // looping over the neigbors for (size_t i=0; i #include #include #include #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample, bool display_progress); Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample, bool display_progress); Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames); Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress ); NumericMatrix Standardize(const Eigen::Map mat, bool display_progress); Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale, bool center, double scale_max, bool display_progress); Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center); Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center); Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2); Eigen::VectorXd FastExpMean(Eigen::MatrixXd mat, bool display_progress); Eigen::VectorXd FastRowMean(Eigen::MatrixXd mat, bool display_progress); Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress); Eigen::VectorXd FastExpVar(Eigen::SparseMatrix mat, bool display_progress); Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress); NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress); NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress); NumericVector RowVar(Eigen::Map x); template std::vector sort_indexes(const std::vector &v); List GraphToNeighborHelper(Eigen::SparseMatrix mat); //---------------------------------------------------- #endif//DATA_MANIPULATION Seurat/src/integration.h0000644000176200001440000000175414005656653015021 0ustar liggesusers#ifndef CORRECT_EXPRESSION #define CORRECT_EXPRESSION #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix FindWeightsC( NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress ); Eigen::SparseMatrix IntegrateDataC( Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2 ); std::vector ScoreHelper( Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress ); //---------------------------------------------------- #endif//CORRECT_EXPRESSION Seurat/src/ModularityOptimizer.h0000644000176200001440000001323513712563445016527 0ustar liggesusers#pragma once #include #include #include #include #include #include #include #include typedef std::vector IVector; typedef std::vector DVector; namespace ModularityOptimizer { class JavaRandom { private: uint64_t seed; int next(int bits); public: JavaRandom(uint64_t seed); int nextInt(int n); void setSeed(uint64_t seed); }; namespace Arrays2 { IVector generateRandomPermutation(int nElements); IVector generateRandomPermutation(int nElements, JavaRandom& random); } class Clustering { private: int nNodes; public: // Note: These two variables were "protected" in java, which means it is accessible to the whole package/public. // Although we could have used friend classes, this allows for better mirroring of the original code. int nClusters; IVector cluster; Clustering(int nNodes); Clustering(IVector cluster); int getNNodes() const {return nNodes;}; int getNClusters() const {return nClusters;}; IVector getClusters() const {return cluster;}; int getCluster(int node) const {return cluster[node];}; IVector getNNodesPerCluster() const; std::vector getNodesPerCluster() const; void setCluster(int node, int cluster); void initSingletonClusters(); void orderClustersByNNodes(); void mergeClusters(const Clustering& clustering); }; class Network { friend class VOSClusteringTechnique; protected: int nNodes; int nEdges; DVector nodeWeight; IVector firstNeighborIndex; IVector neighbor; DVector edgeWeight; double totalEdgeWeightSelfLinks; public: Network(); Network(int nNodes, DVector* nodeWeight, std::vector& edge, DVector* edgeWeight); Network(int nNodes, std::vector& edge) : Network(nNodes, nullptr, edge, nullptr) { }; Network(int nNodes, DVector* nodeWeight, std::vector edge) : Network(nNodes, nodeWeight, edge, nullptr) {}; Network(int nNodes, std::vector& edge, DVector* edgeWeight) : Network(nNodes, nullptr, edge, edgeWeight) {}; Network(int nNodes, DVector* nodeWeight, IVector& firstNeighborIndex, IVector& neighbor, DVector* edgeWeight); Network(int nNodes, IVector& firstNeighborIndex, IVector& neighbor) : Network(nNodes, nullptr, firstNeighborIndex, neighbor, nullptr) {}; Network(int nNodes, DVector* nodeWeight, IVector& firstNeighborIndex, IVector& neighbor) : Network(nNodes, nodeWeight, firstNeighborIndex, neighbor, nullptr){}; Network(int nNodes, IVector& firstNeighborIndex, IVector& neighbor, DVector* edgeWeight) : Network(nNodes, nullptr, firstNeighborIndex, neighbor, edgeWeight) {}; int getNNodes() {return nNodes;}; double getTotalNodeWeight(); DVector getNodeWeights(); double getNodeWeight(int node) { return nodeWeight.at(node);}; int getNEdges() {return nEdges / 2;}; int getNEdges(int node) {return firstNeighborIndex.at(node + 1) - firstNeighborIndex.at(node);}; IVector getNEdgesPerNode(); std::vector getEdges(); IVector getEdges(int node); std::vector getEdgesPerNode(); double getTotalEdgeWeight(); double getTotalEdgeWeight(int node); DVector getTotalEdgeWeightPerNode(); DVector getEdgeWeights() {return edgeWeight;}; DVector getEdgeWeights(int node); std::vector getEdgeWeightsPerNode(); double getTotalEdgeWeightSelfLinks() { return totalEdgeWeightSelfLinks; }; // Added these to avoid making these values public int getFirstNeighborIndexValue(int i) const { return firstNeighborIndex.at(i); }; int getNeighborValue(int index) const { return neighbor.at(index); } std::vector createSubnetworks(Clustering clustering) const; Network createReducedNetwork(const Clustering& clustering) const; Clustering identifyComponents(); private: double generateRandomNumber(int node1, int node2, const IVector& nodePermutation); Network createSubnetwork(const Clustering& clustering, int cluster, IVector& node, IVector& subnetworkNode, IVector& subnetworkNeighbor, DVector& subnetworkEdgeWeight) const; }; class VOSClusteringTechnique { private: std::shared_ptr network; std::shared_ptr clustering; double resolution; public: VOSClusteringTechnique(std::shared_ptr network, double resolution); VOSClusteringTechnique(std::shared_ptr network, std::shared_ptr clustering, double resolution); std::shared_ptr getNetwork() { return network;} std::shared_ptr getClustering() { return clustering; } double getResolution() {return resolution; } void setNetwork(std::shared_ptr network) {this->network = network;} void setClustering(std::shared_ptr clustering) {this->clustering = clustering;} void setResolution(double resolution) {this->resolution = resolution;} double calcQualityFunction(); bool runLocalMovingAlgorithm(JavaRandom& random); bool runLouvainAlgorithm(JavaRandom& random); bool runIteratedLouvainAlgorithm(int maxNIterations, JavaRandom& random); bool runLouvainAlgorithmWithMultilevelRefinement(JavaRandom& random); bool runIteratedLouvainAlgorithmWithMultilevelRefinement(int maxNIterations, JavaRandom& random); bool runSmartLocalMovingAlgorithm(JavaRandom& random); bool runIteratedSmartLocalMovingAlgorithm(int nIterations, JavaRandom& random); int removeCluster(int cluster); void removeSmallClusters(int minNNodesPerCluster); }; std::shared_ptr matrixToNetwork(IVector& node1, IVector& node2, DVector& edgeWeight1, int modularityFunction, int nNodes); std::shared_ptr readInputFile(std::string fname, int modularityFunction); std::vector split(const std::string& s, char delimiter); }; Seurat/src/valid_pointer.c0000644000176200001440000000024713725463216015323 0ustar liggesusers#include // helper to determine if external c++ pointer is valid SEXP isnull(SEXP pointer) { return Rf_ScalarLogical(!R_ExternalPtrAddr(pointer)); } Seurat/src/integration.cpp0000644000176200001440000001432714005656653015354 0ustar liggesusers#include #include #include #include "data_manipulation.h" using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix FindWeightsC( NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress ) { std::vector tripletList; tripletList.reserve(anchor_cells2.size() * 10); std::unordered_map> cell_map; Progress p(anchor_cells2.size() + cells2.size() , display_progress); // build map from anchor_cells2 to integration_matrix rows for(int i=0; i matches; std::vector::iterator iter = integration_matrix_rownames.begin(); while ((iter = std::find(iter, integration_matrix_rownames.end(), anchor_cells2[i])) != integration_matrix_rownames.end()) { int idx = std::distance(integration_matrix_rownames.begin(), iter); matches.push_back(idx); iter++; } cell_map[i] = matches; p.increment(); } // Construct dist_weights matrix for(auto const &cell : cells2){ Eigen::VectorXd dist = distances.row(cell); Eigen::VectorXd indices = cell_index.row(cell); int k=0; //number of anchors used so far; a cell in the neighbor list may contribute to multiple anchors for(int i=0; i mnn_idx = cell_map[indices[i]-1]; for(int j=0; j return_mat; if(min_dist == 0){ Eigen::SparseMatrix dist_weights(integration_matrix_rownames.size(), cells2.size()); dist_weights.setFromTriplets(tripletList.begin(), tripletList.end(), [] (const double&, const double &b) { return b; }); Eigen::VectorXd colSums = dist_weights.transpose() * Eigen::VectorXd::Ones(dist_weights.rows()); for (int k=0; k < dist_weights.outerSize(); ++k){ for (Eigen::SparseMatrix::InnerIterator it(dist_weights, k); it; ++it){ it.valueRef() = it.value()/colSums[k]; } } return_mat = dist_weights; } else { Eigen::MatrixXd dist_weights = Eigen::MatrixXd::Constant(integration_matrix_rownames.size(), cells2.size(), min_dist); for(int i = 0; i < dist_weights.cols(); ++i){ for(int j = 0; j < dist_weights.rows(); ++j){ dist_weights(j, i) = 1 - exp(-1 * dist_weights(j, i) * anchor_score[j]/ pow(2/sd, 2) ); } } for(auto const &weight : tripletList){ dist_weights(weight.row(), weight.col()) = weight.value(); } Eigen::VectorXd colSums = dist_weights.colwise().sum(); for(int i = 0; i < dist_weights.cols(); ++i){ for(int j = 0; j < dist_weights.rows(); ++j){ dist_weights(j, i) = dist_weights(j, i) / colSums[i]; } } return_mat = dist_weights.sparseView(); } return(return_mat); } // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix IntegrateDataC( Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2 ) { Eigen::SparseMatrix corrected = expression_cells2 - weights.transpose() * integration_matrix; return(corrected); } // [[Rcpp::export]] std::vector ScoreHelper( Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress ) { std::vector scores; // Loop over all query cells Progress p(snn.outerSize(), display_progress); for (int i=0; i < snn.outerSize(); ++i){ p.increment(); // create vectors to store the nonzero snn elements and their indices std::vector nonzero; std::vector nonzero_idx; for (Eigen::SparseMatrix::InnerIterator it(snn, i); it; ++it) { nonzero.push_back(it.value()); nonzero_idx.push_back(it.row()); } // find the k_snn cells with the smallest non-zero edge weights to use in // computing the transition probability bandwidth std::vector nonzero_order = sort_indexes(nonzero); std::vector bw_dists; int k_snn_i = k_snn; if (k_snn_i > nonzero_order.size()) k_snn_i = nonzero_order.size(); for (int j = 0; j < nonzero_order.size(); ++j) { // compute euclidean distances to cells with small edge weights size_t cell = nonzero_idx[nonzero_order[j]]; if(bw_dists.size() < k_snn_i || nonzero[nonzero_order[j]] == nonzero[nonzero_order[k_snn_i-1]]) { double res = (query_pca.col(cell) - query_pca.col(i)).norm(); bw_dists.push_back(res); } else { break; } } // compute bandwidth as the mean distance of the farthest k_snn cells double bw; if (bw_dists.size() > k_snn_i) { std::sort(bw_dists.rbegin(), bw_dists.rend()); bw = std::accumulate(bw_dists.begin(), bw_dists.begin() + k_snn_i, 0.0) / k_snn_i; } else { bw = std::accumulate(bw_dists.begin(), bw_dists.end(), 0.0) / bw_dists.size(); } // compute transition probabilites double first_neighbor_dist; // subtract off distance to first neighbor? if (subtract_first_nn) { first_neighbor_dist = query_dists(i, 1); } else { first_neighbor_dist = 0; } bw = bw - first_neighbor_dist; double q_tps = 0; for(int j = 0; j < query_dists.cols(); ++j) { q_tps += std::exp(-1 * (query_dists(i, j) - first_neighbor_dist) / bw); } q_tps = q_tps/(query_dists.cols()); double c_tps = 0; for(int j = 0; j < corrected_nns.cols(); ++j) { c_tps += exp(-1 * ((query_pca.col(i) - query_pca.col(corrected_nns(i, j)-1)).norm() - first_neighbor_dist) / bw); } c_tps = c_tps/(corrected_nns.cols()); scores.push_back(c_tps/q_tps); } return(scores); } Seurat/src/data_manipulation.cpp0000644000176200001440000003600314005656653016515 0ustar liggesusers#include #include #include #include #include #include #include using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] // [[Rcpp::export]] Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample = false, bool display_progress=true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ double entry = it.value(); if( (upsample) || (colSums[k] > sample_val)){ entry = entry * double(sample_val) / colSums[k]; if (fmod(entry, 1) != 0){ double rn = R::runif(0,1); if(fmod(entry, 1) <= rn){ it.valueRef() = floor(entry); } else{ it.valueRef() = ceil(entry); } } else{ it.valueRef() = entry; } } } } return(data); } // [[Rcpp::export]] Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample = false, bool display_progress=true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ double entry = it.value(); if( (upsample) || (colSums[k] > sample_val[k])){ entry = entry * double(sample_val[k]) / colSums[k]; if (fmod(entry, 1) != 0){ double rn = R::runif(0,1); if(fmod(entry, 1) <= rn){ it.valueRef() = floor(entry); } else{ it.valueRef() = ceil(entry); } } else{ it.valueRef() = entry; } } } } return(data); } typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames){ // Set up hash maps for rowname based lookup std::unordered_map mat1_map; for(unsigned int i = 0; i < mat1_rownames.size(); i++){ mat1_map[mat1_rownames[i]] = i; } std::unordered_map mat2_map; for(unsigned int i = 0; i < mat2_rownames.size(); i++){ mat2_map[mat2_rownames[i]] = i; } // set up tripletList for new matrix creation std::vector tripletList; int num_rows = all_rownames.size(); int num_col1 = mat1.cols(); int num_col2 = mat2.cols(); tripletList.reserve(mat1.nonZeros() + mat2.nonZeros()); for(int i = 0; i < num_rows; i++){ std::string key = all_rownames[i]; if (mat1_map.count(key)){ for(Eigen::SparseMatrix::InnerIterator it1(mat1, mat1_map[key]); it1; ++it1){ tripletList.emplace_back(i, it1.col(), it1.value()); } } if (mat2_map.count(key)){ for(Eigen::SparseMatrix::InnerIterator it2(mat2, mat2_map[key]); it2; ++it2){ tripletList.emplace_back(i, num_col1 + it2.col(), it2.value()); } } } Eigen::SparseMatrix combined_mat(num_rows, num_col1 + num_col2); combined_mat.setFromTriplets(tripletList.begin(), tripletList.end()); return combined_mat; } // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress = true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ it.valueRef() = log1p(double(it.value()) / colSums[k] * scale_factor); } } return data; } /* Performs column scaling and/or centering. Equivalent to using scale(mat, TRUE, apply(x,2,sd)) in R. Note: Doesn't handle NA/NaNs in the same way the R implementation does, */ // [[Rcpp::export(rng = false)]] NumericMatrix Standardize(Eigen::Map mat, bool display_progress = true){ Progress p(mat.cols(), display_progress); NumericMatrix std_mat(mat.rows(), mat.cols()); for(int i=0; i < mat.cols(); ++i){ p.increment(); Eigen::ArrayXd r = mat.col(i).array(); double colMean = r.mean(); double colSdev = sqrt((r - colMean).square().sum() / (mat.rows() - 1)); NumericMatrix::Column new_col = std_mat(_, i); for(int j=0; j < new_col.size(); j++) { new_col[j] = (r[j] - colMean) / colSdev; } } return std_mat; } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale = true, bool center = true, double scale_max = 10, bool display_progress = true){ mat = mat.transpose(); Progress p(mat.outerSize(), display_progress); Eigen::MatrixXd scaled_mat(mat.rows(), mat.cols()); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { colMean += it.value(); } colMean = colMean / mat.rows(); if (scale == true){ int nnZero = 0; if(center == true){ for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { nnZero += 1; colSdev += pow((it.value() - colMean), 2); } colSdev += pow(colMean, 2) * (mat.rows() - nnZero); } else{ for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { colSdev += pow(it.value(), 2); } } colSdev = sqrt(colSdev / (mat.rows() - 1)); } else{ colSdev = 1; } if(center == false){ colMean = 0; } Eigen::VectorXd col = Eigen::VectorXd(mat.col(k)); scaled_mat.col(k) = (col.array() - colMean) / colSdev; for(int s=0; s scale_max){ scaled_mat(s,k) = scale_max; } } } return scaled_mat.transpose(); } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastSparseRowScaleWithKnownStats(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sigma, bool scale = true, bool center = true, double scale_max = 10, bool display_progress = true){ mat = mat.transpose(); Progress p(mat.outerSize(), display_progress); Eigen::MatrixXd scaled_mat(mat.rows(), mat.cols()); for (int k=0; k scale_max){ scaled_mat(s,k) = scale_max; } } } return scaled_mat.transpose(); } /* Note: May not handle NA/NaNs in the same way the R implementation does, */ // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center = true){ if (center) { mat = mat.rowwise() - mat.colwise().mean(); } Eigen::MatrixXd cov = (mat.adjoint() * mat) / double(mat.rows() - 1); return(cov); } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center = true){ if(center){ mat1 = mat1.rowwise() - mat1.colwise().mean(); mat2 = mat2.rowwise() - mat2.colwise().mean(); } Eigen::MatrixXd cov = (mat1.adjoint() * mat2) / double(mat1.rows() - 1); return(cov); } /* Note: Faster than the R implementation but is not in-place */ // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2){ Eigen::MatrixXd mat3(mat1.rows() + mat2.rows(), mat1.cols()); mat3 << mat1, mat2; return(mat3); } /* Calculates the row means of the logged values in non-log space */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd FastExpMean(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowmeans(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene means" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += expm1(it.value()); } rm = rm / ncols; rowmeans[k] = log1p(rm); } return(rowmeans); } /* use this if you know the row means */ // [[Rcpp::export(rng = false)]] NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress){ mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variances" << std::endl; } Progress p(mat.outerSize(), display_progress); NumericVector allVars = no_init(mat.cols()); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { nZero -= 1; colSum += pow(it.value() - mu[k], 2); } colSum += pow(mu[k], 2) * nZero; allVars[k] = colSum / (mat.rows() - 1); } return(allVars); } /* standardize matrix rows using given mean and standard deviation, clip values larger than vmax to vmax, then return variance for each row */ // [[Rcpp::export(rng = false)]] NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress){ if(display_progress == true){ Rcpp::Rcerr << "Calculating feature variances of standardized and clipped values" << std::endl; } mat = mat.transpose(); NumericVector allVars(mat.cols()); Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { nZero -= 1; colSum += pow(std::min(vmax, (it.value() - mu[k]) / sd[k]), 2); } colSum += pow((0 - mu[k]) / sd[k], 2) * nZero; allVars[k] = colSum / (mat.rows() - 1); } return(allVars); } /* Calculate the variance to mean ratio (VMR) in non-logspace (return answer in log-space) */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowdisp(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variance to mean ratios" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += expm1(it.value()); } rm = rm / ncols; for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it){ v += pow(expm1(it.value()) - rm, 2); nnZero += 1; } v = (v + (ncols - nnZero) * pow(rm, 2)) / (ncols - 1); rowdisp[k] = log(v/rm); } return(rowdisp); } /* Calculates the variance of rows of a matrix */ // [[Rcpp::export(rng = false)]] NumericVector RowVar(Eigen::Map x){ NumericVector out(x.rows()); for(int i=0; i < x.rows(); ++i){ Eigen::ArrayXd r = x.row(i).array(); double rowMean = r.mean(); out[i] = (r - rowMean).square().sum() / (x.cols() - 1); } return out; } /* Calculate the variance in non-logspace (return answer in non-logspace) */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowdisp(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variances" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += (it.value()); } rm = rm / ncols; for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it){ v += pow((it.value()) - rm, 2); nnZero += 1; } v = (v + (ncols - nnZero) * pow(rm, 2)) / (ncols - 1); rowdisp[k] = v; } return(rowdisp); } //cols_idx should be 0-indexed // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix ReplaceColsC(Eigen::SparseMatrix mat, NumericVector col_idx, Eigen::SparseMatrix replacement){ int rep_idx = 0; for(auto const &ci : col_idx){ mat.col(ci) = replacement.col(rep_idx); rep_idx += 1; } return(mat); } template std::vector sort_indexes(const std::vector &v) { // initialize original index locations std::vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); std::stable_sort(idx.begin(), idx.end(), [&v](size_t i1, size_t i2) {return v[i1] < v[i2];}); return idx; } // [[Rcpp::export(rng = false)]] List GraphToNeighborHelper(Eigen::SparseMatrix mat) { mat = mat.transpose(); //determine the number of neighbors int n = 0; for(Eigen::SparseMatrix::InnerIterator it(mat, 0); it; ++it) { n += 1; } Eigen::MatrixXd nn_idx(mat.rows(), n); Eigen::MatrixXd nn_dist(mat.rows(), n); for (int k=0; k row_idx; std::vector row_dist; row_idx.reserve(n); row_dist.reserve(n); for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { if (n_k > (n-1)) { Rcpp::stop("Not all cells have an equal number of neighbors."); } row_idx.push_back(it.row() + 1); row_dist.push_back(it.value()); n_k += 1; } if (n_k != n) { Rcpp::Rcout << n << ":::" << n_k << std::endl; Rcpp::stop("Not all cells have an equal number of neighbors."); } //order the idx based on dist std::vector idx_order = sort_indexes(row_dist); for(int i = 0; i < n; ++i) { nn_idx(k, i) = row_idx[idx_order[i]]; nn_dist(k, i) = row_dist[idx_order[i]]; } } List neighbors = List::create(nn_idx, nn_dist); return(neighbors); } Seurat/src/snn.cpp0000644000176200001440000001032614005656653013622 0ustar liggesusers#include #include "data_manipulation.h" #include #include #include #include #include #include using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked, double prune) { std::vector tripletList; int k = nn_ranked.cols(); tripletList.reserve(nn_ranked.rows() * nn_ranked.cols()); for(int j=0; j SNN(nn_ranked.rows(), nn_ranked.rows()); SNN.setFromTriplets(tripletList.begin(), tripletList.end()); SNN = SNN * (SNN.transpose()); for (int i=0; i < SNN.outerSize(); ++i){ for (Eigen::SparseMatrix::InnerIterator it(SNN, i); it; ++it){ it.valueRef() = it.value()/(k + (k - it.value())); if(it.value() < prune){ it.valueRef() = 0; } } } SNN.prune(0.0); // actually remove pruned values return SNN; } // [[Rcpp::export(rng = false)]] void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress){ if (display_progress == true) { Rcpp::Rcerr << "Writing SNN as edge file" << std::endl; } // Write out lower triangle std::ofstream output; output.open(filename); Progress p(snn.outerSize(), display_progress); for (int k=0; k < snn.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(snn, k); it; ++it){ if(it.col() >= it.row()){ continue; } output << std::setprecision(15) << it.col() << "\t" << it.row() << "\t" << it.value() << "\n"; } } output.close(); } // Wrapper function so that we don't have to go back into R before writing to file // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename) { Eigen::SparseMatrix SNN = ComputeSNN(nn_ranked, prune); WriteEdgeFile(SNN, filename, display_progress); return SNN; } template std::vector sort_indexes(const std::vector &v) { // initialize original index locations std::vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); std::stable_sort(idx.begin(), idx.end(), [&v](size_t i1, size_t i2) {return v[i1] < v[i2];}); return idx; } // [[Rcpp::export]] std::vector SNN_SmallestNonzero_Dist( Eigen::SparseMatrix snn, Eigen::MatrixXd mat, int n, std::vector nearest_dist ) { std::vector results; for (int i=0; i < snn.outerSize(); ++i){ // create vectors to store the nonzero snn elements and their indices std::vector nonzero; std::vector nonzero_idx; for (Eigen::SparseMatrix::InnerIterator it(snn, i); it; ++it) { nonzero.push_back(it.value()); nonzero_idx.push_back(it.row()); } std::vector nonzero_order = sort_indexes(nonzero); int n_i = n; if (n_i > nonzero_order.size()) n_i = nonzero_order.size(); std::vector dists; for (int j = 0; j < nonzero_order.size(); ++j) { // compute euclidean distances to cells with small edge weights // if multiple entries have same value as nth element, calc dist to all size_t cell = nonzero_idx[nonzero_order[j]]; if(dists.size() < n_i || nonzero[nonzero_order[j]] == nonzero[nonzero_order[n_i-1]]) { double res = (mat.row(cell) - mat.row(i)).norm(); if (nearest_dist[i] > 0) { res = res - nearest_dist[i]; if (res < 0) res = 0; } dists.push_back(res); } else { break; } } double avg_dist; if (dists.size() > n_i) { std::sort(dists.rbegin(), dists.rend()); avg_dist = std::accumulate(dists.begin(), dists.begin() + n_i, 0.0) / n_i; } else { avg_dist = std::accumulate(dists.begin(), dists.end(), 0.0) / dists.size(); } results.push_back(avg_dist); } return results; } Seurat/src/RcppExports.cpp0000644000176200001440000006036314165435365015325 0ustar liggesusers// Generated by using Rcpp::compileAttributes() -> do not edit by hand // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 #include #include using namespace Rcpp; #ifdef RCPP_USE_GLOBAL_ROSTREAM Rcpp::Rostream& Rcpp::Rcout = Rcpp::Rcpp_cout_get(); Rcpp::Rostream& Rcpp::Rcerr = Rcpp::Rcpp_cerr_get(); #endif // RunModularityClusteringCpp IntegerVector RunModularityClusteringCpp(Eigen::SparseMatrix SNN, int modularityFunction, double resolution, int algorithm, int nRandomStarts, int nIterations, int randomSeed, bool printOutput, std::string edgefilename); RcppExport SEXP _Seurat_RunModularityClusteringCpp(SEXP SNNSEXP, SEXP modularityFunctionSEXP, SEXP resolutionSEXP, SEXP algorithmSEXP, SEXP nRandomStartsSEXP, SEXP nIterationsSEXP, SEXP randomSeedSEXP, SEXP printOutputSEXP, SEXP edgefilenameSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type SNN(SNNSEXP); Rcpp::traits::input_parameter< int >::type modularityFunction(modularityFunctionSEXP); Rcpp::traits::input_parameter< double >::type resolution(resolutionSEXP); Rcpp::traits::input_parameter< int >::type algorithm(algorithmSEXP); Rcpp::traits::input_parameter< int >::type nRandomStarts(nRandomStartsSEXP); Rcpp::traits::input_parameter< int >::type nIterations(nIterationsSEXP); Rcpp::traits::input_parameter< int >::type randomSeed(randomSeedSEXP); Rcpp::traits::input_parameter< bool >::type printOutput(printOutputSEXP); Rcpp::traits::input_parameter< std::string >::type edgefilename(edgefilenameSEXP); rcpp_result_gen = Rcpp::wrap(RunModularityClusteringCpp(SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename)); return rcpp_result_gen; END_RCPP } // RunUMISampling Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample, bool display_progress); RcppExport SEXP _Seurat_RunUMISampling(SEXP dataSEXP, SEXP sample_valSEXP, SEXP upsampleSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< int >::type sample_val(sample_valSEXP); Rcpp::traits::input_parameter< bool >::type upsample(upsampleSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(RunUMISampling(data, sample_val, upsample, display_progress)); return rcpp_result_gen; END_RCPP } // RunUMISamplingPerCell Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample, bool display_progress); RcppExport SEXP _Seurat_RunUMISamplingPerCell(SEXP dataSEXP, SEXP sample_valSEXP, SEXP upsampleSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< NumericVector >::type sample_val(sample_valSEXP); Rcpp::traits::input_parameter< bool >::type upsample(upsampleSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(RunUMISamplingPerCell(data, sample_val, upsample, display_progress)); return rcpp_result_gen; END_RCPP } // RowMergeMatrices Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames); RcppExport SEXP _Seurat_RowMergeMatrices(SEXP mat1SEXP, SEXP mat2SEXP, SEXP mat1_rownamesSEXP, SEXP mat2_rownamesSEXP, SEXP all_rownamesSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat2(mat2SEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type mat1_rownames(mat1_rownamesSEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type mat2_rownames(mat2_rownamesSEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type all_rownames(all_rownamesSEXP); rcpp_result_gen = Rcpp::wrap(RowMergeMatrices(mat1, mat2, mat1_rownames, mat2_rownames, all_rownames)); return rcpp_result_gen; END_RCPP } // LogNorm Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress); RcppExport SEXP _Seurat_LogNorm(SEXP dataSEXP, SEXP scale_factorSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< int >::type scale_factor(scale_factorSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(LogNorm(data, scale_factor, display_progress)); return rcpp_result_gen; END_RCPP } // Standardize NumericMatrix Standardize(Eigen::Map mat, bool display_progress); RcppExport SEXP _Seurat_Standardize(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::Map >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(Standardize(mat, display_progress)); return rcpp_result_gen; END_RCPP } // FastSparseRowScale Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale, bool center, double scale_max, bool display_progress); RcppExport SEXP _Seurat_FastSparseRowScale(SEXP matSEXP, SEXP scaleSEXP, SEXP centerSEXP, SEXP scale_maxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type scale(scaleSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); Rcpp::traits::input_parameter< double >::type scale_max(scale_maxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastSparseRowScale(mat, scale, center, scale_max, display_progress)); return rcpp_result_gen; END_RCPP } // FastSparseRowScaleWithKnownStats Eigen::MatrixXd FastSparseRowScaleWithKnownStats(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sigma, bool scale, bool center, double scale_max, bool display_progress); RcppExport SEXP _Seurat_FastSparseRowScaleWithKnownStats(SEXP matSEXP, SEXP muSEXP, SEXP sigmaSEXP, SEXP scaleSEXP, SEXP centerSEXP, SEXP scale_maxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< NumericVector >::type sigma(sigmaSEXP); Rcpp::traits::input_parameter< bool >::type scale(scaleSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); Rcpp::traits::input_parameter< double >::type scale_max(scale_maxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastSparseRowScaleWithKnownStats(mat, mu, sigma, scale, center, scale_max, display_progress)); return rcpp_result_gen; END_RCPP } // FastCov Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center); RcppExport SEXP _Seurat_FastCov(SEXP matSEXP, SEXP centerSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); rcpp_result_gen = Rcpp::wrap(FastCov(mat, center)); return rcpp_result_gen; END_RCPP } // FastCovMats Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center); RcppExport SEXP _Seurat_FastCovMats(SEXP mat1SEXP, SEXP mat2SEXP, SEXP centerSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat2(mat2SEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); rcpp_result_gen = Rcpp::wrap(FastCovMats(mat1, mat2, center)); return rcpp_result_gen; END_RCPP } // FastRBind Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2); RcppExport SEXP _Seurat_FastRBind(SEXP mat1SEXP, SEXP mat2SEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat2(mat2SEXP); rcpp_result_gen = Rcpp::wrap(FastRBind(mat1, mat2)); return rcpp_result_gen; END_RCPP } // FastExpMean Eigen::VectorXd FastExpMean(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_FastExpMean(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastExpMean(mat, display_progress)); return rcpp_result_gen; END_RCPP } // SparseRowVar2 NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress); RcppExport SEXP _Seurat_SparseRowVar2(SEXP matSEXP, SEXP muSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVar2(mat, mu, display_progress)); return rcpp_result_gen; END_RCPP } // SparseRowVarStd NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress); RcppExport SEXP _Seurat_SparseRowVarStd(SEXP matSEXP, SEXP muSEXP, SEXP sdSEXP, SEXP vmaxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< NumericVector >::type sd(sdSEXP); Rcpp::traits::input_parameter< double >::type vmax(vmaxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVarStd(mat, mu, sd, vmax, display_progress)); return rcpp_result_gen; END_RCPP } // FastLogVMR Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_FastLogVMR(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastLogVMR(mat, display_progress)); return rcpp_result_gen; END_RCPP } // RowVar NumericVector RowVar(Eigen::Map x); RcppExport SEXP _Seurat_RowVar(SEXP xSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::Map >::type x(xSEXP); rcpp_result_gen = Rcpp::wrap(RowVar(x)); return rcpp_result_gen; END_RCPP } // SparseRowVar Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_SparseRowVar(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVar(mat, display_progress)); return rcpp_result_gen; END_RCPP } // ReplaceColsC Eigen::SparseMatrix ReplaceColsC(Eigen::SparseMatrix mat, NumericVector col_idx, Eigen::SparseMatrix replacement); RcppExport SEXP _Seurat_ReplaceColsC(SEXP matSEXP, SEXP col_idxSEXP, SEXP replacementSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type col_idx(col_idxSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type replacement(replacementSEXP); rcpp_result_gen = Rcpp::wrap(ReplaceColsC(mat, col_idx, replacement)); return rcpp_result_gen; END_RCPP } // GraphToNeighborHelper List GraphToNeighborHelper(Eigen::SparseMatrix mat); RcppExport SEXP _Seurat_GraphToNeighborHelper(SEXP matSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); rcpp_result_gen = Rcpp::wrap(GraphToNeighborHelper(mat)); return rcpp_result_gen; END_RCPP } // fast_dist List fast_dist(NumericMatrix x, NumericMatrix y, List n); RcppExport SEXP _Seurat_fast_dist(SEXP xSEXP, SEXP ySEXP, SEXP nSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP); Rcpp::traits::input_parameter< NumericMatrix >::type y(ySEXP); Rcpp::traits::input_parameter< List >::type n(nSEXP); rcpp_result_gen = Rcpp::wrap(fast_dist(x, y, n)); return rcpp_result_gen; END_RCPP } // FindWeightsC Eigen::SparseMatrix FindWeightsC(NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress); RcppExport SEXP _Seurat_FindWeightsC(SEXP cells2SEXP, SEXP distancesSEXP, SEXP anchor_cells2SEXP, SEXP integration_matrix_rownamesSEXP, SEXP cell_indexSEXP, SEXP anchor_scoreSEXP, SEXP min_distSEXP, SEXP sdSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< NumericVector >::type cells2(cells2SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type distances(distancesSEXP); Rcpp::traits::input_parameter< std::vector >::type anchor_cells2(anchor_cells2SEXP); Rcpp::traits::input_parameter< std::vector >::type integration_matrix_rownames(integration_matrix_rownamesSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type cell_index(cell_indexSEXP); Rcpp::traits::input_parameter< Eigen::VectorXd >::type anchor_score(anchor_scoreSEXP); Rcpp::traits::input_parameter< double >::type min_dist(min_distSEXP); Rcpp::traits::input_parameter< double >::type sd(sdSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FindWeightsC(cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress)); return rcpp_result_gen; END_RCPP } // IntegrateDataC Eigen::SparseMatrix IntegrateDataC(Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2); RcppExport SEXP _Seurat_IntegrateDataC(SEXP integration_matrixSEXP, SEXP weightsSEXP, SEXP expression_cells2SEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type integration_matrix(integration_matrixSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type weights(weightsSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type expression_cells2(expression_cells2SEXP); rcpp_result_gen = Rcpp::wrap(IntegrateDataC(integration_matrix, weights, expression_cells2)); return rcpp_result_gen; END_RCPP } // ScoreHelper std::vector ScoreHelper(Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress); RcppExport SEXP _Seurat_ScoreHelper(SEXP snnSEXP, SEXP query_pcaSEXP, SEXP query_distsSEXP, SEXP corrected_nnsSEXP, SEXP k_snnSEXP, SEXP subtract_first_nnSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type query_pca(query_pcaSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type query_dists(query_distsSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type corrected_nns(corrected_nnsSEXP); Rcpp::traits::input_parameter< int >::type k_snn(k_snnSEXP); Rcpp::traits::input_parameter< bool >::type subtract_first_nn(subtract_first_nnSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(ScoreHelper(snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress)); return rcpp_result_gen; END_RCPP } // ComputeSNN Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked, double prune); RcppExport SEXP _Seurat_ComputeSNN(SEXP nn_rankedSEXP, SEXP pruneSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type nn_ranked(nn_rankedSEXP); Rcpp::traits::input_parameter< double >::type prune(pruneSEXP); rcpp_result_gen = Rcpp::wrap(ComputeSNN(nn_ranked, prune)); return rcpp_result_gen; END_RCPP } // WriteEdgeFile void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress); RcppExport SEXP _Seurat_WriteEdgeFile(SEXP snnSEXP, SEXP filenameSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< String >::type filename(filenameSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); WriteEdgeFile(snn, filename, display_progress); return R_NilValue; END_RCPP } // DirectSNNToFile Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename); RcppExport SEXP _Seurat_DirectSNNToFile(SEXP nn_rankedSEXP, SEXP pruneSEXP, SEXP display_progressSEXP, SEXP filenameSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type nn_ranked(nn_rankedSEXP); Rcpp::traits::input_parameter< double >::type prune(pruneSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); Rcpp::traits::input_parameter< String >::type filename(filenameSEXP); rcpp_result_gen = Rcpp::wrap(DirectSNNToFile(nn_ranked, prune, display_progress, filename)); return rcpp_result_gen; END_RCPP } // SNN_SmallestNonzero_Dist std::vector SNN_SmallestNonzero_Dist(Eigen::SparseMatrix snn, Eigen::MatrixXd mat, int n, std::vector nearest_dist); RcppExport SEXP _Seurat_SNN_SmallestNonzero_Dist(SEXP snnSEXP, SEXP matSEXP, SEXP nSEXP, SEXP nearest_distSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat(matSEXP); Rcpp::traits::input_parameter< int >::type n(nSEXP); Rcpp::traits::input_parameter< std::vector >::type nearest_dist(nearest_distSEXP); rcpp_result_gen = Rcpp::wrap(SNN_SmallestNonzero_Dist(snn, mat, n, nearest_dist)); return rcpp_result_gen; END_RCPP } // row_sum_dgcmatrix NumericVector row_sum_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_sum_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_sum_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } // row_mean_dgcmatrix NumericVector row_mean_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_mean_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_mean_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } // row_var_dgcmatrix NumericVector row_var_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_var_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_var_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } RcppExport SEXP isnull(SEXP); static const R_CallMethodDef CallEntries[] = { {"_Seurat_RunModularityClusteringCpp", (DL_FUNC) &_Seurat_RunModularityClusteringCpp, 9}, {"_Seurat_RunUMISampling", (DL_FUNC) &_Seurat_RunUMISampling, 4}, {"_Seurat_RunUMISamplingPerCell", (DL_FUNC) &_Seurat_RunUMISamplingPerCell, 4}, {"_Seurat_RowMergeMatrices", (DL_FUNC) &_Seurat_RowMergeMatrices, 5}, {"_Seurat_LogNorm", (DL_FUNC) &_Seurat_LogNorm, 3}, {"_Seurat_Standardize", (DL_FUNC) &_Seurat_Standardize, 2}, {"_Seurat_FastSparseRowScale", (DL_FUNC) &_Seurat_FastSparseRowScale, 5}, {"_Seurat_FastSparseRowScaleWithKnownStats", (DL_FUNC) &_Seurat_FastSparseRowScaleWithKnownStats, 7}, {"_Seurat_FastCov", (DL_FUNC) &_Seurat_FastCov, 2}, {"_Seurat_FastCovMats", (DL_FUNC) &_Seurat_FastCovMats, 3}, {"_Seurat_FastRBind", (DL_FUNC) &_Seurat_FastRBind, 2}, {"_Seurat_FastExpMean", (DL_FUNC) &_Seurat_FastExpMean, 2}, {"_Seurat_SparseRowVar2", (DL_FUNC) &_Seurat_SparseRowVar2, 3}, {"_Seurat_SparseRowVarStd", (DL_FUNC) &_Seurat_SparseRowVarStd, 5}, {"_Seurat_FastLogVMR", (DL_FUNC) &_Seurat_FastLogVMR, 2}, {"_Seurat_RowVar", (DL_FUNC) &_Seurat_RowVar, 1}, {"_Seurat_SparseRowVar", (DL_FUNC) &_Seurat_SparseRowVar, 2}, {"_Seurat_ReplaceColsC", (DL_FUNC) &_Seurat_ReplaceColsC, 3}, {"_Seurat_GraphToNeighborHelper", (DL_FUNC) &_Seurat_GraphToNeighborHelper, 1}, {"_Seurat_fast_dist", (DL_FUNC) &_Seurat_fast_dist, 3}, {"_Seurat_FindWeightsC", (DL_FUNC) &_Seurat_FindWeightsC, 9}, {"_Seurat_IntegrateDataC", (DL_FUNC) &_Seurat_IntegrateDataC, 3}, {"_Seurat_ScoreHelper", (DL_FUNC) &_Seurat_ScoreHelper, 7}, {"_Seurat_ComputeSNN", (DL_FUNC) &_Seurat_ComputeSNN, 2}, {"_Seurat_WriteEdgeFile", (DL_FUNC) &_Seurat_WriteEdgeFile, 3}, {"_Seurat_DirectSNNToFile", (DL_FUNC) &_Seurat_DirectSNNToFile, 4}, {"_Seurat_SNN_SmallestNonzero_Dist", (DL_FUNC) &_Seurat_SNN_SmallestNonzero_Dist, 4}, {"_Seurat_row_sum_dgcmatrix", (DL_FUNC) &_Seurat_row_sum_dgcmatrix, 4}, {"_Seurat_row_mean_dgcmatrix", (DL_FUNC) &_Seurat_row_mean_dgcmatrix, 4}, {"_Seurat_row_var_dgcmatrix", (DL_FUNC) &_Seurat_row_var_dgcmatrix, 4}, {"isnull", (DL_FUNC) &isnull, 1}, {NULL, NULL, 0} }; RcppExport void R_init_Seurat(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } Seurat/src/RModularityOptimizer.cpp0000644000176200001440000001272113712563445017203 0ustar liggesusers#include #include #include #include #include #include #include #include #include #include #include "ModularityOptimizer.h" using namespace ModularityOptimizer; using namespace std::chrono; using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] // [[Rcpp::export]] IntegerVector RunModularityClusteringCpp(Eigen::SparseMatrix SNN, int modularityFunction, double resolution, int algorithm, int nRandomStarts, int nIterations, int randomSeed, bool printOutput, std::string edgefilename) { // validate arguments if(modularityFunction != 1 && modularityFunction != 2) stop("Modularity parameter must be equal to 1 or 2."); if(algorithm != 1 && algorithm !=2 && algorithm !=3 && algorithm !=4) stop("Algorithm for modularity optimization must be 1, 2, 3, or 4"); if(nRandomStarts < 1) stop("Have to have at least one start"); if(nIterations < 1) stop("Need at least one interation"); if (modularityFunction == 2 && resolution > 1.0) stop("error: resolution<1 for alternative modularity"); try { bool update; double modularity, maxModularity, resolution2; int i, j; std::string msg = "Modularity Optimizer version 1.3.0 by Ludo Waltman and Nees Jan van Eck"; if (printOutput) Rcout << msg << std::endl << std::endl; // Load netwrok std::shared_ptr network; if(edgefilename != "") { if (printOutput) Rcout << "Reading input file..." << std::endl << std::endl; try{ network = readInputFile(edgefilename, modularityFunction); } catch(...) { stop("Could not parse edge file."); } } else { // Load lower triangle int network_size = (SNN.nonZeros() / 2) + 3; IVector node1; IVector node2; DVector edgeweights; node1.reserve(network_size); node2.reserve(network_size); edgeweights.reserve(network_size); for (int k=0; k < SNN.outerSize(); ++k){ for (Eigen::SparseMatrix::InnerIterator it(SNN, k); it; ++it){ if(it.col() >= it.row()){ continue; } node1.emplace_back(it.col()); node2.emplace_back(it.row()); edgeweights.emplace_back(it.value()); } } if(node1.size() == 0) { stop("Matrix contained no network data. Check format."); } int nNodes = std::max(SNN.cols(), SNN.rows()); network = matrixToNetwork(node1, node2, edgeweights, modularityFunction, nNodes); Rcpp::checkUserInterrupt(); } if (printOutput) { Rprintf("Number of nodes: %d\n", network->getNNodes()); Rprintf("Number of edges: %d\n", network->getNEdges()); Rcout << std::endl; Rcout << "Running " << ((algorithm == 1) ? "Louvain algorithm" : ((algorithm == 2) ? "Louvain algorithm with multilevel refinement" : "smart local moving algorithm")) << "..."; Rcout << std::endl; } resolution2 = ((modularityFunction == 1) ? (resolution / (2 * network->getTotalEdgeWeight() + network->getTotalEdgeWeightSelfLinks())) : resolution); auto beginTime = duration_cast(system_clock::now().time_since_epoch()); std::shared_ptr clustering; maxModularity = -std::numeric_limits::infinity(); JavaRandom random(randomSeed); Progress p(nRandomStarts, printOutput); for (i = 0; i < nRandomStarts; i++) { //if (printOutput && (nRandomStarts > 1)) //Rprintf("Random start: %d\n", i + 1); VOSClusteringTechnique vosClusteringTechnique(network, resolution2); j = 0; update = true; do { /*if (printOutput && (nIterations > 1)) Rprintf("Iteration: %d\n", j + 1); */ if (algorithm == 1) update = vosClusteringTechnique.runLouvainAlgorithm(random); else if (algorithm == 2) update = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); else if (algorithm == 3) vosClusteringTechnique.runSmartLocalMovingAlgorithm(random); j++; modularity = vosClusteringTechnique.calcQualityFunction(); //if (printOutput && (nIterations > 1)) // Rprintf("Modularity: %.4f\n", modularity); Rcpp::checkUserInterrupt(); } while ((j < nIterations) && update); if (modularity > maxModularity) { clustering = vosClusteringTechnique.getClustering(); maxModularity = modularity; } /*if (printOutput && (nRandomStarts > 1)) { if (nIterations == 1) Rprintf("Modularity: %.4f\n", modularity); Rcout << std::endl; }*/ p.increment(); } auto endTime = duration_cast(system_clock::now().time_since_epoch()); if(clustering == nullptr) { stop("Clustering step failed."); } if (printOutput) { if (nRandomStarts == 1) { if (nIterations > 1) Rcout << std::endl; Rprintf("Modularity: %.4f\n", maxModularity); } else Rprintf("Maximum modularity in %d random starts: %.4f\n", nRandomStarts, maxModularity); Rprintf("Number of communities: %d\n", clustering->getNClusters()); Rprintf("Elapsed time: %d seconds\n", static_cast((endTime - beginTime).count() / 1000.0)); } // Return results clustering->orderClustersByNNodes(); IntegerVector iv(clustering->cluster.cbegin(), clustering->cluster.cend()); return iv; } catch(std::exception &ex) { forward_exception_to_r(ex); } catch(...) { ::Rf_error("c++ exception (unknown reason)"); } return IntegerVector(1); } Seurat/src/snn.h0000644000176200001440000000117213712563445013266 0ustar liggesusers#ifndef SNN #define SNN #include #include "data_manipulation.h" #include #include #include #include #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked); void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress); Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename); //---------------------------------------------------- #endif//SNN Seurat/R/0000755000176200001440000000000014170333640011717 5ustar liggesusersSeurat/R/objects.R0000644000176200001440000026071614170106500013500 0ustar liggesusers#' @include reexports.R #' @include generics.R #' @importFrom Rcpp evalCpp #' @importFrom Matrix colSums rowSums colMeans rowMeans #' @importFrom methods setClass setOldClass setClassUnion slot #' slot<- setMethod new signature slotNames is setAs setValidity .hasSlot #' @importClassesFrom Matrix dgCMatrix #' @useDynLib Seurat #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Class definitions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% setOldClass(Classes = 'package_version') #' The AnchorSet Class #' #' The AnchorSet class is an intermediate data storage class that stores the anchors and other #' related information needed for performing downstream analyses - namely data integration #' (\code{\link{IntegrateData}}) and data transfer (\code{\link{TransferData}}). #' #' @slot object.list List of objects used to create anchors #' @slot reference.cells List of cell names in the reference dataset - needed when performing data #' transfer. #' @slot reference.objects Position of reference object/s in object.list #' @slot query.cells List of cell names in the query dataset - needed when performing data transfer #' @slot anchors The anchor matrix. This contains the cell indices of both anchor pair cells, the #' anchor score, and the index of the original dataset in the object.list for cell1 and cell2 of #' the anchor. #' @slot offsets The offsets used to enable cell look up in downstream functions #' @slot anchor.features The features used when performing anchor finding. #' @slot neighbors List containing Neighbor objects for reuse later (e.g. mapping) #' @slot command Store log of parameters that were used #' #' @name AnchorSet-class #' @rdname AnchorSet-class #' @concept objects #' @exportClass AnchorSet #' AnchorSet <- setClass( Class = "AnchorSet", contains = 'VIRTUAL', slots = list( object.list = "list", reference.cells = "vector", reference.objects = "vector", query.cells = "vector", anchors = "ANY", offsets = "ANY", anchor.features = "ANY", neighbors = "list", command = "ANY" ) ) #' The TransferAnchorSet Class #' #' Inherits from the Anchorset class. Implemented mainly for method dispatch #' purposes. See \code{\link{AnchorSet}} for slot details. #' #' @name TransferAnchorSet-class #' @rdname TransferAnchorSet-class #' @concept objects #' @exportClass TransferAnchorSet #' TransferAnchorSet <- setClass( Class = "TransferAnchorSet", contains = "AnchorSet" ) #' The IntegrationAnchorSet Class #' #' Inherits from the Anchorset class. Implemented mainly for method dispatch #' purposes. See \code{\link{AnchorSet}} for slot details. #' #' @name IntegrationAnchorSet-class #' @rdname IntegrationAnchorSet-class #' @concept objects #' @exportClass IntegrationAnchorSet #' IntegrationAnchorSet <- setClass( Class = "IntegrationAnchorSet", contains = "AnchorSet" ) #' The ModalityWeights Class #' #' The ModalityWeights class is an intermediate data storage class that stores the modality weight and other #' related information needed for performing downstream analyses - namely data integration #' (\code{FindModalityWeights}) and data transfer (\code{\link{FindMultiModalNeighbors}}). #' #' @slot modality.weight.list A list of modality weights value from all modalities #' @slot modality.assay Names of assays for the list of dimensional reductions #' @slot params A list of parameters used in the FindModalityWeights #' @slot score.matrix a list of score matrices representing cross and within-modality prediction #' score, and kernel value #' @slot command Store log of parameters that were used #' #' @name ModalityWeights-class #' @rdname ModalityWeights-class #' @concept objects #' @exportClass ModalityWeights #' ModalityWeights <- setClass( Class = "ModalityWeights", slots = list( modality.weight.list = "list", modality.assay = "vector", params = "list", score.matrix = "list", command = "ANY" ) ) #' The IntegrationData Class #' #' The IntegrationData object is an intermediate storage container used internally throughout the #' integration procedure to hold bits of data that are useful downstream. #' #' @slot neighbors List of neighborhood information for cells (outputs of \code{RANN::nn2}) #' @slot weights Anchor weight matrix #' @slot integration.matrix Integration matrix #' @slot anchors Anchor matrix #' @slot offsets The offsets used to enable cell look up in downstream functions #' @slot objects.ncell Number of cells in each object in the object.list #' @slot sample.tree Sample tree used for ordering multi-dataset integration #' #' @name IntegrationData-class #' @rdname IntegrationData-class #' @concept objects #' @exportClass IntegrationData #' IntegrationData <- setClass( Class = "IntegrationData", slots = list( neighbors = "ANY", weights = "ANY", integration.matrix = "ANY", anchors = "ANY", offsets = "ANY", objects.ncell = "ANY", sample.tree = "ANY" ) ) #' The SCTModel Class #' #' The SCTModel object is a model and parameters storage from SCTransform. #' It can be used to calculate Pearson residuals for new genes. #' #' @slot feature.attributes A data.frame with feature attributes in SCTransform #' @slot cell.attributes A data.frame with cell attributes in SCTransform #' @slot clips A list of two numeric of length two specifying the min and max #' values the Pearson residual will be clipped to. One for vst and one for #' SCTransform #' @slot umi.assay Name of the assay of the seurat object containing UMI matrix #' and the default is RNA #' @slot model A formula used in SCTransform #' @slot arguments other information used in SCTransform #' @slot median_umi Median UMI (or scale factor) used to calculate corrected counts #' #' @seealso \code{\link{Assay}} #' #' @name SCTAssay-class #' @rdname SCTAssay-class #' @concept objects #' #' @examples #' \dontrun{ #' # SCTAssay objects are generated from SCTransform #' pbmc_small <- SCTransform(pbmc_small) #' } #' SCTModel <- setClass( Class = 'SCTModel', slots = c( feature.attributes = 'data.frame', cell.attributes = 'data.frame', clips = 'list', umi.assay = 'character', model = 'character', arguments = 'list', median_umi = 'numeric' ) ) #' The SCTAssay Class #' #' The SCTAssay object contains all the information found in an \code{\link{Assay}} #' object, with extra information from the results of \code{\link{SCTransform}} #' #' @slot SCTModel.list A list containing SCT models #' #' @seealso \code{\link{Assay}} #' #' @name SCTAssay-class #' @rdname SCTAssay-class #' @concept objects #' #' @examples #' # SCTAssay objects are generated from SCTransform #' pbmc_small <- SCTransform(pbmc_small) #' pbmc_small[["SCT"]] #' SCTAssay <- setClass( Class = 'SCTAssay', contains = 'Assay', slots = c( SCTModel.list = 'list' ) ) #' @note \code{scalefactors} objects can be created with \code{scalefactors()} #' #' @param spot Spot full resolution scale factor #' @param fiducial Fiducial full resolution scale factor #' @param hires High resolutoin scale factor #' @param lowres Low resolution scale factor #' #' @rdname ScaleFactors #' @concept objects #' @concept spatial #' @export #' scalefactors <- function(spot, fiducial, hires, lowres) { object <- list( spot = spot, fiducial = fiducial, hires = hires, lowres = lowres ) object <- sapply(X = object, FUN = as.numeric, simplify = FALSE, USE.NAMES = TRUE) return(structure(.Data = object, class = 'scalefactors')) } setOldClass(Classes = c('scalefactors')) #' The SlideSeq class #' #' The SlideSeq class represents spatial information from the Slide-seq platform #' #' @inheritSection SeuratObject::SpatialImage Slots #' @slot coordinates ... #' @concept spatial #' SlideSeq <- setClass( Class = 'SlideSeq', contains = 'SpatialImage', slots = list( 'coordinates' = 'data.frame' ) ) #' The STARmap class #' #' #' @inheritSection SeuratObject::SpatialImage Slots #' @concept objects #' @concept spatial #' STARmap <- setClass( Class = 'STARmap', contains = 'SpatialImage', slots = list( 'coordinates' = 'data.frame', 'qhulls' = 'data.frame' ) ) #' The VisiumV1 class #' #' The VisiumV1 class represents spatial information from the 10X Genomics Visium #' platform #' #' @slot image A three-dimensional array with PNG image data, see #' \code{\link[png]{readPNG}} for more details #' @slot scale.factors An object of class \code{\link{scalefactors}}; see #' \code{\link{scalefactors}} for more information #' @slot coordinates A data frame with tissue coordinate information #' @slot spot.radius Single numeric value giving the radius of the spots #' #' @name VisiumV1-class #' @rdname VisiumV1-class #' @concept objects #' @concept spatial #' @exportClass VisiumV1 #' VisiumV1 <- setClass( Class = 'VisiumV1', contains = 'SpatialImage', slots = list( 'image' = 'array', 'scale.factors' = 'scalefactors', 'coordinates' = 'data.frame', 'spot.radius' = 'numeric' ) ) setClass(Class = 'SliceImage', contains = 'VisiumV1') #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Get a vector of cell names associated with an image (or set of images) #' #' @param object Seurat object #' @param images Vector of image names #' @param unlist Return as a single vector of cell names as opposed to a list, #' named by image name. #' #' @return A vector of cell names #' #' @examples #' \dontrun{ #' CellsByImage(object = object, images = "slice1") #' } #' CellsByImage <- function(object, images = NULL, unlist = FALSE) { images <- images %||% Images(object = object) cells <- sapply( X = images, FUN = function(x) { Cells(x = object[[x]]) }, simplify = FALSE, USE.NAMES = TRUE ) if (unlist) { cells <- unname(obj = unlist(x = cells)) } return(cells) } #' Create a SCT Assay object #' #' Create a SCT object from a feature (e.g. gene) expression matrix and a list of SCTModels. #' The expected format of the input matrix is features x cells. #' #' Non-unique cell or feature names are not allowed. Please make unique before #' calling this function. #' @param scale.data a residual matrix #' @param SCTModel.list list of SCTModels #' @param umi.assay The UMI assay name. Default is RNA #' @inheritParams SeuratObject::CreateAssayObject #' #' @importFrom methods as #' @importFrom Matrix colSums rowSums #' #' @export #' @concept objects #' CreateSCTAssayObject <- function( counts, data, scale.data = NULL, umi.assay = "RNA", min.cells = 0, min.features = 0, SCTModel.list = NULL ) { assay <- CreateAssayObject( counts = counts, data = data, min.cells = min.cells, min.features = min.features ) if (!is.null(scale.data)) { assay <- SetAssayData(object = assay, slot = "scale.data", new.data = scale.data) } slot(object = assay, name = "assay.orig") <- umi.assay #checking SCTModel.list format if (is.null(x = SCTModel.list)) { SCTModel.type <- "none" warning("An empty SCTModel will be generated due to no SCTModel input") } else { if (inherits(x = SCTModel.list, what = "SCTModel")) { SCTModel.list <- list(model1 = SCTModel.list) SCTModel.type <- "SCTModel.list" } else if (inherits(x = SCTModel.list, what = "list")) { if (inherits(x = SCTModel.list[[1]], what = "SCTModel")){ SCTModel.type <- "SCTModel.list" } else if (IsVSTout(vst.out = SCTModel.list)){ SCTModel.type <- "vst.out" } else if (IsVSTout(SCTModel.list[[1]])) { SCTModel.type <- "vst.set" } else { stop("SCTModel input is not a correct format") } } } model.list <- switch( EXPR = SCTModel.type, "none" = { list() }, "SCTModel.list" = { SCTModel.list <- lapply(X = SCTModel.list, FUN = function(model) { select.cell <- intersect(x = Cells(x = model), Cells(x = assay)) if (length(x = select.cell) == 0) { stop("Cells in SCTModel.list don't match Cells in assay") } else { model@cell.attributes <- model@cell.attributes[select.cell, , drop = FALSE] } return(model) }) SCTModel.list }, "vst.out" = { SCTModel.list$umi.assay <- umi.assay SCTModel.list <- PrepVSTResults( vst.res = SCTModel.list, cell.names = Cells(x = assay) ) list(model1 = SCTModel.list) }, "vst.set" = { new.model <- lapply( X = SCTModel.list, FUN = function(vst.res) { vst.res$umi.assay <- umi.assay return(PrepVSTResults(vst.res = vst.res, cell.names = colnames(x = assay))) } ) names(x = new.model) <- paste0("model", 1:length(x = new.model)) new.model } ) assay <- new( Class = "SCTAssay", assay, SCTModel.list = model.list ) return(assay) } #' Slim down a Seurat object #' #' Keep only certain aspects of the Seurat object. Can be useful in functions that utilize merge as #' it reduces the amount of data in the merge. #' #' @param object Seurat object #' @param counts Preserve the count matrices for the assays specified #' @param data Preserve the data slot for the assays specified #' @param scale.data Preserve the scale.data slot for the assays specified #' @param features Only keep a subset of features, defaults to all features #' @param assays Only keep a subset of assays specified here #' @param dimreducs Only keep a subset of DimReducs specified here (if NULL, #' remove all DimReducs) #' @param graphs Only keep a subset of Graphs specified here (if NULL, remove #' all Graphs) #' #' @export #' @concept objects #' DietSeurat <- function( object, counts = TRUE, data = TRUE, scale.data = FALSE, features = NULL, assays = NULL, dimreducs = NULL, graphs = NULL ) { object <- UpdateSlots(object = object) assays <- assays %||% FilterObjects(object = object, classes.keep = "Assay") assays <- assays[assays %in% FilterObjects(object = object, classes.keep = 'Assay')] if (length(x = assays) == 0) { stop("No assays provided were found in the Seurat object") } if (!DefaultAssay(object = object) %in% assays) { stop("The default assay is slated to be removed, please change the default assay") } if (!counts && !data) { stop("Either one or both of 'counts' and 'data' must be kept") } for (assay in FilterObjects(object = object, classes.keep = 'Assay')) { if (!(assay %in% assays)) { object[[assay]] <- NULL } else { if (!is.null(x = features)) { features.assay <- intersect(x = features, y = rownames(x = object[[assay]])) if (length(x = features.assay) == 0) { if (assay == DefaultAssay(object = object)) { stop("The default assay is slated to be removed, please change the default assay") } else { warning("No features found in assay '", assay, "', removing...") object[[assay]] <- NULL } } else { object[[assay]] <- subset(x = object[[assay]], features = features.assay) } } if (!counts) { slot(object = object[[assay]], name = 'counts') <- new(Class = 'matrix') } if (!data) { stop('data = FALSE currently not supported') } if (!scale.data) { slot(object = object[[assay]], name = 'scale.data') <- new(Class = 'matrix') } } } # remove unspecified DimReducs and Graphs all.objects <- FilterObjects(object = object, classes.keep = c('DimReduc', 'Graph')) objects.to.remove <- all.objects[!all.objects %in% c(dimreducs, graphs)] for (ob in objects.to.remove) { object[[ob]] <- NULL } return(object) } #' Filter stray beads from Slide-seq puck #' #' This function is useful for removing stray beads that fall outside the main #' Slide-seq puck area. Essentially, it's a circular filter where you set a #' center and radius defining a circle of beads to keep. If the center is not #' set, it will be estimated from the bead coordinates (removing the 1st and #' 99th quantile to avoid skewing the center by the stray beads). By default, #' this function will display a \code{\link{SpatialDimPlot}} showing which cells #' were removed for easy adjustment of the center and/or radius. #' #' @param object Seurat object with slide-seq data #' @param image Name of the image where the coordinates are stored #' @param center Vector specifying the x and y coordinates for the center of the #' inclusion circle #' @param radius Radius of the circle of inclusion #' @param do.plot Display a \code{\link{SpatialDimPlot}} with the cells being #' removed labeled. #' #' @return Returns a Seurat object with only the subset of cells that pass the #' circular filter #' #' @concept objects #' @concept spatial #' @examples #' \dontrun{ #' # This example uses the ssHippo dataset which you can download #' # using the SeuratData package. #' library(SeuratData) #' data('ssHippo') #' # perform filtering of beads #' ssHippo.filtered <- FilterSlideSeq(ssHippo, radius = 2300) #' # This radius looks to small so increase and repeat until satisfied #' } #' @export #' FilterSlideSeq <- function( object, image = "image", center = NULL, radius = NULL, do.plot = TRUE ) { if (!inherits(x = object[[image]], what = "SlideSeq")) { warning( "This fxn is intended for filtering SlideSeq data and is untested ", "outside of that context." ) } dat <- GetTissueCoordinates(object[[image]]) if (is.null(x = center)) { # heuristic for determining center of puck center <- c() x.vals <- dat[, 1] center[1] <- mean( x = x.vals[x.vals < quantile(x = x.vals, probs = 0.99) & x.vals > quantile(x = x.vals, probs = 0.01)] ) y.vals <- dat[, 2] center[2] <- mean( x = y.vals[y.vals < quantile(x = y.vals, probs = 0.99) & y.vals > quantile(x = y.vals, probs = 0.01)] ) } if (is.null(x = radius)) { stop("Please provide a radius.") } dists <- apply(X = dat, MARGIN = 1, FUN = function(x) { as.numeric(dist(rbind(x[c(1, 2)], center))) }) cells.to.remove <- names(x = which(x = (dists > radius))) if (do.plot) { Idents(object) <- "keep" object <- SetIdent(object = object, cells = cells.to.remove, value = "remove") print(SpatialDimPlot(object = object)) } return(subset(x = object, cells = cells.to.remove, invert = TRUE)) } #' Get integration data #' #' @param object Seurat object #' @param integration.name Name of integration object #' @param slot Which slot in integration object to get #' #' @return Returns data from the requested slot within the integrated object #' #' @export #' @concept objects #' GetIntegrationData <- function(object, integration.name, slot) { tools <- slot(object = object, name = 'tools') if (!(integration.name %in% names(tools))) { stop('Requested integration key does not exist') } int.data <- tools[[integration.name]] return(slot(object = int.data, name = slot)) } #' Set integration data #' #' @param object Seurat object #' @param integration.name Name of integration object #' @param slot Which slot in integration object to set #' @param new.data New data to insert #' #' @return Returns a \code{\link{Seurat}} object #' #' @export #' @concept objects #' SetIntegrationData <- function(object, integration.name, slot, new.data) { tools <- slot(object = object, name = 'tools') if (!(integration.name %in% names(tools))) { new.integrated <- new(Class = 'IntegrationData') slot(object = new.integrated, name = slot) <- new.data tools[[integration.name]] <- new.integrated slot(object = object, name = 'tools') <- tools return(object) } int.data <- tools[[integration.name]] slot(object = int.data, name = slot) <- new.data tools[[integration.name]] <- int.data slot(object = object, name = 'tools') <- tools return(object) } #' Splits object into a list of subsetted objects. #' #' Splits object based on a single attribute into a list of subsetted objects, #' one for each level of the attribute. For example, useful for taking an object #' that contains cells from many patients, and subdividing it into #' patient-specific objects. #' #' @param object Seurat object #' @param split.by Attribute for splitting. Default is "ident". Currently #' only supported for class-level (i.e. non-quantitative) attributes. #' #' @return A named list of Seurat objects, each containing a subset of cells #' from the original object. #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' # Assign the test object a three level attribute #' groups <- sample(c("group1", "group2", "group3"), size = 80, replace = TRUE) #' names(groups) <- colnames(pbmc_small) #' pbmc_small <- AddMetaData(object = pbmc_small, metadata = groups, col.name = "group") #' obj.list <- SplitObject(pbmc_small, split.by = "group") #' SplitObject <- function(object, split.by = "ident") { if (split.by == 'ident') { groupings <- Idents(object = object) } else { groupings <- FetchData(object = object, vars = split.by)[, 1] } groupings <- unique(x = as.character(x = groupings)) obj.list <- list() for (i in groupings) { if (split.by == "ident") { obj.list[[i]] <- subset(x = object, idents = i) } else { cells <- which(x = object[[split.by, drop = TRUE]] == i) cells <- colnames(x = object)[cells] obj.list[[i]] <- subset(x = object, cells = cells) } } return(obj.list) } #' Find features with highest scores for a given dimensional reduction technique #' #' Return a list of features with the strongest contribution to a set of components #' #' @param object DimReduc object #' @param dim Dimension to use #' @param nfeatures Number of features to return #' @param projected Use the projected feature loadings #' @param balanced Return an equal number of features with both + and - scores. #' @param ... Extra parameters passed to \code{\link{Loadings}} #' #' @return Returns a vector of features #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' pbmc_small #' TopFeatures(object = pbmc_small[["pca"]], dim = 1) #' # After projection: #' TopFeatures(object = pbmc_small[["pca"]], dim = 1, projected = TRUE) #' TopFeatures <- function( object, dim = 1, nfeatures = 20, projected = FALSE, balanced = FALSE, ... ) { loadings <- Loadings(object = object, projected = projected, ...)[, dim, drop = FALSE] return(Top( data = loadings, num = nfeatures, balanced = balanced )) } #' Find cells with highest scores for a given dimensional reduction technique #' #' Return a list of genes with the strongest contribution to a set of components #' #' @param object DimReduc object #' @param dim Dimension to use #' @param ncells Number of cells to return #' @param balanced Return an equal number of cells with both + and - scores. #' @param ... Extra parameters passed to \code{\link{Embeddings}} #' #' @return Returns a vector of cells #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' pbmc_small #' head(TopCells(object = pbmc_small[["pca"]])) #' # Can specify which dimension and how many cells to return #' TopCells(object = pbmc_small[["pca"]], dim = 2, ncells = 5) #' TopCells <- function(object, dim = 1, ncells = 20, balanced = FALSE, ...) { embeddings <- Embeddings(object = object, ...)[, dim, drop = FALSE] return(Top( data = embeddings, num = ncells, balanced = balanced )) } #' Get nearest neighbors for given cell #' #' Return a vector of cell names of the nearest n cells. #' #' @param object \code{\link{Neighbor}} object #' @param cell Cell of interest #' @param n Number of neighbors to return #' #' @return Returns a vector of cell names #' #' @export #' @concept objects #' TopNeighbors <- function(object, cell, n = 5) { indices <- Indices(object = object)[cell, 1:n] return(Cells(x = object)[indices]) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param assay Assay to convert #' @param reduction Name of DimReduc to set to main reducedDim in cds #' #' @rdname as.CellDataSet #' @concept objects #' @export #' @method as.CellDataSet Seurat #' as.CellDataSet.Seurat <- function(x, assay = NULL, reduction = NULL, ...) { CheckDots(...) if (!PackageCheck('monocle', error = FALSE)) { stop("Please install monocle from Bioconductor before converting to a CellDataSet object") } else if (packageVersion(pkg = 'monocle') >= package_version(x = '2.99.0')) { stop("Seurat can only convert to/from Monocle v2.X objects") } assay <- assay %||% DefaultAssay(object = x) # make variables, then run `newCellDataSet` # create cellData counts counts <- GetAssayData(object = x, assay = assay, slot = "counts") # metadata cell.metadata <- x[[]] feature.metadata <- x[[assay]][[]] if (!"gene_short_name" %in% colnames(x = feature.metadata)) { feature.metadata$gene_short_name <- rownames(x = feature.metadata) } pd <- new(Class = "AnnotatedDataFrame", data = cell.metadata) fd <- new(Class = "AnnotatedDataFrame", data = feature.metadata) # Now, determine the expressionFamily if ("monocle" %in% names(x = Misc(object = x))) { expressionFamily <- Misc(object = x, slot = "monocle")[["expressionFamily"]] } else { if (all(counts == floor(x = counts))) { expressionFamily <- VGAM::negbinomial.size() } else if (any(counts < 0)) { expressionFamily <- VGAM::uninormal() } else { expressionFamily <- VGAM::tobit() } } cds <- monocle::newCellDataSet( cellData = counts, phenoData = pd, featureData = fd, expressionFamily = expressionFamily ) if ("monocle" %in% names(x = Misc(object = x))) { monocle::cellPairwiseDistances(cds = cds) <- Misc(object = x, slot = "monocle")[["cellPairwiseDistances"]] monocle::minSpanningTree(cds = cds) <- Misc(object = x, slot = "monocle")[["minSpanningTree"]] Biobase::experimentData(cds = cds) <- Misc(object = x, slot = "monocle")[["experimentData"]] Biobase::protocolData(cds = cds) <- Misc(object = x, slot = "monocle")[["protocolData"]] Biobase::classVersion(cds = cds) <- Misc(object = x, slot = "monocle")[["classVersion"]] # no setter methods found for following slots slot(object = cds, name = "lowerDetectionLimit") <- Misc(object = x, slot = "monocle")[["lowerDetectionLimit"]] slot(object = cds, name = "dispFitInfo") <- Misc(object = x, slot = "monocle")[["dispFitInfo"]] slot(object = cds, name = "auxOrderingData") <- Misc(object = x, slot = "monocle")[["auxOrderingData"]] slot(object = cds, name = "auxClusteringData") <- Misc(object = x, slot = "monocle")[["auxClusteringData"]] } # adding dimensionality reduction data to the CDS dr.slots <- c("reducedDimS", "reducedDimK", "reducedDimW", "reducedDimA") reduction <- reduction %||% DefaultDimReduc(object = x, assay = assay) if (!is.null(x = reduction)) { if (grepl(pattern = 'tsne', x = tolower(x = reduction))) { slot(object = cds, name = "dim_reduce_type") <- "tSNE" monocle::reducedDimA(cds = cds) <- t(x = Embeddings(object = x[[reduction]])) } else { slot(object = cds, name = "dim_reduce_type") <- reduction monocle::reducedDimA(cds = cds) <- Loadings(object = x[[reduction]]) slot(object = cds, name = "reducedDimS") <- Embeddings(object = x[[reduction]]) } for (ii in dr.slots) { if (ii %in% names(x = slot(object = x[[reduction]], name = "misc"))) { slot(object = cds, name = ii) <- slot(object = x[[reduction]], name = "misc")[[ii]] } } } return(cds) } #' Convert objects to \code{Seurat} objects #' #' @inheritParams SeuratObject::as.Seurat #' @param slot Slot to store expression data as #' @param verbose Show progress updates #' #' @return A \code{Seurat} object generated from \code{x} #' #' @importFrom utils packageVersion #' #' @rdname as.Seurat #' @concept objects #' @export #' @method as.Seurat CellDataSet #' #' @seealso \code{\link[SeuratObject:as.Seurat]{SeuratObject::as.Seurat}} #' as.Seurat.CellDataSet <- function( x, slot = 'counts', assay = 'RNA', verbose = TRUE, ... ) { CheckDots(...) if (!PackageCheck('monocle', error = FALSE)) { stop("Please install monocle from Bioconductor before converting to a CellDataSet object") } else if (packageVersion(pkg = 'monocle') >= package_version(x = '2.99.0')) { stop("Seurat can only convert to/from Monocle v2.X objects") } slot <- match.arg(arg = slot, choices = c('counts', 'data')) if (verbose) { message("Pulling expression data") } expr <- Biobase::exprs(object = x) if (IsMatrixEmpty(x = expr)) { stop("No data provided in this CellDataSet object", call. = FALSE) } meta.data <- as.data.frame(x = Biobase::pData(object = x)) # if cell names are NULL, fill with cell_X if (is.null(x = colnames(x = expr))) { warning( "The column names of the 'counts' and 'data' matrices are NULL. Setting cell names to cell_columnidx (e.g 'cell_1').", call. = FALSE, immediate. = TRUE ) rownames(x = meta.data) <- colnames(x = expr) <- paste0("cell_", 1:ncol(x = expr)) } # Creating the object if (verbose) { message("Building Seurat object") } if (slot == 'data') { assays <- list(CreateAssayObject(data = expr)) names(x = assays) <- assay Key(object = assays[[assay]]) <- suppressWarnings(expr = UpdateKey(key = assay)) object <- new( Class = 'Seurat', assays = assays, meta.data = meta.data, version = packageVersion(pkg = 'Seurat'), project.name = 'SeuratProject' ) DefaultAssay(object = object) <- assay } else { object <- CreateSeuratObject( counts = expr, meta.data = meta.data, assay = assay ) } # feature metadata if (verbose) { message("Adding feature-level metadata") } feature.metadata <- Biobase::fData(object = x) object[[assay]][[names(x = feature.metadata)]] <- feature.metadata # mean/dispersion values disp.table <- tryCatch( expr = suppressWarnings(expr = monocle::dispersionTable(cds = x)), error = function(...) { return(NULL) } ) if (!is.null(x = disp.table)) { if (verbose) { message("Adding dispersion information") } rownames(x = disp.table) <- disp.table[, 1] disp.table[, 1] <- NULL colnames(x = disp.table) <- paste0('monocle_', colnames(x = disp.table)) object[[assay]][[names(x = disp.table)]] <- disp.table } else if (verbose) { message("No dispersion information in CellDataSet object") } # variable features if ("use_for_ordering" %in% colnames(x = feature.metadata)) { if (verbose) { message("Setting variable features") } VariableFeatures(object = object, assay = assay) <- rownames(x = feature.metadata)[which(x = feature.metadata[, "use_for_ordering"])] } else if (verbose) { message("No variable features present") } # add dim reduction dr.name <- slot(object = x, name = "dim_reduce_type") if (length(x = dr.name) > 0) { if (verbose) { message("Adding ", dr.name, " dimensional reduction") } reduced.A <- t(x = slot(object = x, name = 'reducedDimA')) reduced.S <- t(x = slot(object = x, name = 'reducedDimS')) if (IsMatrixEmpty(x = reduced.S)) { embeddings <- reduced.A loadings <- new(Class = 'matrix') } else { embeddings <- reduced.S loadings <- t(x = reduced.A) } rownames(x = embeddings) <- colnames(x = object) misc.dr <- list( reducedDimS = slot(object = x, name = "reducedDimS"), reducedDimK = slot(object = x, name = "reducedDimK"), reducedDimW = slot(object = x, name = "reducedDimW"), reducedDimA = slot(object = x, name = "reducedDimA") ) dr <- suppressWarnings(expr = CreateDimReducObject( embeddings = embeddings, loadings = loadings, assay = assay, key = UpdateKey(key = tolower(x = dr.name)), misc = misc.dr )) object[[dr.name]] <- dr } else if (verbose) { message("No dimensional reduction information found") } monocle.specific.info <- list( expressionFamily = slot(object = x, name = "expressionFamily"), lowerDetectionLimit = slot(object = x, name = "lowerDetectionLimit"), dispFitInfo = slot(object = x, name = "dispFitInfo"), cellPairwiseDistances = slot(object = x, name = "cellPairwiseDistances"), minSpanningTree = slot(object = x, name = "minSpanningTree"), auxOrderingData = slot(object = x, name = "auxOrderingData"), auxClusteringData = slot(object = x, name = "auxClusteringData"), experimentData = slot(object = x, name = "experimentData"), protocolData = slot(object = x, name = "protocolData"), classVersion = slot(object = x, name = ".__classVersion__") ) Misc(object = object, slot = "monocle") <- monocle.specific.info return(object) } #' @param counts name of the SingleCellExperiment assay to store as \code{counts}; #' set to \code{NULL} if only normalized data are present #' @param data name of the SingleCellExperiment assay to slot as \code{data}. #' Set to NULL if only counts are present #' @param assay Name of assays to convert; set to \code{NULL} for all assays to be converted #' @param project Project name for new Seurat object #' #' @rdname as.Seurat #' @concept objects #' @export #' @method as.Seurat SingleCellExperiment #' as.Seurat.SingleCellExperiment <- function( x, counts = 'counts', data = 'logcounts', assay = NULL, project = 'SingleCellExperiment', ... ) { CheckDots(...) if (!PackageCheck('SingleCellExperiment', error = FALSE)) { stop( "Please install SingleCellExperiment from Bioconductor before converting to a SingeCellExperiment object.", "\nhttps://bioconductor.org/packages/SingleCellExperiment/", call. = FALSE ) } meta.data <- as.data.frame(x = SummarizedExperiment::colData(x = x)) if (packageVersion(pkg = "SingleCellExperiment") >= "1.14.0") { orig.exp <- SingleCellExperiment::mainExpName(x = x) %||% "originalexp" } else { orig.exp <- "originalexp" } if (!is.null(SingleCellExperiment::altExpNames(x = x))) { assayn <- assay %||% SingleCellExperiment::altExpNames(x = x) if (!all(assay %in% SingleCellExperiment::altExpNames(x = x))) { stop("One or more of the assays you are trying to convert is not in the SingleCellExperiment object") } assayn <- c(orig.exp, assayn) } else { assayn <- orig.exp } for (assay in assayn) { if (assay != orig.exp) { x <- SingleCellExperiment::swapAltExp(x = x, name = assay, saved = NULL) } # Pull matrices mats <- list(counts = counts, data = data) mats <- Filter(f = Negate(f = is.null), x = mats) if (length(x = mats) == 0) { stop("Cannot pass 'NULL' to both 'counts' and 'data'") } for (m in 1:length(x = mats)) { mats[[m]] <- tryCatch( expr = SummarizedExperiment::assay(x = x, i = mats[[m]]), error = function(e) { stop("No data in provided assay - ", mats[[m]], call. = FALSE) } ) # if cell names are NULL, fill with cell_X if (is.null(x = colnames(x = mats[[m]]))) { warning( "The column names of the ", names(x = mats)[m], " matrix is NULL. Setting cell names to cell_columnidx (e.g 'cell_1').", call. = FALSE, immediate. = TRUE ) cell.names <- paste0("cell_", 1:ncol(x = mats[[m]])) colnames(x = mats[[m]]) <- cell.names rownames(x = meta.data) <- cell.names } } assays <- if (is.null(x = mats$counts)) { list(CreateAssayObject(data = mats$data)) } else if (is.null(x = mats$data)) { list(CreateAssayObject(counts = mats$counts)) } else { a <- CreateAssayObject(counts = mats$counts) a <- SetAssayData(object = a, slot = 'data', new.data = mats$data) list(a) } names(x = assays) <- assay Key(object = assays[[assay]]) <- paste0(tolower(x = assay), '_') # Create the Seurat object if (!exists(x = "object")) { object <- CreateSeuratObject( counts = assays[[assay]], Class = 'Seurat', assay = assay, meta.data = meta.data, version = packageVersion(pkg = 'Seurat'), project.name = project ) } else { object[[assay]] <- assays[[assay]] } DefaultAssay(object = object) <- assay # add feature level meta data md <- SingleCellExperiment::rowData(x = x) if (ncol(x = md) > 0) { # replace underscores rownames(x = md) <- gsub(pattern = "_", replacement = "-", x = rownames(x = md)) md <- as.data.frame(x = md) # ensure order same as data md <- md[rownames(x = object[[assay]]), , drop = FALSE] object[[assay]] <- AddMetaData( object = object[[assay]], metadata = md ) } Idents(object = object) <- project # Get DimReduc information, add underscores if needed and pull from different alt EXP if (length(x = SingleCellExperiment::reducedDimNames(x = x)) > 0) { for (dr in SingleCellExperiment::reducedDimNames(x = x)) { embeddings <- as.matrix(x = SingleCellExperiment::reducedDim(x = x, type = dr)) if (is.null(x = rownames(x = embeddings))) { rownames(x = embeddings) <- cell.names } if (isTRUE(x = !grepl('_$', gsub(pattern = "[[:digit:]]", replacement = "_", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] )))) { key <- gsub( pattern = "[[:digit:]]", replacement = "_", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] ) } else { key <- gsub( pattern = "[[:digit:]]", replacement = "", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] ) } if (length(x = key) == 0) { key <- paste0(dr, "_") } colnames(x = embeddings) <- paste0(key, 1:ncol(x = embeddings)) object[[dr]] <- CreateDimReducObject( embeddings = embeddings, key = key, assay = DefaultAssay(object = object) ) } } } return(object) } #' @param assay Assays to convert #' #' @rdname as.SingleCellExperiment #' @concept objects #' @export #' @method as.SingleCellExperiment Seurat #' as.SingleCellExperiment.Seurat <- function(x, assay = NULL, ...) { CheckDots(...) if (!PackageCheck('SingleCellExperiment', error = FALSE)) { stop("Please install SingleCellExperiment from Bioconductor before converting to a SingeCellExperiment object") } assay <- assay %||% Assays(object = x) if (!all(assay %in% Assays(object = x))) { stop("One or more of the assays you are trying to convert is not in the Seurat object") } if (DefaultAssay(object = x) %in% assay) { assay <- union(DefaultAssay(object = x), assay) } experiments <- list() for (assayn in assay) { assays <- list( counts = GetAssayData(object = x, assay = assayn, slot = "counts"), logcounts = GetAssayData(object = x, assay = assayn, slot = "data") ) scaledata_a <- GetAssayData(object = x, assay = assayn, slot = "scale.data") if (isTRUE(x = all.equal( target = dim(x = assays[["counts"]]), current = dim(x = scaledata_a)) )) { assays[["scaledata"]] <- scaledata_a } assays <- assays[sapply(X = assays, FUN = nrow) != 0] sume <- SummarizedExperiment::SummarizedExperiment(assays = assays) experiments[[assayn]] <- sume } # create one single cell experiment sce <- as(object = experiments[[1]], Class = "SingleCellExperiment") orig.exp.name <- names(x = experiments[1]) if (packageVersion(pkg = "SingleCellExperiment") >= "1.14.0") { SingleCellExperiment::mainExpName(sce) <- names(x = experiments[1]) } if (length(x = experiments) > 1) { sce <- SingleCellExperiment::SingleCellExperiment(sce, altExps = experiments) sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = NULL ) } metadata <- x[[]] metadata$ident <- Idents(object = x) SummarizedExperiment::colData(x = sce) <- S4Vectors::DataFrame(metadata) for (assayn in assay) { if (assayn != orig.exp.name) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = assayn, saved = orig.exp.name ) SummarizedExperiment::rowData(x = sce) <- S4Vectors::DataFrame(x[[assayn]][[]]) sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = assayn ) } } for (dr in FilterObjects(object = x, classes.keep = "DimReduc")) { assay.used <- DefaultAssay(object = x[[dr]]) swap.exp <- assay.used %in% SingleCellExperiment::altExpNames(x = sce) & assay.used != orig.exp.name if (swap.exp) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = assay.used, saved = orig.exp.name ) } SingleCellExperiment::reducedDim(x = sce, type = toupper(x = dr)) <- Embeddings(object = x[[dr]]) if (swap.exp) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = assay.used ) } } return(sce) } #' Cast to Sparse #' #' @inheritParams SeuratObject::as.sparse #' #' @importFrom methods is #' @importFrom Matrix sparseMatrix #' #' @rdname as.sparse #' @concept objects #' @export #' @method as.sparse H5Group #' #' #' @seealso \code{\link[SeuratObject:as.sparse]{SeuratObject::as.sparse}} #' as.sparse.H5Group <- function(x, ...) { CheckDots(...) for (i in c('data', 'indices', 'indptr')) { if (!x$exists(name = i) || !is(object = x[[i]], class2 = 'H5D')) { stop("Invalid H5Group specification for a sparse matrix, missing dataset ", i) } } if ('h5sparse_shape' %in% hdf5r::h5attr_names(x = x)) { return(sparseMatrix( i = x[['indices']][] + 1, p = x[['indptr']][], x = x[['data']][], dims = rev(x = hdf5r::h5attr(x = x, which = 'h5sparse_shape')) )) } return(sparseMatrix( i = x[['indices']][] + 1, p = x[['indptr']][], x = x[['data']][] )) } #' Get Cell Names #' #' @inheritParams SeuratObject::Cells #' #' @rdname Cells #' @concept objects #' @method Cells SCTModel #' @export #' Cells.SCTModel <- function(x) { return(rownames(x = slot(object = x, name = "cell.attributes"))) } #' @rdname Cells #' @concept objects #' @concept spatial #' @method Cells SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:Cells]{SeuratObject::Cells}} #' Cells.SlideSeq <- function(x) { return(rownames(x = GetTissueCoordinates(object = x))) } #' @rdname Cells #' @concept objects #' @concept spatial #' @method Cells STARmap #' @export #' Cells.STARmap <- function(x) { return(rownames(x = GetTissueCoordinates(object = x))) } #' @rdname Cells #' @concept objects #' @method Cells VisiumV1 #' @export #' Cells.VisiumV1 <- function(x) { return(rownames(x = GetTissueCoordinates(object = x, scale = NULL))) } #' @param assay Assay to get #' #' @rdname GetAssay #' @concept objects #' @export #' @method GetAssay Seurat #' #' @examples #' data("pbmc_small") #' GetAssay(object = pbmc_small, assay = "RNA") #' GetAssay.Seurat <- function(object, assay = NULL, ...) { CheckDots(...) assay <- assay %||% DefaultAssay(object = object) object.assays <- FilterObjects(object = object, classes.keep = 'Assay') if (!assay %in% object.assays) { stop(paste0( assay, " is not an assay present in the given object. Available assays are: ", paste(object.assays, collapse = ", ") )) } return(slot(object = object, name = 'assays')[[assay]]) } #' Get Image Data #' #' @inheritParams SeuratObject::GetImage #' #' @rdname GetImage #' @method GetImage SlideSeq #' @concept objects #' @concept spatial #' @export #' #' @seealso \code{\link[SeuratObject:GetImage]{SeuratObject::GetImage}} #' GetImage.SlideSeq <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) return(NullImage(mode = mode)) } #' @rdname GetImage #' @method GetImage STARmap #' @concept objects #' @concept spatial #' @export #' GetImage.STARmap <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) return(NullImage(mode = mode)) } #' @importFrom plotly raster2uri #' @importFrom grDevices as.raster #' @importFrom grid rasterGrob unit #' #' @rdname GetImage #' @concept objects #' @concept spatial #' @method GetImage VisiumV1 #' @export #' GetImage.VisiumV1 <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) image <- slot(object = object, name = 'image') image <- switch( EXPR = mode, 'grob' = rasterGrob( image = image, width = unit(x = 1, units = 'npc'), height = unit(x = 1, units = 'npc') ), 'raster' = as.raster(x = image), 'plotly' = list( source = raster2uri(r = GetImage(object = object, mode = 'raster')), xref = 'x', yref = 'y', # x = -7, # y = -7, sizex = ncol(x = object), sizey = nrow(x = object), sizing = 'stretch', opacity = 1, layer = 'below' ), 'raw' = image, stop("Unknown image mode: ", mode, call. = FALSE) ) return(image) } #' Get Tissue Coordinates #' #' @inheritParams SeuratObject::GetTissueCoordinates #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates SlideSeq #' @concept objects #' @concept spatial #' @export #' #' @seealso \code{\link[SeuratObject:GetTissueCoordinates]{SeuratObject::GetTissueCoordinates}} #' GetTissueCoordinates.SlideSeq <- function(object, ...) { coords <- slot(object = object, name = 'coordinates') colnames(x = coords) <- c('x', 'y') # coords$y <- -rev(x = coords$y) + 1 # coords$y <- FlipCoords(x = coords$y) coords$cells <- rownames(x = coords) return(coords) } #' @param qhulls return qhulls instead of centroids #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates STARmap #' @concept objects #' @concept spatial #' @export #' GetTissueCoordinates.STARmap <- function(object, qhulls = FALSE, ...) { if (qhulls) { return(slot(object = object, name = 'qhulls')) } return(slot(object = object, name = 'coordinates')) } #' @param scale A factor to scale the coordinates by; choose from: 'tissue', #' 'fiducial', 'hires', 'lowres', or \code{NULL} for no scaling #' @param cols Columns of tissue coordinates data.frame to pull #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates VisiumV1 #' @concept objects #' @concept spatial #' @export #' GetTissueCoordinates.VisiumV1 <- function( object, scale = 'lowres', cols = c('imagerow', 'imagecol'), ... ) { cols <- cols %||% colnames(x = slot(object = object, name = 'coordinates')) if (!is.null(x = scale)) { coordinates <- slot(object = object, name = 'coordinates')[, c('imagerow', 'imagecol')] scale <- match.arg(arg = scale, choices = c('spot', 'fiducial', 'hires', 'lowres')) scale.use <- ScaleFactors(object = object)[[scale]] coordinates <- coordinates * scale.use } else { coordinates <- slot(object = object, name = 'coordinates')[, cols] } return(coordinates) } #' Get Variable Feature Information #' #' Get variable feature information from \code{\link{SCTAssay}} objects #' #' @inheritParams SeuratObject::HVFInfo #' #' @export #' @method HVFInfo SCTAssay #' #' @seealso \code{\link[SeuratObject]{HVFInfo}} #' #' @examples #' # Get the HVF info directly from an SCTAssay object #' pbmc_small <- SCTransform(pbmc_small) #' HVFInfo(pbmc_small[["SCT"]], selection.method = 'sct')[1:5, ] #' HVFInfo.SCTAssay <- function(object, selection.method, status = FALSE, ...) { CheckDots(...) disp.methods <- c('mean.var.plot', 'dispersion', 'disp') if (tolower(x = selection.method) %in% disp.methods) { selection.method <- 'mvp' } selection.method <- switch( EXPR = tolower(x = selection.method), 'sctransform' = 'sct', selection.method ) vars <- c('gmean', 'variance', 'residual_variance') hvf.info <- SCTResults(object = object, slot = "feature.attributes")[,vars] if (status) { hvf.info$variable <- FALSE hvf.info[VariableFeatures(object = object), "variable"] <- TRUE } return(hvf.info) } #' Get Spot Radius #' #' @inheritParams SeuratObject::Radius #' #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:Radius]{SeuratObject::Radius}} #' Radius.SlideSeq <- function(object) { return(0.005) } #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius STARmap #' @export #' Radius.STARmap <- function(object) { return(NULL) } #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius VisiumV1 #' @export #' Radius.VisiumV1 <- function(object) { return(slot(object = object, name = 'spot.radius')) } #' @rdname RenameCells #' @export #' @concept objects #' @method RenameCells SCTAssay #' RenameCells.SCTAssay <- function(object, new.names = NULL, ...) { CheckDots(...) old.names <- Cells(x = object) names(x = new.names) <- old.names cell.attributes <- SCTResults(object = object, slot = "cell.attributes") if (length(x = cell.attributes) > 0) { if (is.data.frame(x = cell.attributes)) { old.names <- rownames(x = cell.attributes) rownames(x = cell.attributes) <- unname(obj = new.names[old.names]) } else { cell.attributes <- lapply( X = cell.attributes, FUN = function(x) { old.names <- rownames(x = x) rownames(x = x) <- unname(obj = new.names[old.names]) return(x) } ) } SCTResults(object = object, slot = "cell.attributes") <- cell.attributes } new.names <- unname(obj = new.names) object <- NextMethod() return(object) } #' Rename Cells in an Object #' #' @inheritParams SeuratObject::RenameCells #' #' @rdname RenameCells #' @concept objects #' @method RenameCells SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:RenameCells]{SeuratObject::RenameCells}} #' RenameCells.SlideSeq <- function(object, new.names = NULL, ...) { return(RenameCells.VisiumV1(object = object, new.names = new.names)) } #' @rdname RenameCells #' @concept objects #' @method RenameCells STARmap #' @export #' RenameCells.STARmap <- function(object, new.names = NULL, ...) { names(x = new.names) <- Cells(x = object) object <- RenameCells.VisiumV1(object = object, new.names = new.names) qhulls <- GetTissueCoordinates(object = object, qhull = TRUE) qhulls$cell <- new.names[qhulls$cell] slot(object = object, name = "qhulls") <- qhulls return(object) } #' @rdname RenameCells #' @concept objects #' @method RenameCells VisiumV1 #' @export #' RenameCells.VisiumV1 <- function(object, new.names = NULL, ...) { if (is.null(x = new.names)) { return(object) } else if (length(x = new.names) != length(x = Cells(x = object))) { stop("Wrong number of cell/spot names", call. = FALSE) } names(x = new.names) <- Cells(x = object) coordinates <- GetTissueCoordinates(object = object, scale = NULL, cols = NULL) rownames(x = coordinates) <- new.names[rownames(x = coordinates)] slot(object = object, name = 'coordinates') <- coordinates return(object) } #' @rdname SCTResults #' @export #' @method SCTResults SCTModel #' SCTResults.SCTModel <- function(object, slot, ...) { CheckDots(...) slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } return(slot(object = object, name = slot)) } #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults<- SCTModel #' "SCTResults<-.SCTModel" <- function(object, slot, ..., value) { slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } slot(object = object, name = slot) <- value return(object) } #' @param slot Which slot to pull the SCT results from #' @param model Name of SCModel to pull result from. Available names can be #' retrieved with \code{levels}. #' #' @return Returns the value present in the requested slot for the requested #' group. If group is not specified, returns a list of slot results for each #' group unless there is only one group present (in which case it just returns #' the slot directly). #' #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults SCTAssay #' SCTResults.SCTAssay <- function(object, slot, model = NULL, ...) { CheckDots(...) slots.use <- c('feature.attributes', 'cell.attributes', 'clips', 'umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } model <- model %||% levels(x = object) model.list <- slot(object = object, name = "SCTModel.list")[model] results.list <- lapply(X = model.list, FUN = function(x) SCTResults(object = x, slot = slot)) if (length(x = results.list) == 1) { results.list <- results.list[[1]] } return(results.list) } #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults<- SCTAssay #' "SCTResults<-.SCTAssay" <- function(object, slot, model = NULL, ..., value) { slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } model <- model %||% levels(x = object) model.list <- slot(object = object, name = "SCTModel.list")[model] if (!is.list(x = value) | is.data.frame(x = value)) { value <- list(value) } model.names <- names(x = model.list) model.list <- lapply( X = 1:length(x = model.list), FUN = function(x) { SCTResults(object = model.list[[x]], slot = slot) <- value[[x]] return(model.list[[x]]) } ) names(x = model.list) <- model.names slot(object = object, name = "SCTModel.list")[model.names] <- model.list return(object) } #' @param assay Assay in the Seurat object to pull from #' #' @rdname SCTResults #' @export #' @concept objects #' @method SCTResults Seurat #' SCTResults.Seurat <- function(object, assay = "SCT", slot, model = NULL, ...) { CheckDots(...) return(SCTResults(object = object[[assay]], slot = slot, model = model, ...)) } #' @rdname ScaleFactors #' @method ScaleFactors VisiumV1 #' @export #' @concept spatial #' ScaleFactors.VisiumV1 <- function(object, ...) { return(slot(object = object, name = 'scale.factors')) } #' @rdname ScaleFactors #' @method ScaleFactors VisiumV1 #' @export #' @concept spatial #' ScaleFactors.VisiumV1 <- function(object, ...) { return(slot(object = object, name = 'scale.factors')) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @method [ SlideSeq #' @concept objects #' @export #' "[.SlideSeq" <- function(x, i, ...) { return(subset(x = x, cells = i, ...)) } #' @method [ VisiumV1 #' @export #' "[.VisiumV1" <- function(x, i, ...) { return(subset(x = x, cells = i)) } #' @method dim SlideSeq #' @concept objects #' @export #' dim.SlideSeq <- function(x) { # return(dim(x = GetImage(object = x, mode = 'raw'))) return(c(599, 600)) } #' @method dim STARmap #' @concept objects #' @export #' dim.STARmap <- function(x) { coords <- GetTissueCoordinates(object = x) return(c( max(coords[, 1]) - min(coords[, 1]), max(coords[, 2]) - min(coords[, 2]) )) } #' @method dim VisiumV1 #' @concept objects #' @export #' dim.VisiumV1 <- function(x) { return(dim(x = GetImage(object = x)$raster)) } #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @section Get and set SCT model names: #' SCT results are named by initial run of \code{\link{SCTransform}} in order #' to keep SCT parameters straight between runs. When working with merged #' \code{SCTAssay} objects, these model names are important. \code{levels} #' allows querying the models present. \code{levels<-} allows the changing of #' the names of the models present, useful when merging \code{SCTAssay} objects. #' Note: unlike normal \code{\link[base]{levels<-}}, \code{levels<-.SCTAssay} #' allows complete changing of model names, not reordering. #' #' @param x An \code{SCTAssay} object #' #' @return \code{levels}: SCT model names #' #' @export #' @concept objects #' @method levels SCTAssay #' #' @examples #' \dontrun{ #' # Query and change SCT model names #' levels(pbmc_small[['SCT']]) #' levels(pbmc_small[['SCT']]) <- '3' #' levels(pbmc_small[['SCT']]) #' } #' levels.SCTAssay <- function(x) { return(names(x = slot(object = x, name = "SCTModel.list"))) } #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @param value New levels, must be in the same order as the levels present #' #' @return \code{levels<-}: \code{x} with updated SCT model names #' #' @export #' @concept objects #' @method levels<- SCTAssay #' "levels<-.SCTAssay" <- function(x, value) { value <- sapply(X = value, FUN = function(v) { if (suppressWarnings(expr = !is.na(x = as.numeric(x = v)))) { warning("SCTModel groups cannot be number, group is added in front of ", v) v <- paste0("group", v) } return (v) }) # Get current levels levels <- levels(x = x) if (length(x = value) != length(x = levels)) { stop("Must provide a vector of length ", length(x = levels), " as new levels.", call. = FALSE) } names(x = slot(object = x, name = "SCTModel.list")) <- value return(x) } #' Merge SCTAssay objects #' #' @inheritParams SeuratObject::merge #' @param na.rm If na.rm = TRUE, this will only preserve residuals that are #' present in all SCTAssays being merged. Otherwise, missing residuals will be #' populated with NAs. #' @export #' @method merge SCTAssay #' @concept objects #' merge.SCTAssay <- function( x = NULL, y = NULL, add.cell.ids = NULL, merge.data = TRUE, na.rm = TRUE, ... ) { assays <- c(x, y) parent.call <- grep(pattern = "merge.Seurat", x = sys.calls()) if (length(x = parent.call) > 0) { # Try and fill in missing residuals if called in the context of merge.Seurat all.features <- unique(x = unlist(x = lapply(X = assays, FUN = function(assay) { if (inherits(x = x, what = "SCTAssay")) { return(rownames(x = GetAssayData(object = assay, slot = "scale.data"))) } }))) if (!is.null(all.features)) { assays <- lapply(X = 1:length(x = assays), FUN = function(assay) { if (inherits(x = assays[[assay]], what = "SCTAssay")) { parent.environ <- sys.frame(which = parent.call[1]) seurat.object <- parent.environ$objects[[assay]] seurat.object <- suppressWarnings(expr = GetResidual(object = seurat.object, features = all.features, assay = parent.environ$assay, verbose = FALSE)) return(seurat.object[[parent.environ$assay]]) } return(assays[[assay]]) }) } } sct.check <- sapply(X = assays, FUN = function(x) inherits(x = x, what = "SCTAssay")) if (any(!sct.check)) { warning("Attempting to merge an SCTAssay with another Assay type \n", "Converting all to standard Assay objects.", call. = FALSE) assays <- lapply(1:length(x = assays), FUN = function(x) { if (sct.check[x]) { assays[[x]] <- as(object = assays[[x]], Class = "Assay") } return(assays[[x]]) }) combined.assay <- merge( x = assays[[1]], y = assays[2:length(x = assays)], add.cell.ids = add.cell.ids, merge.data = merge.data ) return(combined.assay) } combined.assay <- NextMethod() all.levels <- unlist(x = lapply(X = assays, FUN = levels)) while (anyDuplicated(x = all.levels)) { levels.duplicate <- which(x = duplicated(x = all.levels)) all.levels <- sapply(X = 1:length(x = all.levels), FUN = function(l) { if (l %in% levels.duplicate) { return(tryCatch( expr = as.numeric(x = all.levels[l]) + 1, warning = function(...) { make.unique(names = all.levels)[l] }, error = function(...){ make.unique(names = all.levels)[l] } )) } else { return(all.levels[l]) } }) } scale.data <- lapply(X = assays, FUN = function(x) { dat <- GetAssayData(object = x, slot = "scale.data") if (ncol(x = dat) == 0) { dat <- matrix(ncol = ncol(x = x)) } return(dat) }) all.features <- lapply(X = scale.data, FUN = rownames) if (na.rm) { # merge intersection of possible residuals scaled.features <- names(x = which(x = table(x = unlist(x = all.features)) == length(x = assays))) if (length(x = scaled.features) == 0) { scale.data <- list(new(Class = "matrix")) } else { scale.data <- lapply(X = scale.data, FUN = function(x) x[scaled.features, ]) } } else { scaled.features <- unique(x = unlist(x = all.features)) scale.data <- lapply(X = 1:length(x = scale.data), FUN = function(x) { na.features <- setdiff(x = scaled.features, y = rownames(x = scale.data[[x]])) na.mat <- matrix( data = NA, nrow = length(x = na.features), ncol = ncol(x = assays[[x]]), dimnames = list(na.features, colnames(x = assays[[x]])) ) return(rbind(scale.data[[x]], na.mat)[scaled.features, ]) }) } scale.data <- do.call(what = cbind, args = scale.data) combined.assay <- SetAssayData(object = combined.assay, slot = "scale.data", new.data = scale.data) model.list <- unlist(x = lapply( X = assays, FUN = slot, name = "SCTModel.list" )) names(x = model.list) <- all.levels model.list <- model.list %||% list() combined.assay <- new( Class = "SCTAssay", combined.assay, SCTModel.list = model.list ) return(combined.assay) } #' Subset an AnchorSet object #' #' @inheritParams base::subset #' @param score.threshold Only anchor pairs with scores greater than this value #' are retained. #' @param disallowed.dataset.pairs Remove any anchors formed between the #' provided pairs. E.g. \code{list(c(1, 5), c(1, 2))} filters out any anchors between #' datasets 1 and 5 and datasets 1 and 2. #' @param dataset.matrix Provide a binary matrix specifying whether a dataset #' pair is allowable (1) or not (0). Should be a dataset x dataset matrix. #' @param group.by Grouping variable to determine allowable ident pairs #' @param disallowed.ident.pairs Remove any anchors formed between provided #' ident pairs. E.g. \code{list(c("CD4", "CD8"), c("B-cell", "T-cell"))} #' @param ident.matrix Provide a binary matrix specifying whether an ident pair #' is allowable (1) or not (0). Should be an ident x ident symmetric matrix #' #' @return Returns an \code{\link{AnchorSet}} object with specified anchors #' filtered out #' #' @export #' @method subset AnchorSet #' @concept objects #' subset.AnchorSet <- function( x, score.threshold = NULL, disallowed.dataset.pairs = NULL, dataset.matrix = NULL, group.by = NULL, disallowed.ident.pairs = NULL, ident.matrix = NULL, ... ) { if (!is.null(x = disallowed.dataset.pairs) && !is.null(x = dataset.matrix)) { stop("Please use either disallowed.dataset.pairs OR dataset.matrix, not both.") } # Filter based on scores if (!is.null(x = score.threshold)) { if (score.threshold > 1 | score.threshold < 0) { stop( "Anchors are scored on a scale between 0 and 1. Please provide a value", " in that range to score.threshold." ) } anchors <- slot(object = x, name = "anchors") anchors <- anchors[anchors[, 'score'] > score.threshold, , drop = FALSE] slot(object = x, name = "anchors") <- anchors } object.names <- names(x = slot(object = x, name = "object.list")) num.obs <- length(x = object.names) # Filter based on dataset pairings if (!is.null(x = disallowed.dataset.pairs)) { dataset.matrix <- matrix(data = 1, nrow = num.obs, ncol = num.obs) for(i in 1:length(x = disallowed.dataset.pairs)) { pair <- disallowed.dataset.pairs[[i]] if (length(x = pair) != 2) { stop("Please ensure all list items in disallowed.dataset.pairs are of length 2.") } if (any(pair %in% object.names)) { pair[which(pair %in% object.names)] <- sapply( X = pair[which(pair %in% object.names)], FUN = function(x) { which(object.names == x) }) } pair <- as.numeric(x = pair) dataset.matrix[pair[1], pair[2]] <- 0 } } if (!is.null(x = dataset.matrix)) { if (any(dim(x = dataset.matrix) != c(num.obs, num.obs))){ stop("Please provide a dataset.matrix that is ", num.obs, " x ", num.obs, ".") } anchors <- slot(object = x, name = "anchors") pairs <- which(dataset.matrix == 0, arr.ind = TRUE) for (i in 1:nrow(x = pairs)) { anchors <- anchors[-which(x = anchors$dataset1 == pairs[i, 1] & anchors$dataset2 == pairs[i, 2]), ] anchors <- anchors[-which(x = anchors$dataset1 == pairs[i, 2] & anchors$dataset2 == pairs[i, 1]), ] } slot(object = x, name = "anchors") <- anchors } # Filter based on ident pairings if (!is.null(x = group.by)) { anchors <- AnnotateAnchors(anchors = x, vars = group.by) if (!is.null(x = disallowed.ident.pairs) && !is.null(x = ident.matrix)) { stop("Please use either disallowed.ident.pairs OR ident.matrix, not both.") } unique.ids <- unique(x = c( as.character(x = anchors[, paste0("cell1.", group.by)]), as.character(x = anchors[, paste0("cell2.", group.by)])) ) unique.ids <- unique.ids[!is.na(x = unique.ids)] num.ids <- length(x = unique.ids) if (!is.null(x = disallowed.ident.pairs)) { ident.matrix <- matrix(data = 1, nrow = num.ids, ncol = num.ids) rownames(x = ident.matrix) <- unique.ids colnames(x = ident.matrix) <- unique.ids for(i in 1:length(x = disallowed.ident.pairs)) { pair <- disallowed.ident.pairs[[i]] if (length(x = pair) != 2) { stop("Please ensure all list items in disallowed.dataset.pairs are of length 2.") } ident.matrix[pair[1], pair[2]] <- 0 } } if (!is.null(x = ident.matrix)) { if (any(dim(x = ident.matrix) != c(num.ids, num.ids))){ stop("Please provide a dataset.matrix that is ", num.ids, " x ", num.ids, ".") } to.remove <- c() pairs <- which(ident.matrix == 0, arr.ind = TRUE) for (i in 1:nrow(x = pairs)) { id1 <- rownames(x = ident.matrix)[pairs[i, 1]] id2 <- colnames(x = ident.matrix)[pairs[i, 2]] to.remove <- c(to.remove, which(x = anchors[, paste0("cell1.", group.by)] == id1 & anchors[, paste0("cell2.", group.by)] == id2)) to.remove <- c(to.remove, which(x = anchors[, paste0("cell1.", group.by)] == id2 & anchors[, paste0("cell2.", group.by)] == id1)) } anchors <- slot(object = x, name = "anchors") anchors <- anchors[-to.remove, ] slot(object = x, name = "anchors") <- anchors } } return(x) } #' @export #' @method subset SCTAssay #' @concept objects #' subset.SCTAssay <- function(x, cells = NULL, features = NULL, ...) { x <- NextMethod() models <- levels(x = x) for (m in models) { attr <- SCTResults(object = x, slot = "cell.attributes", model = m) attr <- attr[intersect(x = rownames(x = attr), y = Cells(x = x)), , drop = FALSE] SCTResults(object = x, slot = "cell.attributes", model = m) <- attr } return(x) } #' @method subset SlideSeq #' @concept objects #' @export #' subset.SlideSeq <- function(x, cells, ...) { x <- subset.VisiumV1(x = x, cells = cells, ...) return(x) } #' @method subset STARmap #' @concept objects #' @export #' subset.STARmap <- function(x, cells, ...) { x <- subset.VisiumV1(x = x, cells = cells, ...) qhulls <- GetTissueCoordinates(object = x, qhulls = TRUE) qhulls <- qhulls[qhulls$cell %in% cells, ] slot(object = x, name = 'qhulls') <- qhulls return(x) } #' @method subset VisiumV1 #' @concept objects #' @export #' subset.VisiumV1 <- function(x, cells, ...) { coordinates <- GetTissueCoordinates(object = x, scale = NULL, cols = NULL) cells <- cells[cells %in% rownames(x = coordinates)] coordinates <- coordinates[cells, ] slot(object = x, name = 'coordinates') <- coordinates return(x) } #' Update pre-V4 Assays generated with SCTransform in the Seurat to the new #' SCTAssay class # #' @param object A Seurat object #' @export #' @concept objects #' @return A Seurat object with updated SCTAssays #' UpdateSCTAssays <- function(object) { assays <- Assays(object = object) for (assay in assays) { if (IsSCT(assay = object[[assay]]) && !inherits(x = object[[assay]], what = "SCTAssay")) { object[[assay]] <- as(object = object[[assay]], Class = "SCTAssay") } } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # S4 methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @section Creating an \code{SCTAssay} from an \code{Assay}: #' Conversion from an \code{Assay} object to an \code{SCTAssay} object by #' is done by adding the additional slots to the object. If \code{from} has #' results generated by \code{\link{SCTransform}} from Seurat v3.0.0 to v3.1.1, #' the conversion will automagically fill the new slots with the data #' setAs( from = 'Assay', to = 'SCTAssay', def = function(from) { object.list <- sapply( X = slotNames(x = from), FUN = slot, object = from, simplify = FALSE, USE.NAMES = TRUE ) object.list <- c( list('Class' = 'SCTAssay'), object.list ) if (IsSCT(assay = from)) { vst.slots <- c('vst.set', 'vst.out') vst.use <- vst.slots[vst.slots %in% names(x = Misc(object = from))][1] vst.res <- Misc(object = from, slot = vst.use) umi.assay <- Misc(object = from, slot = "umi.assay") if (vst.use == 'vst.out') { vst.res <- list(vst.res) umi.assay <- list(umi.assay) } if (length(x = vst.res) == 0) { vst.res <- list() } else if (length(x = vst.res) > 0) { vst.res <- lapply( X = 1:length(x = vst.res), FUN = function(i) { vst.res[[i]]$umi.assay <- umi.assay[[i]] return(PrepVSTResults( vst.res = vst.res[[i]], cell.names = colnames(x = from) )) } ) names(x = vst.res) <- paste0("model", 1:length(x = vst.res)) } object.list$misc[[vst.use]] <- NULL object.list$SCTModel.list <- vst.res } return(do.call(what = 'new', args = object.list)) } ) setMethod( f = 'show', signature = 'TransferAnchorSet', definition = function(object) { cat('An AnchorSet object containing', nrow(x = slot(object = object, name = "anchors")), "anchors between the reference and query Seurat objects. \n", "This can be used as input to TransferData.") } ) setMethod( f = 'show', signature = 'IntegrationAnchorSet', definition = function(object) { cat('An AnchorSet object containing', nrow(x = slot(object = object, name = "anchors")), "anchors between", length(x = slot(object = object, name = "object.list")), "Seurat objects \n", "This can be used as input to IntegrateData.") } ) setMethod( f = 'show', signature = 'ModalityWeights', definition = function(object) { cat( 'A ModalityWeights object containing modality weights between', paste(slot(object = object, name = "modality.assay"), collapse = " and "), "assays \n", "This can be used as input to FindMultiModelNeighbors.") } ) setMethod( f = 'show', signature = 'SCTModel', definition = function(object) { cat( "An sctransform model.\n", " Model formula: ", slot(object = object, name = "model"), "\n Parameters stored for", nrow(x = SCTResults(object = object, slot = "feature.attributes")), "features,", nrow(x = SCTResults(object = object, slot = "cell.attributes")), "cells") } ) #' @importFrom utils head # setMethod( f = 'show', signature = 'SCTAssay', definition = function(object) { cat('SCTAssay data with', nrow(x = object), 'features for', ncol(x = object), 'cells, and', length(x = levels(x = object)) , 'SCTModel(s) \n') if (length(x = VariableFeatures(object = object)) > 0) { top.ten <- head(x = VariableFeatures(object = object), n = 10L) top <- 'Top' variable <- 'variable' } else { top.ten <- head(x = rownames(x = object), n = 10L) top <- 'First' variable <- '' } features <- paste0( variable, ' feature', if (length(x = top.ten) != 1) {'s'}, ":\n" ) features <- gsub(pattern = '^\\s+', replacement = '', x = features) cat( top, length(x = top.ten), features, paste(strwrap(x = paste(top.ten, collapse = ', ')), collapse = '\n'), '\n' ) } ) #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal AddMetaData defintion # # @param object An object # @param metadata A vector, list, or data.frame with metadata to add # @param col.name A name for meta data if not a named list or data.frame # # @return object with metadata added # .AddMetaData <- function(object, metadata, col.name = NULL) { object <- UpdateSlots(object = object) if (is.null(x = col.name) && is.atomic(x = metadata)) { stop("'col.name' must be provided for atomic metadata types (eg. vectors)") } if (inherits(x = metadata, what = c('matrix', 'Matrix'))) { metadata <- as.data.frame(x = metadata) } col.name <- col.name %||% names(x = metadata) %||% colnames(x = metadata) if (is.null(x = col.name)) { stop("No metadata name provided and could not infer it from metadata object") } object[[col.name]] <- metadata # if (class(x = metadata) == "data.frame") { # for (ii in 1:ncol(x = metadata)) { # object[[colnames(x = metadata)[ii]]] <- metadata[, ii, drop = FALSE] # } # } else { # object[[col.name]] <- metadata # } return(object) } # Find the names of collections in an object # # @return A vector with the names of slots that are a list # Collections <- function(object) { collections <- vapply( X = slotNames(x = object), FUN = function(x) { return(any(grepl(pattern = 'list', x = class(x = slot(object = object, name = x))))) }, FUN.VALUE = logical(length = 1L) ) collections <- Filter(f = isTRUE, x = collections) return(names(x = collections)) } # Calculate nCount and nFeature # # @param object An Assay object # # @return A named list with nCount and nFeature # #' @importFrom Matrix colSums # CalcN <- function(object) { if (IsMatrixEmpty(x = GetAssayData(object = object, slot = "counts"))) { return(NULL) } return(list( nCount = colSums(x = object, slot = 'counts'), nFeature = colSums(x = GetAssayData(object = object, slot = 'counts') > 0) )) } # Get the default image of an object # # Attempts to find all images associated with the default assay of the object. # If none present, finds all images present in the object. Returns the name of # the first image # # @param object A Seurat object # # @return The name of the default image # DefaultImage <- function(object) { object <- UpdateSlots(object = object) images <- Images(object = object, assay = DefaultAssay(object = object)) if (length(x = images) < 1) { images <- Images(object = object) } return(images[[1]]) } # Get the names of objects within a Seurat object that are of a certain class # # @param object A Seurat object # @param classes.keep A vector of names of classes to get # # @return A vector with the names of objects within the Seurat object that are of class \code{classes.keep} # #' @importFrom stats na.omit # FilterObjects <- function(object, classes.keep = c('Assay', 'DimReduc')) { object <- UpdateSlots(object = object) slots <- na.omit(object = Filter( f = function(x) { sobj <- slot(object = object, name = x) return(is.list(x = sobj) && !is.data.frame(x = sobj) && !is.package_version(x = sobj)) }, x = slotNames(x = object) )) slots <- grep(pattern = 'tools', x = slots, value = TRUE, invert = TRUE) slots <- grep(pattern = 'misc', x = slots, value = TRUE, invert = TRUE) slots.objects <- unlist( x = lapply( X = slots, FUN = function(x) { return(names(x = slot(object = object, name = x))) } ), use.names = FALSE ) object.classes <- sapply( X = slots.objects, FUN = function(i) { return(inherits(x = object[[i]], what = classes.keep)) } ) object.classes <- which(x = object.classes, useNames = TRUE) return(names(x = object.classes)) } # Find the collection of an object within a Seurat object # # @param object A Seurat object # @param name Name of object to find # # @return The collection (slot) of the object # FindObject <- function(object, name) { collections <- c( 'assays', 'graphs', 'neighbors', 'reductions', 'commands', 'images' ) object.names <- lapply( X = collections, FUN = function(x) { return(names(x = slot(object = object, name = x))) } ) names(x = object.names) <- collections object.names <- Filter(f = Negate(f = is.null), x = object.names) for (i in names(x = object.names)) { if (name %in% names(x = slot(object = object, name = i))) { return(i) } } return(NULL) } # Prepare VST results for use with SCTAssay objects # # @param vst.res Results from sctransform::vst # @param cell.names Vector of valid cell names still in object # # @return An SCTModel object. # # PrepVSTResults <- function(vst.res, cell.names) { # Prepare cell attribute information cell.attrs <- vst.res$cell_attr cell.names <- intersect(x = cell.names, y = rownames(x = cell.attrs)) cell.cols <- c( 'umi', 'gene', 'log_umi', 'log_gene', 'umi_per_gene', 'log_umi_per_gene' ) cell.cols <- intersect(x = cell.cols, y = colnames(x = cell.attrs)) cell.attrs <- cell.attrs[cell.names, cell.cols, drop = FALSE] colnames(x = cell.attrs) <- gsub( pattern = 'gene', replacement = 'feature', x = colnames(x = cell.attrs) ) if (!is.null(x = vst.res$cells_step1)) { cell.attrs[, "cells_step1"] <- FALSE cells_step1 <- intersect(x = vst.res$cells_step1, y = rownames(x = cell.attrs)) cell.attrs[cells_step1, "cells_step1"] <- TRUE } # Prepare feature attribute information feature.attrs <- vst.res$gene_attr feature.cols <- c( 'detection_rate', 'gmean', 'variance', 'residual_mean', 'residual_variance' ) feature.cols <- intersect(x = feature.cols, y = colnames(x = feature.attrs)) feature.attrs <- feature.attrs[, feature.cols, drop = FALSE] feature.attrs <- cbind(feature.attrs, vst.res$model_pars_fit[rownames(feature.attrs), , drop = FALSE]) if (!is.null(x = vst.res$genes_log_gmean_step1)) { feature.attrs[,"genes_log_gmean_step1"] <- FALSE genes_step1 <- intersect( x = names(vst.res$genes_log_gmean_step1), y = rownames(feature.attrs) ) feature.attrs[genes_step1,"genes_log_gmean_step1"] <- TRUE # add parameters from step1 feature.attrs[, paste0("step1_", colnames(vst.res$model_pars))] <- NA feature.attrs[genes_step1, paste0("step1_", colnames(vst.res$model_pars))] <- vst.res$model_pars[genes_step1,] } # Prepare clipping information clips <- list( 'vst' = vst.res$arguments$res_clip_range, 'sct' = vst.res$arguments$sct.clip.range ) median_umi <- NA # check if a custom scale_factor was provided to vst() if ("scale_factor" %in% names(vst.res$arguments)){ median_umi <- vst.res$arguments$scale_factor } if (is.na(median_umi)) { if ("umi" %in% colnames(x = cell.attrs)) { median_umi <- median(cell.attrs$umi) } else if ("log_umi" %in% colnames(x = cell.attrs)) { median_umi <- median(10 ^ cell.attrs$log_umi) } } vst.res.SCTModel <- SCTModel( feature.attributes = feature.attrs, cell.attributes = cell.attrs, clips = clips, umi.assay = vst.res$umi.assay %||% "RNA", model = vst.res$model_str, arguments = vst.res$arguments, median_umi = median_umi ) return(vst.res.SCTModel) } # Return a null image # # @param mode Image representation to return # see \code{\link{GetImage}} for more details # #' @importFrom grid nullGrob #' @importFrom grDevices as.raster # NullImage <- function(mode) { image <- switch( EXPR = mode, 'grob' = nullGrob(), 'raster' = as.raster(x = new(Class = 'matrix')), 'plotly' = list('visible' = FALSE), 'raw' = NULL, stop("Unknown image mode: ", mode, call. = FALSE) ) return(image) } # Check to see if projected loadings have been set # # @param object a DimReduc object # # @return TRUE if proejcted loadings have been set, else FALSE # Projected <- function(object) { projected.dims <- dim(x = slot(object = object, name = 'feature.loadings.projected')) if (all(projected.dims == 1)) { return(!all(is.na(x = slot(object = object, name = 'feature.loadings.projected')))) } return(!all(projected.dims == 0)) } # Subset cells in vst data # @param sct.info A vst.out list # @param cells vector of cells to retain # @param features vector of features to retain SubsetVST <- function(sct.info, cells, features) { cells.keep <- intersect(x = cells, y = rownames(x = sct.info$cell_attr)) sct.info$cell_attr <- sct.info$cell_attr[cells.keep, ] # find which subset of features are in the SCT assay feat.keep <- intersect(x = features, y = rownames(x = sct.info$gene_attr)) sct.info$gene_attr <- sct.info$gene_attr[feat.keep, ] return(sct.info) } # Get the top # # @param data Data to pull the top from # @param num Pull top \code{num} # @param balanced Pull even amounts of from positive and negative values # # @return The top \code{num} # @seealso \{code{\link{TopCells}}} \{code{\link{TopFeatures}}} # #' @importFrom utils head tail # Top <- function(data, num, balanced) { nr <- nrow(x = data) if (num > nr) { warning("Requested number is larger than the number of available items (", nr, "). Setting to ", nr , ".", call. = FALSE) num <- nr } if (num == 1) { balanced <- FALSE } top <- if (balanced) { num <- round(x = num / 2) data <- data[order(data, decreasing = TRUE), , drop = FALSE] positive <- head(x = rownames(x = data), n = num) negative <- rev(x = tail(x = rownames(x = data), n = num)) # remove duplicates if (positive[num] == negative[num]) { negative <- negative[-num] } list(positive = positive, negative = negative) } else { data <- data[rev(x = order(abs(x = data))), , drop = FALSE] top <- head(x = rownames(x = data), n = num) top[order(data[top, ])] } return(top) } # Update Seurat assay # # @param old.assay Seurat2 assay # @param assay Name to store for assay in new object # UpdateAssay <- function(old.assay, assay){ cells <- colnames(x = old.assay@data) counts <- old.assay@raw.data data <- old.assay@data if (!inherits(x = counts, what = 'dgCMatrix')) { counts <- as(object = as.matrix(x = counts), Class = 'dgCMatrix') } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as(object = as.matrix(x = data), Class = 'dgCMatrix') } new.assay <- new( Class = 'Assay', counts = counts[, cells], data = data, scale.data = old.assay@scale.data %||% new(Class = 'matrix'), meta.features = data.frame(row.names = rownames(x = counts)), var.features = old.assay@var.genes, key = paste0(assay, "_") ) return(new.assay) } # Update dimension reduction # # @param old.dr Seurat2 dimension reduction slot # @param assay.used Name of assay used to compute dimension reduction # UpdateDimReduction <- function(old.dr, assay) { new.dr <- list() for (i in names(x = old.dr)) { cell.embeddings <- old.dr[[i]]@cell.embeddings %||% new(Class = 'matrix') feature.loadings <- old.dr[[i]]@gene.loadings %||% new(Class = 'matrix') stdev <- old.dr[[i]]@sdev %||% numeric() misc <- old.dr[[i]]@misc %||% list() new.jackstraw <- UpdateJackstraw(old.jackstraw = old.dr[[i]]@jackstraw) old.key <- old.dr[[i]]@key if (length(x = old.key) == 0) { old.key <- gsub(pattern = "(.+?)(([0-9]+).*)", replacement = "\\1", x = colnames(cell.embeddings)[[1]]) if (length(x = old.key) == 0) { old.key <- i } } new.key <- suppressWarnings(expr = UpdateKey(key = old.key)) colnames(x = cell.embeddings) <- gsub( pattern = old.key, replacement = new.key, x = colnames(x = cell.embeddings) ) colnames(x = feature.loadings) <- gsub( pattern = old.key, replacement = new.key, x = colnames(x = feature.loadings) ) new.dr[[i]] <- new( Class = 'DimReduc', cell.embeddings = as(object = cell.embeddings, Class = 'matrix'), feature.loadings = as(object = feature.loadings, Class = 'matrix'), assay.used = assay, stdev = as(object = stdev, Class = 'numeric'), key = as(object = new.key, Class = 'character'), jackstraw = new.jackstraw, misc = as(object = misc, Class = 'list') ) } return(new.dr) } # Update jackstraw # # @param old.jackstraw # UpdateJackstraw <- function(old.jackstraw) { if (is.null(x = old.jackstraw)) { new.jackstraw <- new( Class = 'JackStrawData', empirical.p.values = new(Class = 'matrix'), fake.reduction.scores = new(Class = 'matrix'), empirical.p.values.full = new(Class = 'matrix'), overall.p.values = new(Class = 'matrix') ) } else { if (.hasSlot(object = old.jackstraw, name = 'overall.p.values')) { overall.p <- old.jackstraw@overall.p.values %||% new(Class = 'matrix') } else { overall.p <- new(Class = 'matrix') } new.jackstraw <- new( Class = 'JackStrawData', empirical.p.values = old.jackstraw@emperical.p.value %||% new(Class = 'matrix'), fake.reduction.scores = old.jackstraw@fake.pc.scores %||% new(Class = 'matrix'), empirical.p.values.full = old.jackstraw@emperical.p.value.full %||% new(Class = 'matrix'), overall.p.values = overall.p ) } return(new.jackstraw) } # Update a Key # # @param key A character to become a Seurat Key # # @return An updated Key that's valid for Seurat # UpdateKey <- function(key) { if (grepl(pattern = '^[[:alnum:]]+_$', x = key)) { return(key) } else { new.key <- regmatches( x = key, m = gregexpr(pattern = '[[:alnum:]]+', text = key) ) new.key <- paste0(paste(unlist(x = new.key), collapse = ''), '_') if (new.key == '_') { new.key <- paste0(RandomName(length = 3), '_') } warning( "Keys should be one or more alphanumeric characters followed by an underscore, setting key from ", key, " to ", new.key, call. = FALSE, immediate. = TRUE ) return(new.key) } } # Update slots in an object # # @param object An object to update # # @return \code{object} with the latest slot definitions # UpdateSlots <- function(object) { object.list <- sapply( X = slotNames(x = object), FUN = function(x) { return(tryCatch( expr = slot(object = object, name = x), error = function(...) { return(NULL) } )) }, simplify = FALSE, USE.NAMES = TRUE ) object.list <- Filter(f = Negate(f = is.null), x = object.list) object.list <- c('Class' = class(x = object)[1], object.list) object <- do.call(what = 'new', args = object.list) for (x in setdiff(x = slotNames(x = object), y = names(x = object.list))) { xobj <- slot(object = object, name = x) if (is.vector(x = xobj) && !is.list(x = xobj) && length(x = xobj) == 0) { slot(object = object, name = x) <- vector(mode = class(x = xobj), length = 1L) } } return(object) } # Pulls the proper data matrix for merging assay data. If the slot is empty, will return an empty # matrix with the proper dimensions from one of the remaining data slots. # # @param assay Assay to pull data from # @param slot Slot to pull from # # @return Returns the data matrix if present (i.e.) not 0x0. Otherwise, returns an # appropriately sized empty sparse matrix # #' @importFrom Matrix Matrix # ValidateDataForMerge <- function(assay, slot) { mat <- GetAssayData(object = assay, slot = slot) if (any(dim(x = mat) == c(0, 0))) { slots.to.check <- setdiff(x = c("counts", "data", "scale.data"), y = slot) for (ss in slots.to.check) { data.dims <- dim(x = GetAssayData(object = assay, slot = ss)) data.slot <- ss if (!any(data.dims == c(0, 0))) { break } } if (any(data.dims == c(0, 0))) { stop("The counts, data, and scale.data slots are all empty for the provided assay.") } mat <- Matrix( data = 0, nrow = data.dims[1], ncol = data.dims[2], dimnames = dimnames(x = GetAssayData(object = assay, slot = data.slot)) ) mat <- as(object = mat, Class = "dgCMatrix") } return(mat) } Seurat/R/mixscape.R0000644000176200001440000014016714152476164013675 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Calculate a perturbation Signature #' #' Function to calculate perturbation signature for pooled CRISPR screen datasets. #' For each target cell (expressing one target gRNA), we identified 20 cells #' from the control pool (non-targeting cells) with the most similar mRNA #' expression profiles. The perturbation signature is calculated by subtracting the #' averaged mRNA expression profile of the non-targeting neighbors from the mRNA #' expression profile of the target cell. #' #' @param object An object of class Seurat. #' @param assay Name of Assay PRTB signature is being calculated on. #' @param features Features to compute PRTB signature for. Defaults to the #' variable features set in the assay specified. #' @param slot Data slot to use for PRTB signature calculation. #' @param gd.class Metadata column containing target gene classification. #' @param nt.cell.class Non-targeting gRNA cell classification identity. #' @param split.by Provide metadata column if multiple biological replicates #' exist to calculate PRTB signature for every replicate separately. #' @param num.neighbors Number of nearest neighbors to consider. #' @param ndims Number of dimensions to use from dimensionality reduction method. #' @param reduction Reduction method used to calculate nearest neighbors. #' @param new.assay.name Name for the new assay. #' @param verbose Display progress + messages #' @return Returns a Seurat object with a new assay added containing the #' perturbation signature for all cells in the data slot. #' #' @importFrom RANN nn2 #' @export #' @concept mixscape #' CalcPerturbSig <- function( object, assay = NULL, features = NULL, slot = "data", gd.class = "guide_ID", nt.cell.class = "NT", split.by = NULL, num.neighbors = NULL, reduction = "pca", ndims = 15, new.assay.name = "PRTB", verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object ) if (is.null(x = reduction)) { stop('Please provide dimensionality reduction name.') } if (is.null(x = num.neighbors)) { stop("Please specify number of nearest neighbors to consider") } if (is.null(x = ndims)) { stop("Please provide number of ", reduction, " dimensions to consider") } features <- features %||% VariableFeatures(object = object[[assay]]) if (length(x = features) == 0) { features <- rownames(x = GetAssayData(object = object[[assay]], slot = slot)) } if (! is.null(x = split.by)) { Idents(object = object) <- split.by } else { Idents(object = object) <- "rep1" } replicate <- unique(x = Idents(object = object)) all_diff <- list() all_nt_cells <- Cells(x = object)[which(x = object[[]][gd.class] == nt.cell.class)] all_neighbors <- list() for (r in replicate) { if (verbose) { message("Processing ", r) } all_cells <- WhichCells(object = object, idents = r) nt_cells <- intersect(x = all_nt_cells, all_cells) # get pca cell embeddings all_mtx <- Embeddings(object = object, reduction = reduction)[all_cells, ] nt_mtx <- Embeddings(object = object, reduction = reduction)[nt_cells, ] # run nn2 to find the 20 nearest NT neighbors for all cells. Use the same # number of PCs as the ones you used for umap neighbors <- NNHelper( data = nt_mtx[, 1:ndims], query = all_mtx[, 1:ndims], k = num.neighbors, method = "rann" ) diff <- PerturbDiff( object = object, assay = assay, slot = slot, all_cells = all_cells, nt_cells = nt_cells, features = features, neighbors = neighbors, verbose = verbose ) all_diff[[r]] <- diff all_neighbors[[make.names(names = paste0(new.assay.name, "_", r))]] <- neighbors } slot(object = object, name = "tools")[[paste("CalcPerturbSig", assay, reduction, sep = ".")]] <- all_neighbors all_diff <- do.call(what = cbind, args = all_diff) prtb.assay <- suppressWarnings( # TODO: restore once check.matrix is in SeuratObject # expr = CreateAssayObject( # data = all_diff[, colnames(x = object)], # min.cells = -Inf, # min.features = -Inf, # check.matrix = FALSE # ) expr = CreateAssayObject( data = all_diff[, colnames(x = object)], min.cells = -Inf, min.features = -Inf ) ) object[[new.assay.name]] <- prtb.assay object <- LogSeuratCommand(object = object) return(object) } #' DE and EnrichR pathway visualization barplot #' #' @inheritParams FindMarkers #' @param object Name of object class Seurat. #' @param ident.1 Cell class identity 1. #' @param ident.2 Cell class identity 2. #' @param balanced Option to display pathway enrichments for both negative and #' positive DE genes.If false, only positive DE gene will be displayed. #' @param max.genes Maximum number of genes to use as input to enrichR. #' @param p.val.cutoff Cutoff to select DE genes. #' @param cols A list of colors to use for barplots. #' @param enrich.database Database to use from enrichR. #' @param num.pathway Number of pathways to display in barplot. #' @param return.gene.list Return list of DE genes #' #' @return Returns one (only enriched) or two (both enriched and depleted) #' barplots with the top enriched/depleted GO terms from EnrichR. #' #' @importFrom ggplot2 ggplot geom_bar geom_density coord_flip scale_fill_manual #' ylab ggtitle theme_classic theme element_text #' @importFrom patchwork wrap_plots #' #' @export #' @concept mixscape DEenrichRPlot <- function( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = NULL, max.genes, test.use = 'wilcox', p.val.cutoff = 0.05, cols = NULL, enrich.database = NULL, num.pathway = 10, return.gene.list = FALSE, ... ) { enrichr.installed <- PackageCheck("enrichR", error = FALSE) if (!enrichr.installed[1]) { stop( "Please install the enrichR package to use DEenrichRPlot", "\nThis can be accomplished with the following command: ", "\n----------------------------------------", "\ninstall.packages('enrichR')", "\n----------------------------------------", call. = FALSE ) } if (is.null(x = enrich.database)) { stop("Please specify the name of enrichR database to use") } if (!is.numeric(x = max.genes)) { stop("please set max.genes") } assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay all.markers <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, only.pos = FALSE, logfc.threshold = logfc.threshold, test.use = test.use, assay = assay ) pos.markers <- all.markers[all.markers[, 2] > logfc.threshold & all.markers[, 1] < p.val.cutoff, , drop = FALSE] if(nrow(pos.markers) == 0){ message("No positive markers pass the logfc.thershold") pos.er <- c() } else{ pos.markers.list <- rownames(x = pos.markers)[1:min(max.genes, nrow(x = pos.markers))] pos.er <- enrichR::enrichr(genes = pos.markers.list, databases = enrich.database) pos.er <- do.call(what = cbind, args = pos.er) pos.er$log10pval <- -log10(x = pos.er[, paste(enrich.database, sep = ".", "P.value")]) pos.er$term <- pos.er[, paste(enrich.database, sep = ".", "Term")] pos.er <- pos.er[1:num.pathway, ] pos.er$term <- factor(x = pos.er$term, levels = pos.er$term[order(pos.er$log10pval)]) gene.list <- list(pos = pos.er) } if (isTRUE(x = balanced)) { neg.markers <- all.markers[all.markers[, 2] < logfc.threshold & all.markers[, 1] < p.val.cutoff, , drop = FALSE] neg.markers.list <- rownames(x = neg.markers)[1:min(max.genes, nrow(x = neg.markers))] neg.er <- enrichR::enrichr(genes = neg.markers.list, databases = enrich.database) neg.er <- do.call(what = cbind, args = neg.er) neg.er$log10pval <- -log10(x = neg.er[, paste(enrich.database, sep = ".", "P.value")]) neg.er$term <- neg.er[, paste(enrich.database, sep = ".", "Term")] neg.er <- neg.er[1:num.pathway, ] neg.er$term <- factor(x = neg.er$term, levels = neg.er$term[order(neg.er$log10pval)]) if(isTRUE(length(neg.er$term) == 0) & isTRUE(length(pos.er == 0))){ stop("No positive or negative marker genes identified") } else{ if(isTRUE(length(neg.er$term) == 0)){ gene.list <- list(pos = pos.er) } else{ gene.list <- list(pos = pos.er, neg = neg.er) } } } if (return.gene.list) { return(gene.list) } if(nrow(pos.markers) == 0){ message("No positive markers to plot") if (isTRUE(x = balanced)) { p2 <- ggplot(data = neg.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "indianred2") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "negative markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) p <- p2 } else{ stop("Nothing to plot") } } else { p <- ggplot(data = pos.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "dodgerblue") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "positive markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) if (isTRUE(x = balanced)) { p2 <- ggplot(data = neg.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "indianred2") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "negative markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) p <- p+p2 } } return(p) } #' Linear discriminant analysis on pooled CRISPR screen data. #' #' This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all #' cells in the data. Finally, it uses the first 10 principle components from each projection as input to lda in MASS package together with mixscape class labels. #' #' @inheritParams PrepLDA #' @inheritParams RunLDA #' #' @return Returns a Seurat object with LDA added in the reduction slot. #' #' @export #' @concept mixscape #' MixscapeLDA <- function( object, assay = NULL, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) { projected_pcs <- PrepLDA( object = object, de.assay = assay, pc.assay = pc.assay, labels = labels, nt.label = nt.label, npcs = npcs , verbose = verbose ) lda.lables <- object[[labels]][,] object_lda <- RunLDA( object = projected_pcs, labels = lda.lables, assay = assay, verbose = verbose ) object[["lda"]] <- object_lda return(object) } #' Function to prepare data for Linear Discriminant Analysis. #' #' This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all #' cells in the data. #' #' @param object An object of class Seurat. #' @param de.assay Assay to use for selection of DE genes. #' @param pc.assay Assay to use for running Principle components analysis. #' @param labels Meta data column with target gene class labels. #' @param nt.label Name of non-targeting cell class. #' @param npcs Number of principle components to use. #' @param verbose Print progress bar. #' @inheritParams FindMarkers #' @return Returns a list of the first 10 PCs from each projection. #' #' @export #' @concept mixscape #' PrepLDA <- function( object, de.assay = "RNA", pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) { projected_pcs <- list() gene_list <- setdiff(x = unique(x = object[[labels]][, 1]), y = nt.label) Idents(object = object) <- labels DefaultAssay(object = object) <- pc.assay all_genes <- list() nt.cells <- WhichCells(object = object, idents = nt.label) for (g in gene_list) { if (verbose) { message(g) } gd.cells <- WhichCells(object = object, idents = g) gene_set <- TopDEGenesMixscape( object = object, ident.1 = gd.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = labels, verbose = verbose ) if (length(x = gene_set) < (npcs + 1)) { all_genes[[g]] <- character() next } all_genes[[g]] <- gene_set } all_markers <- unique(x = unlist(x = all_genes)) missing_genes <- all_markers[!all_markers %in% rownames(x = object[[pc.assay]])] object <- GetMissingPerturb(object = object, assay = pc.assay, features = missing_genes, verbose = verbose) for (g in gene_list) { if (verbose) { message(g) } gene_subset <- subset(x = object, idents = c(g, nt.label)) gene_set <- all_genes[[g]] if (length(x = gene_set) == 0) { next } gene_subset <- ScaleData( object = gene_subset, features = gene_set, verbose = FALSE ) gene_subset <- RunPCA( object = gene_subset, features = gene_set, npcs = npcs, verbose = FALSE ) project_pca <- ProjectCellEmbeddings( reference = gene_subset, query = object, dims = 1:npcs, verbose = FALSE ) colnames(x = project_pca) <- paste(g, colnames(x = project_pca), sep = "_") projected_pcs[[g]] <- project_pca } return(projected_pcs) } #' @param object Input values for LDA (numeric), with observations as rows #' @param labels Observation labels for LDA #' @param assay Name of Assay LDA is being run on #' @param ndims.print PCs to print genes for #' @param nfeatures.print Number of genes to print for each PC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. LDA by default #' @param seed Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' #' @importFrom MASS lda #' @importFrom stats predict #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA default #' RunLDA.default <- function( object, labels, assay = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) { if (!is.null(x = seed)) { set.seed(seed = seed) } object <- data.frame(object) var_names <- colnames(x = object) object$lda_cluster_label <- labels lda_results <- lda(formula = lda_cluster_label ~ ., data = object) lda_predictions <- predict(object = lda_results, newdata = object) lda_cv <-lda( formula = lda_cluster_label ~ ., data = object, CV = TRUE )$posterior feature.loadings <- lda_results$scaling cell.embeddings <- lda_predictions$x lda.assignments <- lda_predictions$class lda.posterior <- lda_predictions$posterior colnames(x = lda.posterior) <- paste0("LDAP_", colnames(x = lda.posterior)) rownames(x = feature.loadings) <- var_names colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) rownames(x = cell.embeddings) <- rownames(x = object) colnames(x = cell.embeddings) <- colnames(x = feature.loadings) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, key = reduction.key, misc = list( assignments = lda.assignments, posterior = lda.posterior, model = lda_results, cv = lda_cv ) ) if (verbose) { print(x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print) } return(reduction.data) } #' Function to perform Linear Discriminant Analysis. #' #' @param ndims.print Number of LDA dimensions to print. #' @param nfeatures.print Number of features to print for each LDA component. #' @param reduction.key Reduction key name. #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA Assay #' RunLDA.Assay <- function( object, assay = NULL, labels, features = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunLDA( object = t(x = data.use), assay = assay, labels = labels, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed = seed, ... ) return(reduction.data) } #' @param object An object of class Seurat. #' @param assay Assay to use for performing Linear Discriminant Analysis (LDA). #' @param labels Meta data column with target gene class labels. #' @param features Features to compute LDA on #' @param reduction.name dimensional reduction name, lda by default #' @param reduction.key Reduction key name. #' @param seed Value for random seed #' @param verbose Print the top genes associated with high/low loadings for #' the PCs #' @param ndims.print Number of LDA dimensions to print. #' @param nfeatures.print Number of features to print for each LDA component. #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA Seurat #' RunLDA.Seurat <- function( object, assay = NULL, labels, features = NULL, reduction.name = "lda", reduction.key = "LDA_", seed = 42, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) reduction.data <- RunLDA( object = assay.data, assay = assay, labels = labels, features = features, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed = seed, ... ) object[[reduction.name]] <- reduction.data object$lda.assignments <- slot(object = object[[reduction.name]], name = "misc")[["assignments"]] object <- AddMetaData( object = object, metadata = as.data.frame( x = slot(object = object[[reduction.name]], name = "misc")[["posterior"]] ) ) object <- LogSeuratCommand(object = object) object <- ProjectDim( object = object, reduction = reduction.name, assay = assay, verbose = verbose, dims.print = ndims.print, nfeatures.print = nfeatures.print ) Loadings(object = object[[reduction.name]]) <- Loadings( object = object[[reduction.name]], projected = TRUE ) return(object) } #' Run Mixscape #' #' Function to identify perturbed and non-perturbed gRNA expressing cells that #' accounts for multiple treatments/conditions/chemical perturbations. #' #' @inheritParams FindMarkers #' @importFrom ggplot2 geom_density position_dodge #' @param object An object of class Seurat. #' @param assay Assay to use for mixscape classification. #' @param slot Assay data slot to use. #' @param labels metadata column with target gene labels. #' @param nt.class.name Classification name of non-targeting gRNA cells. #' @param new.class.name Name of mixscape classification to be stored in #' metadata. #' @param min.de.genes Required number of genes that are differentially #' expressed for method to separate perturbed and non-perturbed cells. #' @param min.cells Minimum number of cells in target gene class. If fewer than #' this many cells are assigned to a target gene class during classification, #' all are assigned NP. #' @param de.assay Assay to use when performing differential expression analysis. #' Usually RNA. #' @param iter.num Number of normalmixEM iterations to run if convergence does #' not occur. #' @param verbose Display messages #' @param split.by metadata column with experimental condition/cell type #' classification information. This is meant to be used to account for cases a #' perturbation is condition/cell type -specific. #' @param fine.mode When this is equal to TRUE, DE genes for each target gene #' class will be calculated for each gRNA separately and pooled into one DE list #' for calculating the perturbation score of every cell and their subsequent #' classification. #' @param fine.mode.labels metadata column with gRNA ID labels. #' @param prtb.type specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO. #' @return Returns Seurat object with with the following information in the #' meta data and tools slots: #' \describe{ #' \item{mixscape_class}{Classification result with cells being either #' classified as perturbed (KO, by default) or non-perturbed (NP) based on their target #' gene class.} #' \item{mixscape_class.global}{Global classification result (perturbed, NP or NT)} #' \item{p_ko}{Posterior probabilities used to determine if a cell is KO (default). Name of this item will change to match prtb.type parameter setting. #' (>0.5) or NP} #' \item{perturbation score}{Perturbation scores for every cell calculated in #' the first iteration of the function.} #' } #' #' @export #' @concept mixscape #' RunMixscape <- function( object, assay = "PRTB", slot = "scale.data", labels = "gene", nt.class.name = "NT", new.class.name = "mixscape_class", min.de.genes = 5, min.cells = 5, de.assay = "RNA", logfc.threshold = 0.25, iter.num = 10, verbose = FALSE, split.by = NULL, fine.mode = FALSE, fine.mode.labels = "guide_ID", prtb.type = "KO" ) { mixtools.installed <- PackageCheck("mixtools", error = FALSE) if (!mixtools.installed[1]) { stop("Please install the mixtools package to use RunMixscape", "\nThis can be accomplished with the following command: ", "\n----------------------------------------", "\ninstall.packages('mixtools')", "\n----------------------------------------", call. = FALSE) } assay <- assay %||% DefaultAssay(object = object) if (is.null(x = labels)) { stop("Please specify target gene class metadata name") } prtb_markers <- list() object[[new.class.name]] <- object[[labels]] object[[new.class.name]][, 1] <- as.character(x = object[[new.class.name]][, 1]) object[[paste0(new.class.name, "_p_", tolower(x = prtb.type))]] <- 0 #create list to store perturbation scores. gv.list <- list() if (is.null(x = split.by)) { split.by <- splits <- "con1" } else { splits <- as.character(x = unique(x = object[[split.by]][, 1])) } # determine gene sets across all splits/groups cells.s.list <- list() for (s in splits) { Idents(object = object) <- split.by cells.s <- WhichCells(object = object, idents = s) cells.s.list[[s]] <- cells.s genes <- setdiff(x = unique(x = object[[labels]][cells.s, 1]), y = nt.class.name) Idents(object = object) <- labels for (gene in genes) { if (isTRUE(x = verbose)) { message("Processing ", gene) } orig.guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) nt.cells <- intersect(x = WhichCells(object = object, idents = nt.class.name), y = cells.s) if (isTRUE(x = fine.mode)) { guides <- setdiff(x = unique(x = object[[fine.mode.labels]][orig.guide.cells, 1]), y = nt.class.name) all.de.genes <- c() for (gd in guides) { gd.cells <- rownames(x = object[[]][orig.guide.cells, ])[which(x = object[[]][orig.guide.cells, fine.mode.labels] == gd)] de.genes <- TopDEGenesMixscape( object = object, ident.1 = gd.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = fine.mode.labels, verbose = verbose ) all.de.genes <- c(all.de.genes, de.genes) } all.de.genes <- unique(all.de.genes) } else { all.de.genes <- TopDEGenesMixscape( object = object, ident.1 = orig.guide.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = labels, verbose = verbose ) } prtb_markers[[s]][[gene]] <- all.de.genes if (length(x = all.de.genes) < min.de.genes) { prtb_markers[[s]][[gene]] <- character() } } } all_markers <- unique(x = unlist(x = prtb_markers)) missing_genes <- all_markers[!all_markers %in% rownames(x = object[[assay]])] object <- GetMissingPerturb(object = object, assay = assay, features = missing_genes, verbose = verbose) for (s in splits) { cells.s <- cells.s.list[[s]] genes <- setdiff(x = unique(x = object[[labels]][cells.s, 1]), y = nt.class.name) if (verbose) { message("Classifying cells for: ") } for (gene in genes) { Idents(object = object) <- labels post.prob <- 0 orig.guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) nt.cells <- intersect(x = WhichCells(object = object, idents = nt.class.name), y = cells.s) all.cells <- c(orig.guide.cells, nt.cells) if (length(x = prtb_markers[[s]][[gene]]) == 0) { if (verbose) { message(" Fewer than ", min.de.genes, " DE genes for ", gene, ". Assigning cells as NP.") } object[[new.class.name]][orig.guide.cells, 1] <- paste0(gene, " NP") } else { if (verbose) { message(" ", gene) } de.genes <- prtb_markers[[s]][[gene]] dat <- GetAssayData(object = object[[assay]], slot = "data")[de.genes, all.cells, drop = FALSE] if (slot == "scale.data") { dat <- ScaleData(object = dat, features = de.genes, verbose = FALSE) } converged <- FALSE n.iter <- 0 old.classes <- object[[new.class.name]][all.cells, ] while (!converged && n.iter < iter.num) { Idents(object = object) <- new.class.name guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) vec <- rowMeans2(x = dat[, guide.cells, drop = FALSE]) - rowMeans2(x = dat[, nt.cells, drop = FALSE]) pvec <- apply(X = dat, MARGIN = 2, FUN = ProjectVec, v2 = vec) if (n.iter == 0){ #store pvec gv <- as.data.frame(x = pvec) gv[, labels] <- nt.class.name gv[intersect(x = rownames(x = gv), y = guide.cells), labels] <- gene gv.list[[gene]][[s]] <- gv } guide.norm <- DefineNormalMixscape(pvec[guide.cells]) nt.norm <- DefineNormalMixscape(pvec[nt.cells]) mm <- mixtools::normalmixEM( x = pvec, mu = c(nt.norm$mu, guide.norm$mu), sigma = c(nt.norm$sd, guide.norm$sd), k = 2, mean.constr = c(nt.norm$mu, NA), sd.constr = c(nt.norm$sd, NA), verb = FALSE, maxit = 5000, maxrestarts = 100 ) lik.ratio <- dnorm(x = pvec[orig.guide.cells], mean = mm$mu[1], sd = mm$sigma[1]) / dnorm(x = pvec[orig.guide.cells], mean = mm$mu[2], sd = mm$sigma[2]) post.prob <- 1/(1 + lik.ratio) object[[new.class.name]][names(x = which(post.prob > 0.5)), 1] <- gene object[[new.class.name]][names(x = which(post.prob < 0.5)), 1] <- paste(gene, " NP", sep = "") if (length(x = which(x = object[[new.class.name]] == gene & Cells(x = object) %in% cells.s)) < min.de.genes) { if (verbose) { message("Fewer than ", min.cells, " cells assigned as ", gene, "Assigning all to NP.") } object[[new.class.name]][guide.cells, 1] <- "NP" converged <- TRUE } if (all(object[[new.class.name]][all.cells, ] == old.classes)) { converged <- TRUE } old.classes <- object[[new.class.name]][all.cells, ] n.iter <- n.iter + 1 } object[[new.class.name]][which(x = object[[new.class.name]] == gene & Cells(x = object) %in% cells.s), 1] <- paste(gene, prtb.type, sep = " ") } object[[paste0(new.class.name, ".global")]] <- as.character(x = sapply(X = as.character(x = object[[new.class.name]][, 1]), FUN = function(x) {strsplit(x = x, split = " (?=[^ ]+$)", perl = TRUE)[[1]][2]})) object[[paste0(new.class.name, ".global")]][which(x = is.na(x = object[[paste0(new.class.name, ".global")]])), 1] <- nt.class.name object[[paste0(new.class.name,"_p_", tolower(prtb.type))]][names(x = post.prob), 1] <- post.prob } } Tool(object = object) <- gv.list Idents(object = object) <- new.class.name return(object) } #' Differential expression heatmap for mixscape #' #' Draws a heatmap of single cell feature expression with cells ordered by their #' mixscape ko probabilities. #' #' @inheritParams FindMarkers #' @inheritParams DoHeatmap #' @param max.cells.group Number of cells per identity to plot. #' @param max.genes Total number of DE genes to plot. #' @param balanced Plot an equal number of genes with both groups of cells. #' @param order.by.prob Order cells on heatmap based on their mixscape knockout #' probability from highest to lowest score. #' @param group.by (Deprecated) Option to split densities based on mixscape #' classification. Please use mixscape.class instead #' @param mixscape.class metadata column with mixscape classifications. #' @param prtb.type specify type of CRISPR perturbation expected for labeling #' mixscape classifications. Default is KO. #' @param fc.name Name of the fold change, average difference, or custom #' function column in the output data.frame. Default is avg_log2FC #' @param pval.cutoff P-value cut-off for selection of significantly DE genes. #' @return A ggplot object. #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian ggplot_build aes_string #' @export #' @concept mixscape #' MixscapeHeatmap <- function( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = "RNA", max.genes = 100, test.use ='wilcox', max.cells.group = NULL, order.by.prob = TRUE, group.by = NULL, mixscape.class = "mixscape_class", prtb.type = "KO", fc.name = "avg_log2FC", pval.cutoff = 5e-2, ... ) { if (!is.null(x = group.by)) { message("The group.by parameter is being deprecated. Please use ", "mixscape.class instead. Setting mixscape.class = ", group.by, " and continuing.") mixscape.class <- group.by } DefaultAssay(object = object) <- assay if (is.numeric(x = max.genes)) { all.markers <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, only.pos = FALSE, logfc.threshold = logfc.threshold, test.use = test.use ) if (balanced) { pos.markers <- all.markers[which(x = all.markers[,fc.name] > (logfc.threshold)), ] neg.markers <- all.markers[which(x = all.markers[,fc.name] < (-logfc.threshold)), ] if (length(x = rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) < max.genes ) { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) if (length(x = rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) < max.genes){ marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) } else { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))[1:max.genes]) } } else { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))[1:max.genes]) if (length(x = rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) < max.genes) { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) } else { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))[1:max.genes]) } } } else { pos.markers <- all.markers[which(x = all.markers[, fc.name] > (logfc.threshold)),] if (length(x = rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) < max.genes ){ marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) } else { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))[1:max.genes]) } } if (is.null(x = max.cells.group)) { if (is.null(x = group.by)) { sub2 <- subset(x = object, idents = c(ident.1, ident.2)) } else{ sub2 <- subset(x = object, idents = c(ident.1, ident.2)) Idents(object = sub2) <- group.by } } else { if (is.null(x = group.by)) { sub2 <- subset(x = object, idents = c(ident.1, ident.2), downsample = max.cells.group) } else { sub <- subset(x = object, idents = c(ident.1, ident.2)) Idents(object = sub) <- group.by sub2 <- subset(x = sub, downsample = max.cells.group) } } sub2 <- ScaleData(object = sub2, features = marker.list, assay = assay) if (isTRUE(x = order.by.prob)) { p_ko <- sub2[[paste0(mixscape.class, "_p_", tolower(x = prtb.type) )]][, 1, drop = FALSE] ordered.cells <- rownames(x = p_ko)[order(p_ko[,1], decreasing = TRUE)] p <- DoHeatmap(object = sub2, features = marker.list, label = TRUE, cells = ordered.cells, assay = assay, ...) } else{ p <- DoHeatmap(object = sub2, features = marker.list, label = TRUE, cells = sample(x = Cells(x = sub2)), assay = assay, ...) } return(p) } } #' Function to plot perturbation score distributions. #' #' Density plots to visualize perturbation scores calculated from RunMixscape #' function. #' #' @param object An object of class Seurat. #' @param target.gene.ident Target gene name to visualize perturbation scores for. #' @param target.gene.class meta data column specifying all target gene names in the experiment. #' @param before.mixscape Option to split densities based on mixscape classification (default) or original target gene classification. #' Default is set to NULL and plots cells by original class ID. #' @param col Specify color of target gene class or knockout cell class. For #' control non-targeting and non-perturbed cells, colors are set to different #' shades of grey. #' @param mixscape.class meta data column specifying mixscape classifications. #' @param prtb.type specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO. #' @param split.by For datasets with more than one cell type. Set equal TRUE to visualize perturbation scores for each cell type separately. #' @return A ggplot object. #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian ggplot_build aes_string #' geom_density theme_classic #' @export #' @concept mixscape #' PlotPerturbScore <- function( object, target.gene.class = "gene", target.gene.ident = NULL, mixscape.class = "mixscape_class", col = "orange2", split.by = NULL, before.mixscape = FALSE, prtb.type = "KO" ){ if(is.null(target.gene.ident) == TRUE){ message("Please provide name of target gene class to plot") } prtb_score_list <- Tool(object = object, slot = "RunMixscape")[[target.gene.ident]] for (nm in names(prtb_score_list)){ prtb_score_list[[nm]]['name'] <- nm } prtb_score <- do.call(rbind, prtb_score_list) prtb_score[, 2] <- as.factor(x = prtb_score[, 2]) gd <- setdiff(x = unique(x = prtb_score[, target.gene.class]), y = target.gene.ident) colnames(x = prtb_score)[2] <- "gene" prtb_score$cell.bc <- sapply(rownames(prtb_score), FUN = function(x) strsplit(x, split = "[.]")[[1]][2]) if (isTRUE(x = before.mixscape)) { cols <- setNames( object = c("grey49", col), nm = c(gd, target.gene.ident) ) p <- ggplot(data = prtb_score, mapping = aes_string(x = "pvec", color = "gene")) + geom_density() + theme_classic() top_r <- ggplot_build(p)$layout$panel_params[[1]]$y.range[2] prtb_score$y.jitter <- prtb_score$pvec prtb_score$y.jitter[prtb_score[, "gene"] == gd] <- runif( n = prtb_score$y.jitter[prtb_score[, "gene"] == gd], min = 0.001, max = top_r / 10 ) prtb_score$y.jitter[prtb_score[,"gene"] == target.gene.ident] <- runif( n = prtb_score$y.jitter[prtb_score[, "gene"] == target.gene.ident], min = -top_r / 10, max = 0 ) if(is.null(split.by)==FALSE) { prtb_score$split <- as.character(object[[split.by]][prtb_score$cell.bc,1]) p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour = "black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold"))+ facet_wrap(vars(split)) } else{ p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour = "black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold")) } } else { cols <- setNames( object = c("grey49", "grey79", col), nm = c(gd, paste0(target.gene.ident, " NP"), paste(target.gene.ident, prtb.type, sep = " ")) ) #add mixscape identities prtb_score$mix <- object[[mixscape.class]][prtb_score$cell.bc,] p <- ggplot(data = prtb_score, aes_string(x = "pvec", color = "mix")) + geom_density() + theme_classic() top_r <- ggplot_build(p)$layout$panel_params[[1]]$y.range[2] prtb_score$y.jitter <- prtb_score$pvec gd2 <- setdiff( x = unique(x = prtb_score[, "mix"]), y = c(paste0(target.gene.ident, " NP"), paste(target.gene.ident, prtb.type, sep = " ")) ) prtb_score$y.jitter[prtb_score[, "mix"] == gd2] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == gd2], min = 0.001, max = top_r / 10 ) prtb_score$y.jitter[prtb_score$mix == paste(target.gene.ident, prtb.type, sep = " ")] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == paste(target.gene.ident, prtb.type, sep = " ")], min = -top_r / 10, max = 0 ) prtb_score$y.jitter[prtb_score$mix == paste0(target.gene.ident, " NP")] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == paste0(target.gene.ident, " NP")], min = -top_r / 10, max = 0 ) prtb_score[, "mix"] <- as.factor(x = prtb_score[,"mix"]) if(is.null(split.by) == FALSE){ prtb_score$split <- as.character(object[[split.by]][prtb_score$cell.bc,1]) p2 <- ggplot(data = prtb_score, aes_string(x = "pvec", color = "mix")) + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme_classic() + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour ="black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold"))+ facet_wrap(vars(split)) } else{ p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour ="black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold")) } } return(p2) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Function to define Normal distribution - # returns list with mu (mean) and sd (standard deviation) DefineNormalMixscape <- function(x) { mu <- mean(x) sd <- sd(x) return(list(mu = mu, sd = sd)) } # Get missing perturbation signature for missing features # # @param object Seurat object # @param assay Perturbation signature assay name # @param features vector of features to compute for # @param verbose display progress # @return Returns Seurat object with assay updated with new features # GetMissingPerturb <- function(object, assay, features, verbose = TRUE) { if (length(x = features) == 0) { return(object) } if (verbose) { message("Computing perturbation signature for missing features.") } command <- grep(pattern = "CalcPerturbSig", x = Command(object = object), value = TRUE) command.match <- sapply(X = command, FUN = function(x) { Command(object = object, command = x, value = "new.assay.name") == assay }) if (length(x = which(x = command.match)) > 1) { stop("Ambiguous command log.") } if(length(x = which(x = command.match)) == 0) { stop("Cannot find previously run CalcPertubSig command. Please make sure you've run CalcPerturbSig to create the provided assay.") } command <- names(x = command.match) if ("split.by" %in% names(x = slot(object = Command(object = object, command = command), name ="params"))) { split.by <- Command(object = object, command = command, value = "split.by") } else { split.by <- NULL } gd.class <- Command(object = object, command = command, value = "gd.class") nt.cell.class <- Command(object = object, command = command, value = "nt.cell.class") slot <- Command(object = object, command = command, value = "slot") assay.orig <- Command(object = object, command = command, value = "assay") old.idents <- Idents(object = object) if (! is.null(x = split.by)) { Idents(object = object) <- split.by } else { Idents(object = object) <- "rep1" } replicate <- unique(x = Idents(object = object)) all_diff <- list() all_nt_cells <- Cells(x = object)[which(x = object[[]][gd.class] == nt.cell.class)] features <- setdiff(x = features, y = rownames(x = object[[assay]])) for (r in replicate) { # isolate nt cells all_cells <- WhichCells(object = object, idents = r) nt_cells <- intersect(x = all_nt_cells, all_cells) # pull previously computed neighbors neighbors <- Tool(object = object, slot = command)[[make.names(names = paste0(assay, "_", r))]] diff <- PerturbDiff( object = object, assay = assay.orig, slot = slot, all_cells = all_cells, nt_cells = nt_cells, features = features, neighbors = neighbors, verbose = verbose ) all_diff[[r]] <- diff } all_diff <- do.call(what = cbind, args = all_diff) all_diff <- all_diff[, colnames(x = object[[assay]]), drop = FALSE] # TODO: restore once check.matrix is in SeuratObject # new.assay <- CreateAssayObject( # data = rbind( # GetAssayData(object = object[[assay]], slot = "data"), # all_diff # ), # min.cells = 0, # min.features = 0, # check.matrix = FALSE # ) new.assay <- CreateAssayObject( data = rbind( GetAssayData(object = object[[assay]], slot = "data"), all_diff ), min.cells = 0, min.features = 0 ) new.assay <- SetAssayData( object = new.assay, slot = "scale.data", new.data = GetAssayData(object = object[[assay]], slot = "scale.data") ) object[[assay]] <- new.assay Idents(object = object) <- old.idents return(object) } # Helper function to compute the perturbation differences - enables reuse in # GetMissingPerturb # # @param object Seurat object # @param assay assay to use # @param slot slot to use # @param all_cells vector of cell names to compute difference for # @param nt_cells vector of nt cell names # @param features vector of features to compute for # @param neighbors Neighbor object containing indices of nearest NT cells # @param verbose display progress bar # @return returns matrix of perturbation differences # #' @importFrom matrixStats rowMeans2 #' @importFrom Matrix sparseMatrix colSums #' PerturbDiff <- function(object, assay, slot, all_cells, nt_cells, features, neighbors, verbose) { nt_data <- as.matrix(x = expm1(x = GetAssayData(object = object, assay = assay, slot = slot)[features, nt_cells, drop = FALSE])) mysapply <- ifelse(test = verbose, yes = pbsapply, no = sapply) # new_expr <- mysapply(X = all_cells, FUN = function(i) { # index <- Indices(object = neighbors)[i, ] # nt_cells20 <- nt_cells[index] # avg_nt <- rowMeans2(x = nt_data[, nt_cells20, drop = FALSE]) # avg_nt <- as.matrix(x = avg_nt) # colnames(x = avg_nt) <- i # return(avg_nt) # }) idx <- Indices(object = neighbors)[all_cells,] model.matrix <- sparseMatrix(i = as.vector(idx), j = rep(1:nrow(x = idx), times = ncol(x = idx)), x = 1, dims = c(length(x = nt_cells), nrow(x = idx))) model.matrix <- model.matrix/rep(colSums(model.matrix), each = nrow(x = model.matrix)) new_expr <- nt_data %*% model.matrix new_expr <- matrix(data = new_expr, nrow = length(x = features)) new_expr <- log1p(x = new_expr) rownames(x = new_expr) <- rownames(x = nt_data) colnames(x = new_expr) <- all_cells diff <- new_expr - as.matrix(GetAssayData(object = object, slot = slot, assay = assay)[features, colnames(x = new_expr), drop = FALSE]) return(diff) } # Helper function to project cells onto the perturbation vector # @param v1 vector 1 # @param v2 vector 2 # ProjectVec <- function(v1, v2) { return(as.vector(x = (v1 %*% v2) / (v2 %*% v2))) } # Function to find top DE genes that pass some p value cutoff between cells # with targeting and non-targeting gRNAs. # # @param object An object of class Seurat. # @param ident.1 Target gene class or cells to find DE genes for. # @param ident.2 Non-targetting class or cells # @param labels metadata column with target gene classification. # @param de.assay Name of Assay DE is performed on. # @param test.use Denotes which test to use. See all available tests on # FindMarkers documentation. # @param pval.cut.off P-value cut-off for selection of significantly DE genes. # @param logfc.threshold Limit testing to genes which show, on average, at # least X-fold difference (log-scale) between the two groups of cells. Default # is 0.25 Increasing logfc.threshold speeds up the function, but can miss # weaker signals. # @param verbose Display messages # @return # TopDEGenesMixscape <- function( object, ident.1, ident.2 = NULL, labels = 'gene', de.assay = "RNA", test.use = "LR", pval.cutoff = 5e-2, logfc.threshold = 0.25, verbose = TRUE ) { if (verbose) { message("Finding new perturbation gene set") } de.genes <- data.frame() tryCatch( expr = { de.genes <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, group.by = labels, assay = de.assay, test.use = test.use, logfc.threshold = logfc.threshold, verbose = verbose ) de.genes <- de.genes[de.genes$p_val_adj < pval.cutoff, ] }, error = function(e) {} ) return(rownames(x = de.genes)) } Seurat/R/zzz.R0000644000176200001440000000373514005656653012720 0ustar liggesusers#' @section Package options: #' #' Seurat uses the following [options()] to configure behaviour: #' #' \describe{ #' \item{\code{Seurat.memsafe}}{global option to call gc() after many operations. #' This can be helpful in cleaning up the memory status of the R session and #' prevent use of swap space. However, it does add to the computational overhead #' and setting to FALSE can speed things up if you're working in an environment #' where RAM availability is not a concern.} #' \item{\code{Seurat.warn.umap.uwot}}{Show warning about the default backend #' for \code{\link{RunUMAP}} changing from Python UMAP via reticulate to UWOT} #' \item{\code{Seurat.checkdots}}{For functions that have ... as a parameter, #' this controls the behavior when an item isn't used. Can be one of warn, #' stop, or silent.} #' \item{\code{Seurat.limma.wilcox.msg}}{{Show message about more efficient #' Wilcoxon Rank Sum test available via the limma package}} #' \item{\code{Seurat.Rfast2.msg}}{{Show message about more efficient #' Moran's I function available via the Rfast2 package}} #' \item{\code{Seurat.warn.vlnplot.split}}{Show message about changes to #' default behavior of split/multi violin plots} #' } #' #' @docType package #' @rdname Seurat-package #' @name Seurat-package #' "_PACKAGE" seurat_default_options <- list( Seurat.memsafe = FALSE, Seurat.warn.umap.uwot = TRUE, Seurat.checkdots = "warn", Seurat.limma.wilcox.msg = TRUE, Seurat.Rfast2.msg = TRUE, Seurat.warn.vlnplot.split = TRUE ) AttachDeps <- function(deps) { for (d in deps) { if (!paste0('package:', d) %in% search()) { packageStartupMessage("Attaching ", d) attachNamespace(ns = d) } } } .onAttach <- function(libname, pkgname) { AttachDeps(deps = 'SeuratObject') } .onLoad <- function(libname, pkgname) { op <- options() toset <- !(names(x = seurat_default_options) %in% names(x = op)) if (any(toset)) options(seurat_default_options[toset]) invisible(x = NULL) } Seurat/R/utilities.R0000644000176200001440000022422514156670503014072 0ustar liggesusers#' @include generics.R #' @importFrom SeuratObject PackageCheck #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Add Azimuth Results #' #' Add mapping and prediction scores, UMAP embeddings, and imputed assay (if #' available) #' from Azimuth to an existing or new \code{\link[SeuratObject]{Seurat}} object #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param filename Path to Azimuth mapping scores file #' #' @return \code{object} with Azimuth results added #' #' @examples #' \dontrun{ #' object <- AddAzimuthResults(object, filename = "azimuth_results.Rds") #' } #' #' @export AddAzimuthResults <- function(object = NULL, filename) { if (is.null(x = filename)) { stop("No Azimuth results provided.") } azimuth_results <- readRDS(file = filename) if (!is.list(x = azimuth_results) || any(!(c('umap', 'pred.df') %in% names(x = azimuth_results)))) { stop("Expected following format for azimuth_results: `list(umap = , pred.df = [, impADT = ])`") } if (is.null(x = object)) { message("No existing Seurat object provided. Creating new one.") object <- CreateSeuratObject( counts = matrix( nrow = 1, ncol = nrow(x = azimuth_results$umap), dimnames = list( row.names = 'Dummy.feature', col.names = rownames(x = azimuth_results$umap)) ), assay = 'Dummy' ) } else { overlap.cells <- intersect( x = Cells(x = object), y = rownames(x = azimuth_results$umap) ) if (!(all(overlap.cells %in% Cells(x = object)))) { stop("Cells in object do not match cells in download") } else if (length(x = overlap.cells) < length(x = Cells(x = object))) { warning(paste0("Subsetting out ", length(x = Cells(x = object)) - length(x = overlap.cells), " cells that are absent in downloaded results (perhaps filtered by Azimuth)")) object <- subset(x = object, cells = overlap.cells) } } azimuth_results$pred.df$cell <- NULL object <- AddMetaData(object = object, metadata = azimuth_results$pred.df) object[['umap.proj']] <- azimuth_results$umap if ('impADT' %in% names(x = azimuth_results)) { object[['impADT']] <- azimuth_results$impADT if ('Dummy' %in% Assays(object = object)) { DefaultAssay(object = object) <- 'impADT' object[['Dummy']] <- NULL } } return(object) } #' Add Azimuth Scores #' #' Add mapping and prediction scores from Azimuth to a #' \code{\link[SeuratObject]{Seurat}} object #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param filename Path to Azimuth mapping scores file #' #' @return \code{object} with the mapping scores added #' #' @examples #' \dontrun{ #' object <- AddAzimuthScores(object, filename = "azimuth_pred.tsv") #' } #' AddAzimuthScores <- function(object, filename) { if (!file.exists(filename)) { stop("Cannot find Azimuth scores file ", filename, call. = FALSE) } object <- AddMetaData( object = object, metadata = read.delim(file = filename, row.names = 1) ) return(object) } #' Calculate module scores for feature expression programs in single cells #' #' Calculate the average expression levels of each program (cluster) on single #' cell level, subtracted by the aggregated expression of control feature sets. #' All analyzed features are binned based on averaged expression, and the #' control features are randomly selected from each bin. #' #' @param object Seurat object #' @param features A list of vectors of features for expression programs; each #' entry should be a vector of feature names #' @param pool List of features to check expression levels against, defaults to #' \code{rownames(x = object)} #' @param nbin Number of bins of aggregate expression levels for all #' analyzed features #' @param ctrl Number of control features selected from the same bin per #' analyzed feature #' @param k Use feature clusters returned from DoKMeans #' @param assay Name of assay to use #' @param name Name for the expression programs; will append a number to the #' end for each entry in \code{features} (eg. if \code{features} has three #' programs, the results will be stored as \code{name1}, \code{name2}, #' \code{name3}, respectively) #' @param seed Set a random seed. If NULL, seed is not set. #' @param search Search for symbol synonyms for features in \code{features} that #' don't match features in \code{object}? Searches the HGNC's gene names #' database; see \code{\link{UpdateSymbolList}} for more details #' @param ... Extra parameters passed to \code{\link{UpdateSymbolList}} #' #' @return Returns a Seurat object with module scores added to object meta data; #' each module is stored as \code{name#} for each module program present in #' \code{features} #' #' @importFrom ggplot2 cut_number #' @importFrom Matrix rowMeans colMeans #' #' @references Tirosh et al, Science (2016) #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' data("pbmc_small") #' cd_features <- list(c( #' 'CD79B', #' 'CD79A', #' 'CD19', #' 'CD180', #' 'CD200', #' 'CD3D', #' 'CD2', #' 'CD3E', #' 'CD7', #' 'CD8A', #' 'CD14', #' 'CD1C', #' 'CD68', #' 'CD9', #' 'CD247' #' )) #' pbmc_small <- AddModuleScore( #' object = pbmc_small, #' features = cd_features, #' ctrl = 5, #' name = 'CD_Features' #' ) #' head(x = pbmc_small[]) #' } #' AddModuleScore <- function( object, features, pool = NULL, nbin = 24, ctrl = 100, k = FALSE, assay = NULL, name = 'Cluster', seed = 1, search = FALSE, ... ) { if (!is.null(x = seed)) { set.seed(seed = seed) } assay.old <- DefaultAssay(object = object) assay <- assay %||% assay.old DefaultAssay(object = object) <- assay assay.data <- GetAssayData(object = object) features.old <- features if (k) { .NotYetUsed(arg = 'k') features <- list() for (i in as.numeric(x = names(x = table(object@kmeans.obj[[1]]$cluster)))) { features[[i]] <- names(x = which(x = object@kmeans.obj[[1]]$cluster == i)) } cluster.length <- length(x = features) } else { if (is.null(x = features)) { stop("Missing input feature list") } features <- lapply( X = features, FUN = function(x) { missing.features <- setdiff(x = x, y = rownames(x = object)) if (length(x = missing.features) > 0) { warning( "The following features are not present in the object: ", paste(missing.features, collapse = ", "), ifelse( test = search, yes = ", attempting to find updated synonyms", no = ", not searching for symbol synonyms" ), call. = FALSE, immediate. = TRUE ) if (search) { tryCatch( expr = { updated.features <- UpdateSymbolList(symbols = missing.features, ...) names(x = updated.features) <- missing.features for (miss in names(x = updated.features)) { index <- which(x == miss) x[index] <- updated.features[miss] } }, error = function(...) { warning( "Could not reach HGNC's gene names database", call. = FALSE, immediate. = TRUE ) } ) missing.features <- setdiff(x = x, y = rownames(x = object)) if (length(x = missing.features) > 0) { warning( "The following features are still not present in the object: ", paste(missing.features, collapse = ", "), call. = FALSE, immediate. = TRUE ) } } } return(intersect(x = x, y = rownames(x = object))) } ) cluster.length <- length(x = features) } if (!all(LengthCheck(values = features))) { warning(paste( 'Could not find enough features in the object from the following feature lists:', paste(names(x = which(x = !LengthCheck(values = features)))), 'Attempting to match case...' )) features <- lapply( X = features.old, FUN = CaseMatch, match = rownames(x = object) ) } if (!all(LengthCheck(values = features))) { stop(paste( 'The following feature lists do not have enough features present in the object:', paste(names(x = which(x = !LengthCheck(values = features)))), 'exiting...' )) } pool <- pool %||% rownames(x = object) data.avg <- Matrix::rowMeans(x = assay.data[pool, ]) data.avg <- data.avg[order(data.avg)] data.cut <- cut_number(x = data.avg + rnorm(n = length(data.avg))/1e30, n = nbin, labels = FALSE, right = FALSE) #data.cut <- as.numeric(x = Hmisc::cut2(x = data.avg, m = round(x = length(x = data.avg) / (nbin + 1)))) names(x = data.cut) <- names(x = data.avg) ctrl.use <- vector(mode = "list", length = cluster.length) for (i in 1:cluster.length) { features.use <- features[[i]] for (j in 1:length(x = features.use)) { ctrl.use[[i]] <- c( ctrl.use[[i]], names(x = sample( x = data.cut[which(x = data.cut == data.cut[features.use[j]])], size = ctrl, replace = FALSE )) ) } } ctrl.use <- lapply(X = ctrl.use, FUN = unique) ctrl.scores <- matrix( data = numeric(length = 1L), nrow = length(x = ctrl.use), ncol = ncol(x = object) ) for (i in 1:length(ctrl.use)) { features.use <- ctrl.use[[i]] ctrl.scores[i, ] <- Matrix::colMeans(x = assay.data[features.use, ]) } features.scores <- matrix( data = numeric(length = 1L), nrow = cluster.length, ncol = ncol(x = object) ) for (i in 1:cluster.length) { features.use <- features[[i]] data.use <- assay.data[features.use, , drop = FALSE] features.scores[i, ] <- Matrix::colMeans(x = data.use) } features.scores.use <- features.scores - ctrl.scores rownames(x = features.scores.use) <- paste0(name, 1:cluster.length) features.scores.use <- as.data.frame(x = t(x = features.scores.use)) rownames(x = features.scores.use) <- colnames(x = object) object[[colnames(x = features.scores.use)]] <- features.scores.use CheckGC() DefaultAssay(object = object) <- assay.old return(object) } #' Aggregated feature expression by identity class #' #' Returns aggregated (summed) expression values for each identity class #' #' If slot is set to 'data', this function assumes that the data has been log #' normalized and therefore feature values are exponentiated prior to aggregating #' so that sum is done in non-log space. Otherwise, if slot is set to #' either 'counts' or 'scale.data', no exponentiation is performed prior to #' aggregating #' If \code{return.seurat = TRUE} and slot is not 'scale.data', aggregated values #' are placed in the 'counts' slot of the returned object and the log of aggregated values #' are placed in the 'data' slot. For the \code{\link{ScaleData}} is then run on the default assay #' before returning the object. #' If \code{return.seurat = TRUE} and slot is 'scale.data', the 'counts' slot is left empty, #' the 'data' slot is filled with NA, and 'scale.data' is set to the aggregated values. #' #' @param object Seurat object #' @param assays Which assays to use. Default is all assays #' @param features Features to analyze. Default is all features in the assay #' @param return.seurat Whether to return the data as a Seurat object. Default is FALSE #' @param group.by Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default #' @param add.ident (Deprecated) Place an additional label on each cell prior to pseudobulking #' (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example) #' @param slot Slot(s) to use; if multiple slots are given, assumed to follow #' the order of 'assays' (if specified) or object's assays #' @param verbose Print messages and show progress bar #' @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}}#' #' @return Returns a matrix with genes as rows, identity classes as columns. #' If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' head(AggregateExpression(object = pbmc_small)) #' AggregateExpression <- function( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, slot = 'data', verbose = TRUE, ... ) { return( PseudobulkExpression( object = object, pb.method = 'aggregate', assays = assays, features = features, return.seurat = return.seurat, group.by = group.by, add.ident = add.ident, slot = slot, verbose = verbose, ... ) ) } #' Averaged feature expression by identity class #' #' Returns averaged expression values for each identity class #' #' If slot is set to 'data', this function assumes that the data has been log #' normalized and therefore feature values are exponentiated prior to averaging #' so that averaging is done in non-log space. Otherwise, if slot is set to #' either 'counts' or 'scale.data', no exponentiation is performed prior to #' averaging #' If \code{return.seurat = TRUE} and slot is not 'scale.data', averaged values #' are placed in the 'counts' slot of the returned object and the log of averaged values #' are placed in the 'data' slot. \code{\link{ScaleData}} is then run on the default assay #' before returning the object. #' If \code{return.seurat = TRUE} and slot is 'scale.data', the 'counts' slot is left empty, #' the 'data' slot is filled with NA, and 'scale.data' is set to the aggregated values. #' #' @param object Seurat object #' @param assays Which assays to use. Default is all assays #' @param features Features to analyze. Default is all features in the assay #' @param return.seurat Whether to return the data as a Seurat object. Default is FALSE #' @param group.by Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default #' @param add.ident (Deprecated) Place an additional label on each cell prior to pseudobulking #' (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example) #' @param slot Slot(s) to use; if multiple slots are given, assumed to follow #' the order of 'assays' (if specified) or object's assays #' @param verbose Print messages and show progress bar #' @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}} #' #' @return Returns a matrix with genes as rows, identity classes as columns. #' If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' head(AverageExpression(object = pbmc_small)) #' AverageExpression <- function( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, slot = 'data', verbose = TRUE, ... ) { return( PseudobulkExpression( object = object, pb.method = 'average', assays = assays, features = features, return.seurat = return.seurat, group.by = group.by, add.ident = add.ident, slot = slot, verbose = verbose, ... ) ) } #' Match the case of character vectors #' #' @param search A vector of search terms #' @param match A vector of characters whose case should be matched #' #' @return Values from search present in match with the case of match #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' cd_genes <- c('Cd79b', 'Cd19', 'Cd200') #' CaseMatch(search = cd_genes, match = rownames(x = pbmc_small)) #' CaseMatch <- function(search, match) { search.match <- sapply( X = search, FUN = function(s) { return(grep( pattern = paste0('^', s, '$'), x = match, ignore.case = TRUE, perl = TRUE, value = TRUE )) } ) return(unlist(x = search.match)) } #' Score cell cycle phases #' #' @param object A Seurat object #' @param s.features A vector of features associated with S phase #' @param g2m.features A vector of features associated with G2M phase #' @param ctrl Number of control features selected from the same bin per #' analyzed feature supplied to \code{\link{AddModuleScore}}. #' Defaults to value equivalent to minimum number of features #' present in 's.features' and 'g2m.features'. #' @param set.ident If true, sets identity to phase assignments #' Stashes old identities in 'old.ident' #' @param ... Arguments to be passed to \code{\link{AddModuleScore}} #' #' @return A Seurat object with the following columns added to object meta data: S.Score, G2M.Score, and Phase #' #' @seealso \code{AddModuleScore} #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' data("pbmc_small") #' # pbmc_small doesn't have any cell-cycle genes #' # To run CellCycleScoring, please use a dataset with cell-cycle genes #' # An example is available at http://satijalab.org/seurat/cell_cycle_vignette.html #' pbmc_small <- CellCycleScoring( #' object = pbmc_small, #' g2m.features = cc.genes$g2m.genes, #' s.features = cc.genes$s.genes #' ) #' head(x = pbmc_small@meta.data) #' } #' CellCycleScoring <- function( object, s.features, g2m.features, ctrl = NULL, set.ident = FALSE, ... ) { name <- 'Cell.Cycle' features <- list('S.Score' = s.features, 'G2M.Score' = g2m.features) if (is.null(x = ctrl)) { ctrl <- min(vapply(X = features, FUN = length, FUN.VALUE = numeric(length = 1))) } object.cc <- AddModuleScore( object = object, features = features, name = name, ctrl = ctrl, ... ) cc.columns <- grep(pattern = name, x = colnames(x = object.cc[[]]), value = TRUE) cc.scores <- object.cc[[cc.columns]] rm(object.cc) CheckGC() assignments <- apply( X = cc.scores, MARGIN = 1, FUN = function(scores, first = 'S', second = 'G2M', null = 'G1') { if (all(scores < 0)) { return(null) } else { if (length(which(x = scores == max(scores))) > 1) { return('Undecided') } else { return(c(first, second)[which(x = scores == max(scores))]) } } } ) cc.scores <- merge(x = cc.scores, y = data.frame(assignments), by = 0) colnames(x = cc.scores) <- c('rownames', 'S.Score', 'G2M.Score', 'Phase') rownames(x = cc.scores) <- cc.scores$rownames cc.scores <- cc.scores[, c('S.Score', 'G2M.Score', 'Phase')] object[[colnames(x = cc.scores)]] <- cc.scores if (set.ident) { object[['old.ident']] <- Idents(object = object) Idents(object = object) <- 'Phase' } return(object) } #' Slim down a multi-species expression matrix, when only one species is primarily of interenst. #' #' Valuable for CITE-seq analyses, where we typically spike in rare populations of 'negative control' cells from a different species. #' #' @param object A UMI count matrix. Should contain rownames that start with #' the ensuing arguments prefix.1 or prefix.2 #' @param prefix The prefix denoting rownames for the species of interest. #' Default is "HUMAN_". These rownames will have this prefix removed in the returned matrix. #' @param controls The prefix denoting rownames for the species of 'negative #' control' cells. Default is "MOUSE_". #' @param ncontrols How many of the most highly expressed (average) negative #' control features (by default, 100 mouse genes), should be kept? All other #' rownames starting with prefix.2 are discarded. #' #' @return A UMI count matrix. Rownames that started with \code{prefix} have this #' prefix discarded. For rownames starting with \code{controls}, only the #' \code{ncontrols} most highly expressed features are kept, and the #' prefix is kept. All other rows are retained. #' #' @importFrom utils head #' @importFrom Matrix rowSums #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' cbmc.rna.collapsed <- CollapseSpeciesExpressionMatrix(cbmc.rna) #' } #' CollapseSpeciesExpressionMatrix <- function( object, prefix = "HUMAN_", controls = "MOUSE_", ncontrols = 100 ) { features <- grep(pattern = prefix, x = rownames(x = object), value = TRUE) controls <- grep(pattern = controls, x = rownames(x = object), value = TRUE) others <- setdiff(x = rownames(x = object), y = c(features, controls)) controls <- rowSums(x = object[controls, ]) controls <- names(x = head( x = sort(x = controls, decreasing = TRUE), n = ncontrols )) object <- object[c(features, controls, others), ] rownames(x = object) <- gsub( pattern = prefix, replacement = '', x = rownames(x = object) ) return(object) } # Create an Annoy index # # @note Function exists because it's not exported from \pkg{uwot} # # @param name Distance metric name # @param ndim Number of dimensions # # @return An nn index object # #' @importFrom methods new #' @importFrom RcppAnnoy AnnoyAngular AnnoyManhattan AnnoyEuclidean AnnoyHamming # CreateAnn <- function(name, ndim) { return(switch( EXPR = name, cosine = new(Class = AnnoyAngular, ndim), manhattan = new(Class = AnnoyManhattan, ndim), euclidean = new(Class = AnnoyEuclidean, ndim), hamming = new(Class = AnnoyHamming, ndim), stop("BUG: unknown Annoy metric '", name, "'") )) } #' Run a custom distance function on an input data matrix #' #' @author Jean Fan #' #' @param my.mat A matrix to calculate distance on #' @param my.function A function to calculate distance #' @param ... Extra parameters to my.function #' #' @return A distance matrix #' #' @importFrom stats as.dist #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' # Define custom distance matrix #' manhattan.distance <- function(x, y) return(sum(abs(x-y))) #' #' input.data <- GetAssayData(pbmc_small, assay.type = "RNA", slot = "scale.data") #' cell.manhattan.dist <- CustomDistance(input.data, manhattan.distance) #' CustomDistance <- function(my.mat, my.function, ...) { CheckDots(..., fxns = my.function) n <- ncol(x = my.mat) mat <- matrix(data = 0, ncol = n, nrow = n) colnames(x = mat) <- rownames(x = mat) <- colnames(x = my.mat) for (i in 1:nrow(x = mat)) { for (j in 1:ncol(x = mat)) { mat[i,j] <- my.function(my.mat[, i], my.mat[, j], ...) } } return(as.dist(m = mat)) } #' Calculate the mean of logged values #' #' Calculate mean of logged values in non-log space (return answer in log-space) #' #' @param x A vector of values #' @param ... Other arguments (not used) #' #' @return Returns the mean in log-space #' #' @export #' @concept utilities #' #' @examples #' ExpMean(x = c(1, 2, 3)) #' ExpMean <- function(x, ...) { if (inherits(x = x, what = 'AnyMatrix')) { return(apply(X = x, FUN = function(i) {log(x = mean(x = exp(x = i) - 1) + 1)}, MARGIN = 1)) } else { return(log(x = mean(x = exp(x = x) - 1) + 1)) } } #' Calculate the standard deviation of logged values #' #' Calculate SD of logged values in non-log space (return answer in log-space) #' #' @param x A vector of values #' #' @return Returns the standard deviation in log-space #' #' @importFrom stats sd #' #' @export #' @concept utilities #' #' @examples #' ExpSD(x = c(1, 2, 3)) #' ExpSD <- function(x) { return(log1p(x = sd(x = expm1(x = x)))) } #' Calculate the variance of logged values #' #' Calculate variance of logged values in non-log space (return answer in #' log-space) #' #' @param x A vector of values #' #' @return Returns the variance in log-space #' #' @importFrom stats var #' #' @export #' @concept utilities #' #' @examples #' ExpVar(x = c(1, 2, 3)) #' ExpVar <- function(x) { return(log1p(x = var(x = expm1(x = x)))) } #' Scale and/or center matrix rowwise #' #' Performs row scaling and/or centering. Equivalent to using t(scale(t(mat))) #' in R except in the case of NA values. #' #' @param mat A matrix #' @param center a logical value indicating whether to center the rows #' @param scale a logical value indicating whether to scale the rows #' @param scale_max clip all values greater than scale_max to scale_max. Don't #' clip if Inf. #' @return Returns the center/scaled matrix #' #' @importFrom matrixStats rowMeans2 rowSds rowSums2 #' #' @export #' @concept utilities #' FastRowScale <- function( mat, center = TRUE, scale = TRUE, scale_max = 10 ) { # inspired by https://www.r-bloggers.com/a-faster-scale-function/ if (center) { rm <- rowMeans2(x = mat, na.rm = TRUE) } if (scale) { if (center) { rsd <- rowSds(mat, center = rm) } else { rsd <- sqrt(x = rowSums2(x = mat^2)/(ncol(x = mat) - 1)) } } if (center) { mat <- mat - rm } if (scale) { mat <- mat / rsd } if (scale_max != Inf) { mat[mat > scale_max] <- scale_max } return(mat) } #' Get updated synonyms for gene symbols #' #' Find current gene symbols based on old or alias symbols using the gene #' names database from the HUGO Gene Nomenclature Committee (HGNC) #' #' @details For each symbol passed, we query the HGNC gene names database for #' current symbols that have the provided symbol as either an alias #' (\code{alias_symbol}) or old (\code{prev_symbol}) symbol. All other queries #' are \strong{not} supported. #' #' @note This function requires internet access #' #' @param symbols A vector of gene symbols #' @param timeout Time to wait before canceling query in seconds #' @param several.ok Allow several current gene symbols for each #' provided symbol #' @param search.types Type of query to perform: #' \describe{ #' \item{\dQuote{\code{alias_symbol}}}{Find alternate symbols for the genes #' described by \code{symbols}} #' \item{\dQuote{\code{prev_symbol}}}{Find new new symbols for the genes #' described by \code{symbols}} #' } #' This parameter accepts multiple options and short-hand options #' (eg. \dQuote{\code{prev}} for \dQuote{\code{prev_symbol}}) #' @param verbose Show a progress bar depicting search progress #' @param ... Extra parameters passed to \code{\link[httr]{GET}} #' #' @return \code{GeneSymbolThesarus}:, if \code{several.ok}, a named list #' where each entry is the current symbol found for each symbol provided and #' the names are the provided symbols. Otherwise, a named vector with the #' same information. #' #' @source \url{https://www.genenames.org/} \url{https://www.genenames.org/help/rest/} #' #' @importFrom utils txtProgressBar setTxtProgressBar #' @importFrom httr GET accept_json timeout status_code content #' #' @rdname UpdateSymbolList #' @name UpdateSymbolList #' #' @export #' @concept utilities #' #' @seealso \code{\link[httr]{GET}} #' #' @examples #' \dontrun{ #' GeneSybmolThesarus(symbols = c("FAM64A")) #' } #' GeneSymbolThesarus <- function( symbols, timeout = 10, several.ok = FALSE, search.types = c('alias_symbol', 'prev_symbol'), verbose = TRUE, ... ) { db.url <- 'http://rest.genenames.org/fetch' # search.types <- c('alias_symbol', 'prev_symbol') search.types <- match.arg(arg = search.types, several.ok = TRUE) synonyms <- vector(mode = 'list', length = length(x = symbols)) not.found <- vector(mode = 'logical', length = length(x = symbols)) multiple.found <- vector(mode = 'logical', length = length(x = symbols)) names(x = multiple.found) <- names(x = not.found) <- names(x = synonyms) <- symbols if (verbose) { pb <- txtProgressBar(max = length(x = symbols), style = 3, file = stderr()) } for (symbol in symbols) { sym.syn <- character() for (type in search.types) { response <- GET( url = paste(db.url, type, symbol, sep = '/'), config = c(accept_json(), timeout(seconds = timeout)), ... ) if (!identical(x = status_code(x = response), y = 200L)) { next } response <- content(x = response) if (response$response$numFound != 1) { if (response$response$numFound > 1) { warning( "Multiple hits found for ", symbol, " as ", type, ", skipping", call. = FALSE, immediate. = TRUE ) } next } sym.syn <- c(sym.syn, response$response$docs[[1]]$symbol) } not.found[symbol] <- length(x = sym.syn) < 1 multiple.found[symbol] <- length(x = sym.syn) > 1 if (length(x = sym.syn) == 1 || (length(x = sym.syn) > 1 && several.ok)) { synonyms[[symbol]] <- sym.syn } if (verbose) { setTxtProgressBar(pb = pb, value = pb$getVal() + 1) } } if (verbose) { close(con = pb) } if (sum(not.found) > 0) { warning( "The following symbols had no synonyms: ", paste(names(x = which(x = not.found)), collapse = ', '), call. = FALSE, immediate. = TRUE ) } if (sum(multiple.found) > 0) { msg <- paste( "The following symbols had multiple synonyms:", paste(names(x = which(x = multiple.found)), sep = ', ') ) if (several.ok) { message(msg) message("Including anyways") } else { warning(msg, call. = FALSE, immediate. = TRUE) } } synonyms <- Filter(f = Negate(f = is.null), x = synonyms) if (!several.ok) { synonyms <- unlist(x = synonyms) } return(synonyms) } #' Compute the correlation of features broken down by groups with another #' covariate #' #' @param object Seurat object #' @param assay Assay to pull the data from #' @param slot Slot in the assay to pull feature expression data from (counts, #' data, or scale.data) #' @param var Variable with which to correlate the features #' @param group.assay Compute the gene groups based off the data in this assay. #' @param min.cells Only compute for genes in at least this many cells #' @param ngroups Number of groups to split into #' @param do.plot Display the group correlation boxplot (via #' \code{GroupCorrelationPlot}) #' #' @return A Seurat object with the correlation stored in metafeatures #' #' @export #' @concept utilities #' GroupCorrelation <- function( object, assay = NULL, slot = "scale.data", var = NULL, group.assay = NULL, min.cells = 5, ngroups = 6, do.plot = TRUE ) { assay <- assay %||% DefaultAssay(object = object) group.assay <- group.assay %||% assay var <- var %||% paste0("nCount_", group.assay) gene.grp <- GetFeatureGroups( object = object, assay = group.assay, min.cells = min.cells, ngroups = ngroups ) data <- as.matrix(x = GetAssayData(object = object[[assay]], slot = slot)) data <- data[rowMeans(x = data) != 0, ] grp.cors <- apply( X = data, MARGIN = 1, FUN = function(x) { cor(x = x, y = object[[var]]) } ) grp.cors <- grp.cors[names(x = gene.grp)] grp.cors <- as.data.frame(x = grp.cors[which(x = !is.na(x = grp.cors))]) grp.cors$gene_grp <- gene.grp[rownames(x = grp.cors)] colnames(x = grp.cors) <- c("cor", "feature_grp") object[[assay]][["feature.grp"]] <- grp.cors[, "feature_grp", drop = FALSE] object[[assay]][[paste0(var, "_cor")]] <- grp.cors[, "cor", drop = FALSE] if (do.plot) { print(GroupCorrelationPlot( object = object, assay = assay, feature.group = "feature.grp", cor = paste0(var, "_cor") )) } return(object) } #' Load the Annoy index file #' #' @param object Neighbor object #' @param file Path to file with annoy index #' #' @return Returns the Neighbor object with the index stored #' @export #' @concept utilities #' LoadAnnoyIndex <- function(object, file){ metric <- slot(object = object, name = "alg.info")$metric ndim <- slot(object = object, name = "alg.info")$ndim if (is.null(x = metric)) { stop("Provided Neighbor object wasn't generated with annoy") } annoy.idx <- CreateAnn(name = metric, ndim = ndim) annoy.idx$load(path.expand(path = file)) Index(object = object) <- annoy.idx return(object) } #' Calculate the variance to mean ratio of logged values #' #' Calculate the variance to mean ratio (VMR) in non-logspace (return answer in #' log-space) #' #' @param x A vector of values #' @param ... Other arguments (not used) #' #' @return Returns the VMR in log-space #' #' @importFrom stats var #' #' @export #' @concept utilities #' #' @examples #' LogVMR(x = c(1, 2, 3)) #' LogVMR <- function(x, ...) { if (inherits(x = x, what = 'AnyMatrix')) { return(apply(X = x, FUN = function(i) {log(x = var(x = exp(x = i) - 1) / mean(x = exp(x = i) - 1))}, MARGIN = 1)) } else { return(log(x = var(x = exp(x = x) - 1) / mean(x = exp(x = x) - 1))) } } #' Aggregate expression of multiple features into a single feature #' #' Calculates relative contribution of each feature to each cell #' for given set of features. #' #' @param object A Seurat object #' @param features List of features to aggregate #' @param meta.name Name of column in metadata to store metafeature #' @param cells List of cells to use (default all cells) #' @param assay Which assay to use #' @param slot Which slot to take data from (default data) #' #' @return Returns a \code{Seurat} object with metafeature stored in objct metadata #' #' @importFrom Matrix rowSums colMeans #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' pbmc_small <- MetaFeature( #' object = pbmc_small, #' features = c("LTB", "EAF2"), #' meta.name = 'var.aggregate' #' ) #' head(pbmc_small[[]]) #' MetaFeature <- function( object, features, meta.name = 'metafeature', cells = NULL, assay = NULL, slot = 'data' ) { cells <- cells %||% colnames(x = object) assay <- assay %||% DefaultAssay(object = object) newmat <- GetAssayData(object = object, assay = assay, slot = slot) newmat <- newmat[features, cells] if (slot == 'scale.data') { newdata <- Matrix::colMeans(newmat) } else { rowtotals <- Matrix::rowSums(newmat) newmat <- newmat / rowtotals newdata <- Matrix::colMeans(newmat) } object[[meta.name]] <- newdata return(object) } #' Apply a ceiling and floor to all values in a matrix #' #' @param data Matrix or data frame #' @param min all values below this min value will be replaced with min #' @param max all values above this max value will be replaced with max #' @return Returns matrix after performing these floor and ceil operations #' @export #' @concept utilities #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) #' mat #' MinMax(data = mat, min = 4, max = 5) #' MinMax <- function(data, min, max) { data2 <- data data2[data2 > max] <- max data2[data2 < min] <- min return(data2) } #' Calculate the percentage of a vector above some threshold #' #' @param x Vector of values #' @param threshold Threshold to use when calculating percentage #' #' @return Returns the percentage of \code{x} values above the given threshold #' #' @export #' @concept utilities #' #' @examples #' set.seed(42) #' PercentAbove(sample(1:100, 10), 75) #' PercentAbove <- function(x, threshold) { return(length(x = x[x > threshold]) / length(x = x)) } #' Calculate the percentage of all counts that belong to a given set of features #' #' This function enables you to easily calculate the percentage of all the counts belonging to a #' subset of the possible features for each cell. This is useful when trying to compute the percentage #' of transcripts that map to mitochondrial genes for example. The calculation here is simply the #' column sum of the matrix present in the counts slot for features belonging to the set divided by #' the column sum for all features times 100. #' #' @param object A Seurat object #' @param pattern A regex pattern to match features against #' @param features A defined feature set. If features provided, will ignore the pattern matching #' @param col.name Name in meta.data column to assign. If this is not null, returns a Seurat object #' with the proportion of the feature set stored in metadata. #' @param assay Assay to use #' #' @return Returns a vector with the proportion of the feature set or if md.name is set, returns a #' Seurat object with the proportion of the feature set stored in metadata. #' @importFrom Matrix colSums #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' # Calculate the proportion of transcripts mapping to mitochondrial genes #' # NOTE: The pattern provided works for human gene names. You may need to adjust depending on your #' # system of interest #' pbmc_small[["percent.mt"]] <- PercentageFeatureSet(object = pbmc_small, pattern = "^MT-") #' PercentageFeatureSet <- function( object, pattern = NULL, features = NULL, col.name = NULL, assay = NULL ) { assay <- assay %||% DefaultAssay(object = object) if (!is.null(x = features) && !is.null(x = pattern)) { warning("Both pattern and features provided. Pattern is being ignored.") } features <- features %||% grep(pattern = pattern, x = rownames(x = object[[assay]]), value = TRUE) percent.featureset <- colSums(x = GetAssayData(object = object, assay = assay, slot = "counts")[features, , drop = FALSE])/ object[[paste0("nCount_", assay)]] * 100 if (!is.null(x = col.name)) { object <- AddMetaData(object = object, metadata = percent.featureset, col.name = col.name) return(object) } return(percent.featureset) } # Pseudobulk feature expression by identity class # # Returns a representative expression value for each identity class # # @param object Seurat object # @param pb.method Whether to 'average' (default) or 'aggregate' expression levels # @param assays Which assays to use. Default is all assays # @param features Features to analyze. Default is all features in the assay # @param return.seurat Whether to return the data as a Seurat object. Default is FALSE # @param group.by Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default # @param add.ident (Deprecated) Place an additional label on each cell prior to pseudobulking # (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example) # @param slot Slot(s) to use; if multiple slots are given, assumed to follow # the order of 'assays' (if specified) or object's assays # @param verbose Print messages and show progress bar # @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}} # # @return Returns a matrix with genes as rows, identity classes as columns. # If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. # #' @importFrom Matrix rowMeans sparse.model.matrix #' @importFrom stats as.formula # @export # # @examples # data("pbmc_small") # head(PseudobulkExpression(object = pbmc_small)) # PseudobulkExpression <- function( object, pb.method = 'average', assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, slot = 'data', verbose = TRUE, ... ) { CheckDots(..., fxns = 'CreateSeuratObject') if (!is.null(x = add.ident)) { .Deprecated(msg = "'add.ident' is a deprecated argument, please use the 'group.by' argument instead") group.by <- c('ident', add.ident) } if (!(pb.method %in% c('average', 'aggregate'))) { stop("'pb.method' must be either 'average' or 'aggregate'") } object.assays <- FilterObjects(object = object, classes.keep = 'Assay') assays <- assays %||% object.assays if (!all(assays %in% object.assays)) { assays <- assays[assays %in% object.assays] if (length(x = assays) == 0) { stop("None of the requested assays are present in the object") } else { warning("Requested assays that do not exist in object. Proceeding with existing assays only.") } } if (length(x = slot) == 1) { slot <- rep_len(x = slot, length.out = length(x = assays)) } else if (length(x = slot) != length(x = assays)) { stop("Number of slots provided does not match number of assays") } data <- FetchData(object = object, vars = rev(x = group.by)) data <- data[which(rowSums(x = is.na(x = data)) == 0), , drop = F] if (nrow(x = data) < ncol(x = object)) { message("Removing cells with NA for 1 or more grouping variables") object <- subset(x = object, cells = rownames(x = data)) } for (i in 1:ncol(x = data)) { data[, i] <- as.factor(x = data[, i]) } num.levels <- sapply( X = 1:ncol(x = data), FUN = function(i) { length(x = levels(x = data[, i])) } ) if (any(num.levels == 1)) { message(paste0("The following grouping variables have 1 value and will be ignored: ", paste0(colnames(x = data)[which(num.levels <= 1)], collapse = ", "))) group.by <- colnames(x = data)[which(num.levels > 1)] data <- data[, which(num.levels > 1), drop = F] } if (ncol(x = data) == 0) { message("All grouping variables have 1 value only. Computing across all cells.") category.matrix <- matrix( data = 1, nrow = ncol(x = object), dimnames = list(Cells(x = object), 'all') ) if (pb.method == 'average') { category.matrix <- category.matrix / sum(category.matrix) } } else { category.matrix <- sparse.model.matrix(object = as.formula( object = paste0( '~0+', paste0( "data[,", 1:length(x = group.by), "]", collapse = ":" ) ) )) colsums <- colSums(x = category.matrix) category.matrix <- category.matrix[, colsums > 0] colsums <- colsums[colsums > 0] if (pb.method == 'average') { category.matrix <- Sweep( x = category.matrix, MARGIN = 2, STATS = colsums, FUN = "/") } colnames(x = category.matrix) <- sapply( X = colnames(x = category.matrix), FUN = function(name) { name <- gsub(pattern = "data\\[, [1-9]*\\]", replacement = "", x = name) return(paste0(rev(x = unlist(x = strsplit(x = name, split = ":"))), collapse = "_")) }) } data.return <- list() for (i in 1:length(x = assays)) { data.use <- GetAssayData( object = object, assay = assays[i], slot = slot[i] ) features.to.avg <- features %||% rownames(x = data.use) if (inherits(x = features, what = "list")) { features.to.avg <- features[i] } if (IsMatrixEmpty(x = data.use)) { warning( "The ", slot[i], " slot for the ", assays[i], " assay is empty. Skipping assay.", immediate. = TRUE, call. = FALSE) next } bad.features <- setdiff(x = features.to.avg, y = rownames(x = data.use)) if (length(x = bad.features) > 0) { warning( "The following ", length(x = bad.features), " features were not found in the ", assays[i], " assay: ", paste(bad.features, collapse = ", "), call. = FALSE, immediate. = TRUE) } features.assay <- intersect(x = features.to.avg, y = rownames(x = data.use)) if (length(x = features.assay) > 0) { data.use <- data.use[features.assay, ] } else { warning("None of the features specified were found in the ", assays[i], " assay.", call. = FALSE, immediate. = TRUE) next } if (slot[i] == 'data') { data.use <- expm1(x = data.use) if (any(data.use == Inf)) { warning("Exponentiation yielded infinite values. `data` may not be log-normed.") } } data.return[[i]] <- as.matrix(x = (data.use %*% category.matrix)) names(x = data.return)[i] <- assays[[i]] } if (return.seurat) { if (slot[1] == 'scale.data') { na.matrix <- data.return[[1]] na.matrix[1:length(x = na.matrix)] <- NA # TODO: restore once check.matrix is in SeuratObject # toRet <- CreateSeuratObject( # counts = na.matrix, # project = if (pb.method == "average") "Average" else "Aggregate", # assay = names(x = data.return)[1], # check.matrix = FALSE, # ... # ) toRet <- CreateSeuratObject( counts = na.matrix, project = if (pb.method == "average") "Average" else "Aggregate", assay = names(x = data.return)[1], ... ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[1], slot = "counts", new.data = matrix() ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[1], slot = "data", new.data = na.matrix ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[1], slot = "scale.data", new.data = data.return[[1]] ) } else { # TODO: restore once check.matrix is in SeuratObject # toRet <- CreateSeuratObject( # counts = data.return[[1]], # project = if (pb.method == "average") "Average" else "Aggregate", # assay = names(x = data.return)[1], # check.matrix = FALSE, # ... # ) toRet <- CreateSeuratObject( counts = data.return[[1]], project = if (pb.method == "average") "Average" else "Aggregate", assay = names(x = data.return)[1], ... ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[1], slot = "data", new.data = log1p(x = as.matrix(x = data.return[[1]])) ) } #for multimodal data if (length(x = data.return) > 1) { for (i in 2:length(x = data.return)) { if (slot[i] == 'scale.data') { na.matrix <- data.return[[i]] na.matrix[1:length(x = na.matrix)] <- NA # TODO: restore once check.matrix is in SeuratObject # toRet[[names(x = data.return)[i]]] <- CreateAssayObject(counts = na.matrix, check.matrix = FALSE) toRet[[names(x = data.return)[i]]] <- CreateAssayObject(counts = na.matrix) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[i], slot = "counts", new.data = matrix() ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[i], slot = "data", new.data = na.matrix ) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[i], slot = "scale.data", new.data = as.matrix(x = data.return[[i]]) ) } else { # TODO: restore once check.matrix is in SeuratObject # toRet[[names(x = data.return)[i]]] <- CreateAssayObject(counts = data.return[[i]], check.matrix = FALSE) toRet[[names(x = data.return)[i]]] <- CreateAssayObject(counts = data.return[[i]]) toRet <- SetAssayData( object = toRet, assay = names(x = data.return)[i], slot = "data", new.data = log1p(x = as.matrix(x = data.return[[i]])) ) } } } if (DefaultAssay(object = object) %in% names(x = data.return)) { DefaultAssay(object = toRet) <- DefaultAssay(object = object) if (slot[which(DefaultAssay(object = object) %in% names(x = data.return))[1]] != 'scale.data') { toRet <- ScaleData(object = toRet, verbose = verbose) } } if ('ident' %in% group.by) { first.cells <- c() for (i in 1:ncol(x = category.matrix)) { first.cells <- c(first.cells, Position(x = category.matrix[,i], f = function(x) {x > 0})) } Idents(object = toRet) <- Idents(object = object)[first.cells] } return(toRet) } else { return(data.return) } } #' Regroup idents based on meta.data info #' #' For cells in each ident, set a new identity based on the most common value #' of a specified metadata column. #' #' @param object Seurat object #' @param metadata Name of metadata column #' @return A Seurat object with the active idents regrouped #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' pbmc_small <- RegroupIdents(pbmc_small, metadata = "groups") #' RegroupIdents <- function(object, metadata) { for (ii in levels(x = object)) { ident.cells <- WhichCells(object = object, idents = ii) if (length(x = ident.cells) == 0) { next } new.ident <- names(x = which.max(x = table(object[[metadata]][ident.cells, ]))) if (is.null(x = new.ident)) { stop("Cluster ", ii, " contains only cells with NA values in the '", metadata, "' metadata column.") } Idents(object = object, cells = ident.cells) <- new.ident } return(object) } #' Save the Annoy index #' #' @param object A Neighbor object with the annoy index stored #' @param file Path to file to write index to #' #' @export #' @concept utilities #' SaveAnnoyIndex <- function( object, file ) { index <- Index(object = object) if (is.null(x = index)) { stop("Index for provided Neighbor object is NULL") } index$save(path.expand(path = file)) } #' Find the Quantile of Data #' #' Converts a quantile in character form to a number regarding some data. #' String form for a quantile is represented as a number prefixed with #' \dQuote{q}; for example, 10th quantile is \dQuote{q10} while 2nd quantile is #' \dQuote{q2}. Will only take a quantile of non-zero data values #' #' @param cutoff The cutoff to turn into a quantile #' @param data The data to turn find the quantile of #' #' @return The numerical representation of the quantile #' #' @importFrom stats quantile #' #' @export #' @concept utilities #' #' @examples #' set.seed(42) #' SetQuantile('q10', sample(1:100, 10)) #' SetQuantile <- function(cutoff, data) { if (grepl(pattern = '^q[0-9]{1,2}$', x = as.character(x = cutoff), perl = TRUE)) { this.quantile <- as.numeric(x = sub( pattern = 'q', replacement = '', x = as.character(x = cutoff) )) / 100 data <- unlist(x = data) data <- data[data > 0] cutoff <- quantile(x = data, probs = this.quantile) } return(as.numeric(x = cutoff)) } #' @rdname UpdateSymbolList #' #' @return \code{UpdateSymbolList}: \code{symbols} with updated symbols from #' HGNC's gene names database #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' UpdateSymbolList(symbols = cc.genes$s.genes) #' } #' UpdateSymbolList <- function( symbols, timeout = 10, several.ok = FALSE, verbose = TRUE, ... ) { new.symbols <- suppressWarnings(expr = GeneSymbolThesarus( symbols = symbols, timeout = timeout, several.ok = several.ok, search.types = 'prev_symbol', verbose = verbose, ... )) if (length(x = new.symbols) < 1) { warning("No updated symbols found", call. = FALSE, immediate. = TRUE) } else { if (verbose) { message("Found updated symbols for ", length(x = new.symbols), " symbols") x <- sapply(X = new.symbols, FUN = paste, collapse = ', ') message(paste(names(x = x), x, sep = ' -> ', collapse = '\n')) } for (sym in names(x = new.symbols)) { index <- which(x = symbols == sym) symbols <- append( x = symbols[-index], values = new.symbols[[sym]], after = index - 1 ) } } return(symbols) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @inheritParams base::as.data.frame #' #' @return \code{as.data.frame.Matrix}: A data frame representation of the S4 Matrix #' #' @importFrom Matrix as.matrix #' #' @rdname as.sparse #' @concept utilities #' @export #' @method as.data.frame Matrix #' as.data.frame.Matrix <- function( x, row.names = NULL, optional = FALSE, ..., stringsAsFactors = default.stringsAsFactors() ) { return(as.data.frame( x = as.matrix(x = x), row.names = row.names, optional = optional, stringsAsFactors = stringsAsFactors, ... )) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Set a default value if an object is null # # @param lhs An object to set if it's null # @param rhs The value to provide if x is null # # @return rhs if lhs is null, else lhs # # @author Hadley Wickham # @references https://adv-r.hadley.nz/functions.html#missing-arguments # `%||%` <- function(lhs, rhs) { if (!is.null(x = lhs)) { return(lhs) } else { return(rhs) } } # Set a default value if an object is NOT null # # @param lhs An object to set if it's NOT null # @param rhs The value to provide if x is NOT null # # @return lhs if lhs is null, else rhs # # @author Hadley Wickham # @references https://adv-r.hadley.nz/functions.html#missing-arguments # `%iff%` <- function(lhs, rhs) { if (!is.null(x = lhs)) { return(rhs) } else { return(lhs) } } # Generate chunk points # # @param dsize How big is the data being chunked # @param csize How big should each chunk be # # @return A matrix where each column is a chunk, row 1 is start points, row 2 is end points # ChunkPoints <- function(dsize, csize) { return(vapply( X = 1L:ceiling(x = dsize / csize), FUN = function(i) { return(c( start = (csize * (i - 1L)) + 1L, end = min(csize * i, dsize) )) }, FUN.VALUE = numeric(length = 2L) )) } # L2 normalize the columns (or rows) of a given matrix # @param mat Matrix to cosine normalize # @param MARGIN Perform normalization over rows (1) or columns (2) # # # @return returns l2-normalized matrix # # L2Norm <- function(mat, MARGIN = 1){ normalized <- Sweep( x = mat, MARGIN = MARGIN, STATS = apply( X = mat, MARGIN = MARGIN, FUN = function(x){ sqrt(x = sum(x ^ 2)) } ), FUN = "/" ) normalized[!is.finite(x = normalized)] <- 0 return(normalized) } # Check the use of ... # # @param ... Arguments passed to a function that fall under ... # @param fxns A list/vector of functions or function names # # @return ... # # @importFrom utils argsAnywhere getAnywhere #' @importFrom utils isS3stdGeneric methods argsAnywhere isS3method # # @examples # CheckDots <- function(..., fxns = NULL) { args.names <- names(x = list(...)) if (length(x = list(...)) == 0) { return(invisible(x = NULL)) } if (is.null(x = args.names)) { stop("No named arguments passed") } if (length(x = fxns) == 1) { fxns <- list(fxns) } for (f in fxns) { if (!(is.character(x = f) || is.function(x = f))) { stop("CheckDots only works on characters or functions, not ", class(x = f)) } } fxn.args <- suppressWarnings(expr = sapply( X = fxns, FUN = function(x) { x <- tryCatch( expr = if (isS3stdGeneric(f = x)) { as.character(x = methods(generic.function = x)) } else { x }, error = function(...) { return(x) } ) x <- if (is.character(x = x)) { sapply(X = x, FUN = argsAnywhere, simplify = FALSE, USE.NAMES = TRUE) } else if (length(x = x) <= 1) { list(x) } return(sapply( X = x, FUN = function(f) { return(names(x = formals(fun = f))) }, simplify = FALSE, USE.NAMES = TRUE )) }, simplify = FALSE, USE.NAMES = TRUE )) fxn.args <- unlist(x = fxn.args, recursive = FALSE) fxn.null <- vapply(X = fxn.args, FUN = is.null, FUN.VALUE = logical(length = 1L)) if (all(fxn.null) && !is.null(x = fxns)) { stop("None of the functions passed could be found") } else if (any(fxn.null)) { warning( "The following functions passed could not be found: ", paste(names(x = which(x = fxn.null)), collapse = ', '), call. = FALSE, immediate. = TRUE ) fxn.args <- Filter(f = Negate(f = is.null), x = fxn.args) } dfxns <- vector(mode = 'logical', length = length(x = fxn.args)) names(x = dfxns) <- names(x = fxn.args) for (i in 1:length(x = fxn.args)) { dfxns[i] <- any(grepl(pattern = '...', x = fxn.args[[i]], fixed = TRUE)) } if (any(dfxns)) { dfxns <- names(x = which(x = dfxns)) if (any(nchar(x = dfxns) > 0)) { fx <- vapply( X = Filter(f = nchar, x = dfxns), FUN = function(x) { if (isS3method(method = x)) { x <- unlist(x = strsplit(x = x, split = '\\.')) x <- x[length(x = x) - 1L] } return(x) }, FUN.VALUE = character(length = 1L) ) message( "The following functions and any applicable methods accept the dots: ", paste(unique(x = fx), collapse = ', ') ) if (any(nchar(x = dfxns) < 1)) { message( "In addition, there is/are ", length(x = Filter(f = Negate(f = nchar), x = dfxns)), " other function(s) that accept(s) the dots" ) } } else { message("There is/are ", length(x = dfxns), 'function(s) that accept(s) the dots') } } else { unused <- Filter( f = function(x) { return(!x %in% unlist(x = fxn.args)) }, x = args.names ) if (length(x = unused) > 0) { msg <- paste0( "The following arguments are not used: ", paste(unused, collapse = ', ') ) switch( EXPR = getOption(x = "Seurat.checkdots"), "warn" = warning(msg, call. = FALSE, immediate. = TRUE), "stop" = stop(msg), "silent" = NULL, stop("Invalid Seurat.checkdots option. Please choose one of warn, stop, silent") ) unused.hints <- sapply(X = unused, FUN = OldParamHints) names(x = unused.hints) <- unused unused.hints <- na.omit(object = unused.hints) if (length(x = unused.hints) > 0) { message( "Suggested parameter: ", paste(unused.hints, "instead of", names(x = unused.hints), collapse = '; '), "\n" ) } } } } # Call gc() to perform garbage collection # CheckGC <- function() { if (getOption(x = "Seurat.memsafe")) { gc(verbose = FALSE) } } # Check a list of objects for duplicate cell names # # @param object.list List of Seurat objects # @param verbose Print message about renaming # @param stop Error out if any duplicate names exist # # @return Returns list of objects with duplicate cells renamed to be unique # # @keywords internal # # @noRd # CheckDuplicateCellNames <- function(object.list, verbose = TRUE, stop = FALSE) { cell.names <- unlist(x = lapply(X = object.list, FUN = colnames)) if (any(duplicated(x = cell.names))) { if (stop) { stop("Duplicate cell names present across objects provided.") } if (verbose) { warning("Some cell names are duplicated across objects provided. Renaming to enforce unique cell names.") } object.list <- lapply( X = 1:length(x = object.list), FUN = function(x) { return(RenameCells( object = object.list[[x]], new.names = paste0(Cells(x = object.list[[x]]), "_", x) )) } ) } return(object.list) } # Create an empty dummy assay to replace existing assay #' @importFrom Matrix sparseMatrix CreateDummyAssay <- function(assay) { cm <- sparseMatrix( i = {}, j = {}, dims = c(nrow(x = assay), ncol(x = assay)) ) cm <- as(object = cm, Class = "dgCMatrix") rownames(x = cm) <- rownames(x = assay) colnames(x = cm) <- colnames(x = assay) # TODO: restore once check.matrix is in SeuratObject # return(CreateAssayObject( # counts = cm, # check.matrix = FALSE # )) return(CreateAssayObject( counts = cm )) } # Extract delimiter information from a string. # # Parses a string (usually a cell name) and extracts fields based on a delimiter # # @param string String to parse. # @param field Integer(s) indicating which field(s) to extract. Can be a vector multiple numbers. # @param delim Delimiter to use, set to underscore by default. # # @return A new string, that parses out the requested fields, and (if multiple), rejoins them with the same delimiter # # @export # # @examples # ExtractField(string = 'Hello World', field = 1, delim = '_') # ExtractField <- function(string, field = 1, delim = "_") { fields <- as.numeric(x = unlist(x = strsplit(x = as.character(x = field), split = ","))) if (length(x = fields) == 1) { return(strsplit(x = string, split = delim)[[1]][field]) } return(paste(strsplit(x = string, split = delim)[[1]][fields], collapse = delim)) } # Resize GenomicRanges upstream and or downstream # from https://support.bioconductor.org/p/78652/ # Extend <- function(x, upstream = 0, downstream = 0) { if (any(GenomicRanges::strand(x = x) == "*")) { warning("'*' ranges were treated as '+'") } on_plus <- GenomicRanges::strand(x = x) == "+" | GenomicRanges::strand(x = x) == "*" new_start <- GenomicRanges::start(x = x) - ifelse(test = on_plus, yes = upstream, no = downstream) new_end <- GenomicRanges::end(x = x) + ifelse(test = on_plus, yes = downstream, no = upstream) IRanges::ranges(x = x) <- IRanges::IRanges(start = new_start, end = new_end) x <- GenomicRanges::trim(x = x) return(x) } # Interleave vectors together # # @param ... Vectors to be interleaved # # @return A vector with the values from each vector in ... interleaved # Interleave <- function(...) { return(as.vector(x = t(x = as.data.frame(x = list(...))))) } # Check if a matrix is empty # # Takes a matrix and asks if it's empty (either 0x0 or 1x1 with a value of NA) # # @param x A matrix # # @return Whether or not \code{x} is empty # IsMatrixEmpty <- function(x) { matrix.dims <- dim(x = x) matrix.na <- all(matrix.dims == 1) && all(is.na(x = x)) return(all(matrix.dims == 0) || matrix.na) } # Check if externalptr is null # From https://stackoverflow.com/questions/26666614/how-do-i-check-if-an-externalptr-is-null-from-within-r # is.null.externalptr <- function(pointer) { stopifnot(is(pointer, "externalptr")) .Call("isnull", pointer) } # Check whether an assay has been processed by sctransform # # @param assay assay to check # # @return Boolean # IsSCT <- function(assay) { if (is.list(x = assay)) { sct.check <- lapply(X = assay, FUN = function(x) { return(!is.null(x = Misc(object = x, slot = 'vst.out')) | !is.null(x = Misc(object = x, slot = 'vst.set')) | inherits(x = x, what = "SCTAssay")) }) return(unlist(x = sct.check)) } return(!is.null(x = Misc(object = assay, slot = 'vst.out')) | !is.null(x = Misc(object = assay, slot = 'vst.set')) | inherits(x = assay, what = "SCTAssay")) } # Check whether a vst.out is from sctransform # # @param vst.out a sct model from sctransform # # @return Boolean # IsVSTout <- function(vst.out) { vst.element <- c("model_str", "model_pars_fit", "cell_attr" ) vst.check <- setdiff(x = vst.element, y = names(x = vst.out)) if (length(x = setdiff(x = vst.element, y = names(x = vst.out))) == 0) { vst.check <- TRUE } else { vst.check <- FALSE } return(vst.check) } # Calculate euclidean distance the x and y, # and subtract the nearest neighbors of x distance to keep local connectivity # It is used in FindModalityWeights to calculate the with and cross modality distance impute_dist <- function(x, y, nearest.dist) { dist <- sqrt(x = rowSums(x = (x - y)**2)) - nearest.dist dist <- ReLu(x = dist) return(dist) } # Check the length of components of a list # # @param values A list whose components should be checked # @param cutoff A minimum value to check for # # @return a vector of logicals # LengthCheck <- function(values, cutoff = 0) { return(vapply( X = values, FUN = function(x) { return(length(x = x) > cutoff) }, FUN.VALUE = logical(1) )) } # Function to map values in a vector `v` as defined in `from`` to the values # defined in `to`. # # @param v vector of values to map # @param from vector of original values # @param to vector of values to map original values to (should be of equal # length as from) # @return returns vector of mapped values # MapVals <- function(v, from, to) { if (length(x = from) != length(x = to)) { stop("from and to vectors are not the equal length.") } vals.to.match <- match(x = v, table = from) vals.to.match.idx <- !is.na(x = vals.to.match) v[vals.to.match.idx] <- to[vals.to.match[vals.to.match.idx]] return(v) } # Independently shuffle values within each row of a matrix # # Creates a matrix where correlation structure has been removed, but overall values are the same # # @param x Matrix to shuffle # # @return Returns a scrambled matrix, where each row is shuffled independently # #' @importFrom stats runif # # @export # # @examples # mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) # mat # MatrixRowShuffle(x = mat) # MatrixRowShuffle <- function(x) { x2 <- x x2 <- t(x = x) ind <- order(c(col(x = x2)), runif(n = length(x = x2))) x2 <- matrix( data = x2[ind], nrow = nrow(x = x), ncol = ncol(x = x), byrow = TRUE ) return(x2) } # Reverse the vector x and return the value at the Nth index. If N is larger # than the length of the vector, return the last value in the reversed vector. # # @param x vector of interest # @param N index in reversed vector # # @return returns element at given index # MaxN <- function(x, N = 2){ len <- length(x) if (N > len) { warning('N greater than length(x). Setting N=length(x)') N <- length(x) } sort(x, partial = len - N + 1)[len - N + 1] } # Given a range from cut, compute the mean # # @x range from cut as a string (e.g. (10, 20] ) # @return returns a numeric with the mean of the range # MeanRange <- function(x) { left <- gsub(pattern = "\\]", replacement = "", x = sub(pattern = "\\([[:digit:]\\.e+]*,", x = x, replacement = "")) right <- gsub(pattern = "\\(", replacement = "", x = sub(pattern = ",[[:digit:]\\.e+]*]", x = x, replacement = "")) return(mean(c(as.numeric(x = left), as.numeric(x = right)))) } # Melt a data frame # # @param x A data frame # # @return A molten data frame # Melt <- function(x) { if (!is.data.frame(x = x)) { x <- as.data.frame(x = x) } return(data.frame( rows = rep.int(x = rownames(x = x), times = ncol(x = x)), cols = unlist(x = lapply(X = colnames(x = x), FUN = rep.int, times = nrow(x = x))), vals = unlist(x = x, use.names = FALSE) )) } # Modify parameters in calling environment # # Used exclusively for helper parameter validation functions # # @param param name of parameter to change # @param value new value for parameter # ModifyParam <- function(param, value) { # modify in original function environment env1 <- sys.frame(which = length(x = sys.frames()) - 2) env1[[param]] <- value # also modify in validation function environment env2 <- sys.frame(which = length(x = sys.frames()) - 1) env2[[param]] <- value } # Give hints for old parameters and their newer counterparts # # This is a non-exhaustive list. If your function isn't working properly based # on the parameters you give it, please read the documentation for your function # # @param param A vector of parameters to get hints for # # @return Parameter hints for the specified parameters # OldParamHints <- function(param) { param.conversion <- c( 'raw.data' = 'counts', 'min.genes' = 'min.features', 'features.plot' = 'features', 'pc.genes' = 'features', 'do.print' = 'verbose', 'genes.print' = 'nfeatures.print', 'pcs.print' = 'ndims.print', 'pcs.use' = 'dims', 'reduction.use' = 'reduction', 'cells.use' = 'cells', 'do.balanced' = 'balanced', 'display.progress' = 'verbose', 'print.output' = 'verbose', 'dims.use' = 'dims', 'reduction.type' = 'reduction', 'y.log' = 'log', 'cols.use' = 'cols', 'assay.use' = 'assay' ) return(param.conversion[param]) } # Check if a web resource is available # # @param url A URL # @param strict Perform a strict web availability test # @param seconds Timeout in seconds # # @return \code{TRUE} if \url{is available} otherwise \code{FALSE} # #' @importFrom httr GET status_code timeout # # @keywords internal # Online <- function(url, strict = FALSE, seconds = 5L) { if (isTRUE(x = strict)) { code <- 200L comp <- identical } else { code <- 404L comp <- Negate(f = identical) } request <- tryCatch( expr = GET(url = url, timeout(seconds = seconds)), error = function(err) { code <- if (grepl(pattern = '^Timeout was reached', x = err$message)) { 408L } else { 404L } return(code) } ) return(comp(x = status_code(x = request), y = code)) } # Parenting parameters from one environment to the next # # This function allows one to modify a parameter in a parent environment # The primary use of this is to ensure logging functions store correct parameters # if they've been modified by a child function or method # # @param parent.find Regex pattern of name of parent function call to modify. # For example, this can be the class name for a method that was dispatched previously # @param ... Parameter names and values to parent; both name and value must be supplied # in the standard \code{name = value} format; any number of name/value pairs can be specified # # @return No return, modifies parent environment directly # # @examples # Parenting(parent.find = 'Seurat', features = features[features > 7]) # Parenting <- function(parent.find = 'Seurat', ...) { calls <- as.character(x = sys.calls()) calls <- lapply( X = strsplit(x = calls, split = '(', fixed = TRUE), FUN = '[', 1 ) parent.index <- grep(pattern = parent.find, x = calls) if (length(x = parent.index) != 1) { warning( "Cannot find a parent environment called ", parent.find, immediate. = TRUE, call. = FALSE ) } else { to.parent <- list(...) if (length(x = to.parent) == 0) { warning("Nothing to parent", immediate. = TRUE, call. = FALSE) } else if (is.null(x = names(x = to.parent))) { stop("All input must be in a key = value pair") } else if (length(x = Filter(f = nchar, x = names(x = to.parent))) != length(x = to.parent)) { stop("All inputs must be named") } else { parent.environ <- sys.frame(which = parent.index) for (i in 1:length(x = to.parent)) { parent.environ[[names(x = to.parent)[i]]] <- to.parent[[i]] } } } } # Generate a random name # # Make a name from randomly sampled lowercase letters, # pasted together with no spaces or other characters # # @param length How long should the name be # @param ... Extra parameters passed to sample # # @return A character with nchar == length of randomly sampled letters # # @seealso \code{\link{sample}} # RandomName <- function(length = 5L, ...) { CheckDots(..., fxns = 'sample') return(paste(sample(x = letters, size = length, ...), collapse = '')) } # Rectified linear units function. Calculate positive part of its argument # The input can be a vector and a matrix ReLu <- function(x) { x[x < 0] <- 0 return(x) } # Remove the last field from a string # # Parses a string (usually a cell name) and removes the last field based on a delimter # # @param string String to parse # @param delim Delimiter to use, set to underscore by default. # # @return A new string sans the last field # RemoveLastField <- function(string, delim = "_") { ss <- strsplit(x = string, split = delim)[[1]] if (length(x = ss) == 1) { return(string) } else { return(paste(ss[1:(length(x = ss)-1)], collapse = delim)) } } # Calculate row mean of a sparse matrix # @param mat sparse matrix # @return A vector of row mean # RowMeanSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_mean_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Calculate row sum of a sparse matrix # # @param mat sparse matrix # @return A vector of row sum # RowSumSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_sum_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Calculate row variance of a sparse matrix # # @param mat sparse matrix # @return A vector of row variance # RowVarSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_var_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Check if the input matrix is dgCMatrix # # @param mat sparse matrix # @return A dgCMatrix # RowSparseCheck <- function(mat) { if (!inherits(x = mat, what = "sparseMatrix")) { stop("Input should be sparse matrix") } else if (class(x = mat) != "dgCMatrix") { warning("Input matrix is converted to dgCMatrix.") mat <- as.sparse(x = mat) } return(mat) } # Sweep out array summaries # # Reimplmentation of \code{\link[base]{sweep}} to maintain compatability with # both R 3.X and 4.X # # @inheritParams base::sweep # @param x an array. # # @seealso \code{\link[base]{sweep}} # Sweep <- function(x, MARGIN, STATS, FUN = '-', check.margin = TRUE, ...) { if (any(grepl(pattern = 'X', x = names(x = formals(fun = sweep))))) { return(sweep( X = x, MARGIN = MARGIN, STATS = STATS, FUN = FUN, check.margin = check.margin, ... )) } else { return(sweep( x = x, MARGIN = MARGIN, STATS = STATS, FUN = FUN, check.margin = check.margin, ... )) } } # Get program paths in a system-agnostic way # # @param progs A vector of program names # @param error Throw an error if any programs are not found # @param add.exe Add '.exe' extension to program names that don't have it # # @return A named vector of program paths; missing programs are returned as # \code{NA} if \code{error = FALSE} # #' @importFrom tools file_ext # SysExec <- function( progs, error = ifelse(test = length(x = progs) == 1, yes = TRUE, no = FALSE), add.exe = .Platform$OS.type == 'windows' ) { cmd <- ifelse( test = .Platform$OS.type == 'windows', yes = 'where.exe', no = 'which' ) if (add.exe) { missing.exe <- file_ext(x = progs) != 'exe' progs[missing.exe] <- paste0(progs[missing.exe], '.exe') } paths <- sapply( X = progs, FUN = function(x) { return(tryCatch( expr = system2(command = cmd, args = x, stdout = TRUE)[1], warning = function(...) { return(NA_character_) } )) } ) if (error && any(is.na(x = paths))) { stop( "Could not find the following programs: ", paste(names(x = paths[is.na(x = paths)]), collapse = ', '), call. = FALSE ) } return(paths) } # Try to convert x to numeric, if NA's introduced return x as is # ToNumeric <- function(x){ # check for x:y range if (is.numeric(x = x)) { return(x) } if (length(x = unlist(x = strsplit(x = x, split = ":"))) == 2) { num <- unlist(x = strsplit(x = x, split = ":")) return(num[1]:num[2]) } num <- suppressWarnings(expr = as.numeric(x = x)) if (!is.na(x = num)) { return(num) } return(x) } Seurat/R/generics.R0000644000176200001440000005337014156670503013657 0ustar liggesusers#' @include reexports.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Add info to anchor matrix #' #' @param anchors An \code{\link{AnchorSet}} object #' @param vars Variables to pull for each object via FetchData #' @param slot Slot to pull feature data for #' @param assay Specify the Assay per object if annotating with expression data #' @param ... Arguments passed to other methods # #' @return Returns the anchor dataframe with additional columns for annotation #' metadata #' #' @export #' AnnotateAnchors <- function(anchors, vars, slot, ...) { UseMethod(generic = 'AnnotateAnchors', object = anchors) } #' Convert objects to CellDataSet objects #' #' @param x An object to convert to class \code{CellDataSet} #' @param ... Arguments passed to other methods #' #' @rdname as.CellDataSet #' @export as.CellDataSet #' as.CellDataSet <- function(x, ...) { UseMethod(generic = 'as.CellDataSet', object = x) } #' Convert objects to SingleCellExperiment objects #' #' @param x An object to convert to class \code{SingleCellExperiment} #' @param ... Arguments passed to other methods #' #' @rdname as.SingleCellExperiment #' @export as.SingleCellExperiment #' as.SingleCellExperiment <- function(x, ...) { UseMethod(generic = 'as.SingleCellExperiment', object = x) } #' Cluster Determination #' #' Identify clusters of cells by a shared nearest neighbor (SNN) modularity #' optimization based clustering algorithm. First calculate k-nearest neighbors #' and construct the SNN graph. Then optimize the modularity function to #' determine clusters. For a full description of the algorithms, see Waltman and #' van Eck (2013) \emph{The European Physical Journal B}. Thanks to Nigel #' Delaney (evolvedmicrobe@github) for the rewrite of the Java modularity #' optimizer code in Rcpp! #' #' To run Leiden algorithm, you must first install the leidenalg python #' package (e.g. via pip install leidenalg), see Traag et al (2018). #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns a Seurat object where the idents have been updated with new cluster info; #' latest clustering results will be stored in object metadata under 'seurat_clusters'. #' Note that 'seurat_clusters' will be overwritten everytime FindClusters is run #' #' @export #' #' @rdname FindClusters #' @export FindClusters #' FindClusters <- function(object, ...) { UseMethod(generic = 'FindClusters', object = object) } #' Gene expression markers of identity classes #' #' Finds markers (differentially expressed genes) for identity classes #' #' @param object An object #' @param ... Arguments passed to other methods and to specific DE methods #' @return data.frame with a ranked list of putative markers as rows, and associated #' statistics as columns (p-values, ROC score, etc., depending on the test used (\code{test.use})). The following columns are always present: #' \itemize{ #' \item \code{avg_logFC}: log fold-chage of the average expression between the two groups. Positive values indicate that the gene is more highly expressed in the first group #' \item \code{pct.1}: The percentage of cells where the gene is detected in the first group #' \item \code{pct.2}: The percentage of cells where the gene is detected in the second group #' \item \code{p_val_adj}: Adjusted p-value, based on bonferroni correction using all genes in the dataset #' } #' #' @details p-value adjustment is performed using bonferroni correction based on #' the total number of genes in the dataset. Other correction methods are not #' recommended, as Seurat pre-filters genes using the arguments above, reducing #' the number of tests performed. Lastly, as Aaron Lun has pointed out, p-values #' should be interpreted cautiously, as the genes used for clustering are the #' same genes tested for differential expression. #' #' @references McDavid A, Finak G, Chattopadyay PK, et al. Data exploration, #' quality control and testing in single-cell qPCR-based gene expression experiments. #' Bioinformatics. 2013;29(4):461-467. doi:10.1093/bioinformatics/bts714 #' @references Trapnell C, et al. The dynamics and regulators of cell fate #' decisions are revealed by pseudotemporal ordering of single cells. Nature #' Biotechnology volume 32, pages 381-386 (2014) #' @references Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based #' Analysis of Single Cell Transcriptomics. R package version 1.2.1. #' https://github.com/RGLab/MAST/ #' @references Love MI, Huber W and Anders S (2014). "Moderated estimation of #' fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. #' https://bioconductor.org/packages/release/bioc/html/DESeq2.html #' #' @export #' #' @examples #' data("pbmc_small") #' # Find markers for cluster 2 #' markers <- FindMarkers(object = pbmc_small, ident.1 = 2) #' head(x = markers) #' #' # Take all cells in cluster 2, and find markers that separate cells in the 'g1' group (metadata #' # variable 'group') #' markers <- FindMarkers(pbmc_small, ident.1 = "g1", group.by = 'groups', subset.ident = "2") #' head(x = markers) #' #' # Pass 'clustertree' or an object of class phylo to ident.1 and #' # a node to ident.2 as a replacement for FindMarkersNode #' if (requireNamespace("ape", quietly = TRUE)) { #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' markers <- FindMarkers(object = pbmc_small, ident.1 = 'clustertree', ident.2 = 5) #' head(x = markers) #' } #' #' @rdname FindMarkers #' @export FindMarkers #' #' @aliases FindMarkersNode #' @seealso \code{FoldChange} #' FindMarkers <- function(object, ...) { UseMethod(generic = 'FindMarkers', object = object) } #' (Shared) Nearest-neighbor graph construction #' #' Computes the \code{k.param} nearest neighbors for a given dataset. Can also #' optionally (via \code{compute.SNN}), construct a shared nearest neighbor #' graph by calculating the neighborhood overlap (Jaccard index) between every #' cell and its \code{k.param} nearest neighbors. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return This function can either return a \code{\link{Neighbor}} object #' with the KNN information or a list of \code{\link{Graph}} objects with #' the KNN and SNN depending on the settings of \code{return.neighbor} and #' \code{compute.SNN}. When running on a \code{\link{Seurat}} object, this #' returns the \code{\link{Seurat}} object with the Graphs or Neighbor objects #' stored in their respective slots. Names of the Graph or Neighbor object can #' be found with \code{\link{Graphs}} or \code{\link{Neighbors}}. #' #' @examples #' data("pbmc_small") #' pbmc_small #' # Compute an SNN on the gene expression level #' pbmc_small <- FindNeighbors(pbmc_small, features = VariableFeatures(object = pbmc_small)) #' #' # More commonly, we build the SNN on a dimensionally reduced form of the data #' # such as the first 10 principle components. #' #' pbmc_small <- FindNeighbors(pbmc_small, reduction = "pca", dims = 1:10) #' #' @rdname FindNeighbors #' @export FindNeighbors #' FindNeighbors <- function(object, ...) { UseMethod(generic = 'FindNeighbors', object = object) } #' Find variable features #' #' Identifies features that are outliers on a 'mean variability plot'. #' #' For the mean.var.plot method: #' Exact parameter settings may vary empirically from dataset to dataset, and #' based on visual inspection of the plot. Setting the y.cutoff parameter to 2 #' identifies features that are more than two standard deviations away from the #' average dispersion within a bin. The default X-axis function is the mean #' expression level, and for Y-axis it is the log(Variance/mean). All mean/variance #' calculations are not performed in log-space, but the results are reported in #' log-space - see relevant functions for exact details. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname FindVariableFeatures #' @export FindVariableFeatures #' #' @aliases FindVariableGenes #' FindVariableFeatures <- function(object, ...) { UseMethod(generic = 'FindVariableFeatures', object = object) } #' Find spatially variable features #' #' Identify features whose variability in expression can be explained to some #' degree by spatial location. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname FindSpatiallyVariableFeatures #' @export FindSpatiallyVariableFeatures #' FindSpatiallyVariableFeatures <- function(object, ...) { UseMethod(generic = 'FindSpatiallyVariableFeatures', object = object) } #' Fold Change #' #' Calculate log fold change and percentage of cells expressing each feature #' for different identity classes. #' #' If the slot is \code{scale.data} or a reduction is specified, average difference #' is returned instead of log fold change and the column is named "avg_diff". #' Otherwise, log2 fold change is returned with column named "avg_log2_FC". #' #' @examples #' data("pbmc_small") #' FoldChange(pbmc_small, ident.1 = 1) #' #' @param object A Seurat object #' @param ... Arguments passed to other methods #' @rdname FoldChange #' @export FoldChange #' @return Returns a data.frame #' @seealso \code{FindMarkers} FoldChange <- function(object, ...) { UseMethod(generic = 'FoldChange', object = object) } #' Get an Assay object from a given Seurat object. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns an Assay object #' #' @rdname GetAssay #' @export GetAssay #' GetAssay <- function(object, ...) { UseMethod(generic = 'GetAssay', object = object) } #' Integrate low dimensional embeddings #' #' Perform dataset integration using a pre-computed Anchorset of specified low #' dimensional representations. #' #' The main steps of this procedure are identical to \code{\link{IntegrateData}} #' with one key distinction. When computing the weights matrix, the distance #' calculations are performed in the full space of integrated embeddings when #' integrating more than two datasets, as opposed to a reduced PCA space which #' is the default behavior in \code{\link{IntegrateData}}. #' #' @param anchorset An AnchorSet object #' @param new.reduction.name Name for new integrated dimensional reduction. #' @param reductions Name of reductions to be integrated. For a #' TransferAnchorSet, this should be the name of a reduction present in the #' anchorset object (for example, "pcaproject"). For an IntegrationAnchorSet, #' this should be a \code{\link{DimReduc}} object containing all cells present #' in the anchorset object. #' @param dims.to.integrate Number of dimensions to return integrated values for #' @param weight.reduction Dimension reduction to use when calculating anchor #' weights. This can be one of: #' \itemize{ #' \item{A string, specifying the name of a dimension reduction present in #' all objects to be integrated} #' \item{A vector of strings, specifying the name of a dimension reduction to #' use for each object to be integrated} #' \item{A vector of \code{\link{DimReduc}} objects, specifying the object to #' use for each object in the integration} #' \item{NULL, in which case the full corrected space is used for computing #' anchor weights.} #' } #' @param ... Reserved for internal use #' #' @return When called on a TransferAnchorSet (from FindTransferAnchors), this #' will return the query object with the integrated embeddings stored in a new #' reduction. When called on an IntegrationAnchorSet (from IntegrateData), this #' will return a merged object with the integrated reduction stored. #' #' @rdname IntegrateEmbeddings #' @export IntegrateEmbeddings #' IntegrateEmbeddings <- function(anchorset, ...) { UseMethod(generic = "IntegrateEmbeddings", object = anchorset) } #' Metric for evaluating mapping success #' #' This metric was designed to help identify query cells that aren't well #' represented in the reference dataset. The intuition for the score is that we #' are going to project the query cells into a reference-defined space and then #' project them back onto the query. By comparing the neighborhoods before and #' after projection, we identify cells who's local neighborhoods are the most #' affected by this transformation. This could be because there is a population #' of query cells that aren't present in the reference or the state of the cells #' in the query is significantly different from the equivalent cell type in the #' reference. #' #' @param anchors Set of anchors #' @param ... Arguments passed to other methods #' #' @rdname MappingScore #' @export MappingScore #' MappingScore <- function(anchors, ...) { UseMethod(generic = "MappingScore", object = anchors) } #' Normalize Data #' #' Normalize the count data present in a given assay. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns object after normalization #' #' @rdname NormalizeData #' @export NormalizeData #' NormalizeData <- function(object, ...) { UseMethod(generic = 'NormalizeData', object = object) } #' Project query into UMAP coordinates of a reference #' #' This function will take a query dataset and project it into the coordinates #' of a provided reference UMAP. This is essentially a wrapper around two steps: #' \itemize{ #' \item{FindNeighbors - Find the nearest reference cell neighbors and their #' distances for each query cell.} #' \item{RunUMAP - Perform umap projection by providing the neighbor set #' calculated above and the umap model previously computed in the reference.} #' } #' #' @param query Query dataset #' #' @rdname ProjectUMAP #' @export ProjectUMAP #' ProjectUMAP <- function(query, ...) { UseMethod(generic = "ProjectUMAP", object = query) } #' Perform Canonical Correlation Analysis #' #' Runs a canonical correlation analysis using a diagonal implementation of CCA. #' For details about stored CCA calculation parameters, see #' \code{PrintCCAParams}. #' @param object1 First Seurat object #' @param object2 Second Seurat object. # @param ... Arguments passed to other methods #' #' @return Returns a combined Seurat object with the CCA results stored. #' #' @seealso \code{\link{merge.Seurat}} #' #' @examples #' data("pbmc_small") #' pbmc_small #' # As CCA requires two datasets, we will split our test object into two just for this example #' pbmc1 <- subset(pbmc_small, cells = colnames(pbmc_small)[1:40]) #' pbmc2 <- subset(pbmc_small, cells = colnames(x = pbmc_small)[41:80]) #' pbmc1[["group"]] <- "group1" #' pbmc2[["group"]] <- "group2" #' pbmc_cca <- RunCCA(object1 = pbmc1, object2 = pbmc2) #' # Print results #' print(x = pbmc_cca[["cca"]]) #' #' @rdname RunCCA #' @export RunCCA #' RunCCA <- function(object1, object2, ...) { UseMethod(generic = 'RunCCA', object = object1) } #' Run Independent Component Analysis on gene expression #' #' Run fastica algorithm from the ica package for ICA dimensionality reduction. #' For details about stored ICA calculation parameters, see #' \code{PrintICAParams}. #' #' @param object Seurat object #' #' @rdname RunICA #' @export RunICA #' RunICA <- function(object, ...) { UseMethod(generic = "RunICA", object = object) } #' Run Linear Discriminant Analysis #' #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname RunLDA #' @export RunLDA #' #' @aliases RunLDA #' RunLDA <- function(object, ...) { UseMethod(generic = 'RunLDA', object = object) } #' Run Principal Component Analysis #' #' Run a PCA dimensionality reduction. For details about stored PCA calculation #' parameters, see \code{PrintPCAParams}. #' #' @param object An object #' @param ... Arguments passed to other methods and IRLBA #' #' @return Returns Seurat object with the PCA calculation stored in the reductions slot #' #' @export #' #' @rdname RunPCA #' @export RunPCA #' RunPCA <- function(object, ...) { UseMethod(generic = 'RunPCA', object = object) } #' Run Supervised Latent Semantic Indexing #' #' Run a supervised LSI (SLSI) dimensionality reduction supervised by a #' cell-cell kernel. SLSI is used to capture a linear transformation of peaks #' that maximizes its dependency to the given cell-cell kernel. #' #' @param object An object #' @param ... Arguments passed to IRLBA irlba #' #' @return Returns Seurat object with the SLSI calculation stored in the #' reductions slot #' #' @export #' #' @rdname RunSLSI #' @export RunSLSI #' RunSLSI <- function(object, ...) { UseMethod(generic = 'RunSLSI', object = object) } #' Run Supervised Principal Component Analysis #' #' Run a supervised PCA (SPCA) dimensionality reduction supervised by a cell-cell kernel. #' SPCA is used to capture a linear transformation which maximizes its dependency to #' the given cell-cell kernel. We use SNN graph as the kernel to supervise the linear #' matrix factorization. #' #' @param object An object #' @param ... Arguments passed to other methods and IRLBA #' #' @return Returns Seurat object with the SPCA calculation stored in the reductions slot #' @references Barshan E, Ghodsi A, Azimifar Z, Jahromi MZ. #' Supervised principal component analysis: Visualization, classification and #' regression on subspaces and submanifolds. #' Pattern Recognition. 2011 Jul 1;44(7):1357-71. \url{https://www.sciencedirect.com/science/article/pii/S0031320310005819?casa_token=AZMFg5OtPnAAAAAA:_Udu7GJ7G2ed1-XSmr-3IGSISUwcHfMpNtCj-qacXH5SBC4nwzVid36GXI3r8XG8dK5WOQui}; #' @export #' #' @rdname RunSPCA #' @export RunSPCA #' RunSPCA <- function(object, ...) { UseMethod(generic = 'RunSPCA', object = object) } #' Run t-distributed Stochastic Neighbor Embedding #' #' Run t-SNE dimensionality reduction on selected features. Has the option of #' running in a reduced dimensional space (i.e. spectral tSNE, recommended), #' or running based on a set of genes. For details about stored TSNE calculation #' parameters, see \code{PrintTSNEParams}. #' #' @param object Seurat object #' @param ... Arguments passed to other methods and to t-SNE call (most commonly used is perplexity) #' #' @rdname RunTSNE #' @export RunTSNE #' RunTSNE <- function(object, ...) { UseMethod(generic = 'RunTSNE', object = object) } #' Run UMAP #' #' Runs the Uniform Manifold Approximation and Projection (UMAP) dimensional #' reduction technique. To run, you must first install the umap-learn python #' package (e.g. via \code{pip install umap-learn}). Details on this package can be #' found here: \url{https://github.com/lmcinnes/umap}. For a more in depth #' discussion of the mathematics underlying UMAP, see the ArXiv paper here: #' \url{https://arxiv.org/abs/1802.03426}. #' #' @param object An object #' @param ... Arguments passed to other methods and UMAP #' #' @return Returns a Seurat object containing a UMAP representation #' #' @references McInnes, L, Healy, J, UMAP: Uniform Manifold Approximation and #' Projection for Dimension Reduction, ArXiv e-prints 1802.03426, 2018 #' #' @export #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' # Run UMAP map on first 5 PCs #' pbmc_small <- RunUMAP(object = pbmc_small, dims = 1:5) #' # Plot results #' DimPlot(object = pbmc_small, reduction = 'umap') #' } #' #' @rdname RunUMAP #' @export RunUMAP #' RunUMAP <- function(object, ...) { UseMethod(generic = 'RunUMAP', object = object) } #' Scale and center the data. #' #' Scales and centers features in the dataset. If variables are provided in vars.to.regress, #' they are individually regressed against each feature, and the resulting residuals are #' then scaled and centered. #' #' ScaleData now incorporates the functionality of the function formerly known #' as RegressOut (which regressed out given the effects of provided variables #' and then scaled the residuals). To make use of the regression functionality, #' simply pass the variables you want to remove to the vars.to.regress parameter. #' #' Setting center to TRUE will center the expression for each feature by subtracting #' the average expression for that feature. Setting scale to TRUE will scale the #' expression level for each feature by dividing the centered feature expression #' levels by their standard deviations if center is TRUE and by their root mean #' square otherwise. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname ScaleData #' @export ScaleData #' ScaleData <- function(object, ...) { UseMethod(generic = 'ScaleData', object = object) } #' Get image scale factors #' #' @param object An object to get scale factors from #' @param ... Arguments passed to other methods #' #' @return An object of class \code{scalefactors} #' #' @rdname ScaleFactors #' @export ScaleFactors #' ScaleFactors <- function(object, ...) { UseMethod(generic = 'ScaleFactors', object = object) } #' Compute Jackstraw scores significance. #' #' Significant PCs should show a p-value distribution that is #' strongly skewed to the left compared to the null distribution. #' The p-value for each PC is based on a proportion test comparing the number #' of features with a p-value below a particular threshold (score.thresh), compared with the #' proportion of features expected under a uniform distribution of p-values. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns a Seurat object #' #' @author Omri Wurtzel #' @seealso \code{\link{JackStrawPlot}} #' #' @rdname ScoreJackStraw #' @export ScoreJackStraw #' ScoreJackStraw <- function(object, ...) { UseMethod(generic = 'ScoreJackStraw', object = object) } #' Get SCT results from an Assay #' #' Pull the \code{\link{SCTResults}} information from an \code{\link{SCTAssay}} #' object. #' #' @param object An object #' @param ... Arguments passed to other methods (not used) #' #' @rdname SCTResults #' @export SCTResults #' SCTResults <- function(object, ...) { UseMethod(generic = 'SCTResults', object = object) } #' @param value new data to set #' #' @rdname SCTResults #' @export SCTResults<- #' "SCTResults<-" <- function(object, ..., value) { UseMethod(generic = 'SCTResults<-', object = object) } Seurat/R/data.R0000644000176200001440000000274314152507372012766 0ustar liggesusers#' Cell cycle genes #' #' A list of genes used in cell-cycle regression #' #' @format A list of two vectors #' \describe{ #' \item{s.genes}{Genes associated with S-phase} #' \item{g2m.genes}{Genes associated with G2M-phase} #' } #' @concept data #' @source \url{https://www.science.org/doi/abs/10.1126/science.aad0501} #' "cc.genes" #' Cell cycle genes: 2019 update #' #' A list of genes used in cell-cycle regression, updated with 2019 symbols #' #' @section Updated symbols: #' The following symbols were updated from \code{\link{cc.genes}} #' \describe{ #' \item{s.genes}{ #' \itemize{ #' \item \emph{MCM2}: \emph{MCM7} #' \item \emph{MLF1IP}: \emph{CENPU} #' \item \emph{RPA2}: \emph{POLR1B} #' \item \emph{BRIP1}: \emph{MRPL36} #' } #' } #' \item{g2m.genes}{ #' \itemize{ #' \item \emph{FAM64A}: \emph{PIMREG} #' \item \emph{HN1}: \emph{JPT1} #' } #' } #' } #' #' @format A list of two vectors #' \describe{ #' \item{s.genes}{Genes associated with S-phase} #' \item{g2m.genes}{Genes associated with G2M-phase} #' } #' @concept data #' @source \url{https://www.science.org/doi/abs/10.1126/science.aad0501} #' #' @seealso \code{\link{cc.genes}} #' #' @examples #' \dontrun{ #' cc.genes.updated.2019 <- cc.genes #' cc.genes.updated.2019$s.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$s.genes) #' cc.genes.updated.2019$g2m.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$g2m.genes) #' } #' "cc.genes.updated.2019" Seurat/R/RcppExports.R0000644000176200001440000001174414165435365014355 0ustar liggesusers# Generated by using Rcpp::compileAttributes() -> do not edit by hand # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 RunModularityClusteringCpp <- function(SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename) { .Call('_Seurat_RunModularityClusteringCpp', PACKAGE = 'Seurat', SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename) } RunUMISampling <- function(data, sample_val, upsample = FALSE, display_progress = TRUE) { .Call('_Seurat_RunUMISampling', PACKAGE = 'Seurat', data, sample_val, upsample, display_progress) } RunUMISamplingPerCell <- function(data, sample_val, upsample = FALSE, display_progress = TRUE) { .Call('_Seurat_RunUMISamplingPerCell', PACKAGE = 'Seurat', data, sample_val, upsample, display_progress) } RowMergeMatrices <- function(mat1, mat2, mat1_rownames, mat2_rownames, all_rownames) { .Call('_Seurat_RowMergeMatrices', PACKAGE = 'Seurat', mat1, mat2, mat1_rownames, mat2_rownames, all_rownames) } LogNorm <- function(data, scale_factor, display_progress = TRUE) { .Call('_Seurat_LogNorm', PACKAGE = 'Seurat', data, scale_factor, display_progress) } Standardize <- function(mat, display_progress = TRUE) { .Call('_Seurat_Standardize', PACKAGE = 'Seurat', mat, display_progress) } FastSparseRowScale <- function(mat, scale = TRUE, center = TRUE, scale_max = 10, display_progress = TRUE) { .Call('_Seurat_FastSparseRowScale', PACKAGE = 'Seurat', mat, scale, center, scale_max, display_progress) } FastSparseRowScaleWithKnownStats <- function(mat, mu, sigma, scale = TRUE, center = TRUE, scale_max = 10, display_progress = TRUE) { .Call('_Seurat_FastSparseRowScaleWithKnownStats', PACKAGE = 'Seurat', mat, mu, sigma, scale, center, scale_max, display_progress) } FastCov <- function(mat, center = TRUE) { .Call('_Seurat_FastCov', PACKAGE = 'Seurat', mat, center) } FastCovMats <- function(mat1, mat2, center = TRUE) { .Call('_Seurat_FastCovMats', PACKAGE = 'Seurat', mat1, mat2, center) } FastRBind <- function(mat1, mat2) { .Call('_Seurat_FastRBind', PACKAGE = 'Seurat', mat1, mat2) } FastExpMean <- function(mat, display_progress) { .Call('_Seurat_FastExpMean', PACKAGE = 'Seurat', mat, display_progress) } SparseRowVar2 <- function(mat, mu, display_progress) { .Call('_Seurat_SparseRowVar2', PACKAGE = 'Seurat', mat, mu, display_progress) } SparseRowVarStd <- function(mat, mu, sd, vmax, display_progress) { .Call('_Seurat_SparseRowVarStd', PACKAGE = 'Seurat', mat, mu, sd, vmax, display_progress) } FastLogVMR <- function(mat, display_progress) { .Call('_Seurat_FastLogVMR', PACKAGE = 'Seurat', mat, display_progress) } RowVar <- function(x) { .Call('_Seurat_RowVar', PACKAGE = 'Seurat', x) } SparseRowVar <- function(mat, display_progress) { .Call('_Seurat_SparseRowVar', PACKAGE = 'Seurat', mat, display_progress) } ReplaceColsC <- function(mat, col_idx, replacement) { .Call('_Seurat_ReplaceColsC', PACKAGE = 'Seurat', mat, col_idx, replacement) } GraphToNeighborHelper <- function(mat) { .Call('_Seurat_GraphToNeighborHelper', PACKAGE = 'Seurat', mat) } fast_dist <- function(x, y, n) { .Call('_Seurat_fast_dist', PACKAGE = 'Seurat', x, y, n) } FindWeightsC <- function(cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress) { .Call('_Seurat_FindWeightsC', PACKAGE = 'Seurat', cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress) } IntegrateDataC <- function(integration_matrix, weights, expression_cells2) { .Call('_Seurat_IntegrateDataC', PACKAGE = 'Seurat', integration_matrix, weights, expression_cells2) } ScoreHelper <- function(snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress) { .Call('_Seurat_ScoreHelper', PACKAGE = 'Seurat', snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress) } ComputeSNN <- function(nn_ranked, prune) { .Call('_Seurat_ComputeSNN', PACKAGE = 'Seurat', nn_ranked, prune) } WriteEdgeFile <- function(snn, filename, display_progress) { invisible(.Call('_Seurat_WriteEdgeFile', PACKAGE = 'Seurat', snn, filename, display_progress)) } DirectSNNToFile <- function(nn_ranked, prune, display_progress, filename) { .Call('_Seurat_DirectSNNToFile', PACKAGE = 'Seurat', nn_ranked, prune, display_progress, filename) } SNN_SmallestNonzero_Dist <- function(snn, mat, n, nearest_dist) { .Call('_Seurat_SNN_SmallestNonzero_Dist', PACKAGE = 'Seurat', snn, mat, n, nearest_dist) } row_sum_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_sum_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } row_mean_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_mean_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } row_var_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_var_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } Seurat/R/convenience.R0000644000176200001440000001222314170106500014327 0ustar liggesusers#' @include generics.R #' @include visualization.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param ... Extra parameters passed to \code{DimHeatmap} #' #' @rdname DimHeatmap #' @concept convenience #' @export #' PCHeatmap <- function(object, ...) { args <- list('object' = object) args <- c(args, list(...)) args$reduction <- "pca" return(do.call(what = 'DimHeatmap', args = args)) } #' @param ... Extra parameters passed to \code{DimPlot} #' #' @rdname DimPlot #' @concept convenience #' @export #' PCAPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #' @rdname SpatialPlot #' @concept convenience #' @concept spatial #' @export #' SpatialDimPlot <- function( object, group.by = NULL, images = NULL, cols = NULL, crop = TRUE, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), facet.highlight = FALSE, label = FALSE, label.size = 7, label.color = 'white', repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, label.box = TRUE, interactive = FALSE, information = NULL ) { return(SpatialPlot( object = object, group.by = group.by, images = images, cols = cols, crop = crop, cells.highlight = cells.highlight, cols.highlight = cols.highlight, facet.highlight = facet.highlight, label = label, label.size = label.size, label.color = label.color, repel = repel, ncol = ncol, combine = combine, pt.size.factor = pt.size.factor, alpha = alpha, image.alpha = image.alpha, stroke = stroke, label.box = label.box, interactive = interactive, information = information )) } #' @rdname SpatialPlot #' @concept convenience #' @concept spatial #' @export #' SpatialFeaturePlot <- function( object, features, images = NULL, crop = TRUE, slot = 'data', min.cutoff = NA, max.cutoff = NA, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, interactive = FALSE, information = NULL ) { return(SpatialPlot( object = object, features = features, images = images, crop = crop, slot = slot, min.cutoff = min.cutoff, max.cutoff = max.cutoff, ncol = ncol, combine = combine, pt.size.factor = pt.size.factor, alpha = alpha, image.alpha = image.alpha, stroke = stroke, interactive = interactive, information = information )) } #' @rdname DimPlot #' @concept convenience #' @export #' TSNEPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #' @rdname DimPlot #' @concept convenience #' @export #' UMAPPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # @rdname DimPlot # SpecificDimPlot <- function(object, ...) { funs <- sys.calls() name <- as.character(x = funs[[length(x = funs) - 1]])[1] name <- tolower(x = gsub(pattern = 'Plot', replacement = '', x = name)) args <- list('object' = object) args <- c(args, list(...)) reduc <- grep( pattern = name, x = names(x = object), value = TRUE, ignore.case = TRUE ) reduc <- grep(pattern = DefaultAssay(object = object), x = reduc, value = TRUE) args$reduction <- ifelse(test = length(x = reduc) == 1, yes = reduc, no = name) tryCatch( expr = return(do.call(what = 'DimPlot', args = args)), error = function(e) { stop(e) } ) } #' Read output from Parse Biosciences #' #' @param data.dir Directory containing the data files #' @param ... Extra parameters passed to \code{\link{ReadMtx}} #' @concept convenience #' @export #' ReadParseBio <- function(data.dir, ...) { mtx <- file.path(data.dir, "DGE.mtx") cells <- file.path(data.dir, "cell_metadata.csv") features <- file.path(data.dir, "all_genes.csv") return(ReadMtx( mtx = mtx, cells = cells, features = features, cell.column = 1, feature.column = 2, cell.sep = ",", feature.sep = ",", skip.cell = 1, skip.feature = 1, mtx.transpose = TRUE )) } #' Read output from STARsolo #' #' @param data.dir Directory containing the data files #' @param ... Extra parameters passed to \code{\link{ReadMtx}} #' #' @rdname ReadSTARsolo #' @concept convenience #' @export #' ReadSTARsolo <- function(data.dir, ... ) { mtx <- file.path(data.dir, "matrix.mtx") cells <- file.path(data.dir, "barcodes.tsv") features <- file.path(data.dir, "features.tsv") return(ReadMtx(mtx = mtx, cells = cells, features = features, ...)) } Seurat/R/reexports.R0000644000176200001440000001775614156670503014123 0ustar liggesusers #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Classes #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' The Assay Class #' #' The \code{Assay} object is the basic unit of Seurat; for more details, please #' see the documentation in \code{\link[SeuratObject:Assay]{SeuratObject}} #' #' @importClassesFrom SeuratObject Assay #' #' @exportClass Assay #' #' @docType class #' @name Assay-class #' @rdname Assay-class #' #' @seealso \code{\link[SeuratObject:Assay]{SeuratObject::Assay-class}} #' NULL #' The DimReduc Class #' #' The \code{DimReduc} object stores a dimensionality reduction taken out in #' Seurat; for more details, please see the documentation in #' \code{\link[SeuratObject:DimReduc]{SeuratObject}} #' #' @importClassesFrom SeuratObject DimReduc #' #' @exportClass DimReduc #' #' @docType class #' @name DimReduc-class #' @rdname DimReduc-class #' #' @seealso \code{\link[SeuratObject:DimReduc]{SeuratObject::DimReduc-class}} #' NULL #' The Graph Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:Graph]{SeuratObject}} #' #' @importClassesFrom SeuratObject Graph #' #' @exportClass Graph #' #' @docType class #' @name Graph-class #' @rdname Graph-class #' #' @seealso \code{\link[SeuratObject:Graph]{SeuratObject::Graph-class}} #' NULL #' The JackStrawData Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:JackStrawData]{SeuratObject}} #' #' @importClassesFrom SeuratObject JackStrawData #' #' @exportClass JackStrawData #' #' @docType class #' @name JackStrawData-class #' @rdname JackStrawData-class #' #' @seealso \code{\link[SeuratObject:JackStrawData]{SeuratObject::JackStrawData-class}} #' NULL #' The Neighbor Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:Neighbor]{SeuratObject}} #' #' @importClassesFrom SeuratObject Neighbor #' #' @exportClass Neighbor #' #' @docType class #' @name Neighbor-class #' @rdname Neighbor-class #' #' @seealso \code{\link[SeuratObject:Neighbor]{SeuratObject::Neighbor-class}} #' NULL #' The Seurat Class #' #' The Seurat object is a representation of single-cell expression data for R; #' for more details, please see the documentation in #' \code{\link[SeuratObject:Seurat]{SeuratObject}} #' #' @importClassesFrom SeuratObject Seurat #' #' @exportClass Seurat #' #' @docType class #' @name Seurat-class #' @rdname Seurat-class #' #' @seealso \code{\link[SeuratObject:Seurat]{SeuratObject::Seurat-class}} #' NULL #' The SeuratCommand Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:SeuratCommand]{SeuratObject}} #' #' @importClassesFrom SeuratObject SeuratCommand #' #' @exportClass SeuratCommand #' #' @docType class #' @name SeuratCommand-class #' @rdname SeuratCommand-class #' #' @seealso \code{\link[SeuratObject:SeuratCommand]{SeuratObject::SeuratCommand-class}} #' NULL #' The SpatialImage Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:SpatialImage]{SeuratObject}} #' #' @importClassesFrom SeuratObject SpatialImage #' #' @exportClass SpatialImage #' #' @docType class #' @name SpatialImage-class #' @rdname SpatialImage-class #' #' @seealso \code{\link[SeuratObject:SpatialImage]{SeuratObject::SpatialImage-class}} #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions and Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom SeuratObject AddMetaData #' @export #' SeuratObject::AddMetaData #' @importFrom SeuratObject as.Graph #' @export #' SeuratObject::as.Graph #' @importFrom SeuratObject as.Neighbor #' @export #' SeuratObject::as.Neighbor #' @importFrom SeuratObject as.Seurat #' @export #' SeuratObject::as.Seurat #' @importFrom SeuratObject as.sparse #' @export #' SeuratObject::as.sparse #' @importFrom SeuratObject Assays #' @export #' SeuratObject::Assays #' @importFrom SeuratObject Cells #' @export #' SeuratObject::Cells #' @importFrom SeuratObject CellsByIdentities #' @export #' SeuratObject::CellsByIdentities #' @importFrom SeuratObject Command #' @export #' SeuratObject::Command #' @importFrom SeuratObject CreateAssayObject #' @export #' SeuratObject::CreateAssayObject #' @importFrom SeuratObject CreateDimReducObject #' @export #' SeuratObject::CreateDimReducObject #' @importFrom SeuratObject CreateSeuratObject #' @export #' SeuratObject::CreateSeuratObject #' @importFrom SeuratObject DefaultAssay #' @export #' SeuratObject::DefaultAssay #' @importFrom SeuratObject DefaultAssay<- #' @export #' SeuratObject::`DefaultAssay<-` #' @importFrom SeuratObject Distances #' @export #' SeuratObject::Distances #' @importFrom SeuratObject Embeddings #' @export #' SeuratObject::Embeddings #' @importFrom SeuratObject FetchData #' @export #' SeuratObject::FetchData #' @importFrom SeuratObject GetAssayData #' @export #' SeuratObject::GetAssayData #' @importFrom SeuratObject GetImage #' @export #' SeuratObject::GetImage #' @importFrom SeuratObject GetTissueCoordinates #' @export #' SeuratObject::GetTissueCoordinates #' @importFrom SeuratObject HVFInfo #' @export #' SeuratObject::HVFInfo #' @importFrom SeuratObject Idents #' @export #' SeuratObject::Idents #' @importFrom SeuratObject Idents<- #' @export #' SeuratObject::`Idents<-` #' @importFrom SeuratObject Images #' @export #' SeuratObject::Images #' @importFrom SeuratObject Index #' @export #' SeuratObject::Index #' @importFrom SeuratObject Index<- #' @export #' SeuratObject::`Index<-` #' @importFrom SeuratObject Indices #' @export #' SeuratObject::Indices #' @importFrom SeuratObject IsGlobal #' @export #' SeuratObject::IsGlobal #' @importFrom SeuratObject JS #' @export #' SeuratObject::JS #' @importFrom SeuratObject JS<- #' @export #' SeuratObject::`JS<-` #' @importFrom SeuratObject Key #' @export #' SeuratObject::Key #' @importFrom SeuratObject Key<- #' @export #' SeuratObject::`Key<-` #' @importFrom SeuratObject Loadings #' @export #' SeuratObject::Loadings #' @importFrom SeuratObject Loadings<- #' @export #' SeuratObject::`Loadings<-` #' @importFrom SeuratObject LogSeuratCommand #' @export #' SeuratObject::LogSeuratCommand #' @importFrom SeuratObject Misc #' @export #' SeuratObject::Misc #' @importFrom SeuratObject Misc<- #' @export #' SeuratObject::`Misc<-` #' @importFrom SeuratObject Neighbors #' @export #' SeuratObject::Neighbors #' @importFrom SeuratObject Project #' @export #' SeuratObject::Project #' @importFrom SeuratObject Project<- #' @export #' SeuratObject::`Project<-` #' @importFrom SeuratObject Radius #' @export #' SeuratObject::Radius #' @importFrom SeuratObject Reductions #' @export #' SeuratObject::Reductions #' @importFrom SeuratObject RenameCells #' @export #' SeuratObject::RenameCells #' @importFrom SeuratObject RenameIdents #' @export #' SeuratObject::RenameIdents #' @importFrom SeuratObject ReorderIdent #' @export #' SeuratObject::ReorderIdent #' @importFrom SeuratObject RowMergeSparseMatrices #' @export #' SeuratObject::RowMergeSparseMatrices #' @importFrom SeuratObject SetAssayData #' @export #' SeuratObject::SetAssayData #' @importFrom SeuratObject SetIdent #' @export #' SeuratObject::SetIdent #' @importFrom SeuratObject SpatiallyVariableFeatures #' @export #' SeuratObject::SpatiallyVariableFeatures #' @importFrom SeuratObject StashIdent #' @export #' SeuratObject::StashIdent #' @importFrom SeuratObject Stdev #' @export #' SeuratObject::Stdev #' @importFrom SeuratObject SVFInfo #' @export #' SeuratObject::SVFInfo #' @importFrom SeuratObject Tool #' @export #' SeuratObject::Tool #' @importFrom SeuratObject Tool<- #' @export #' SeuratObject::`Tool<-` #' @importFrom SeuratObject UpdateSeuratObject #' @export #' SeuratObject::UpdateSeuratObject #' @importFrom SeuratObject VariableFeatures #' @export #' SeuratObject::VariableFeatures #' @importFrom SeuratObject VariableFeatures<- #' @export #' SeuratObject::`VariableFeatures<-` #' @importFrom SeuratObject WhichCells #' @export #' SeuratObject::WhichCells Seurat/R/tree.R0000644000176200001440000002715514152507372013020 0ustar liggesusers#' @include generics.R #' NULL cluster.ape <- paste( "Cluster tree functionality requires 'ape'", "please install with 'install.packages('ape')'" ) #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Phylogenetic Analysis of Identity Classes #' #' Constructs a phylogenetic tree relating the 'average' cell from each #' identity class. Tree is estimated based on a distance matrix constructed in #' either gene expression space or PCA space. #' #' Note that the tree is calculated for an 'average' cell, so gene expression #' or PC scores are averaged across all cells in an identity class before the #' tree is constructed. #' #' @param object Seurat object #' @param assay Assay to use for the analysis. #' @param features Genes to use for the analysis. Default is the set of #' variable genes (\code{VariableFeatures(object = object)}) #' @param dims If set, tree is calculated in dimension reduction space; #' overrides \code{features} #' @param reduction Name of dimension reduction to use. Only used if \code{dims} #' is not NULL. #' @param graph If graph is passed, build tree based on graph connectivity between #' clusters; overrides \code{dims} and \code{features} #' @param reorder Re-order identity classes (factor ordering), according to #' position on the tree. This groups similar classes together which can be #' helpful, for example, when drawing violin plots. #' @param reorder.numeric Re-order identity classes according to position on #' the tree, assigning a numeric value ('1' is the leftmost node) #' @param verbose Show progress updates #' @inheritParams AverageExpression #' #' @return A Seurat object where the cluster tree can be accessed with \code{\link{Tool}} #' #' @importFrom pbapply pblapply #' @importFrom stats dist hclust na.omit #' @importFrom utils txtProgressBar setTxtProgressBar #' #' @export #' @concept tree #' #' @examples #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' Tool(object = pbmc_small, slot = 'BuildClusterTree') #' } #' BuildClusterTree <- function( object, assay = NULL, features = NULL, dims = NULL, reduction = "pca", graph = NULL, slot = 'data', reorder = FALSE, reorder.numeric = FALSE, verbose = TRUE ) { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } assay <- assay %||% DefaultAssay(object = object) if (!is.null(x = graph)) { idents <- levels(x = object) nclusters <- length(x = idents) data.dist <- matrix( data = numeric(length = 1L), nrow = nclusters, ncol = nclusters, dimnames = list(idents, idents) ) graph <- object[[graph]] cxi <- CellsByIdentities(object = object) cpairs <- na.omit(object = unique(x = t(x = apply( X = expand.grid(1:nclusters, 1:nclusters)[, c(2, 1)], MARGIN = 1, FUN = function(x) { if (length(x = x) == length(x = unique(x = x))) { return(sort(x = x)) } return(c(NA, NA)) } )))) if (verbose) { pb <- txtProgressBar(style = 3, file = stderr()) } for (i in 1:nrow(x = cpairs)) { i1 <- cpairs[i, ][1] i2 <- cpairs[i, ][2] graph.sub <- graph[cxi[[idents[i1]]], cxi[[idents[i2]]]] d <- mean(x = graph.sub) if (is.na(x = d)) { d <- 0 } data.dist[i1, i2] <- d if (verbose) { setTxtProgressBar(pb = pb, value = i / nrow(x = cpairs)) } } if (verbose) { close(con = pb) } diag(x = data.dist) <- 1 data.dist <- dist(x = data.dist) } else if (!is.null(x = dims)) { my.lapply <- ifelse(test = verbose, yes = pblapply, no = lapply) embeddings <- Embeddings(object = object, reduction = reduction)[, dims] data.dims <- my.lapply( X = levels(x = object), FUN = function(x) { cells <- WhichCells(object = object, idents = x) if (length(x = cells) == 1) { cells <- c(cells, cells) } temp <- colMeans(x = embeddings[cells, ]) } ) data.dims <- do.call(what = 'cbind', args = data.dims) colnames(x = data.dims) <- levels(x = object) data.dist <- dist(x = t(x = data.dims)) } else { features <- features %||% VariableFeatures(object = object) features <- intersect(x = features, y = rownames(x = object)) data.avg <- AverageExpression( object = object, assays = assay, features = features, slot = slot, verbose = verbose )[[1]] data.dist <- dist(x = t(x = data.avg[features, ])) } data.tree <- ape::as.phylo(x = hclust(d = data.dist)) Tool(object = object) <- data.tree if (reorder) { if (verbose) { message("Reordering identity classes and rebuilding tree") } old.ident.order <- levels(x = object) data.tree <- Tool(object = object, slot = 'BuildClusterTree') all.desc <- GetDescendants(tree = data.tree, node = (data.tree$Nnode + 2)) all.desc <- old.ident.order[all.desc[all.desc <= (data.tree$Nnode + 1)]] Idents(object = object) <- factor(x = Idents(object = object), levels = all.desc, ordered = TRUE) if (reorder.numeric) { new.levels <- sort(x = unique(x = as.integer(x = Idents(object = object)))) Idents(object = object) <- factor(x = as.integer(x = Idents(object = object)), levels = new.levels) object[['tree.ident']] <- as.integer(x = Idents(object = object)) } object <- BuildClusterTree( object = object, assay = assay, features = features, dims = dims, reduction = reduction, graph = graph, slot = slot, reorder = FALSE, verbose = verbose ) } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Depth first traversal path of a given tree # # @param tree Tree object (from ape package) # @param node Internal node in the tree # @param path Path through the tree (for recursion) # @param include.children Include children in the output path # @param only.children Only include children in the output path # @return Returns a vector representing the depth first traversal path # DFT <- function( tree, node, path = NULL, include.children = FALSE, only.children = FALSE ) { if (only.children) { include.children = TRUE } children <- which(x = tree$edge[, 1] == node) child1 <- tree$edge[children[1], 2] child2 <- tree$edge[children[2], 2] if (child1 %in% tree$edge[, 1]) { if (!only.children) { path <- c(path, child1) } path <- DFT( tree = tree, node = child1, path = path, include.children = include.children, only.children = only.children ) } else { if (include.children) { path <- c(path, child1) } } if (child2 %in% tree$edge[, 1]) { if (!only.children) { path <- c(path, child2) } path <- DFT( tree = tree, node = child2, path = path, include.children = include.children, only.children = only.children ) } else { if (include.children) { path <- c(path, child2) } } return(path) } # Function to return all internal (non-terminal) nodes in a given tree # # @param tree Tree object (from ape package) # # @return Returns a vector of all internal nodes for the given tree # GetAllInternalNodes <- function(tree) { return(c(tree$edge[1, 1], DFT(tree = tree, node = tree$edge[1, 1]))) } # Function to get all the descendants on a tree of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants of the given node # GetDescendants <- function(tree, node, curr = NULL) { if (is.null(x = curr)) { curr <- vector() } daughters <- tree$edge[which(x = tree$edge[, 1] == node), 2] curr <- c(curr, daughters) w <- which(x = daughters >= length(x = tree$tip)) if (length(x = w) > 0) { for (i in 1:length(x = w)) { curr <- GetDescendants(tree = tree, node = daughters[w[i]], curr = curr) } } return(curr) } # Function to get all the descendants on a tree left of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants left of the given node # GetLeftDescendants <- function(tree, node) { daughters <- tree$edge[which(tree$edge[, 1] == node), 2] if (daughters[1] <= (tree$Nnode + 1)) { return(daughters[1]) } daughter.use <- GetDescendants(tree, daughters[1]) daughter.use <- daughter.use[daughter.use <= (tree$Nnode + 1)] return(daughter.use) } # Function to get all the descendants on a tree right of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants right of the given node # GetRightDescendants <- function(tree, node) { daughters <- tree$edge[which(x = tree$edge[, 1] == node), 2] if (daughters[2] <= (tree$Nnode + 1)) { return(daughters[2]) } daughter.use <- GetDescendants(tree = tree, node = daughters[2]) daughter.use <- daughter.use[daughter.use <= (tree$Nnode + 1)] return(daughter.use) } # Merge childen of a node # # Merge the childen of a node into a single identity class # # @param object Seurat object # @param node.use Merge children of this node # @param rebuild.tree Rebuild cluster tree after the merge? # @param ... Extra parameters to BuildClusterTree, used only if rebuild.tree = TRUE # # @seealso \code{BuildClusterTree} # # # @examples # data("pbmc_small") # PlotClusterTree(object = pbmc_small) # pbmc_small <- MergeNode(object = pbmc_small, node.use = 7, rebuild.tree = TRUE) # PlotClusterTree(object = pbmc_small) # MergeNode <- function(object, node.use, rebuild.tree = FALSE, ...) { CheckDots(..., fxns = 'BuldClusterTree') object.tree <- object@cluster.tree[[1]] node.children <- DFT( tree = object.tree, node = node.use, include.children = TRUE ) node.children <- intersect(x = node.children, y = levels(x = object@ident)) children.cells <- WhichCells(object = object, ident = node.children) if (length(x = children.cells > 0)) { object <- SetIdent( object = object, cells.use = children.cells, ident.use = min(node.children) ) } if (rebuild.tree) { object <- BuildClusterTree(object = object, ...) } return(object) } # Function to check whether a given node in a tree has a child (leaf node) # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns a Boolean of whether the given node is connected to a terminal leaf node NodeHasChild <- function(tree, node) { children <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] return(any(children %in% tree$edge[, 2] && !children %in% tree$edge[, 1])) } # Function to check whether a given node in a tree has only children(leaf nodes) # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns a Boolean of whether the given node is connected to only terminal leaf nodes NodeHasOnlyChildren <- function(tree, node) { children <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] return(!any(children %in% tree$edge[, 1])) } Seurat/R/integration.R0000644000176200001440000061227114170106500014367 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Find integration anchors #' #' Find a set of anchors between a list of \code{\link{Seurat}} objects. #' These anchors can later be used to integrate the objects using the #' \code{\link{IntegrateData}} function. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019: #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' First, determine anchor.features if not explicitly specified using #' \code{\link{SelectIntegrationFeatures}}. Then for all pairwise combinations #' of reference and query datasets: #' #' \itemize{ #' \item{Perform dimensional reduction on the dataset pair as specified via #' the \code{reduction} parameter. If \code{l2.norm} is set to \code{TRUE}, #' perform L2 normalization of the embedding vectors.} #' \item{Identify anchors - pairs of cells from each dataset #' that are contained within each other's neighborhoods (also known as mutual #' nearest neighbors).} #' \item{Filter low confidence anchors to ensure anchors in the low dimension #' space are in broad agreement with the high dimensional measurements. This #' is done by looking at the neighbors of each query cell in the reference #' dataset using \code{max.features} to define this space. If the reference #' cell isn't found within the first \code{k.filter} neighbors, remove the #' anchor.} #' \item{Assign each remaining anchor a score. For each anchor cell, determine #' the nearest \code{k.score} anchors within its own dataset and within its #' pair's dataset. Based on these neighborhoods, construct an overall neighbor #' graph and then compute the shared neighbor overlap between anchor and query #' cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on #' these scores to dampen outlier effects and rescale to range between 0-1.} #' } #' #' @param object.list A list of \code{\link{Seurat}} objects between which to #' find anchors for downstream integration. #' @param assay A vector of assay names specifying which assay to use when #' constructing anchors. If NULL, the current default assay for each object is #' used. #' @param reference A vector specifying the object/s to be used as a reference #' during integration. If NULL (default), all pairwise anchors are found (no #' reference/s). If not NULL, the corresponding objects in \code{object.list} #' will be used as references. When using a set of specified references, anchors #' are first found between each query and each reference. The references are #' then integrated through pairwise integration. Each query is then mapped to #' the integrated reference. #' @param anchor.features Can be either: #' \itemize{ #' \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} #' to select the provided number of features to be used in anchor finding} #' \item{A vector of features to be used as input to the anchor finding process} #' } #' @param scale Whether or not to scale the features provided. Only set to FALSE #' if you have previously scaled the features you want to use for each object in #' the object.list #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param sct.clip.range Numeric of length two specifying the min and max values #' the Pearson residual will be clipped to #' @param reduction Dimensional reduction to perform when finding anchors. Can #' be one of: #' \itemize{ #' \item{cca: Canonical correlation analysis} #' \item{rpca: Reciprocal PCA} #' \item{rlsi: Reciprocal LSI} #' } #' @param l2.norm Perform L2 normalization on the CCA cell embeddings after #' dimensional reduction #' @param dims Which dimensions to use from the CCA to specify the neighbor #' search space #' @param k.anchor How many neighbors (k) to use when picking anchors #' @param k.filter How many neighbors (k) to use when filtering anchors #' @param k.score How many neighbors (k) to use when scoring anchors #' @param max.features The maximum number of features to use when specifying the #' neighborhood search space in the anchor filtering #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param eps Error bound on the neighbor finding algorithm (from RANN/Annoy) #' @param verbose Print progress bars and output #' #' @return Returns an \code{\link{AnchorSet}} object that can be used as input to #' \code{\link{IntegrateData}}. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @importFrom pbapply pblapply #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @export #' @concept integration #' #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset #' pancreas.list <- SplitObject(panc8, split.by = "tech") #' #' # perform standard preprocessing on each object #' for (i in 1:length(pancreas.list)) { #' pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) #' pancreas.list[[i]] <- FindVariableFeatures( #' pancreas.list[[i]], selection.method = "vst", #' nfeatures = 2000, verbose = FALSE #' ) #' } #' #' # find anchors #' anchors <- FindIntegrationAnchors(object.list = pancreas.list) #' #' # integrate data #' integrated <- IntegrateData(anchorset = anchors) #' } #' FindIntegrationAnchors <- function( object.list = NULL, assay = NULL, reference = NULL, anchor.features = 2000, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), sct.clip.range = NULL, reduction = c("cca", "rpca", "rlsi"), l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reduction <- match.arg(arg = reduction) if (reduction == "rpca") { reduction <- "pca" } if (reduction == "rlsi") { reduction <- "lsi" if (normalization.method == "SCT") { warning("Requested normalization method 'SCT' is not applicable for LSI") normalization.method <- "LogNormalize" } scale <- FALSE k.filter <- NA } my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) object.ncells <- sapply(X = object.list, FUN = function(x) dim(x = x)[2]) if (any(object.ncells <= max(dims))) { bad.obs <- which(x = object.ncells <= max(dims)) stop("Max dimension too large: objects ", paste(bad.obs, collapse = ", "), " contain fewer than ", max(dims), " cells. \n Please specify a", " maximum dimensions that is less than the number of cells in any ", "object (", min(object.ncells), ").") } if (!is.null(x = assay)) { if (length(x = assay) != length(x = object.list)) { stop("If specifying the assay, please specify one assay per object in the object.list") } object.list <- sapply( X = 1:length(x = object.list), FUN = function(x) { DefaultAssay(object = object.list[[x]]) <- assay[x] return(object.list[[x]]) } ) } else { assay <- sapply(X = object.list, FUN = DefaultAssay) } # check tool object.list <- lapply( X = object.list, FUN = function (obj) { slot(object = obj, name = "tools")$Integration <- NULL return(obj) }) object.list <- CheckDuplicateCellNames(object.list = object.list) slot <- "data" if (reduction == "lsi") { all.rownames <- lapply(X = object.list, FUN = rownames) anchor.features <- Reduce(f = intersect, x = all.rownames) } if (normalization.method == "SCT") { slot <- "scale.data" scale <- FALSE if (is.numeric(x = anchor.features)) { stop("Please specify the anchor.features to be used. The expected ", "workflow for integratinge assays produced by SCTransform is ", "SelectIntegrationFeatures -> PrepSCTIntegration -> ", "FindIntegrationAnchors.") } sct.check <- sapply( X = 1:length(x = object.list), FUN = function(x) { sct.cmd <- grep( pattern = 'PrepSCTIntegration', x = Command(object = object.list[[x]]), value = TRUE ) # check assay has gone through PrepSCTIntegration if (!any(grepl(pattern = "PrepSCTIntegration", x = Command(object = object.list[[x]]))) || Command(object = object.list[[x]], command = sct.cmd, value = "assay") != assay[x]) { stop("Object ", x, " assay - ", assay[x], " has not been processed ", "by PrepSCTIntegration. Please run PrepSCTIntegration prior to ", "FindIntegrationAnchors if using assays generated by SCTransform.", call. = FALSE) } # check that the correct features are being used if (all(Command(object = object.list[[x]], command = sct.cmd, value = "anchor.features") != anchor.features)) { stop("Object ", x, " assay - ", assay[x], " was processed using a ", "different feature set than in PrepSCTIntegration. Please rerun ", "PrepSCTIntegration with the same anchor.features for all objects in ", "the object.list.", call. = FALSE) } } ) } if (is.numeric(x = anchor.features) && normalization.method != "SCT") { if (verbose) { message("Computing ", anchor.features, " integration features") } anchor.features <- SelectIntegrationFeatures( object.list = object.list, nfeatures = anchor.features, assay = assay ) } if (scale) { if (verbose) { message("Scaling features for provided objects") } object.list <- my.lapply( X = object.list, FUN = function(object) { ScaleData(object = object, features = anchor.features, verbose = FALSE) } ) } nn.reduction <- reduction # if using pca or lsi, only need to compute the internal neighborhood structure once # for each dataset internal.neighbors <- list() if (nn.reduction %in% c("pca", "lsi")) { k.filter <- NA if (verbose) { message("Computing within dataset neighborhoods") } k.neighbor <- max(k.anchor, k.score) internal.neighbors <- my.lapply( X = 1:length(x = object.list), FUN = function(x) { NNHelper( data = Embeddings(object = object.list[[x]][[nn.reduction]])[, dims], k = k.neighbor + 1, method = nn.method, n.trees = n.trees, eps = eps ) } ) } # determine pairwise combinations combinations <- expand.grid(1:length(x = object.list), 1:length(x = object.list)) combinations <- combinations[combinations$Var1 < combinations$Var2, , drop = FALSE] # determine the proper offsets for indexing anchors objects.ncell <- sapply(X = object.list, FUN = ncol) offsets <- as.vector(x = cumsum(x = c(0, objects.ncell)))[1:length(x = object.list)] if (is.null(x = reference)) { # case for all pairwise, leave the combinations matrix the same if (verbose) { message("Finding all pairwise anchors") } } else { reference <- unique(x = sort(x = reference)) if (max(reference) > length(x = object.list)) { stop('Error: requested reference object ', max(reference), " but only ", length(x = object.list), " objects provided") } # modify the combinations matrix to retain only R-R and R-Q comparisons if (verbose) { message("Finding anchors between all query and reference datasets") ok.rows <- (combinations$Var1 %in% reference) | (combinations$Var2 %in% reference) combinations <- combinations[ok.rows, ] } } # determine all anchors anchoring.fxn <- function(row) { i <- combinations[row, 1] j <- combinations[row, 2] object.1 <- DietSeurat( object = object.list[[i]], assays = assay[i], features = anchor.features, counts = FALSE, scale.data = TRUE, dimreducs = reduction ) object.2 <- DietSeurat( object = object.list[[j]], assays = assay[j], features = anchor.features, counts = FALSE, scale.data = TRUE, dimreducs = reduction ) # suppress key duplication warning suppressWarnings(object.1[["ToIntegrate"]] <- object.1[[assay[i]]]) DefaultAssay(object = object.1) <- "ToIntegrate" if (reduction %in% Reductions(object = object.1)) { slot(object = object.1[[reduction]], name = "assay.used") <- "ToIntegrate" } object.1 <- DietSeurat(object = object.1, assays = "ToIntegrate", scale.data = TRUE, dimreducs = reduction) suppressWarnings(object.2[["ToIntegrate"]] <- object.2[[assay[j]]]) DefaultAssay(object = object.2) <- "ToIntegrate" if (reduction %in% Reductions(object = object.2)) { slot(object = object.2[[reduction]], name = "assay.used") <- "ToIntegrate" } object.2 <- DietSeurat(object = object.2, assays = "ToIntegrate", scale.data = TRUE, dimreducs = reduction) object.pair <- switch( EXPR = reduction, 'cca' = { object.pair <- RunCCA( object1 = object.1, object2 = object.2, assay1 = "ToIntegrate", assay2 = "ToIntegrate", features = anchor.features, num.cc = max(dims), renormalize = FALSE, rescale = FALSE, verbose = verbose ) if (l2.norm){ object.pair <- L2Dim(object = object.pair, reduction = reduction) reduction <- paste0(reduction, ".l2") nn.reduction <- reduction } reduction.2 <- character() object.pair }, 'pca' = { object.pair <- ReciprocalProject( object.1 = object.1, object.2 = object.2, reduction = 'pca', projected.name = 'projectedpca', features = anchor.features, do.scale = FALSE, do.center = FALSE, slot = 'scale.data', l2.norm = l2.norm, verbose = verbose ) reduction <- "projectedpca.ref" reduction.2 <- "projectedpca.query" if (l2.norm) { reduction <- paste0(reduction, ".l2") reduction.2 <- paste0(reduction.2, ".l2") } object.pair }, 'lsi' = { object.pair <- ReciprocalProject( object.1 = object.1, object.2 = object.2, reduction = 'lsi', projected.name = 'projectedlsi', features = anchor.features, do.center = TRUE, do.scale = FALSE, slot = 'data', l2.norm = l2.norm, verbose = verbose ) reduction <- "projectedlsi.ref" reduction.2 <- "projectedlsi.query" if (l2.norm) { reduction <- paste0(reduction, ".l2") reduction.2 <- paste0(reduction.2, ".l2") } object.pair }, stop("Invalid reduction parameter. Please choose either cca, rpca, or rlsi") ) internal.neighbors <- internal.neighbors[c(i, j)] anchors <- FindAnchors( object.pair = object.pair, assay = c("ToIntegrate", "ToIntegrate"), slot = slot, cells1 = colnames(x = object.1), cells2 = colnames(x = object.2), internal.neighbors = internal.neighbors, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, eps = eps, verbose = verbose ) anchors[, 1] <- anchors[, 1] + offsets[i] anchors[, 2] <- anchors[, 2] + offsets[j] return(anchors) } if (nbrOfWorkers() == 1) { all.anchors <- pblapply( X = 1:nrow(x = combinations), FUN = anchoring.fxn ) } else { all.anchors <- future_lapply( X = 1:nrow(x = combinations), FUN = anchoring.fxn, future.seed = TRUE ) } all.anchors <- do.call(what = 'rbind', args = all.anchors) all.anchors <- rbind(all.anchors, all.anchors[, c(2, 1, 3)]) all.anchors <- AddDatasetID(anchor.df = all.anchors, offsets = offsets, obj.lengths = objects.ncell) command <- LogSeuratCommand(object = object.list[[1]], return.command = TRUE) anchor.set <- new(Class = "IntegrationAnchorSet", object.list = object.list, reference.objects = reference %||% seq_along(object.list), anchors = all.anchors, offsets = offsets, anchor.features = anchor.features, command = command ) return(anchor.set) } # Merge dataset and perform reciprocal SVD projection, adding new dimreducs # for each projection and the merged original SVDs. # # @param object.1 First Seurat object to merge # @param object.2 Second Seurat object to merge # @param reduction Name of DimReduc to use. Must be an SVD-based DimReduc (eg, PCA or LSI) # so that the loadings can be used to project new embeddings. Must be present # in both input objects, with a substantial overlap in the features use to construct # the SVDs. # @param dims dimensions used for rpca # @param projected.name Name to store projected SVDs under (eg, "projectedpca") # @param features Features to use. Will subset the SVD loadings to use these features # before performing projection. Typically uses the anchor.features for integration. # @param do.center Center projected values (subtract mean) # @param do.scale Scale projected values (divide by SD) # @param slot Name of slot to pull data from. Should be scale.data for PCA and data for LSI # @param verbose Display messages # @return Returns a merged Seurat object with two projected SVDs (object.1 -> object.2, object.2 -> object.1) # and a merged SVD (needed for within-dataset neighbors) ReciprocalProject <- function( object.1, object.2, reduction, dims, projected.name, features, do.scale, do.center, slot, l2.norm, verbose = TRUE ) { common.features <- intersect( x = rownames(x = Loadings(object = object.1[[reduction]])), y = rownames(x = Loadings(object = object.2[[reduction]])) ) common.features <- intersect( x = common.features, y = features ) object.pair <- merge(x = object.1, y = object.2, merge.data = TRUE) data.1 <- GetAssayData( object = object.1, slot = slot ) data.2 <- GetAssayData( object = object.2, slot = slot ) proj.1 <- ProjectSVD( reduction = object.2[[reduction]], data = data.1, mode = reduction, features = common.features, do.scale = do.scale, do.center = do.center, use.original.stats = FALSE, verbose = verbose ) proj.2 <- ProjectSVD( reduction = object.1[[reduction]], data = data.2, mode = reduction, features = common.features, do.scale = do.scale, do.center = do.center, use.original.stats = FALSE, verbose = verbose ) # object.1 is ref, and object.2 is query reduction.dr.name.1 <- paste0(projected.name, ".ref") reduction.dr.name.2 <- paste0(projected.name, ".query") object.pair[[reduction.dr.name.1]] <- CreateDimReducObject( embeddings = rbind(Embeddings(object = object.1[[reduction]]), proj.2)[,dims], loadings = Loadings(object = object.1[[reduction]])[,dims], assay = DefaultAssay(object = object.1), key = paste0(projected.name, "ref_") ) object.pair[[reduction.dr.name.2]] <- CreateDimReducObject( embeddings = rbind(proj.1, Embeddings(object = object.2[[reduction]]))[,dims], loadings = Loadings(object = object.2[[reduction]])[,dims], assay = DefaultAssay(object = object.2), key = paste0(projected.name, "query_") ) object.pair[[reduction]] <- CreateDimReducObject( embeddings = rbind( Embeddings(object = object.1[[reduction]]), Embeddings(object = object.2[[reduction]]))[,dims], loadings = Loadings(object = object.1[[reduction]])[,dims], assay = DefaultAssay(object = object.1), key = paste0(projected.name, "_") ) if (l2.norm) { slot(object = object.pair[[reduction.dr.name.1]], name = "cell.embeddings") <- Sweep( x = Embeddings(object = object.pair[[reduction.dr.name.1]]), MARGIN = 2, STATS = apply(X = Embeddings(object = object.pair[[reduction.dr.name.1]]), MARGIN = 2, FUN = sd), FUN = "/" ) slot(object = object.pair[[reduction.dr.name.2]], name = "cell.embeddings") <- Sweep( x = Embeddings(object = object.pair[[reduction.dr.name.2]]), MARGIN = 2, STATS = apply(X = Embeddings(object = object.pair[[reduction.dr.name.2]]), MARGIN = 2, FUN = sd), FUN = "/" ) object.pair <- L2Dim(object = object.pair, reduction = reduction.dr.name.1) object.pair <- L2Dim(object = object.pair, reduction = reduction.dr.name.2) } return(object.pair) } #' Find transfer anchors #' #' Find a set of anchors between a reference and query object. These #' anchors can later be used to transfer data from the reference to #' query object using the \code{\link{TransferData}} object. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' \itemize{ #' #' \item{Perform dimensional reduction. Exactly what is done here depends on #' the values set for the \code{reduction} and \code{project.query} #' parameters. If \code{reduction = "pcaproject"}, a PCA is performed on #' either the reference (if \code{project.query = FALSE}) or the query (if #' \code{project.query = TRUE}), using the \code{features} specified. The data #' from the other dataset is then projected onto this learned PCA structure. #' If \code{reduction = "cca"}, then CCA is performed on the reference and #' query for this dimensional reduction step. If #' \code{reduction = "lsiproject"}, the stored LSI dimension reduction in the #' reference object is used to project the query dataset onto the reference. #' If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the #' embedding vectors.} #' \item{Identify anchors between the reference and query - pairs of cells #' from each dataset that are contained within each other's neighborhoods #' (also known as mutual nearest neighbors).} #' \item{Filter low confidence anchors to ensure anchors in the low dimension #' space are in broad agreement with the high dimensional measurements. This #' is done by looking at the neighbors of each query cell in the reference #' dataset using \code{max.features} to define this space. If the reference #' cell isn't found within the first \code{k.filter} neighbors, remove the #' anchor.} #' \item{Assign each remaining anchor a score. For each anchor cell, determine #' the nearest \code{k.score} anchors within its own dataset and within its #' pair's dataset. Based on these neighborhoods, construct an overall neighbor #' graph and then compute the shared neighbor overlap between anchor and query #' cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on #' these scores to dampen outlier effects and rescale to range between 0-1.} #' } #' #' @param reference \code{\link{Seurat}} object to use as the reference #' @param query \code{\link{Seurat}} object to use as the query #' @param reference.assay Name of the Assay to use from reference #' @param reference.neighbors Name of the Neighbor to use from the reference. #' Optionally enables reuse of precomputed neighbors. #' @param query.assay Name of the Assay to use from query #' @param reduction Dimensional reduction to perform when finding anchors. #' Options are: #' \itemize{ #' \item{pcaproject: Project the PCA from the reference onto the query. We #' recommend using PCA when reference and query datasets are from scRNA-seq} #' \item{lsiproject: Project the LSI from the reference onto the query. We #' recommend using LSI when reference and query datasets are from scATAC-seq. #' This requires that LSI has been computed for the reference dataset, and the #' same features (eg, peaks or genome bins) are present in both the reference #' and query. See \code{\link[Signac]{RunTFIDF}} and #' \code{\link[Signac]{RunSVD}}} #' \item{rpca: Project the PCA from the reference onto the query, and the PCA #' from the query onto the reference (reciprocal PCA projection).} #' \item{cca: Run a CCA on the reference and query } #' } #' @param reference.reduction Name of dimensional reduction to use from the #' reference if running the pcaproject workflow. Optionally enables reuse of #' precomputed reference dimensional reduction. If NULL (default), use a PCA #' computed on the reference object. #' @param project.query Project the PCA from the query dataset onto the #' reference. Use only in rare cases where the query dataset has a much larger #' cell number, but the reference dataset has a unique assay for transfer. In #' this case, the default features will be set to the variable features of the #' query object that are alos present in the reference. #' @param features Features to use for dimensional reduction. If not specified, #' set as variable features of the reference object which are also present in #' the query. #' @param scale Scale query data. #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT. #' @param recompute.residuals If using SCT as a normalization method, compute #' query Pearson residuals using the reference SCT model parameters. #' @param npcs Number of PCs to compute on reference if reference.reduction is #' not provided. #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction #' @param dims Which dimensions to use from the reduction to specify the #' neighbor search space #' @param k.anchor How many neighbors (k) to use when finding anchors #' @param k.filter How many neighbors (k) to use when filtering anchors. Set to #' NA to turn off filtering. #' @param k.score How many neighbors (k) to use when scoring anchors #' @param max.features The maximum number of features to use when specifying the #' neighborhood search space in the anchor filtering #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}} or \code{\link{RcppAnnoy}}) #' @param approx.pca Use truncated singular value decomposition to approximate #' PCA #' @param mapping.score.k Compute and store nearest k query neighbors in the #' AnchorSet object that is returned. You can optionally set this if you plan #' on computing the mapping score and want to enable reuse of some downstream #' neighbor calculations to make the mapping score function more efficient. #' @param verbose Print progress bars and output #' #' @return Returns an \code{AnchorSet} object that can be used as input to #' \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and #' \code{\link{MapQuery}}. The dimension reduction used for finding anchors is #' stored in the \code{AnchorSet} object and can be used for computing anchor #' weights in downstream functions. Note that only the requested dimensions are #' stored in the dimension reduction object in the \code{AnchorSet}. This means #' that if \code{dims=2:20} is used, for example, the dimension of the stored #' reduction is \code{1:19}. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031}; #' #' @export #' @importFrom methods slot slot<- #' @concept integration #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("pbmc3k") #' #' # for demonstration, split the object into reference and query #' pbmc.reference <- pbmc3k[, 1:1350] #' pbmc.query <- pbmc3k[, 1351:2700] #' #' # perform standard preprocessing on each object #' pbmc.reference <- NormalizeData(pbmc.reference) #' pbmc.reference <- FindVariableFeatures(pbmc.reference) #' pbmc.reference <- ScaleData(pbmc.reference) #' #' pbmc.query <- NormalizeData(pbmc.query) #' pbmc.query <- FindVariableFeatures(pbmc.query) #' pbmc.query <- ScaleData(pbmc.query) #' #' # find anchors #' anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) #' #' # transfer labels #' predictions <- TransferData( #' anchorset = anchors, #' refdata = pbmc.reference$seurat_annotations #' ) #' pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) #' } #' FindTransferAnchors <- function( reference, query, normalization.method = "LogNormalize", recompute.residuals = TRUE, reference.assay = NULL, reference.neighbors = NULL, query.assay = NULL, reduction = "pcaproject", reference.reduction = NULL, project.query = FALSE, features = NULL, scale = TRUE, npcs = 30, l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, approx.pca = TRUE, mapping.score.k = NULL, verbose = TRUE ) { # input validation ValidateParams_FindTransferAnchors( reference = reference, query = query, normalization.method = normalization.method, recompute.residuals = recompute.residuals, reference.assay = reference.assay, reference.neighbors = reference.neighbors, query.assay = query.assay, reduction = reduction, reference.reduction = reference.reduction, project.query = project.query, features = features, scale = scale, npcs = npcs, l2.norm = l2.norm, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, eps = eps, approx.pca = approx.pca, mapping.score.k = mapping.score.k, verbose = verbose ) projected <- ifelse(test = reduction == "pcaproject", yes = TRUE, no = FALSE) reduction.2 <- character() feature.mean <- NULL reference.reduction.init <- reference.reduction if (normalization.method == "SCT") { # ensure all residuals required are computed query <- suppressWarnings(expr = GetResidual(object = query, assay = query.assay, features = features, verbose = FALSE)) if (is.null(x = reference.reduction)) { reference <- suppressWarnings(expr = GetResidual(object = reference, assay = reference.assay, features = features, verbose = FALSE)) features <- intersect( x = features, y = intersect( x = rownames(x = GetAssayData(object = query[[query.assay]], slot = "scale.data")), y = rownames(x = GetAssayData(object = reference[[reference.assay]], slot = "scale.data")) ) ) reference[[reference.assay]] <- as( object = CreateAssayObject( data = GetAssayData(object = reference[[reference.assay]], slot = "scale.data")[features, ]), Class = "SCTAssay" ) reference <- SetAssayData( object = reference, slot = "scale.data", assay = reference.assay, new.data = as.matrix(x = GetAssayData(object = reference[[reference.assay]], slot = "data")) ) } query[[query.assay]] <- as( object = CreateAssayObject( data = GetAssayData(object = query[[query.assay]], slot = "scale.data")[features, ]), Class = "SCTAssay" ) query <- SetAssayData( object = query, slot = "scale.data", assay = query.assay, new.data = as.matrix(x = GetAssayData(object = query[[query.assay]], slot = "data")) ) feature.mean <- "SCT" } # only keep necessary info from objects query <- DietSeurat( object = query, assays = query.assay, dimreducs = reference.reduction, features = features, scale.data = TRUE ) # check assay in the reference.reduction if (!is.null(reference.reduction) && slot(object = reference[[reference.reduction]], name = "assay.used") != reference.assay) { warnings("reference assay is diffrent from the assay.used in", reference.reduction) slot(object = reference[[reference.reduction]], name = "assay.used") <- reference.assay } reference <- DietSeurat( object = reference, assays = reference.assay, dimreducs = reference.reduction, features = features, scale.data = TRUE ) # append query and reference to cell names - mainly to avoid name conflicts query <- RenameCells( object = query, new.names = paste0(Cells(x = query), "_", "query") ) reference <- RenameCells( object = reference, new.names = paste0(Cells(x = reference), "_", "reference") ) # Perform PCA projection if (reduction == 'pcaproject') { if (project.query) { if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message("Performing PCA on the provided query using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { query <- ScaleData( object = query, features = features, do.scale = scale, verbose = FALSE ) } query <- RunPCA( object = query, npcs = npcs, reduction.name = reference.reduction, verbose = FALSE, features = features, approx = approx.pca ) } projected.pca <- ProjectCellEmbeddings( reference = query, reduction = reference.reduction, query = reference, scale = scale, dims = dims, verbose = verbose ) orig.embeddings <- Embeddings(object = query[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = query[[reference.reduction]]) } else { if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message("Performing PCA on the provided reference using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { reference <- ScaleData(object = reference, features = features, do.scale = scale, verbose = FALSE) } reference <- RunPCA( object = reference, npcs = npcs, verbose = FALSE, features = features, approx = approx.pca ) } projected.pca <- ProjectCellEmbeddings( reference = reference, reduction = reference.reduction, query = query, scale = scale, dims = dims, feature.mean = feature.mean, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = reference[[reference.reduction]]) } combined.pca <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.pca)), key = "ProjectPC_", assay = reference.assay ) combined.ob <- suppressWarnings(expr = merge( x = DietSeurat(object = reference, counts = FALSE), y = DietSeurat(object = query, counts = FALSE), )) combined.ob[["pcaproject"]] <- combined.pca colnames(x = orig.loadings) <- paste0("ProjectPC_", 1:ncol(x = orig.loadings)) Loadings(object = combined.ob[["pcaproject"]]) <- orig.loadings[, dims] } # Use reciprocal PCA projection in anchor finding if (reduction == "rpca") { # Run PCA on reference and query if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message("Performing PCA on the provided reference using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { reference <- ScaleData( object = reference, features = features, do.scale = scale, verbose = verbose ) } reference <- RunPCA( object = reference, npcs = npcs, verbose = FALSE, features = features, approx = approx.pca ) } if (verbose) { message("Performing PCA on the provided query using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { query <- ScaleData( object = query, features = features, do.scale = scale, verbose = verbose ) } query <- RunPCA( object = query, npcs = ncol(x = reference[[reference.reduction]]), reduction.name = reference.reduction, verbose = FALSE, features = features, approx = approx.pca ) combined.ob <- ReciprocalProject( object.1 = reference, object.2 = query, reduction = reference.reduction, dims = dims, projected.name = reduction, features = features, do.scale = FALSE, do.center = FALSE, slot = 'scale.data', l2.norm = l2.norm, verbose = verbose ) # pcaproject is used as the weight.matrix in MapQuery projected.pca <- ProjectCellEmbeddings( reference = reference, reduction = reference.reduction, query = query, scale = scale, dims = dims, feature.mean = feature.mean, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = reference[[reference.reduction]]) combined.pca <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.pca)), key = "ProjectPC_", assay = reference.assay ) combined.ob[["pcaproject"]] <- combined.pca colnames(x = orig.loadings) <- paste0("ProjectPC_", 1:ncol(x = orig.loadings)) Loadings(object = combined.ob[["pcaproject"]]) <- orig.loadings[, dims] if (l2.norm) { # L2 norm is done on each projected PCA in ReciprocalProject, so turn it off here # avoids later error as we now have two reductions (rpca.ref and rpca.query) l2.norm <- FALSE reduction <- "rpca.ref.l2" reduction.2 <- "rpca.query.l2" } else { reduction <- "rpca.ref" reduction.2 <- "rpca.query" } if (project.query) { reduction <- gsub(".ref", ".query", reduction) reduction.2 <- gsub(".query", ".ref", reduction.2) } } # Run CCA as dimension reduction to be used in anchor finding if (reduction == 'cca') { if (normalization.method == "LogNormalize") { reference <- ScaleData(object = reference, features = features, do.scale = scale, verbose = FALSE) query <- ScaleData(object = query, features = features, do.scale = scale, verbose = FALSE) } combined.ob <- RunCCA( object1 = reference, object2 = query, features = features, num.cc = max(dims), renormalize = FALSE, rescale = FALSE, verbose = verbose ) slot(object = combined.ob[["cca"]], name = "cell.embeddings") <- Embeddings(combined.ob[["cca"]])[, dims] slot(object = combined.ob[["cca"]], name = "feature.loadings") <- Loadings(combined.ob[["cca"]])[, dims] slot(object = combined.ob[["cca"]], name = "feature.loadings.projected") <- Loadings(object = combined.ob[["cca"]], projected = TRUE)[, dims] } if (reduction == "lsiproject") { if (project.query) { projected.lsi <- ProjectSVD( reduction = query[[reference.reduction]], data = GetAssayData(object = reference, assay = reference.assay, slot = "data"), mode = "lsi", do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, verbose = verbose ) orig.embeddings <- Embeddings(object = query[[reference.reduction]]) orig.loadings <- Loadings(object = query[[reference.reduction]]) } else { projected.lsi <- ProjectSVD( reduction = reference[[reference.reduction]], data = GetAssayData(object = query, assay = query.assay, slot = "data"), mode = "lsi", do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]]) orig.loadings <- Loadings(object = reference[[reference.reduction]]) } combined.lsi <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.lsi))[,dims], key = "ProjectLSI_", assay = reference.assay ) combined.ob <- merge( x = DietSeurat(object = reference), y = DietSeurat(object = query) ) combined.ob[["lsiproject"]] <- combined.lsi colnames(x = orig.loadings) <- paste0("ProjectLSI_", 1:ncol(x = orig.loadings)) Loadings(object = combined.ob[["lsiproject"]]) <- orig.loadings[,dims] } if (l2.norm) { combined.ob <- L2Dim(object = combined.ob, reduction = reduction) reduction <- paste0(reduction, ".l2") } precomputed.neighbors <- list(ref.neighbors = NULL, query.neighbors = NULL) nn.idx1 <- NULL nn.idx2 <- NULL # if computing the mapping score later, compute large enough query # neighborhood here to reuse if (!is.null(x = mapping.score.k)) { if (verbose) { message("Finding query neighbors") } k.nn <- max(k.score, k.anchor) query.neighbors <- NNHelper( data = Embeddings(object = combined.ob[[reduction]])[Cells(x = query), ], k = max(mapping.score.k, k.nn + 1), method = nn.method, n.trees = n.trees, cache.index = TRUE ) query.neighbors.sub <- query.neighbors slot(object = query.neighbors.sub, name = "nn.idx") <- slot( object = query.neighbors.sub, name = "nn.idx")[, 1:(k.nn + 1)] slot(object = query.neighbors.sub, name = "nn.dist") <- slot( object = query.neighbors.sub, name = "nn.dist")[, 1:(k.nn + 1)] precomputed.neighbors[["query.neighbors"]] <- query.neighbors.sub nn.idx2 <- Index(object = query.neighbors.sub) } if (!is.null(x = reference.neighbors)) { precomputed.neighbors[["ref.neighbors"]] <- reference[[reference.neighbors]] nn.idx1 <- Index(object = reference[[reference.neighbors]]) } anchors <- FindAnchors( object.pair = combined.ob, assay = c(reference.assay, query.assay), slot = "data", cells1 = colnames(x = reference), cells2 = colnames(x = query), reduction = reduction, reduction.2 = reduction.2, internal.neighbors = precomputed.neighbors, dims = 1:length(x = dims), k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, projected = projected, verbose = verbose ) reductions <- slot(object = combined.ob, name = "reductions") for (i in unique(x = c(reference.assay, query.assay))) { dummy.assay <- paste0(i, "DUMMY") suppressWarnings( expr = combined.ob[[dummy.assay]] <- CreateDummyAssay(assay = combined.ob[[i]]) ) DefaultAssay(combined.ob) <- dummy.assay combined.ob[[i]] <- NULL suppressWarnings( expr = combined.ob[[i]] <- combined.ob[[dummy.assay]] ) DefaultAssay(combined.ob) <- i combined.ob[[dummy.assay]] <- NULL } slot(object = combined.ob, name = "reductions") <- reductions command <- LogSeuratCommand(object = combined.ob, return.command = TRUE) slot(command, name = 'params')$reference.reduction <- reference.reduction.init anchor.set <- new( Class = "TransferAnchorSet", object.list = list(combined.ob), reference.cells = colnames(x = reference), query.cells = colnames(x = query), anchors = anchors, anchor.features = features, command = command ) if (!is.null(x = precomputed.neighbors[["query.neighbors"]])) { slot(object = anchor.set, name = "neighbors") <- list( query.neighbors = query.neighbors) } return(anchor.set) } #' Get the predicted identity #' #' Utility function to easily pull out the name of the class with the maximum #' prediction. This is useful if you've set \code{prediction.assay = TRUE} in #' \code{\link{TransferData}} and want to have a vector with the predicted class. #' #' @param object Seurat object #' @param assay Name of the assay holding the predictions #' @param slot Slot of the assay in which the prediction scores are stored #' @param score.filter Return "Unassigned" for any cell with a score less than #' this value #' #' @return Returns a vector of predicted class names #' #' @examples #' \dontrun{ #' prediction.assay <- TransferData(anchorset = anchors, refdata = reference$class) #' query[["predictions"]] <- prediction.assay #' query$predicted.id <- GetTransferPredictions(query) #' } #' @export #' @concept integration #' GetTransferPredictions <- function(object, assay = "predictions", slot = "data", score.filter = 0.75) { dat <- GetAssayData(object[[assay]], slot = slot) predictions <- apply( X = dat, MARGIN = 2, FUN = function(x){ if (x['max'] < score.filter) { "Unassigned" } else { x <- x[-which(x = names(x = x) == "max")] names(x = which.max(x = x)) } } ) return(predictions) } #' Integrate data #' #' Perform dataset integration using a pre-computed \code{\link{AnchorSet}}. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' For pairwise integration: #' #' \itemize{ #' \item{Construct a weights matrix that defines the association between each #' query cell and each anchor. These weights are computed as 1 - the distance #' between the query cell and the anchor divided by the distance of the query #' cell to the \code{k.weight}th anchor multiplied by the anchor score #' computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian #' kernel width a bandwidth defined by \code{sd.weight} and normalize across #' all \code{k.weight} anchors.} #' \item{Compute the anchor integration matrix as the difference between the #' two expression matrices for every pair of anchor cells} #' \item{Compute the transformation matrix as the product of the integration #' matrix and the weights matrix.} #' \item{Subtract the transformation matrix from the original expression #' matrix.} #' } #' #' For multiple dataset integration, we perform iterative pairwise integration. #' To determine the order of integration (if not specified via #' \code{sample.tree}), we #' \itemize{ #' \item{Define a distance between datasets as the total number of cells in #' the smaller dataset divided by the total number of anchors between the two #' datasets.} #' \item{Compute all pairwise distances between datasets} #' \item{Cluster this distance matrix to determine a guide tree} #' } #' #' #' @param anchorset An \code{\link{AnchorSet}} object generated by #' \code{\link{FindIntegrationAnchors}} #' @param new.assay.name Name for the new assay containing the integrated data #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param features Vector of features to use when computing the PCA to determine #' the weights. Only set if you want a different set from those used in the #' anchor finding process #' @param features.to.integrate Vector of features to integrate. By default, #' will use the features used in anchor finding. #' @param dims Number of dimensions to use in the anchor weighting procedure #' @param k.weight Number of neighbors to consider when weighting anchors #' @param weight.reduction Dimension reduction to use when calculating anchor #' weights. This can be one of: #' \itemize{ #' \item{A string, specifying the name of a dimension reduction present in #' all objects to be integrated} #' \item{A vector of strings, specifying the name of a dimension reduction to #' use for each object to be integrated} #' \item{A vector of \code{\link{DimReduc}} objects, specifying the object to #' use for each object in the integration} #' \item{NULL, in which case a new PCA will be calculated and used to #' calculate anchor weights} #' } #' Note that, if specified, the requested dimension reduction will only be used #' for calculating anchor weights in the first merge between reference and #' query, as the merged object will subsequently contain more cells than was in #' query, and weights will need to be calculated for all cells in the object. #' @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting #' @param sample.tree Specify the order of integration. Order of integration #' should be encoded in a matrix, where each row represents one of the pairwise #' integration steps. Negative numbers specify a dataset, positive numbers #' specify the integration results from a given row (the format of the merge #' matrix included in the \code{\link{hclust}} function output). For example: #' `matrix(c(-2, 1, -3, -1), ncol = 2)` gives: #' #' ``` #' [,1] [,2] #' [1,] -2 -3 #' [2,] 1 -1 #' ``` #' #' Which would cause dataset 2 and 3 to be integrated first, then the resulting #' object integrated with dataset 1. #' #' If NULL, the sample tree will be computed automatically. #' @param preserve.order Do not reorder objects based on size for each pairwise #' integration. #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}}) #' @param verbose Print progress bars and output #' #' @return Returns a \code{\link{Seurat}} object with a new integrated #' \code{\link{Assay}}. If \code{normalization.method = "LogNormalize"}, the #' integrated data is returned to the \code{data} slot and can be treated as #' log-normalized, corrected data. If \code{normalization.method = "SCT"}, the #' integrated data is returned to the \code{scale.data} slot and can be treated #' as centered, corrected Pearson residuals. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @export #' @concept integration #' @md #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset #' pancreas.list <- SplitObject(panc8, split.by = "tech") #' #' # perform standard preprocessing on each object #' for (i in 1:length(pancreas.list)) { #' pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) #' pancreas.list[[i]] <- FindVariableFeatures( #' pancreas.list[[i]], selection.method = "vst", #' nfeatures = 2000, verbose = FALSE #' ) #' } #' #' # find anchors #' anchors <- FindIntegrationAnchors(object.list = pancreas.list) #' #' # integrate data #' integrated <- IntegrateData(anchorset = anchors) #' } #' IntegrateData <- function( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') ref <- object.list[reference.datasets] features <- features %||% slot(object = anchorset, name = "anchor.features") unintegrated <- suppressWarnings(expr = merge( x = object.list[[1]], y = object.list[2:length(x = object.list)] )) if (!is.null(x = features.to.integrate)) { features.to.integrate <- intersect( x = features.to.integrate, y = Reduce( f = intersect, x = lapply( X = object.list, FUN = rownames ) ) ) } if (normalization.method == "SCT") { model.list <- list() for (i in 1:length(x = object.list)) { assay <- DefaultAssay(object = object.list[[i]]) if (length(x = setdiff(x = features.to.integrate, y = features)) != 0) { object.list[[i]] <- GetResidual( object = object.list[[i]], features = setdiff(x = features.to.integrate, y = features), verbose = verbose ) } model.list[[i]] <- slot(object = object.list[[i]][[assay]], name = "SCTModel.list") object.list[[i]][[assay]] <- suppressWarnings(expr = CreateSCTAssayObject( data = GetAssayData( object = object.list[[i]], assay = assay, slot = "scale.data") ) ) } model.list <- unlist(x = model.list) slot(object = anchorset, name = "object.list") <- object.list } # perform pairwise integration of reference objects reference.integrated <- PairwiseIntegrateReference( anchorset = anchorset, new.assay.name = new.assay.name, normalization.method = normalization.method, features = features, features.to.integrate = features.to.integrate, dims = dims, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, eps = eps, verbose = verbose ) # set SCT model if (normalization.method == "SCT") { if (is.null(x = Tool(object = reference.integrated, slot = "Integration"))) { reference.sample <- slot(object = anchorset, name = "reference.objects") } else { reference.sample <- SampleIntegrationOrder( tree = slot( object = reference.integrated, name = "tools" )$Integration@sample.tree )[1] } reference.cells <- Cells(x = object.list[[reference.sample]]) reference.model <- NULL if (length(x = model.list) > 0) { reference.model <- sapply(X = model.list, FUN = function(model) { reference.check <- FALSE model.cells <- Cells(x = model) if (length(x = model.cells) > 0 & length(x = setdiff(x = model.cells, y = reference.cells)) == 0) { reference.check <- TRUE } return(reference.check) } ) reference.model <- model.list[[which(reference.model)]] } } if (length(x = reference.datasets) == length(x = object.list)) { if (normalization.method == "SCT") { reference.integrated[[new.assay.name]] <- CreateSCTAssayObject( data = GetAssayData(object = reference.integrated, assay = new.assay.name, slot = "data"), scale.data = ScaleData( object = GetAssayData(object = reference.integrated, assay = new.assay.name, slot = "scale.data"), do.scale = FALSE, do.center = TRUE, verbose = FALSE), SCTModel.list = reference.model ) levels(x = reference.integrated[[new.assay.name]]) <- "refmodel" reference.integrated[[assay]] <- unintegrated[[assay]] } DefaultAssay(object = reference.integrated) <- new.assay.name VariableFeatures(object = reference.integrated) <- features reference.integrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") reference.integrated <- suppressWarnings(LogSeuratCommand(object = reference.integrated)) return(reference.integrated) } else { active.assay <- DefaultAssay(object = ref[[1]]) reference.integrated[[active.assay]] <- NULL # TODO: restore once check.matrix is in SeuratObject # reference.integrated[[active.assay]] <- CreateAssayObject( # data = GetAssayData( # object = reference.integrated[[new.assay.name]], # slot = 'data' # ), # check.matrix = FALSE # ) reference.integrated[[active.assay]] <- CreateAssayObject( data = GetAssayData( object = reference.integrated[[new.assay.name]], slot = 'data' ) ) DefaultAssay(object = reference.integrated) <- active.assay reference.integrated[[new.assay.name]] <- NULL VariableFeatures(object = reference.integrated) <- features # Extract the query objects (if any) and map to reference integrated.data <- MapQueryData( anchorset = anchorset, reference = reference.integrated, new.assay.name = new.assay.name, normalization.method = normalization.method, features = features, features.to.integrate = features.to.integrate, dims = dims, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, preserve.order = preserve.order, eps = eps, verbose = verbose ) # Construct final assay object # TODO: restore once check.matrix is in SeuratObject # integrated.assay <- CreateAssayObject( # data = integrated.data, # check.matrix = FALSE # ) integrated.assay <- CreateAssayObject( data = integrated.data ) if (normalization.method == "SCT") { integrated.assay <- CreateSCTAssayObject( data = integrated.data, scale.data = ScaleData( object = integrated.data, do.scale = FALSE, do.center = TRUE, verbose = FALSE), SCTModel.list = reference.model ) levels(x = integrated.assay) <- "refmodel" } unintegrated[[new.assay.name]] <- integrated.assay unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) if (!is.null(x = Tool(object = reference.integrated, slot = "Integration"))) { sample.tree <- GetIntegrationData( object = reference.integrated, integration.name = "Integration", slot = "sample.tree" ) } unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "sample.tree", new.data = sample.tree ) DefaultAssay(object = unintegrated) <- new.assay.name VariableFeatures(object = unintegrated) <- features unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") unintegrated <- suppressWarnings(LogSeuratCommand(object = unintegrated)) return(unintegrated) } } #' @inheritParams IntegrateData #' #' @rdname IntegrateEmbeddings #' @concept integration #' @export #' @method IntegrateEmbeddings IntegrationAnchorSet #' IntegrateEmbeddings.IntegrationAnchorSet <- function( anchorset, new.reduction.name = "integrated_dr", reductions = NULL, dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) { CheckDots(...) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') ValidateParams_IntegrateEmbeddings_IntegrationAnchors( anchorset = anchorset, object.list = object.list, reductions = reductions, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, sample.tree = sample.tree ) unintegrated <- merge( x = object.list[[1]], y = object.list[2:length(x = object.list)] ) # make DimReducs into Assays temporarily intdr.assay <- DefaultAssay(object = reductions) int.assay <- DefaultAssay(object = object.list[[1]]) dims.names <- paste0("drtointegrate-", dims.to.integrate) cell.names.map <- Cells(x = unintegrated) names(x = cell.names.map) <- make.unique(names = unname(obj = do.call( what = c, args = lapply(X = object.list, FUN = Cells))) ) for (i in 1:length(x = object.list)) { embeddings <- t(x = Embeddings(object = reductions)[cell.names.map[Cells(x = object.list[[i]])], dims.to.integrate]) rownames(x = embeddings) <- dims.names fake.assay <- suppressWarnings( # TODO: restore once check.matrix is in SeuratObject # expr = CreateAssayObject( # data = embeddings, # check.matrix = FALSE # ) expr = CreateAssayObject( data = embeddings ) ) object.list[[i]][['drtointegrate']] <- fake.assay DefaultAssay(object = object.list[[i]]) <- "drtointegrate" } slot(object = anchorset, name = "object.list") <- object.list new.reduction.name.safe <- gsub(pattern = "_", replacement = "", x = new.reduction.name) reference.integrated <- PairwiseIntegrateReference( anchorset = anchorset, new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, verbose = verbose ) if (length(x = reference.datasets) == length(x = object.list)) { reference.dr <- CreateDimReducObject( embeddings = as.matrix(x = t(GetAssayData(reference.integrated[[new.reduction.name.safe]]))), assay = intdr.assay, key = paste0(new.reduction.name.safe, "_") ) DefaultAssay(object = reference.integrated) <- int.assay reference.integrated[["drtointegrate"]] <- NULL reference.integrated[[new.reduction.name.safe]] <- NULL reference.integrated[[new.reduction.name]] <- reference.dr return(reference.integrated) } active.assay <- DefaultAssay(object = object.list[reference.datasets][[1]]) reference.integrated[[active.assay]] <- NULL # TODO: restore once check.matrix is in SeuratObject # reference.integrated[[active.assay]] <- CreateAssayObject( # data = GetAssayData( # object = reference.integrated[[new.reduction.name.safe]], # slot = 'data', # check.matrix = FALSE # ) # ) reference.integrated[[active.assay]] <- CreateAssayObject( data = GetAssayData( object = reference.integrated[[new.reduction.name.safe]], slot = 'data' ) ) DefaultAssay(object = reference.integrated) <- active.assay reference.integrated[[new.reduction.name.safe]] <- NULL VariableFeatures(object = reference.integrated) <- dims.names # Extract the query objects (if any) and map to reference integrated.data <- MapQueryData( anchorset = anchorset, reference = reference.integrated, new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, preserve.order = preserve.order, verbose = verbose ) unintegrated[[new.reduction.name]] <- CreateDimReducObject( embeddings = as.matrix(x = t(x = integrated.data)), assay = intdr.assay, key = paste0(new.reduction.name.safe, "_") ) unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") suppressWarnings(unintegrated <- LogSeuratCommand(object = unintegrated)) return(unintegrated) } #' @param reference Reference object used in anchorset construction #' @param query Query object used in anchorset construction #' @param reuse.weights.matrix Can be used in conjunction with the store.weights #' parameter in TransferData to reuse a precomputed weights matrix. #' #' @rdname IntegrateEmbeddings #' @concept integration #' @export #' @method IntegrateEmbeddings TransferAnchorSet #' IntegrateEmbeddings.TransferAnchorSet <- function( anchorset, reference, query, new.reduction.name = "integrated_dr", reductions = 'pcaproject', dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, reuse.weights.matrix = TRUE, sd.weight = 1, preserve.order = FALSE, verbose = TRUE, ... ) { CheckDots(...) combined.object <- slot(object = anchorset, name = 'object.list')[[1]] anchors <- slot(object = anchorset, name = 'anchors') weights.matrix <- NULL ValidateParams_IntegrateEmbeddings_TransferAnchors( anchorset = anchorset, combined.object = combined.object, reference = reference, query = query, reductions = reductions, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, reuse.weights.matrix = reuse.weights.matrix ) object.list <- list(reference, query) # make DimReducs into Assays temporarily intdr.assay <- DefaultAssay(object = object.list[[1]][[reductions[[1]]]]) int.assay <- DefaultAssay(object = object.list[[1]]) dims.names <- paste0("drtointegrate-", dims.to.integrate) for (i in 1:length(x = object.list)) { embeddings <- t(x = Embeddings( object = object.list[[i]], reduction = reductions[[i]] )[ , dims.to.integrate]) rownames(x = embeddings) <- dims.names fake.assay <- suppressWarnings( # TODO restore once check.matrix is in SeuratObject # expr = CreateAssayObject( # data = embeddings, # check.matrix = FALSE # ) expr = CreateAssayObject( data = embeddings ) ) object.list[[i]][['drtointegrate']] <- fake.assay DefaultAssay(object = object.list[[i]]) <- "drtointegrate" object.list[[i]] <- DietSeurat(object = object.list[[i]], assays = "drtointegrate") } slot(object = anchorset, name = "object.list") <- object.list new.reduction.name.safe <- gsub(pattern = "_", replacement = "", x = new.reduction.name) new.reduction.name.safe <- gsub(pattern = "[.]", replacement = "", x = new.reduction.name) slot(object = anchorset, name = "reference.objects") <- 1 anchors <- as.data.frame(x = anchors) anchors$dataset1 <- 1 anchors$dataset2 <- 2 slot(object = anchorset, name = "anchors") <- anchors integrated.embeddings <- MapQueryData( anchorset = anchorset, reference = object.list[[1]], new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, weights.matrix = weights.matrix, no.offset = TRUE, sd.weight = sd.weight, preserve.order = preserve.order, verbose = verbose ) integrated.embeddings <- as.matrix(x = integrated.embeddings) query[[new.reduction.name]] <- CreateDimReducObject( embeddings = t(x = integrated.embeddings[, Cells(x = query)]), assay = DefaultAssay(object = query[[reductions[1]]]), key = paste0(new.reduction.name.safe, "_") ) query <- RenameCells( object = query, new.names = gsub(pattern = "_query$", replacement = "", x = Cells(x = query)) ) query[[reductions[[1]]]] <- NULL return(query) } #' Calculate the local structure preservation metric #' #' Calculates a metric that describes how well the local structure of each group #' prior to integration is preserved after integration. This procedure works as #' follows: For each group, compute a PCA, compute the top num.neighbors in pca #' space, compute the top num.neighbors in corrected pca space, compute the #' size of the intersection of those two sets of neighbors. #' Return the average over all groups. #' #' @param object Seurat object #' @param grouping.var Grouping variable #' @param idents Optionally specify a set of idents to compute metric for #' @param neighbors Number of neighbors to compute in pca/corrected pca space #' @param reduction Dimensional reduction to use for corrected space #' @param reduced.dims Number of reduced dimensions to use #' @param orig.dims Number of PCs to use in original space #' @param verbose Display progress bar #' #' @return Returns the average preservation metric #' #' @importFrom RANN nn2 #' @importFrom utils txtProgressBar setTxtProgressBar #' #' @export #' @concept integration #' LocalStruct <- function( object, grouping.var, idents = NULL, neighbors = 100, reduction = "pca", reduced.dims = 1:10, orig.dims = 1:10, verbose = TRUE ) { if (is.null(x = idents)) { cells.use <- colnames(x = object) } else { cells.use <- WhichCells(object = object, idents = idents) } Idents(object = object) <- grouping.var local.struct <- list() ob.list <- SplitObject(object = object, split.by = grouping.var) if (verbose) { pb <- txtProgressBar( min = 1, max = length(x = ob.list), style = 3, file = stderr() ) } embeddings <- Embeddings(object = object[[reduction]])[, reduced.dims] for (i in 1:length(x = ob.list)) { ob <- ob.list[[i]] ob <- FindVariableFeatures( object = ob, verbose = FALSE, selection.method = "dispersion", nfeatures = 2000 ) ob <- ScaleData( object = ob, features = VariableFeatures(object = ob), verbose = FALSE ) ob <- RunPCA( object = ob, features = VariableFeatures(object = ob), verbose = FALSE, npcs = max(orig.dims) ) ob.cells <- intersect(x = cells.use, y = colnames(x = ob)) if (length(x = ob.cells) == 0) next nn.corrected <- nn2( data = embeddings[colnames(x = ob), ], query = embeddings[ob.cells, ], k = neighbors )$nn.idx nn.orig <- nn2( data = Embeddings(object = ob[["pca"]])[, orig.dims], query = Embeddings(object = ob[["pca"]])[ob.cells, orig.dims], k = neighbors )$nn.idx local.struct[[i]] <- sapply(X = 1:nrow(x = nn.orig), FUN = function(x) { length(x = intersect(x = nn.orig[x, ], y = nn.corrected[x, ])) / neighbors }) if (verbose) { setTxtProgressBar(pb = pb, value = i) } } names(x = local.struct) <- names(x = ob.list) return(local.struct) } #' Map query cells to a reference #' #' This is a convenience wrapper function around the following three functions #' that are often run together when mapping query data to a reference: #' \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}}, #' \code{\link{ProjectUMAP}}. Note that by default, the \code{weight.reduction} #' parameter for all functions will be set to the dimension reduction method #' used in the \code{\link{FindTransferAnchors}} function call used to construct #' the anchor object, and the \code{dims} parameter will be the same dimensions #' used to find anchors. #' #' @inheritParams IntegrateEmbeddings #' @inheritParams TransferData #' @inheritParams ProjectUMAP #' @param transferdata.args A named list of additional arguments to #' \code{\link{TransferData}} #' @param integrateembeddings.args A named list of additional arguments to #' \code{\link{IntegrateEmbeddings}} #' @param projectumap.args A named list of additional arguments to #' \code{\link{ProjectUMAP}} #' @return Returns a modified query Seurat object containing: #' #' \itemize{ #' \item{New Assays corresponding to the features transferred and/or their #' corresponding prediction scores from \code{\link{TransferData}}} #' \item{An integrated reduction from \code{\link{IntegrateEmbeddings}}} #' \item{A projected UMAP reduction of the query cells projected into the #' reference UMAP using \code{\link{ProjectUMAP}}} #' } #' #' @importFrom rlang invoke #' #' @export #' @concept integration #' MapQuery <- function( anchorset, query, reference, refdata = NULL, new.reduction.name = NULL, reference.reduction = NULL, reference.dims = NULL, query.dims = NULL, reduction.model = NULL, transferdata.args = list(), integrateembeddings.args = list(), projectumap.args = list(), verbose = TRUE ) { # determine anchor type if (grepl(pattern = "pca", x = slot(object = anchorset, name = "command")$reduction)) { anchor.reduction <- "pcaproject" # check if the anchorset can be used for mapping if (is.null(x = slot(object = anchorset, name = "command")$reference.reduction)) { stop('The reference.reduction parameter was not set when running ', 'FindTransferAnchors, so the resulting AnchorSet object cannot be used ', 'in the MapQuery function.') } } else if (grepl(pattern = "cca", x = slot(object = anchorset, name = "command")$reduction)) { anchor.reduction <- "cca" ref.cca.embedding <- Embeddings( slot(object = anchorset, name = "object.list")[[1]][["cca"]] )[slot(object = anchorset, name = "reference.cells"), ] rownames(x = ref.cca.embedding) <- gsub( pattern = "_reference", replacement = "", x = rownames(x = ref.cca.embedding) ) query.cca.embedding <- Embeddings( slot(object = anchorset, name = "object.list")[[1]][["cca"]] )[slot(object = anchorset, name = "query.cells"), ] rownames(x = query.cca.embedding) <- gsub( pattern = "_query", replacement = "", x = rownames(x = query.cca.embedding) ) reference[["cca"]] <- CreateDimReducObject( embeddings = ref.cca.embedding, key = "CCA_", assay = DefaultAssay(reference) ) query[["cca"]] <- CreateDimReducObject( embeddings = query.cca.embedding, key = "CCA_", assay = DefaultAssay(query) ) reference.reduction <- new.reduction.name <- "cca" reference.dims <- query.dims <- 1:ncol(x = ref.cca.embedding) } else if (grepl(pattern = "lsi", x = slot(object = anchorset, name = "command")$reduction)) { anchor.reduction <- "lsiproject" } else { stop("unkown type of anchors") } reference.reduction <- reference.reduction %||% slot(object = anchorset, name = "command")$reference.reduction %||% anchor.reduction new.reduction.name <- new.reduction.name %||% paste0("ref.", reference.reduction) # checking TransferData parameters td.badargs <- names(x = transferdata.args)[!names(x = transferdata.args) %in% names(x = formals(fun = TransferData))] if (length(x = td.badargs) > 0) { warning("The following arguments in transferdata.args are not valid: ", paste(td.badargs, collapse = ", "), immediate. = TRUE, call. = FALSE) } transferdata.args <- transferdata.args[names(x = transferdata.args) %in% names(x = formals(fun = TransferData))] transferdata.args$weight.reduction <- transferdata.args$weight.reduction %||% anchor.reduction # checking IntegrateEmbeddings parameters ie.badargs <- names(x = integrateembeddings.args)[!names(x = integrateembeddings.args) %in% names(x = formals(fun = IntegrateEmbeddings.TransferAnchorSet))] if (length(x = ie.badargs) > 0) { warning("The following arguments in integrateembeddings.args are not valid: ", paste(ie.badargs, collapse = ", "), immediate. = TRUE, call. = FALSE) } integrateembeddings.args <- integrateembeddings.args[names(x = integrateembeddings.args) %in% names(x = formals(fun = IntegrateEmbeddings.TransferAnchorSet))] integrateembeddings.args$reductions <- integrateembeddings.args$reductions %||% anchor.reduction integrateembeddings.args$weight.reduction <- integrateembeddings.args$weight.reduction %||% anchor.reduction slot(object = query, name = "tools")$TransferData <- NULL reuse.weights.matrix <- FALSE if (!is.null(x = refdata)) { query <- invoke( .fn = TransferData, .args = c(list( anchorset = anchorset, reference = reference, query = query, refdata = refdata, store.weights = TRUE, verbose = verbose ), transferdata.args ) ) if (transferdata.args$weight.reduction == integrateembeddings.args$weight.reduction) { reuse.weights.matrix <- TRUE } } if (anchor.reduction != "cca"){ query <- invoke( .fn = IntegrateEmbeddings, .args = c(list( anchorset = anchorset, reference = reference, query = query, new.reduction.name = new.reduction.name, reuse.weights.matrix = reuse.weights.matrix, verbose = verbose ), integrateembeddings.args ) ) } slot(object = query, name = "tools")$TransferData <- NULL if (!is.null(x = reduction.model)) { reference.dims <- reference.dims %||% slot(object = anchorset, name = "command")$dims query.dims <- query.dims %||% 1:ncol(x = query[[new.reduction.name]]) if (length(x = query.dims) != length(x = reference.dims)) { message("Query and reference dimensions are not equal, proceeding with reference dimensions.") query.dims <- reference.dims } ref_nn.num <- Misc(object = reference[[reduction.model]], slot = "model")$n_neighbors query <- invoke( .fn = ProjectUMAP, .args = c(list( query = query, query.reduction = new.reduction.name, query.dims = query.dims, reference = reference, reference.dims = reference.dims, reference.reduction = reference.reduction, reduction.model = reduction.model, k.param = ref_nn.num ), projectumap.args ) ) } return(query) } #' @param anchors AnchorSet object or just anchor matrix from the #' Anchorset object returned from FindTransferAnchors #' @param combined.object Combined object (ref + query) from the #' Anchorset object returned #' @param query.neighbors Neighbors object computed on query cells #' @param ref.embeddings Reference embeddings matrix #' @param query.embeddings Query embeddings matrix #' @param kanchors Number of anchors to use in projection steps when computing #' weights #' @param ndim Number of dimensions to use when working with low dimensional #' projections of the data #' @param ksmooth Number of cells to average over when computing transition #' probabilities #' @param ksnn Number of cells to average over when determining the kernel #' bandwidth from the SNN graph #' @param snn.prune Amount of pruning to apply to edges in SNN graph #' @param subtract.first.nn Option to the scoring function when computing #' distances to subtract the distance to the first nearest neighbor #' @param nn.method Nearest neighbor method to use (annoy or RANN) #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param query.weights Query weights matrix for reuse #' @param verbose Display messages/progress #' @param ... Reserved for internal use #' #' @return Returns a vector of cell scores #' #' @importClassesFrom SeuratObject Neighbor #' #' @rdname MappingScore #' @concept integration #' @export #' MappingScore.default <- function( anchors, combined.object, query.neighbors, ref.embeddings, query.embeddings, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) { CheckDots(...) # Input checks start.time <- Sys.time() if (is.null(x = query.neighbors) || ncol(x = query.neighbors) < ksmooth) { message("Recomputing query neighborhoods.\nSetting mapping.score.k in ", "FindTransferAnchors to the ksmooth \nvalue here (", ksmooth, "), can bypass this calculation in future runs.") query.neighbors <- FindNeighbors( object = query.embeddings, k.param = ksmooth, nn.method = nn.method, n.trees = n.trees, cache.index = TRUE, return.neighbor = TRUE, verbose = FALSE ) } ref.cells <- rownames(x = ref.embeddings) query.cells <- rownames(query.embeddings) # Project reference values onto query if (verbose) { message("Projecting reference PCA onto query") } ## Need to set up an IntegrationData object to use FindWeights here int.mat <- matrix(data = NA, nrow = nrow(x = anchors), ncol = 0) rownames(x = int.mat) <- query.cells[anchors[, "cell2"]] slot(object = combined.object, name = 'tools')[["IT1"]] <- new( Class = "IntegrationData", anchors = anchors, neighbors = list(cells1 = ref.cells, cells2 = query.cells), integration.matrix = int.mat ) ## Finding weights of anchors in query pca space ref.pca.orig <- ref.embeddings[, 1:ndim] query.pca.orig <- query.embeddings[, 1:ndim] dr.weights <- suppressWarnings(expr = CreateDimReducObject( embeddings = rbind(query.pca.orig, ref.pca.orig) )) if (!is.null(x = query.weights)) { weights.matrix <- query.weights } else { combined.object <- FindWeights( object = combined.object, integration.name = "IT1", reduction = dr.weights, dims = 1:ncol(x = dr.weights), k = kanchors, sd.weight = 1, eps = 0, nn.method = nn.method, n.trees = n.trees, verbose = verbose ) weights.matrix <- GetIntegrationData( object = combined.object, integration.name = "IT1", slot = "weights" ) } ## Perform projection of ref pca values using weights matrix ref.pca <- ref.embeddings[ref.cells[anchors[, 1]], 1:ndim] rownames(x = ref.pca) <- paste0(rownames(x = ref.pca), "_reference") query.cells.projected <- Matrix::crossprod( x = as(object = ref.pca, Class = "dgCMatrix"), y = weights.matrix ) colnames(x = query.cells.projected) <- query.cells rownames(x = query.cells.projected) <- colnames(x = ref.pca) # Re-project the query cells back onto query if (verbose) { message("Projecting back the query cells into original PCA space") } ## Compute new weights dr.weights <- suppressWarnings(CreateDimReducObject( embeddings = rbind( t(x = as.matrix(x = query.cells.projected)), ref.pca.orig[ref.cells, ] ), )) combined.object <- FindWeights( object = combined.object, integration.name = "IT1", reduction = dr.weights, dims = 1:ndim, k = kanchors, sd.weight = 1, eps = 0, nn.method = nn.method, n.trees = n.trees, reverse = TRUE, verbose = verbose ) weights.matrix <- GetIntegrationData( object = combined.object, integration.name = "IT1", slot = "weights" ) ## Project back onto query orig.pca <- query.embeddings[query.cells[anchors[, 2]], ] query.cells.back.corrected <- Matrix::t( x = Matrix::crossprod( x = as(object = orig.pca, Class = "dgCMatrix"), y = weights.matrix)[1:ndim, ] ) query.cells.back.corrected <- as.matrix(x = query.cells.back.corrected) rownames(x = query.cells.back.corrected) <- query.cells query.cells.pca <- query.embeddings[query.cells, 1:ndim] if (verbose) { message("Computing scores:") message(" Finding neighbors of original query cells") } ## Compute original neighborhood of query cells if (is.null(x = query.neighbors)) { query.neighbors <- NNHelper( data = query.cells.pca, query = query.cells.pca, k = max(ksmooth, ksnn), method = nn.method, n.trees = n.trees, cache.index = TRUE ) } if (verbose) { message(" Finding neighbors of transformed query cells") } ## Compute new neighborhood of query cells after projections if (nn.method == "annoy") { if (is.null(x = Index(object = query.neighbors))) { corrected.neighbors <- NNHelper( data = query.cells.pca, query = query.cells.back.corrected, k = max(ksmooth, ksnn), method = nn.method, n.treees = n.trees, cache.index = TRUE ) } else { corrected.neighbors <- AnnoySearch( index = Index(object = query.neighbors), query = query.cells.back.corrected, k = max(ksmooth, ksnn) ) corrected.neighbors <- new( Class = 'Neighbor', nn.idx = corrected.neighbors$nn.idx, nn.dist = corrected.neighbors$nn.dists ) } } if (verbose) { message(" Computing query SNN") } snn <- ComputeSNN( nn_ranked = Indices(query.neighbors)[, 1:ksnn], prune = snn.prune ) query.cells.pca <- t(x = query.cells.pca) if (verbose) { message(" Determining bandwidth and computing transition probabilities") } scores <- ScoreHelper( snn = snn, query_pca = query.cells.pca, query_dists = Distances(object = query.neighbors), corrected_nns = Indices(object = corrected.neighbors), k_snn = ksnn, subtract_first_nn = subtract.first.nn, display_progress = verbose ) scores[scores > 1] <- 1 names(x = scores) <- query.cells end.time <- Sys.time() if (verbose) { message("Total elapsed time: ", end.time - start.time) } return(scores) } #' @rdname MappingScore #' @export #' @concept integration #' @method MappingScore AnchorSet #' MappingScore.AnchorSet <- function( anchors, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) { CheckDots(...) combined.object <- slot(object = anchors, name = "object.list")[[1]] combined.object <- RenameCells( object = combined.object, new.names = unname(obj = sapply( X = Cells(x = combined.object), FUN = RemoveLastField )) ) query.cells <- sapply( X = slot(object = anchors, name = "query.cells"), FUN = RemoveLastField ) ref.cells <- sapply( X = slot(object = anchors, name = "reference.cells"), FUN = RemoveLastField ) query.embeddings <- Embeddings(object = subset( x = combined.object[["pcaproject.l2"]], cells = query.cells )) ref.embeddings <- Embeddings(object = subset( x = combined.object[["pcaproject.l2"]], cells = ref.cells )) query.neighbors <- slot(object = anchors, name = "neighbors")[["query.neighbors"]] # reduce size of anchorset combined object combined.object <- DietSeurat(object = combined.object) combined.object <- subset( x = combined.object, features = c(rownames(x = combined.object)[1]) ) for (i in colnames(x = combined.object[[]])) { combined.object[[i]] <- NULL } return(MappingScore( anchors = slot(object = anchors, name = "anchors"), combined.object = combined.object, query.neighbors = query.neighbors, ref.embeddings = ref.embeddings, query.embeddings = query.embeddings, kanchors = kanchors, ndim = ndim, ksmooth = ksmooth, ksnn = ksnn, snn.prune = snn.prune, subtract.first.nn = subtract.first.nn, nn.method = nn.method, n.trees = n.trees, query.weights = query.weights, verbose = verbose )) } #' Calculates a mixing metric #' #' Here we compute a measure of how well mixed a composite dataset is. To #' compute, we first examine the local neighborhood for each cell (looking at #' max.k neighbors) and determine for each group (could be the dataset after #' integration) the k nearest neighbor and what rank that neighbor was in the #' overall neighborhood. We then take the median across all groups as the mixing #' metric per cell. #' #' @param object Seurat object #' @param grouping.var Grouping variable for dataset #' @param reduction Which dimensionally reduced space to use #' @param dims Dimensions to use #' @param k Neighbor number to examine per group #' @param max.k Maximum size of local neighborhood to compute #' @param eps Error bound on the neighbor finding algorithm (from RANN) #' @param verbose Displays progress bar #' #' @return Returns a vector of values of the mixing metric for each cell #' #' @importFrom RANN nn2 #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers #' @export #' @concept integration #' MixingMetric <- function( object, grouping.var, reduction = "pca", dims = 1:2, k = 5, max.k = 300, eps = 0, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) embeddings <- Embeddings(object = object[[reduction]])[, dims] nn <- nn2( data = embeddings, k = max.k, eps = eps ) group.info <- object[[grouping.var, drop = TRUE]] groups <- unique(x = group.info) mixing <- my.sapply( X = 1:ncol(x = object), FUN = function(x) { sapply(X = groups, FUN = function(y) { which(x = group.info[nn$nn.idx[x, ]] == y)[k] }) } ) mixing[is.na(x = mixing)] <- max.k mixing <- apply( X = mixing, MARGIN = 2, FUN = median ) return(mixing) } #' Prepare an object list normalized with sctransform for integration. #' #' This function takes in a list of objects that have been normalized with the #' \code{\link{SCTransform}} method and performs the following steps: #' \itemize{ #' \item{If anchor.features is a numeric value, calls \code{\link{SelectIntegrationFeatures}} #' to determine the features to use in the downstream integration procedure.} #' \item{Ensures that the sctransform residuals for the features specified #' to anchor.features are present in each object in the list. This is #' necessary because the default behavior of \code{\link{SCTransform}} is to #' only store the residuals for the features determined to be variable. #' Residuals are recomputed for missing features using the stored model #' parameters via the \code{\link{GetResidual}} function.} #' \item{Subsets the \code{scale.data} slot to only contain the residuals for #' anchor.features for efficiency in downstream processing. } #' } #' #' @param object.list A list of \code{\link{Seurat}} objects to prepare for integration #' @param assay The name of the \code{\link{Assay}} to use for integration. This can be a #' single name if all the assays to be integrated have the same name, or a character vector #' containing the name of each \code{\link{Assay}} in each object to be integrated. The #' specified assays must have been normalized using \code{\link{SCTransform}}. #' If NULL (default), the current default assay for each object is used. #' @param anchor.features Can be either: #' \itemize{ #' \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} #' to select the provided number of features to be used in anchor finding} #' \item{A vector of features to be used as input to the anchor finding #' process} #' } #' @param sct.clip.range Numeric of length two specifying the min and max values #' the Pearson residual will be clipped to #' @param verbose Display output/messages #' #' @return A list of \code{\link{Seurat}} objects with the appropriate \code{scale.data} slots #' containing only the required \code{anchor.features}. #' #' @importFrom pbapply pblapply #' @importFrom methods slot slot<- #' @importFrom future nbrOfWorkers #' @importFrom future.apply future_lapply #' #' @export #' @concept integration #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset and take the first 2 to integrate #' pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] #' #' # perform SCTransform normalization #' pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) #' #' # select integration features and prep step #' features <- SelectIntegrationFeatures(pancreas.list) #' pancreas.list <- PrepSCTIntegration( #' pancreas.list, #' anchor.features = features #' ) #' #' # downstream integration steps #' anchors <- FindIntegrationAnchors( #' pancreas.list, #' normalization.method = "SCT", #' anchor.features = features #' ) #' pancreas.integrated <- IntegrateData(anchors) #' } #' PrepSCTIntegration <- function( object.list, assay = NULL, anchor.features = 2000, sct.clip.range = NULL, verbose = TRUE ) { my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) assay <- assay %||% sapply(X = object.list, FUN = DefaultAssay) assay <- rep_len(x = assay, length.out = length(x = object.list)) objects.names <- names(x = object.list) object.list <- lapply( X = 1:length(x = object.list), FUN = function(i) { DefaultAssay(object = object.list[[i]]) <- assay[i] object.list[[i]][[assay[i]]] <- as(object = object.list[[i]][[assay[i]]], Class = "SCTAssay") return(object.list[[i]]) } ) sct.check <- vapply( X = 1:length(x = object.list), FUN = function(i) { sct.check <- IsSCT(assay = object.list[[i]][[assay[i]]]) if (!sct.check) { if ("FindIntegrationAnchors" %in% Command(object = object.list[[i]]) && Command(object = object.list[[i]], command = "FindIntegrationAnchors", value = "normalization.method") == "SCT") { sct.check <- TRUE } } return(sct.check) }, FUN.VALUE = logical(length = 1L), USE.NAMES = FALSE ) if (!all(sct.check)) { stop( "The following assays have not been processed with SCTransform:\n", paste( ' object:', which(x = !sct.check, useNames = FALSE), '- assay:', assay[!sct.check], collapse = '\n' ), call. = FALSE ) } if (is.numeric(x = anchor.features)) { anchor.features <- SelectIntegrationFeatures( object.list = object.list, nfeatures = anchor.features, verbose = verbose ) } object.list <- my.lapply( X = 1:length(x = object.list), FUN = function(i) { obj <- GetResidual( object = object.list[[i]], assay = assay[i], features = anchor.features, replace.value = ifelse(test = is.null(x = sct.clip.range), yes = FALSE, no = TRUE), clip.range = sct.clip.range, verbose = FALSE ) scale.data <- GetAssayData( object = obj, assay = assay[i], slot = 'scale.data' ) obj <- SetAssayData( object = obj, slot = 'scale.data', new.data = scale.data[anchor.features, ], assay = assay[i] ) return(obj) } ) assays.used <- assay for (i in 1:length(x = object.list)) { assay <- as.character(x = assays.used[i]) object.list[[i]] <- LogSeuratCommand(object = object.list[[i]]) } names(x = object.list) <- objects.names return(object.list) } #' Select integration features #' #' Choose the features to use when integrating multiple datasets. This function #' ranks features by the number of datasets they are deemed variable in, #' breaking ties by the median variable feature rank across datasets. It returns #' the top scoring features by this ranking. #' #' If for any assay in the list, \code{\link{FindVariableFeatures}} hasn't been #' run, this method will try to run it using the \code{fvf.nfeatures} parameter #' and any additional ones specified through the \dots. #' #' @param object.list List of seurat objects #' @param nfeatures Number of features to return #' @param assay Name or vector of assay names (one for each object) from which #' to pull the variable features. #' @param verbose Print messages #' @param fvf.nfeatures nfeatures for \code{\link{FindVariableFeatures}}. Used #' if \code{VariableFeatures} have not been set for any object in #' \code{object.list}. #' @param ... Additional parameters to \code{\link{FindVariableFeatures}} #' #' @return A vector of selected features #' #' @importFrom utils head #' #' @export #' @concept integration #' #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset and take the first 2 #' pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] #' #' # perform SCTransform normalization #' pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) #' #' # select integration features #' features <- SelectIntegrationFeatures(pancreas.list) #' } #' SelectIntegrationFeatures <- function( object.list, nfeatures = 2000, assay = NULL, verbose = TRUE, fvf.nfeatures = 2000, ... ) { if (!is.null(x = assay)) { if (length(x = assay) != length(x = object.list)) { stop("If specifying the assay, please specify one assay per object in the object.list") } for (ii in length(x = object.list)) { DefaultAssay(object = object.list[[ii]]) <- assay[ii] } } else { assay <- sapply(X = object.list, FUN = DefaultAssay) } for (ii in 1:length(x = object.list)) { if (length(x = VariableFeatures(object = object.list[[ii]])) == 0) { if (verbose) { message(paste0("No variable features found for object", ii, " in the object.list. Running FindVariableFeatures ...")) } object.list[[ii]] <- FindVariableFeatures(object = object.list[[ii]], nfeatures = fvf.nfeatures, verbose = verbose, ...) } } var.features <- unname(obj = unlist(x = lapply( X = 1:length(x = object.list), FUN = function(x) VariableFeatures(object = object.list[[x]], assay = assay[x])) )) var.features <- sort(x = table(var.features), decreasing = TRUE) for (i in 1:length(x = object.list)) { var.features <- var.features[names(x = var.features) %in% rownames(x = object.list[[i]][[assay[i]]])] } tie.val <- var.features[min(nfeatures, length(x = var.features))] features <- names(x = var.features[which(x = var.features > tie.val)]) vf.list <- lapply(X = object.list, FUN = VariableFeatures) if (length(x = features) > 0) { feature.ranks <- sapply(X = features, FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- names(x = sort(x = feature.ranks)) } features.tie <- var.features[which(x = var.features == tie.val)] tie.ranks <- sapply(X = names(x = features.tie), FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- c( features, names(x = head(x = sort(x = tie.ranks), nfeatures - length(x = features))) ) return(features) } #' Transfer data #' #' Transfer categorical or continuous data across single-cell datasets. For #' transferring categorical information, pass a vector from the reference #' dataset (e.g. \code{refdata = reference$celltype}). For transferring #' continuous information, pass a matrix from the reference dataset (e.g. #' \code{refdata = GetAssayData(reference[['RNA']])}). #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' For both transferring discrete labels and also feature imputation, we first #' compute the weights matrix. #' #' \itemize{ #' \item{Construct a weights matrix that defines the association between each #' query cell and each anchor. These weights are computed as 1 - the distance #' between the query cell and the anchor divided by the distance of the query #' cell to the \code{k.weight}th anchor multiplied by the anchor score #' computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian #' kernel width a bandwidth defined by \code{sd.weight} and normalize across #' all \code{k.weight} anchors.} #' } #' #' The main difference between label transfer (classification) and feature #' imputation is what gets multiplied by the weights matrix. For label transfer, #' we perform the following steps: #' #' \itemize{ #' \item{Create a binary classification matrix, the rows corresponding to each #' possible class and the columns corresponding to the anchors. If the #' reference cell in the anchor pair is a member of a certain class, that #' matrix entry is filled with a 1, otherwise 0.} #' \item{Multiply this classification matrix by the transpose of weights #' matrix to compute a prediction score for each class for each cell in the #' query dataset.} #' } #' #' For feature imputation, we perform the following step: #' \itemize{ #' \item{Multiply the expression matrix for the reference anchor cells by the #' weights matrix. This returns a predicted expression matrix for the #' specified features for each cell in the query dataset.} #' } #' #' #' @param anchorset An \code{\link{AnchorSet}} object generated by #' \code{\link{FindTransferAnchors}} #' @param refdata Data to transfer. This can be specified in one of two ways: #' \itemize{ #' \item{The reference data itself as either a vector where the names #' correspond to the reference cells, or a matrix, where the column names #' correspond to the reference cells.} #' \item{The name of the metadata field or assay from the reference object #' provided. This requires the reference parameter to be specified. If pulling #' assay data in this manner, it will pull the data from the data slot. To #' transfer data from other slots, please pull the data explicitly with #' \code{\link{GetAssayData}} and provide that matrix here.} #' } #' @param reference Reference object from which to pull data to transfer #' @param query Query object into which the data will be transferred. #' @param weight.reduction Dimensional reduction to use for the weighting #' anchors. Options are: #' \itemize{ #' \item{pcaproject: Use the projected PCA used for anchor building} #' \item{lsiproject: Use the projected LSI used for anchor building} #' \item{pca: Use an internal PCA on the query only} #' \item{cca: Use the CCA used for anchor building} #' \item{custom DimReduc: User provided \code{\link{DimReduc}} object #' computed on the query cells} #' } #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction #' @param dims Set of dimensions to use in the anchor weighting procedure. If #' NULL, the same dimensions that were used to find anchors will be used for #' weighting. #' @param k.weight Number of neighbors to consider when weighting anchors #' @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}}) #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param verbose Print progress bars and output #' @param slot Slot to store the imputed data. Must be either "data" (default) #' or "counts" #' @param prediction.assay Return an \code{Assay} object with the prediction #' scores for each class stored in the \code{data} slot. #' @param store.weights Optionally store the weights matrix used for predictions #' in the returned query object. #' #' @return #' If \code{query} is not provided, for the categorical data in \code{refdata}, #' returns a data.frame with label predictions. If \code{refdata} is a matrix, #' returns an Assay object where the imputed data has been stored in the #' provided slot. #' #' If \code{query} is provided, a modified query object is returned. For #' the categorical data in refdata, prediction scores are stored as Assays #' (prediction.score.NAME) and two additional metadata fields: predicted.NAME #' and predicted.NAME.score which contain the class prediction and the score for #' that predicted class. For continuous data, an Assay called NAME is returned. #' NAME here corresponds to the name of the element in the refdata list. #' #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @export #' @concept integration #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("pbmc3k") #' #' # for demonstration, split the object into reference and query #' pbmc.reference <- pbmc3k[, 1:1350] #' pbmc.query <- pbmc3k[, 1351:2700] #' #' # perform standard preprocessing on each object #' pbmc.reference <- NormalizeData(pbmc.reference) #' pbmc.reference <- FindVariableFeatures(pbmc.reference) #' pbmc.reference <- ScaleData(pbmc.reference) #' #' pbmc.query <- NormalizeData(pbmc.query) #' pbmc.query <- FindVariableFeatures(pbmc.query) #' pbmc.query <- ScaleData(pbmc.query) #' #' # find anchors #' anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) #' #' # transfer labels #' predictions <- TransferData(anchorset = anchors, refdata = pbmc.reference$seurat_annotations) #' pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) #' } #' TransferData <- function( anchorset, refdata, reference = NULL, query = NULL, weight.reduction = 'pcaproject', l2.norm = FALSE, dims = NULL, k.weight = 50, sd.weight = 1, eps = 0, n.trees = 50, verbose = TRUE, slot = "data", prediction.assay = FALSE, store.weights = TRUE ) { combined.ob <- slot(object = anchorset, name = "object.list")[[1]] anchors <- slot(object = anchorset, name = "anchors") reference.cells <- slot(object = anchorset, name = "reference.cells") query.cells <- slot(object = anchorset, name = "query.cells") label.transfer <- list() ValidateParams_TransferData( anchorset = anchorset, combined.ob = combined.ob, anchors = anchors, reference.cells = reference.cells, query.cells = query.cells, refdata = refdata, reference = reference, query = query, weight.reduction = weight.reduction, l2.norm = l2.norm, dims = dims, k.weight = k.weight, sd.weight = sd.weight, eps = eps, n.trees = n.trees, verbose = verbose, slot = slot, prediction.assay = prediction.assay, label.transfer = label.transfer ) if (!inherits(x = weight.reduction, what = "DimReduc") && weight.reduction == 'pca') { if (verbose) { message("Running PCA on query dataset") } features <- slot(object = anchorset, name = "anchor.features") query.ob <- query query.ob <- ScaleData(object = query.ob, features = features, verbose = FALSE) query.ob <- RunPCA(object = query.ob, npcs = max(dims), features = features, verbose = FALSE) query.pca <- Embeddings(query.ob[['pca']]) rownames(x = query.pca) <- paste0(rownames(x = query.pca), "_query") #fill with 0s ref.pca <- matrix( data = 0, nrow = length(x = reference.cells), ncol = ncol(x = query.pca), dimnames = list(reference.cells, colnames(x = query.pca)) ) rm(query.ob) combined.pca.embeddings <- rbind(ref.pca, query.pca)[colnames(x = combined.ob), ] combined.pca <- CreateDimReducObject( embeddings = combined.pca.embeddings, key = "PC_", assay = DefaultAssay(object = combined.ob) ) combined.ob[["pca"]] <- combined.pca if (l2.norm) { combined.ob <- L2Dim(object = combined.ob, reduction = 'pca') } } if (!inherits(x = weight.reduction, what = "DimReduc") && weight.reduction == "lsi") { if (!("lsi" %in% Reductions(object = query))) { stop("Requested lsi for weight.reduction, but lsi not stored in query object.") } else { weight.reduction <- query[["lsi"]] } } if (inherits(x = weight.reduction, what = "DimReduc")) { weight.reduction <- RenameCells( object = weight.reduction, new.names = paste0(Cells(x = weight.reduction), "_query") ) } else { if (l2.norm) { weight.reduction.l2 <- paste0(weight.reduction, ".l2") if (weight.reduction.l2 %in% Reductions(object = combined.ob)) { combined.ob <- L2Dim(object = combined.ob, reduction = weight.reduction) } weight.reduction <- weight.reduction.l2 } weight.reduction <- combined.ob[[weight.reduction]] } if (max(dims) > ncol(x = weight.reduction)) { stop("dims is larger than the number of available dimensions in ", "weight.reduction (", ncol(x = weight.reduction), ").", call. = FALSE) } combined.ob <- SetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'anchors', new.data = anchors ) combined.ob <- SetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'neighbors', new.data = list('cells1' = reference.cells, 'cells2' = query.cells) ) combined.ob <- FindIntegrationMatrix( object = combined.ob, verbose = verbose ) combined.ob <- FindWeights( object = combined.ob, reduction = weight.reduction, dims = dims, k = k.weight, sd.weight = sd.weight, eps = eps, n.trees = n.trees, verbose = verbose ) weights <- GetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'weights' ) anchors <- as.data.frame(x = anchors) query.cells <- unname(obj = sapply( X = query.cells, FUN = function(x) gsub(pattern = "_query", replacement = "", x = x) )) transfer.results <- list() for (rd in 1:length(x = refdata)) { if (isFALSE(x = refdata[[rd]])) { transfer.results[[rd]] <- NULL next } rd.name <- names(x = refdata)[rd] # case for projection if (label.transfer[[rd]]) { anchors$id1 <- refdata[[rd]][anchors[, "cell1"]] reference.ids <- factor(x = anchors$id1, levels = unique(x = refdata[[rd]])) possible.ids <- levels(x = reference.ids) prediction.mat <- matrix(nrow = nrow(x = anchors), ncol = length(x = possible.ids), data = 0) for(i in 1:length(x = possible.ids)) { prediction.mat[which(reference.ids == possible.ids[i]), i] = 1 } if (verbose) { message("Predicting cell labels") } prediction.scores <- t(x = weights) %*% prediction.mat colnames(x = prediction.scores) <- possible.ids rownames(x = prediction.scores) <- query.cells prediction.ids <- possible.ids[apply(X = prediction.scores, MARGIN = 1, FUN = which.max)] prediction.ids <- as.character(prediction.ids) prediction.max <- apply(X = prediction.scores, MARGIN = 1, FUN = max) if (is.null(x = query)){ prediction.scores <- cbind(prediction.scores, max = prediction.max) } predictions <- data.frame( predicted.id = prediction.ids, prediction.score = as.matrix(prediction.scores), row.names = query.cells, stringsAsFactors = FALSE ) if (prediction.assay || !is.null(x = query)) { # TODO: restore once check.matrix is in SeuratObject # predictions <- CreateAssayObject(data = t(x = as.matrix(x = prediction.scores)), check.matrix = FALSE) predictions <- CreateAssayObject(data = t(x = as.matrix(x = prediction.scores))) Key(object = predictions) <- paste0("predictionscore", rd.name, "_") } if (is.null(x = query)) { transfer.results[[rd]] <- predictions } else { query <- AddMetaData(object = query, metadata = prediction.max, col.name = paste0("predicted.", rd.name, ".score")) query <- AddMetaData(object = query, metadata = prediction.ids, col.name = paste0("predicted.", rd.name)) query[[paste0("prediction.score.", rd.name)]] <- predictions } } else { # case for transferring features reference.cell.indices <- reference.cells[anchors$cell1] refdata.anchors <- refdata[[rd]][, reference.cell.indices] nfeatures <- nrow(x = refdata[[rd]]) if (verbose) { message(paste0("Transfering ", nfeatures, " features onto reference data")) } new.data <- refdata.anchors %*% weights rownames(x = new.data) <- rownames(x = refdata[[rd]]) colnames(x = new.data) <- query.cells if (inherits(x = new.data, what = "Matrix")) { new.data <- as(object = new.data, Class = "dgCMatrix") } if (slot == "counts") { # TODO: restore once check.matrix is in SeuratObject # new.assay <- CreateAssayObject(counts = new.data, check.matrix = FALSE) new.assay <- CreateAssayObject(counts = new.data) } else if (slot == "data") { # TODO: restore once check.matrix is in SeuratObject # new.assay <- CreateAssayObject(data = new.data, check.matrix = FALSE) new.assay <- CreateAssayObject(data = new.data) } Key(object = new.assay) <- paste0(rd.name, "_") if (is.null(x = query)) { transfer.results[[rd]] <- new.assay } else { if (rd.name %in% Assays(object = query)) { message( rd.name, " already present in query. ", "Storing as ", paste0("predicted_", rd.name) ) rd.name <- paste0("predicted_", rd.name) } query[[rd.name]] <- new.assay } } } if (is.null(x = query)) { names(x = transfer.results) <- names(x = refdata) if (length(x = transfer.results) == 1) { transfer.results <- transfer.results[[1]] } return(transfer.results) } else { if (store.weights) { slot(object = query, name = "tools")[["TransferData"]] <- list(weights.matrix = weights) } return(query) } } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param object.list List of Seurat objects #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors default #' @concept integration #' AnnotateAnchors.default <- function( anchors, vars = NULL, slot = NULL, object.list, assay = NULL, ... ) { # reorder columns anchors <- anchors[, c("cell1", "dataset1", "cell2", "dataset2", "score")] colnames(x = anchors)[5] <- "anchor.score" cell.names <- lapply(X = object.list, FUN = Cells) cell1.names <- character(length = nrow(x = anchors)) for (dataset in unique(x = anchors$dataset1)) { dataset.cells <- which(x = anchors$dataset1 == dataset) cell1.names[dataset.cells] <- cell.names[[dataset]][anchors[dataset.cells, "cell1"]] } anchors$cell1 <- cell1.names cell2.names <- character(length(x = nrow(x = anchors))) for (dataset in unique(x = anchors$dataset2)) { dataset.cells <- which(x = anchors$dataset2 == dataset) cell2.names[dataset.cells] <- cell.names[[dataset]][anchors[dataset.cells, "cell2"]] } anchors$cell2 <- cell2.names slot <- slot %||% "data" assay <- assay %||% sapply(X = object.list, FUN = DefaultAssay) if (length(x = assay) == 1) { assay <- rep(x = assay, times = length(x = object.list)) } if (length(x = assay) != length(x = object.list)) { stop("Number of assays provided should either be one or the length of object.list") } for (ob in 1:length(x = object.list)) { DefaultAssay(object = object.list[[ob]]) <- assay[ob] } if (length(x = slot) == 1) { slot <- rep(x = slot, times = length(x = vars)) } if (length(x = vars) > 0) { for(v in 1:length(x = vars)) { var <- vars[v] var.list <- lapply(X = object.list, FUN = function(x) { tryCatch( expr = FetchData(object = x, vars = var, slot = slot[v]), error = function(e) { data.fetched <- as.data.frame( x = rep(x = NA, times = ncol(x = x)), row.names = Cells(x = x), stringsAsFactors = FALSE ) colnames(x = data.fetched) <- var return(data.fetched) } ) }) if (all(unlist(x = lapply(X = var.list, FUN = isFALSE)))) { warning( var, " not found in all objects", call. = FALSE, immediate. = TRUE ) next } if (any(unlist(x = lapply(X = var.list, FUN = isFALSE)))) { warning( var, " not in all objects. Filling missing objects with NA", call. = FALSE, immediate. = TRUE ) } if (is.null(x = names(x = object.list))) { names(x = var.list) <- 1:length(x = object.list) } else { names(x = var.list) <- names(x = object.list) } for(i in c(1, 2)) { cell <- paste0("cell", i) if (is.factor(x = anchors[, cell])) { anchors[, cell] <- as.character(x = anchors[, cell]) } for (j in unique(x = anchors[, paste0("dataset", i)])) { var.df <- var.list[[j]] dataset.cells <- which(x = anchors[, paste0("dataset", i)] == j) anchors[dataset.cells, paste0(cell, ".", var)] <- var.df[anchors[, cell][dataset.cells], ] } } # column specifying whether the annotation matches across pair of datasets anchors[, paste0(var, ".match")] <- anchors[, paste0("cell1.", var)] == anchors[, paste0("cell2.", var)] } } return(anchors) } #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors IntegrationAnchorSet #' AnnotateAnchors.IntegrationAnchorSet <- function( anchors, vars = NULL, slot = NULL, object.list = NULL, assay = NULL, ... ) { anchor.df <- slot(object = anchors, name = 'anchors') object.list <- object.list %||% slot(object = anchors, name = 'object.list') anchor.df <- as.data.frame(x = anchor.df) anchor.df <- AnnotateAnchors( anchors = anchor.df, vars = vars, slot = slot, object.list = object.list, assay = assay ) return(anchor.df) } #' @param reference Reference object used in \code{\link{FindTransferAnchors}} #' @param query Query object used in \code{\link{FindTransferAnchors}} #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors TransferAnchorSet #' AnnotateAnchors.TransferAnchorSet <- function( anchors, vars = NULL, slot = NULL, reference = NULL, query = NULL, assay = NULL, ... ) { anchor.df <- slot(object = anchors, name = 'anchors') if (class(x = reference) != class(x = query)) { stop("If setting reference/query, please set both parameters.") } if (is.null(x = reference)) { object.list <- slot(object = anchors, name = 'object.list')[[1]] reference.cells <- slot(object = anchors, name = "reference.cells") reference <- subset(x = object.list, cells = reference.cells, recompute = FALSE) reference <- RenameCells( object = reference, new.names = gsub(pattern = "_reference$", replacement = "", x = reference.cells) ) query.cells <- slot(object = anchors, name = "query.cells") query <- subset(x = object.list, cells = query.cells, recompute = FALSE) query <- RenameCells( object = query, new.names = gsub(pattern = "_query$", replacement = "", x = query.cells) ) } object.list <- list(reference = reference, query = query) anchor.df <- as.data.frame(x = anchor.df) anchor.df$dataset1 <- "reference" anchor.df$dataset2 <- "query" anchor.df <- AnnotateAnchors( anchors = anchor.df, vars = vars, slot = slot, object.list = object.list, assay = assay ) return(anchor.df) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Add dataset number and remove cell offset # # Record which dataset number in the original list of Seurat objects # each anchor cell came from, and correct the cell index so it corresponds to # the position of the anchor cell in its own dataset # # @param anchor.df Dataframe of anchors # @param offsets size of each dataset in anchor dataframe # @param obj.length Vector of object lengths # # @return Anchor dataframe with additional columns corresponding to the dataset # of each cell AddDatasetID <- function( anchor.df, offsets, obj.lengths ) { ndataset <- length(x = offsets) total.cells <- sum(obj.lengths) offsets <- c(offsets, total.cells) row.offset <- rep.int(x = offsets[1:ndataset], times = obj.lengths) dataset <- rep.int(x = 1:ndataset, times = obj.lengths) anchor.df <- data.frame( 'cell1' = anchor.df[, 1] - row.offset[anchor.df[, 1]], 'cell2' = anchor.df[, 2] - row.offset[anchor.df[, 2]], 'score' = anchor.df[, 3], 'dataset1' = dataset[anchor.df[, 1]], 'dataset2' = dataset[anchor.df[, 2]] ) return(anchor.df) } # Adjust sample tree to only include given reference objects # # @param x A sample tree # @param reference.objects a sorted list of reference object IDs # AdjustSampleTree <- function(x, reference.objects) { for (i in 1:nrow(x = x)) { obj.id <- -(x[i, ]) if (obj.id[[1]] > 0) { x[i, 1] <- -(reference.objects[[obj.id[[1]]]]) } if (obj.id[[2]] > 0) { x[i, 2] <- -(reference.objects[[obj.id[[2]]]]) } } return(x) } # Build tree of datasets based on cell similarity # # @param similarity.matrix Dataset similarity matrix # # @return Returns a heirarchical clustering of datasets # #' @importFrom stats hclust # BuildSampleTree <- function(similarity.matrix) { dist.mat <- as.dist(m = 1 / similarity.matrix) clusters <- hclust(d = dist.mat) return(clusters$merge) } # Construct nearest neighbor matrix from nn.idx # # @param nn.idx Nearest neighbor index matrix (nn.idx from RANN) # @param offset1 Offsets for the first neighbor # @param offset2 Offsets for the second neighbor # # @return returns a sparse matrix representing the NN matrix # ConstructNNMat <- function(nn.idx, offset1, offset2, dims) { k <- ncol(x = nn.idx) j <- as.numeric(x = t(x = nn.idx)) + offset2 i <- ((1:length(x = j)) - 1) %/% k + 1 + offset1 nn.mat <- sparseMatrix(i = i, j = j, x = 1, dims = dims) return(nn.mat) } # Count anchors between all datasets # # Counts anchors between each dataset and scales based on total number of cells # in the datasets # # @param anchor.df Matrix of anchors # @param offsets Dataset sizes in anchor matrix. Used to identify boundaries of # each dataset in matrix, so that total pairwise anchors between all datasets # can be counted # # @return Returns a similarity matrix # CountAnchors <- function( anchor.df, offsets, obj.lengths ) { similarity.matrix <- matrix(data = 0, ncol = length(x = offsets), nrow = length(x = offsets)) similarity.matrix[upper.tri(x = similarity.matrix, diag = TRUE)] <- NA total.cells <- sum(obj.lengths) offsets <- c(offsets, total.cells) for (i in 1:nrow(x = similarity.matrix)){ for (j in 1:ncol(x = similarity.matrix)){ if (!is.na(x = similarity.matrix[i, j])){ relevant.rows <- anchor.df[(anchor.df$dataset1 %in% c(i, j)) & (anchor.df$dataset2 %in% c(i, j)), ] score <- nrow(x = relevant.rows) ncell <- min(obj.lengths[[i]], obj.lengths[[j]]) similarity.matrix[i, j] <- score / ncell } } } return(similarity.matrix) } FilterAnchors <- function( object, assay = NULL, slot = "data", integration.name = 'integrated', features = NULL, k.filter = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) { if (verbose) { message("Filtering anchors") } assay <- assay %||% DefaultAssay(object = object) features <- features %||% VariableFeatures(object = object) if (length(x = features) == 0) { stop("No features provided and no VariableFeatures computed.") } features <- unique(x = features) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 if (min(length(x = nn.cells1), length(x = nn.cells2)) < k.filter) { warning("Number of anchor cells is less than k.filter. Retaining all anchors.") k.filter <- min(length(x = nn.cells1), length(x = nn.cells2)) anchors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "anchors") } else { cn.data1 <- L2Norm( mat = as.matrix(x = t(x = GetAssayData( object = object[[assay[1]]], slot = slot)[features, nn.cells1])), MARGIN = 1) cn.data2 <- L2Norm( mat = as.matrix(x = t(x = GetAssayData( object = object[[assay[2]]], slot = slot)[features, nn.cells2])), MARGIN = 1) nn <- NNHelper( data = cn.data2[nn.cells2, ], query = cn.data1[nn.cells1, ], k = k.filter, method = nn.method, n.trees = n.trees, eps = eps ) anchors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "anchors") position <- sapply(X = 1:nrow(x = anchors), FUN = function(x) { which(x = anchors[x, "cell2"] == Indices(object = nn)[anchors[x, "cell1"], ])[1] }) anchors <- anchors[!is.na(x = position), ] if (verbose) { message("\tRetained ", nrow(x = anchors), " anchors") } } object <- SetIntegrationData( object = object, integration.name = integration.name, slot = "anchors", new.data = anchors ) return(object) } FindAnchors <- function( object.pair, assay, slot, cells1, cells2, internal.neighbors, reduction, reduction.2 = character(), nn.reduction = reduction, dims = 1:10, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, projected = FALSE, verbose = TRUE ) { # compute local neighborhoods, use max of k.anchor and k.score if also scoring to avoid # recomputing neighborhoods k.neighbor <- k.anchor if (!is.na(x = k.score)) { k.neighbor <- max(k.anchor, k.score) } object.pair <- FindNN( object = object.pair, cells1 = cells1, cells2 = cells2, internal.neighbors = internal.neighbors, dims = dims, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, k = k.neighbor, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, verbose = verbose ) object.pair <- FindAnchorPairs( object = object.pair, integration.name = "integrated", k.anchor = k.anchor, verbose = verbose ) if (!is.na(x = k.filter)) { top.features <- TopDimFeatures( object = object.pair, reduction = reduction, dims = dims, features.per.dim = 100, max.features = max.features, projected = projected ) if(length(top.features) == 2){ top.features <- intersect(top.features[[1]], top.features[[2]]) } else{ top.features <- as.vector(top.features) } top.features <- top.features[top.features %in% rownames(x = object.pair)] object.pair <- FilterAnchors( object = object.pair, assay = assay, slot = slot, integration.name = 'integrated', features = top.features, k.filter = k.filter, nn.method = nn.method, n.trees = n.trees, eps = eps, verbose = verbose ) } if (!is.na(x = k.score)) { object.pair = ScoreAnchors( object = object.pair, assay = DefaultAssay(object = object.pair), integration.name = "integrated", verbose = verbose, k.score = k.score ) } anchors <- GetIntegrationData( object = object.pair, integration.name = 'integrated', slot = 'anchors' ) return(anchors) } # Find Anchor pairs # FindAnchorPairs <- function( object, integration.name = 'integrated', k.anchor = 5, verbose = TRUE ) { neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') max.nn <- c(ncol(x = neighbors$nnab), ncol(x = neighbors$nnba)) if (any(k.anchor > max.nn)) { message(paste0('warning: requested k.anchor = ', k.anchor, ', only ', min(max.nn), ' in dataset')) k.anchor <- min(max.nn) } if (verbose) { message("Finding anchors") } # convert cell name to neighbor index nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 cell1.index <- suppressWarnings(which(colnames(x = object) == nn.cells1, arr.ind = TRUE)) ncell <- 1:nrow(x = neighbors$nnab) ncell <- ncell[ncell %in% cell1.index] anchors <- list() # pre allocate vector anchors$cell1 <- rep(x = 0, length(x = ncell) * 5) anchors$cell2 <- anchors$cell1 anchors$score <- anchors$cell1 + 1 idx <- 0 indices.ab <- Indices(object = neighbors$nnab) indices.ba <- Indices(object = neighbors$nnba) for (cell in ncell) { neighbors.ab <- indices.ab[cell, 1:k.anchor] mutual.neighbors <- which( x = indices.ba[neighbors.ab, 1:k.anchor, drop = FALSE] == cell, arr.ind = TRUE )[, 1] for (i in neighbors.ab[mutual.neighbors]){ idx <- idx + 1 anchors$cell1[idx] <- cell anchors$cell2[idx] <- i anchors$score[idx] <- 1 } } anchors$cell1 <- anchors$cell1[1:idx] anchors$cell2 <- anchors$cell2[1:idx] anchors$score <- anchors$score[1:idx] anchors <- t(x = do.call(what = rbind, args = anchors)) anchors <- as.matrix(x = anchors) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors', new.data = anchors ) if (verbose) { message(paste0("\tFound ", nrow(x = anchors), " anchors")) } return(object) } FindIntegrationMatrix <- function( object, assay = NULL, integration.name = 'integrated', features.integrate = NULL, verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 anchors <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors' ) if (verbose) { message("Finding integration vectors") } features.integrate <- features.integrate %||% rownames( x = GetAssayData(object = object, assay = assay, slot = "data") ) data.use1 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.integrate, nn.cells1] ) data.use2 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.integrate, nn.cells2] ) anchors1 <- nn.cells1[anchors[, "cell1"]] anchors2 <- nn.cells2[anchors[, "cell2"]] data.use1 <- data.use1[anchors1, ] data.use2 <- data.use2[anchors2, ] integration.matrix <- data.use2 - data.use1 object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'integration.matrix', new.data = integration.matrix ) return(object) } # Find nearest neighbors # FindNN <- function( object, cells1 = NULL, cells2 = NULL, internal.neighbors, grouping.var = NULL, dims = 1:10, reduction = "cca.l2", reduction.2 = character(), nn.dims = dims, nn.reduction = reduction, k = 300, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, integration.name = 'integrated', verbose = TRUE ) { if (xor(x = is.null(x = cells1), y = is.null(x = cells2))) { stop("cells1 and cells2 must both be specified") } if (!is.null(x = cells1) && !is.null(x = cells2) && !is.null(x = grouping.var)) { stop("Specify EITHER grouping.var or cells1/2.") } if (is.null(x = cells1) && is.null(x = cells2) && is.null(x = grouping.var)) { stop("Please set either cells1/2 or grouping.var") } if (!is.null(x = grouping.var)) { if (nrow(x = unique(x = object[[grouping.var]])) != 2) { stop("Number of groups in grouping.var not equal to 2.") } groups <- names(x = sort(x = table(object[[grouping.var]]), decreasing = TRUE)) cells1 <- colnames(x = object)[object[[grouping.var]] == groups[[1]]] cells2 <- colnames(x = object)[object[[grouping.var]] == groups[[2]]] } if (verbose) { message("Finding neighborhoods") } dim.data.self <- Embeddings(object = object[[nn.reduction]])[, nn.dims] if (!is.null(x = internal.neighbors[[1]])) { nnaa <- internal.neighbors[[1]] } else { dims.cells1.self <- dim.data.self[cells1, ] nnaa <- NNHelper( data = dims.cells1.self, k = k + 1, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx1 ) } if (!is.null(x = internal.neighbors[[2]])) { nnbb <- internal.neighbors[[2]] } else { dims.cells2.self <- dim.data.self[cells2, ] nnbb <- NNHelper( data = dims.cells2.self, k = k + 1, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx1 ) } if (length(x = reduction.2) > 0) { nnab <- NNHelper( data = Embeddings(object = object[[reduction.2]])[cells2, nn.dims], query = Embeddings(object = object[[reduction.2]])[cells1, nn.dims], k = k, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx2 ) nnba <- NNHelper( data = Embeddings(object = object[[reduction]])[cells1, nn.dims], query = Embeddings(object = object[[reduction]])[cells2, nn.dims], k = k, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx1 ) } else { dim.data.opposite <- Embeddings(object = object[[reduction]])[ ,dims] dims.cells1.opposite <- dim.data.opposite[cells1, ] dims.cells2.opposite <- dim.data.opposite[cells2, ] nnab <- NNHelper( data = dims.cells2.opposite, query = dims.cells1.opposite, k = k, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx2 ) nnba <- NNHelper( data = dims.cells1.opposite, query = dims.cells2.opposite, k = k, method = nn.method, n.trees = n.trees, eps = eps, index = nn.idx1 ) } object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'neighbors', new.data = list('nnaa' = nnaa, 'nnab' = nnab, 'nnba' = nnba, 'nnbb' = nnbb, 'cells1' = cells1, 'cells2' = cells2) ) return(object) } # @param reduction a DimReduc object containing cells in the query object # @param reverse Compute weights matrix for reference anchors that are nearest # to query cells. Used in mapping metric to perform projection of query cells # back from reference space. FindWeights <- function( object, reduction = NULL, assay = NULL, integration.name = 'integrated', dims = 1:10, features = NULL, k = 300, sd.weight = 1, nn.method = "annoy", n.trees = 50, eps = 0, reverse = FALSE, verbose = TRUE ) { if (verbose) { message("Finding integration vector weights") } if (is.null(x = reduction) & is.null(x = features)) { stop("Need to specify either dimension reduction object or a set of features") } assay <- assay %||% DefaultAssay(object = object) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 anchors <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors' ) if (reverse) { anchors.cells2 <- nn.cells2[anchors[, "cell2"]] anchors.cells1 <- nn.cells1[anchors[, "cell1"]] to.keep <- !duplicated(x = anchors.cells1) anchors.cells1 <- anchors.cells1[to.keep] anchors.cells2 <- anchors.cells2[to.keep] if (is.null(x = features)) { data.use <- Embeddings(object = reduction)[nn.cells1, dims] data.use.query <- Embeddings(object = reduction)[nn.cells2, dims] } else { data.use <- t(x = GetAssayData( object = object, slot = 'data', assay = assay)[features, nn.cells1] ) data.use.query <- t(x = GetAssayData( object = object, slot = 'data', assay = assay)[features, nn.cells2] ) } knn_2_2 <- NNHelper( data = data.use[anchors.cells1, ], query = data.use.query, k = k, method = nn.method, n.trees = n.trees, eps = eps ) } else { anchors.cells2 <- unique(x = nn.cells2[anchors[, "cell2"]]) if (is.null(x = features)) { data.use <- Embeddings(reduction)[nn.cells2, dims] } else { data.use <- t(x = GetAssayData(object = object, slot = 'data', assay = assay)[features, nn.cells2]) } knn_2_2 <- NNHelper( data = data.use[anchors.cells2, ], query = data.use, k = k, method = nn.method, n.trees = n.trees, eps = eps ) } distances <- Distances(object = knn_2_2) distances <- 1 - (distances / distances[, ncol(x = distances)]) cell.index <- Indices(object = knn_2_2) integration.matrix <- GetIntegrationData( object = object, integration.name = integration.name, slot = "integration.matrix" ) weights <- FindWeightsC( cells2 = 0:(length(x = nn.cells2) - 1), distances = as.matrix(x = distances), anchor_cells2 = anchors.cells2, integration_matrix_rownames = rownames(x = integration.matrix), cell_index = cell.index, anchor_score = anchors[, "score"], min_dist = 0, sd = sd.weight, display_progress = verbose ) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'weights', new.data = weights ) return(object) } # Work out the anchor cell offsets for given set of cells in anchor list # # @param anchors A dataframe of anchors, from AnchorSet object # @param dataset Dataset number (1 or 2) # @param cell Cell number (1 or 2) # @param cellnames.list List of cell names in all objects # @param cellnames list of cell names for only the object in question # # @return Returns a list of offsets # GetCellOffsets <- function(anchors, dataset, cell, cellnames.list, cellnames) { cell.id <- sapply(X = 1:nrow(x = anchors), FUN = function(x) { cellnames.list[[anchors[, dataset+3][x]]][anchors[, cell][x]] }) cell.offset <- sapply( X = 1:length(x = cell.id), FUN = function(x) { return(which(x = cellnames == cell.id[x])) } ) return(cell.offset) } # Map queries to reference # # Map query objects onto assembled reference dataset # # @param anchorset Anchorset found by FindIntegrationAnchors # @param reference Pre-integrated reference dataset to map query datasets to # @param new.assay.name Name for the new assay containing the integrated data # @param normalization.method Name of normalization method used: LogNormalize # or SCT # @param features Vector of features to use when computing the PCA to determine the weights. Only set # if you want a different set from those used in the anchor finding process # @param features.to.integrate Vector of features to integrate. By default, will use the features # used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor weights. # This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the # first merge between reference and query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param preserve.order Do not reorder objects based on size for each pairwise integration. # @param eps Error bound on the neighbor finding algorithm (from \code{\link{RANN}}) # @param verbose Print progress bars and output # # @return Returns an integrated matrix # MapQueryData <- function( anchorset, reference, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, weights.matrix = NULL, no.offset = FALSE, sd.weight = 1, preserve.order = FALSE, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') features <- features %||% slot(object = anchorset, name = "anchor.features") features.to.integrate <- features.to.integrate %||% features cellnames.list <- list() for (ii in 1:length(x = object.list)) { cellnames.list[[ii]] <- colnames(x = object.list[[ii]]) } if (length(x = reference.datasets) == length(x = object.list)) { query.datasets <- NULL } else { query.datasets <- setdiff(x = seq_along(along.with = object.list), y = reference.datasets) } my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) query.corrected <- my.lapply( X = query.datasets, FUN = function(dataset1) { if (verbose) { message("\nIntegrating dataset ", dataset1, " with reference dataset") } filtered.anchors <- anchors[anchors$dataset1 %in% reference.datasets & anchors$dataset2 == dataset1, ] integrated <- RunIntegration( filtered.anchors = filtered.anchors, reference = reference, query = object.list[[dataset1]], new.assay.name = new.assay.name, normalization.method = normalization.method, cellnames.list = cellnames.list, features.to.integrate = features.to.integrate, weight.reduction = weight.reduction, weights.matrix = weights.matrix, no.offset = no.offset, features = features, dims = dims, k.weight = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) return(integrated) } ) reference.integrated <- GetAssayData( object = reference, slot = 'data' )[features.to.integrate, ] query.corrected[[length(x = query.corrected) + 1]] <- reference.integrated all.integrated <- do.call(cbind, query.corrected) return(all.integrated) } # Convert nearest neighbor information to a sparse matrix # # @param idx Nearest neighbor index # @param distance Nearest neighbor distance # @param k Number of nearest neighbors # NNtoMatrix <- function(idx, distance, k) { nn <- list() x <- 1 for (i in 1:nrow(x = idx)) { for (j in 2:k) { nn.idx <- idx[i, j] nn.dist <- distance[i, j] nn[[x]] <- c('i' = i, 'j' = nn.idx, 'x' = 1/nn.dist) x <- x + 1 } } nn <- do.call(what = rbind, args = nn) nn.matrix <- new( Class = 'dgTMatrix', i = as.integer(x = nn[, 1] - 1), j = as.integer(x = nn[, 2] - 1), x = as.numeric(x = nn[, 3]), Dim = as.integer(x = c(nrow(idx), nrow(x = idx))) ) nn.matrix <- as(object = nn.matrix, Class = 'dgCMatrix') return(nn.matrix) } # Pairwise dataset integration # # Used for reference construction # # @param anchorset Results from FindIntegrationAnchors # @param new.assay.name Name for the new assay containing the integrated data # @param normalization.method Name of normalization method used: LogNormalize # or SCT # @param features Vector of features to use when computing the PCA to determine # the weights. Only set if you want a different set from those used in the # anchor finding process # @param features.to.integrate Vector of features to integrate. By default, # will use the features used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor # weights. This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in # all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to # use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to # calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used # for calculating anchor weights in the first merge between reference and # query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param sample.tree Specify the order of integration. If NULL, will compute # automatically. # @param preserve.order Do not reorder objects based on size for each pairwise # integration. # @param eps Error bound on the neighbor finding algorithm (from # \code{\link{RANN}}) # @param verbose Print progress bars and output # # @return Returns a Seurat object with a new integrated Assay # PairwiseIntegrateReference <- function( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) { object.list <- slot(object = anchorset, name = "object.list") reference.objects <- slot(object = anchorset, name = "reference.objects") features <- features %||% slot(object = anchorset, name = "anchor.features") features.to.integrate <- features.to.integrate %||% features if (length(x = reference.objects) == 1) { ref.obj <- object.list[[reference.objects]] # TODO: restore once check.matrix is in SeuratObject # ref.obj[[new.assay.name]] <- CreateAssayObject( # data = GetAssayData(ref.obj, slot = 'data')[features.to.integrate, ], # check.matrix = FALSE # ) ref.obj[[new.assay.name]] <- CreateAssayObject( data = GetAssayData(ref.obj, slot = 'data')[features.to.integrate, ] ) DefaultAssay(object = ref.obj) <- new.assay.name return(ref.obj) } anchors <- slot(object = anchorset, name = "anchors") offsets <- slot(object = anchorset, name = "offsets") objects.ncell <- sapply(X = object.list, FUN = ncol) if (!is.null(x = weight.reduction)) { if (length(x = weight.reduction) == 1 | inherits(x = weight.reduction, what = "DimReduc")) { if (length(x = object.list) == 2) { weight.reduction <- list(NULL, weight.reduction) } else if (inherits(x = weight.reduction, what = "character")) { weight.reduction <- as.list(x = rep(x = weight.reduction, times = length(x = object.list))) } else { stop("Invalid input for weight.reduction. Please specify either the names of the dimension", "reduction for each object in the list or provide DimReduc objects.") } } if (length(x = weight.reduction) != length(x = object.list)) { stop("Please specify a dimension reduction for each object, or one dimension reduction to be used for all objects") } if (inherits(x = weight.reduction, what = "character")) { weight.reduction <- as.list(x = weight.reduction) } available.reductions <- lapply(X = object.list, FUN = FilterObjects, classes.keep = 'DimReduc') for (ii in 1:length(x = weight.reduction)) { if (ii == 1 & is.null(x = weight.reduction[[ii]])) next if (!inherits(x = weight.reduction[[ii]], what = "DimReduc")) { if (!weight.reduction[[ii]] %in% available.reductions[[ii]]) { stop("Requested dimension reduction (", weight.reduction[[ii]], ") is not present in object ", ii) } weight.reduction[[ii]] <- object.list[[ii]][[weight.reduction[[ii]]]] } } } if (is.null(x = sample.tree)) { similarity.matrix <- CountAnchors( anchor.df = anchors, offsets = offsets, obj.lengths = objects.ncell ) similarity.matrix <- similarity.matrix[reference.objects, reference.objects] sample.tree <- BuildSampleTree(similarity.matrix = similarity.matrix) sample.tree <- AdjustSampleTree(x = sample.tree, reference.objects = reference.objects) } cellnames.list <- list() for (ii in 1:length(x = object.list)) { cellnames.list[[ii]] <- colnames(x = object.list[[ii]]) } unintegrated <- suppressWarnings(expr = merge( x = object.list[[reference.objects[[1]]]], y = object.list[reference.objects[2:length(x = reference.objects)]] )) names(x = object.list) <- as.character(-(1:length(x = object.list))) if (!is.null(x = weight.reduction)) { names(x = weight.reduction) <- names(x = object.list) } if (verbose & (length(x = reference.objects) != length(x = object.list))) { message("Building integrated reference") } for (ii in 1:nrow(x = sample.tree)) { merge.pair <- as.character(x = sample.tree[ii, ]) length1 <- ncol(x = object.list[[merge.pair[1]]]) length2 <- ncol(x = object.list[[merge.pair[2]]]) if (!(preserve.order) & (length2 > length1)) { merge.pair <- rev(x = merge.pair) sample.tree[ii, ] <- as.numeric(merge.pair) } if (!is.null(x = weight.reduction)) { # extract the correct dimreduc objects, in the correct order weight.pair <- weight.reduction[merge.pair] } else { weight.pair <- NULL } object.1 <- DietSeurat( object = object.list[[merge.pair[1]]], assays = DefaultAssay(object = object.list[[merge.pair[1]]]), counts = FALSE ) object.2 <- DietSeurat( object = object.list[[merge.pair[2]]], assays = DefaultAssay(object = object.list[[merge.pair[2]]]), counts = FALSE ) # suppress key duplication warning suppressWarnings(object.1[["ToIntegrate"]] <- object.1[[DefaultAssay(object = object.1)]]) DefaultAssay(object = object.1) <- "ToIntegrate" object.1 <- DietSeurat(object = object.1, assays = "ToIntegrate") suppressWarnings(object.2[["ToIntegrate"]] <- object.2[[DefaultAssay(object = object.2)]]) DefaultAssay(object = object.2) <- "ToIntegrate" object.2 <- DietSeurat(object = object.2, assays = "ToIntegrate") datasets <- ParseMergePair(sample.tree, ii) if (verbose) { message( "Merging dataset ", paste(datasets$object2, collapse = " "), " into ", paste(datasets$object1, collapse = " ") ) } merged.obj <- merge(x = object.1, y = object.2, merge.data = TRUE) if (verbose) { message("Extracting anchors for merged samples") } filtered.anchors <- anchors[anchors$dataset1 %in% datasets$object1 & anchors$dataset2 %in% datasets$object2, ] integrated.matrix <- RunIntegration( filtered.anchors = filtered.anchors, normalization.method = normalization.method, reference = object.1, query = object.2, cellnames.list = cellnames.list, new.assay.name = new.assay.name, features.to.integrate = features.to.integrate, features = features, dims = dims, weight.reduction = weight.reduction, k.weight = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) integrated.matrix <- cbind(integrated.matrix, GetAssayData(object = object.1, slot = 'data')[features.to.integrate, ]) # TODO: restore once check.matrix is in SeuratObject # merged.obj[[new.assay.name]] <- CreateAssayObject(data = integrated.matrix, check.matrix = FALSE) merged.obj[[new.assay.name]] <- CreateAssayObject(data = integrated.matrix) DefaultAssay(object = merged.obj) <- new.assay.name object.list[[as.character(x = ii)]] <- merged.obj object.list[[merge.pair[[1]]]] <- NULL object.list[[merge.pair[[2]]]] <- NULL invisible(x = CheckGC()) } integrated.data <- GetAssayData( object = object.list[[as.character(x = ii)]], assay = new.assay.name, slot = 'data' ) integrated.data <- integrated.data[, colnames(x = unintegrated)] new.assay <- new( Class = 'Assay', counts = new(Class = "dgCMatrix"), data = integrated.data, scale.data = matrix(), var.features = vector(), meta.features = data.frame(row.names = rownames(x = integrated.data)), misc = NULL ) unintegrated[[new.assay.name]] <- new.assay # "unintegrated" now contains the integrated assay DefaultAssay(object = unintegrated) <- new.assay.name VariableFeatures(object = unintegrated) <- features if (normalization.method == "SCT"){ unintegrated[[new.assay.name]] <- SetAssayData( object = unintegrated[[new.assay.name]], slot = "scale.data", new.data = as.matrix(x = GetAssayData(object = unintegrated[[new.assay.name]], slot = "data")) ) } unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "sample.tree", new.data = sample.tree ) unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") suppressWarnings(expr = unintegrated <- LogSeuratCommand(object = unintegrated)) return(unintegrated) } # Parse merge information from dataset clustering # # @param clustering clustering dataframe from hclust ($merge). # Gives the order of merging datasets to get to the root of the tree. # @param i current row in clustering dataframe # ParseMergePair <- function(clustering, i){ # return 2-element list of datasets in first and second object datasets <- list('object1' = clustering[i, 1], 'object2' = clustering[i, 2]) if (datasets$object1 > 0) { datasets$object1 <- ParseRow(clustering, datasets$object1) } if (datasets$object2 > 0) { datasets$object2 <- ParseRow(clustering, datasets$object2) } datasets$object1 <- abs(x = datasets$object1) datasets$object2 <- abs(x = datasets$object2) return(datasets) } # Parse row of clustering order # # Used recursively to work out the dataset composition of a merged object # # @param clustering clustering dataframe from hclust ($merge). # Gives the order of merging datasets to get to the root of the tree. # @param i current row in clustering dataframe # ParseRow <- function(clustering, i){ # returns vector of datasets datasets <- as.list(x = clustering[i, ]) if (datasets[[1]] > 0) { datasets[[1]] <- ParseRow(clustering = clustering, i = datasets[[1]]) } if (datasets[[2]] > 0) { datasets[[2]] <- ParseRow(clustering = clustering, i = datasets[[2]]) } return(unlist(datasets)) } # Rescale query with mean and sd from reference, or known mean and SD # # @param reference A reference object # @param query A query object # @param features Features to scale # @param scale Scale data (divide by SD) # @return Returns a matrix containing the scaled query data RescaleQuery <- function( reference, query, reference.assay = NULL, query.assay = NULL, features = NULL, feature.mean = NULL, feature.sd = NULL, scale = TRUE ) { reference.assay <- reference.assay %||% DefaultAssay(object = reference) query.assay <- query.assay %||% DefaultAssay(object = query) features <- features %||% intersect( rownames(x = reference[[reference.assay]]), rownames(x = query[[query.assay]]) ) reference.data <- GetAssayData( object = reference, assay = reference.assay, slot = "data")[features, ] query.data <- GetAssayData( object = query, assay = query.assay, slot = "data")[features, ] if (is.null(x = feature.mean)) { feature.mean <- rowMeans(x = reference.data) if (scale) { feature.sd <- sqrt( x = SparseRowVar2( mat = as(object = reference.data, Class = "dgCMatrix"), mu = feature.mean, display_progress = FALSE ) ) feature.sd[is.na(x = feature.sd)] <- 1 } else { feature.sd <- rep(x = 1, nrow( reference.data)) } feature.mean[is.na(x = feature.mean)] <- 1 } proj.data <- GetAssayData( object = query, assay = query.assay, slot = "data" )[features, ] store.names <- dimnames(x = proj.data) if (is.numeric(x = feature.mean) && feature.mean[[1]] != "SCT") { proj.data <- FastSparseRowScaleWithKnownStats( mat = as(object = proj.data, Class = "dgCMatrix"), mu = feature.mean, sigma = feature.sd, display_progress = FALSE ) } dimnames(x = proj.data) <- store.names return(proj.data) } ProjectCellEmbeddings <- function( reference, query, reduction = "pca", reference.assay = NULL, query.assay = NULL, dims = 1:50, scale = TRUE, verbose = TRUE, feature.mean = NULL, feature.sd = NULL ) { if (verbose) { message("Projecting cell embeddings") } reference.assay <- reference.assay %||% DefaultAssay(object = reference) query.assay <- query.assay %||% DefaultAssay(object = query) features <- rownames(x = Loadings(object = reference[[reduction]])) features <- intersect(x = features, y = rownames(x = query[[query.assay]])) proj.data <- RescaleQuery( reference = reference, query = query, features = features, scale = scale, feature.mean = feature.mean, feature.sd = feature.sd ) ref.feature.loadings <- Loadings(object = reference[[reduction]])[features, dims] proj.pca <- t(crossprod(x = ref.feature.loadings, y = proj.data)) return(proj.pca) } # Project new data onto SVD (LSI or PCA) # # A = U∑V SVD # U' = VA'/∑ LSI projection # # Note that because in LSI we don't multiply by ∑ to get the embeddings (it's just U), # we need to divide by ∑ in the projection to get the equivalent. Therefore need # the singular values, which (in Signac RunLSI) we store in the DimReduc misc slot. # # @param reduction A \code{DimReduc} object containing the SVD dimension # reduction. Assumes original irlba output is stored in the misc slot of the dimreduc. # @param data A data matrix to project onto the SVD. Must contain the same # features used to construct the original SVD. # @param mode "pca" or "lsi". Determines if we divide projected values by singular values. # @param features Features to use. If NULL, use all common features between # the dimreduc and the data matrix. # @param do.center Center the projected cell embeddings (subtract mean across cells) # @param do.scale Scale the projected cell embeddings (divide by standard deviation across cells) # @param use.original.stats When standardizing the vectors, use the mean and standard deviation # of the original vectors from the SVD, rather than the mean and standard deviation of the # projected vectors. # @param dims A vector containing the dimensions to use in the projection. If NULL (default), # project to all dimensions in the input SVD. # @param verbose Display messages # # @return Returns a matrix #' @importFrom Matrix crossprod # @export ProjectSVD <- function( reduction, data, mode = "pca", features = NULL, do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, dims = NULL, verbose = TRUE ) { vt <- Loadings(object = reduction) dims <- dims %||% seq_len(length.out = ncol(x = vt)) features <- features %||% rownames(x = vt) features <- intersect(x = features, y = rownames(x = data)) vt <- vt[features, dims] data <- data[features, ] if (verbose) { message("Projecting new data onto SVD") } projected.u <- as.matrix(x = crossprod(x = vt, y = data)) if (mode == "lsi") { components <- slot(object = reduction, name = 'misc') sigma <- components$d projected.u <- projected.u / sigma[dims] } if (do.center) { if (use.original.stats) { components <- slot(object = reduction, name = 'misc') if ("u" %in% names(x = components)) { # preferentially use original irlba output stored in misc # signac scales and centers embeddings by default embed.mean <- apply(X = components$u, MARGIN = 2, FUN = mean) } else { # raw irlba output not stored, fall back to the reference embeddings ref.emb <- Embeddings(object = reduction) embed.mean <- apply(X = ref.emb, MARGIN = 2, FUN = mean) } } else { # projected.u is transposed so use MARGIN = 1 embed.mean <- apply(X = projected.u, MARGIN = 1, FUN = mean) } projected.u <- projected.u - embed.mean } if (do.scale) { if (use.original.stats) { components <- slot(object = reduction, name = 'misc') if ("u" %in% names(x = components)) { embed.sd <- apply(X = components$u, MARGIN = 2, FUN = sd) } else { ref.emb <- Embeddings(object = reduction) embed.sd <- apply(X = ref.emb, MARGIN = 2, FUN = sd) } } else { embed.sd <- apply(X = projected.u, MARGIN = 1, FUN = sd) } projected.u <- projected.u / embed.sd } return(t(x = projected.u)) } # Calculate position along a defined reference range for a given vector of # numerics. Will range from 0 to 1. # # @param x Vector of numeric type # @param lower Lower end of reference range # @param upper Upper end of reference range # #' @importFrom stats quantile # # @return Returns a vector that describes the position of each element in # x along the defined reference range # ReferenceRange <- function(x, lower = 0.025, upper = 0.975) { return((x - quantile(x = x, probs = lower)) / (quantile(x = x, probs = upper) - quantile(x = x, probs = lower))) } # Run integration between a reference and query object # # Should only be called from within another function # # @param filtered.anchors A dataframe containing only anchors between reference and query # @param reference A reference object # @param query A query object # @param cellnames.list List of all cell names in all objects to be integrated # @param new.assay.name Name for the new assay containing the integrated data # @param features Vector of features to use when computing the PCA to determine the weights. Only set # if you want a different set from those used in the anchor finding process # @param features.to.integrate Vector of features to integrate. By default, will use the features # used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor weights. # This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the # first merge between reference and query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param sample.tree Specify the order of integration. If NULL, will compute automatically. # @param eps Error bound on the neighbor finding algorithm (from \code{\link{RANN}}) # @param verbose Print progress bars and output # RunIntegration <- function( filtered.anchors, normalization.method, reference, query, cellnames.list, new.assay.name, features.to.integrate, weight.reduction, weights.matrix = NULL, no.offset = FALSE, features, dims, k.weight, sd.weight, eps, verbose ) { cells1 <- colnames(x = reference) cells2 <- colnames(x = query) if (nrow(x = filtered.anchors) < k.weight) { warning("Number of anchors is less than k.weight. Lowering k.weight for sample pair.") k.weight <- nrow(x = filtered.anchors) } merged.obj <- merge(x = reference, y = query, merge.data = TRUE) if (no.offset) { cell1.offset <- filtered.anchors[, 1] cell2.offset <- filtered.anchors[, 2] } else { cell1.offset <- GetCellOffsets( anchors = filtered.anchors, dataset = 1, cell = 1, cellnames.list = cellnames.list, cellnames = cells1 ) cell2.offset <- GetCellOffsets( anchors = filtered.anchors, dataset = 2, cell = 2, cellnames.list = cellnames.list, cellnames = cells2 ) } filtered.anchors[, 1] <- cell1.offset filtered.anchors[, 2] <- cell2.offset integration.name <- "integrated" merged.obj <- SetIntegrationData( object = merged.obj, integration.name = integration.name, slot = 'anchors', new.data = filtered.anchors ) merged.obj <- SetIntegrationData( object = merged.obj, integration.name = integration.name, slot = 'neighbors', new.data = list('cells1' = cells1, 'cells2' = cells2) ) merged.obj <- FindIntegrationMatrix( object = merged.obj, integration.name = integration.name, features.integrate = features.to.integrate, verbose = verbose ) assay <- DefaultAssay(object = merged.obj) if (is.null(x = weights.matrix)) { if (is.null(x = weight.reduction) && !is.null(x = dims)) { if (normalization.method == "SCT"){ # recenter residuals centered.resids <- ScaleData( object = GetAssayData(object = merged.obj, assay = assay, slot = "data"), do.scale = FALSE, do.center = TRUE, verbose = FALSE ) merged.obj[["pca"]] <- RunPCA( object = centered.resids[features, ], assay = assay, npcs = max(dims), verbose = FALSE, features = features ) } else { merged.obj <- ScaleData( object = merged.obj, features = features, verbose = FALSE ) merged.obj <- RunPCA( object = merged.obj, npcs = max(dims), verbose = FALSE, features = features ) } dr.weights <- merged.obj[['pca']] } else if(is.null(x = weight.reduction) && is.null(x = dims)) { dr.weights <- CreateDimReducObject( embeddings = as.matrix(x = t(x = GetAssayData(object = merged.obj))), key = "int_", assay = "ToIntegrate" ) dims <- 1:ncol(x = dr.weights) } else { # need to match order of objects dr <- weight.reduction[[2]] if (!all(cells2 %in% rownames(x = dr))) { stop("Query cells not present in supplied DimReduc object. Set weight.reduction to a DimReduc object containing the query cells.") } if (inherits(x = dr, what = "DimReduc")) { dr.weights <- dr } else { dr.weights <- query[[dr]] } dims <- 1:ncol(x = dr.weights) } merged.obj <- FindWeights( object = merged.obj, integration.name = integration.name, reduction = dr.weights, dims = dims, k = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) } else { merged.obj <- SetIntegrationData( object = merged.obj, integration.name = "integrated", slot = "weights", new.data = weights.matrix ) } merged.obj <- TransformDataMatrix( object = merged.obj, new.assay.name = new.assay.name, features.to.integrate = features.to.integrate, integration.name = integration.name, verbose = verbose ) integrated.matrix <- GetAssayData( object = merged.obj, assay = new.assay.name, slot = 'data' ) return(integrated.matrix[, cells2]) } # order samples based on sample tree # the first sample is reference sample SampleIntegrationOrder <- function(tree) { order <- tree[nrow(x = tree), ] while (sum(order > 0) != 0) { replace.idx <- which(x = order > 0)[1] replace <- tree[order[replace.idx], ] if (replace.idx == 1) { left <- vector() right <- order[(replace.idx + 1):length(x = order)] replace <- tree[order[replace.idx], ] order <- c(left, replace, right) } else if (replace.idx == length(x = order)) { left <- order[1:(replace.idx - 1)] right <- vector() } else { left <- order[1:(replace.idx - 1)] right <- order[(replace.idx + 1):length(x = order)] } order <- c(left, replace, right) } order <- order * (-1) return(order) } ScoreAnchors <- function( object, assay = NULL, integration.name = 'integrated', verbose = TRUE, k.score = 30 ) { assay <- assay %||% DefaultAssay(object = object) anchor.df <- as.data.frame(x = GetIntegrationData(object = object, integration.name = integration.name, slot = 'anchors')) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "neighbors") offset <- length(x = neighbors$cells1) indices.aa <- Indices(object = neighbors$nnaa) indices.bb <- Indices(object = neighbors$nnbb) indices.ab <- Indices(object = neighbors$nnab) indices.ba <- Indices(object = neighbors$nnba) nbrsetA <- function(x) c(indices.aa[x, 1:k.score], indices.ab[x, 1:k.score] + offset) nbrsetB <- function(x) c(indices.ba[x, 1:k.score], indices.bb[x, 1:k.score] + offset) # score = number of shared neighbors anchor.new <- data.frame( 'cell1' = anchor.df[, 1], 'cell2' = anchor.df[, 2], 'score' = mapply( FUN = function(x, y) { length(x = intersect(x = nbrsetA(x = x), nbrsetB(x = y)))}, anchor.df[, 1], anchor.df[, 2] ) ) # normalize the score max.score <- quantile(anchor.new$score, 0.9) min.score <- quantile(anchor.new$score, 0.01) anchor.new$score <- anchor.new$score - min.score anchor.new$score <- anchor.new$score / (max.score - min.score) anchor.new$score[anchor.new$score > 1] <- 1 anchor.new$score[anchor.new$score < 0] <- 0 anchor.new <- as.matrix(x = anchor.new) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors', new.data = anchor.new ) return(object) } # Get top n features across given set of dimensions # # @param object Seurat object # @param reduction Which dimension reduction to use # @param dims Which dimensions to use # @param features.per.dim How many features to consider per dimension # @param max.features Number of features to return at most # @param projected Use projected loadings # TopDimFeatures <- function( object, reduction, dims = 1:10, features.per.dim = 100, max.features = 200, projected = FALSE ) { dim.reduction <- object[[reduction]] max.features <- max(length(x = dims) * 2, max.features) num.features <- sapply(X = 1:features.per.dim, FUN = function(y) { length(x = unique(x = as.vector(x = sapply(X = dims, FUN = function(x) { unlist(x = TopFeatures(object = dim.reduction, dim = x, nfeatures = y, balanced = TRUE, projected = projected)) })))) }) max.per.pc <- which.max(x = num.features[num.features < max.features]) features <- unique(x = as.vector(x = sapply(X = dims, FUN = function(x) { unlist(x = TopFeatures(object = dim.reduction, dim = x, nfeatures = max.per.pc, balanced = TRUE, projected = projected)) }))) features <- unique(x = features) return(features) } TransformDataMatrix <- function( object, assay = NULL, new.assay.name = 'integrated', integration.name = 'integrated', features.to.integrate = NULL, reduction = "cca", verbose = TRUE ) { if(verbose) { message("Integrating data") } assay <- assay %||% DefaultAssay(object = object) weights <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'weights' ) integration.matrix <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'integration.matrix' ) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 data.use1 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.to.integrate, nn.cells1] ) data.use2 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.to.integrate, nn.cells2] ) integrated <- IntegrateDataC(integration_matrix = as(integration.matrix, "dgCMatrix"), weights = as(weights, "dgCMatrix"), expression_cells2 = as(data.use2, "dgCMatrix")) dimnames(integrated) <- dimnames(data.use2) new.expression <- t(rbind(data.use1, integrated)) new.expression <- new.expression[, colnames(object)] new.assay <- new( Class = 'Assay', counts = new(Class = "dgCMatrix"), data = new.expression, scale.data = matrix(), var.features = vector(), meta.features = data.frame(row.names = rownames(x = new.expression)), misc = NULL ) object[[new.assay.name]] <- new.assay return(object) } # Helper function to validate parameters for FindTransferAnchors # ValidateParams_FindTransferAnchors <- function( reference, query, normalization.method, recompute.residuals, reference.assay, reference.neighbors, query.assay, reduction, reference.reduction, project.query, features, scale, npcs, l2.norm, dims, k.anchor, k.filter, k.score, max.features, nn.method, n.trees, eps, approx.pca, mapping.score.k, verbose ) { reference.assay <- reference.assay %||% DefaultAssay(object = reference) ModifyParam(param = "reference.assay", value = reference.assay) query.assay <- query.assay %||% DefaultAssay(object = query) ModifyParam(param = "query.assay", value = query.assay) DefaultAssay(object = reference) <- reference.assay ModifyParam(param = "reference", value = reference) DefaultAssay(object = query) <- query.assay ModifyParam(param = "query", value = query) if (!is.logical(x = scale)) { stop("Scale should be TRUE or FALSE") } if (length(x = reference) > 1 | length(x = query) > 1) { stop("We currently only support transfer between a single query and reference", call. = FALSE) } if (!reduction %in% c("pcaproject", "cca", "lsiproject", "rpca")) { stop("Please select either pcaproject, rpca, cca, or lsiproject for the reduction parameter.", call. = FALSE) } if (reduction == "cca" && !is.null(x = reference.reduction)) { stop("Specifying a reference reduction is only compatible with reduction = 'pcaproject'", call. = FALSE) } if (!normalization.method %in% c("LogNormalize", "SCT")) { stop("Please select either LogNormalize or SCT, for the normalization.method parameter.", call. = FALSE) } if (normalization.method == "SCT") { ModifyParam(param = "k.filter", value = NA) } if (reduction == "lsiproject") { ModifyParam(param = "k.filter", value = NA) } if (!is.na(x = k.filter) && k.filter > ncol(x = query)) { warning("k.filter is larger than the number of cells present in the query.\n", "Continuing without anchor filtering.", immediate. = TRUE, call. = FALSE) ModifyParam(param = "k.filter", value = NA) } if ((k.anchor + 1) > min(ncol(x = query), ncol(x = reference))) { stop("Please set k.anchor to be smaller than the number of cells in query (", ncol(x = query), ") and reference (", ncol(x = reference), ") objects.", call. = FALSE) } if ((k.score + 1) > min(ncol(x = query), ncol(x = reference))) { stop("Please set k.score to be smaller than the number of cells in query (", ncol(x = query), ") and reference (", ncol(x = reference), ") objects.", call. = FALSE) } if (reduction == "cca" && isTRUE(x = project.query)) { stop("The project.query workflow is not compatible with reduction = 'cca'", call. = FALSE) } if (IsSCT(assay = query[[query.assay]]) && IsSCT(assay = reference[[reference.assay]]) && normalization.method != "SCT") { warning("Both reference and query assays have been processed with SCTransform.", "Setting normalization.method = 'SCT' and continuing.") normalization.method <- "SCT" ModifyParam(param = "normalization.method", value = "SCT") } if (IsSCT(assay = query[[query.assay]]) && normalization.method == "LogNormalize") { stop("An SCT assay (", query.assay, ") was provided for query.assay but ", "normalization.method was set as LogNormalize", call. = FALSE) } if (IsSCT(assay = query[[query.assay]]) && !inherits(x = query[[query.assay]], what = "SCTAssay")) { query[[query.assay]] <- as(object = query[[query.assay]], Class = "SCTAssay") ModifyParam(param = "query", value = query) } if (IsSCT(assay = reference[[reference.assay]]) && !inherits(x = reference[[reference.assay]], what = "SCTAssay")) { reference[[reference.assay]] <- as(object = reference[[reference.assay]], Class = "SCTAssay") ModifyParam(param = "reference", value = reference) } if (normalization.method != "SCT") { recompute.residuals <- FALSE ModifyParam(param = "recompute.residuals", value = recompute.residuals) } if (recompute.residuals) { reference.model.num <- length(x = slot(object = reference[[reference.assay]], name = "SCTModel.list")) if (reference.model.num > 1) { stop("Given reference assay (", reference.assay, ") has ", reference.model.num , " reference sct models. Please provide a reference assay with a ", " single reference sct model.", call. = FALSE) } else if (reference.model.num == 0) { if (IsSCT(query[[query.assay]])) { stop("Given reference assay (", reference.assay, ") doesn't contain a reference SCT model.\n", "Query assay is a SCTAssay. ", "You can set recompute.residuals to FALSE ", "to use Query residuals to continue the analysis", call. = FALSE) } stop("Given reference assay (", reference.assay, ") doesn't contain a reference SCT model. ", call. = FALSE) } else if (reference.model.num == 1) { new.sct.assay <- reference.assay if (verbose) { message("Normalizing query using reference SCT model") } } query.umi.assay <- query.assay if (IsSCT(assay = query[[query.assay]])) { query.sct.models <- slot(object = query[[query.assay]], name = "SCTModel.list") query.umi.assay <- unique(x = unname(obj = unlist(x = lapply(X = query.sct.models, FUN = slot, name = "umi.assay")))) if (length(x = query.umi.assay) > 1) { stop("Query assay provided is an SCTAssay with multiple different original umi assays", call = FALSE) } if (!query.umi.assay %in% Assays(object = query)) { stop("Query assay provided is an SCTAssay based on an orignal UMI assay", " that is no longer present in the query Seurat object. Unable to", " recompute residuals based on the reference SCT model.\n", "If you want to use Query SCTAssay residuals to continue the analysis, ", "you can set recompute.residuals to FALSE", call. = FALSE) } } query <- SCTransform( object = query, reference.SCT.model = slot(object = reference[[reference.assay]], name = "SCTModel.list")[[1]], residual.features = features, assay = query.umi.assay, new.assay.name = new.sct.assay, verbose = FALSE ) ModifyParam(param = "query.assay", value = new.sct.assay) ModifyParam(param = "query", value = query) ModifyParam(param = "reference", value = reference) } if (IsSCT(assay = reference[[reference.assay]]) && normalization.method == "LogNormalize") { stop("An SCT assay (", reference.assay, ") was provided for reference.assay but ", "normalization.method was set as LogNormalize.", call. = FALSE) } if (!IsSCT(assay = reference[[reference.assay]]) && normalization.method == "SCT") { stop("Given reference.assay (", reference.assay, ") has not been processed with ", "SCTransform. Please either run SCTransform or set normalization.method = 'LogNormalize'.", call. = FALSE) } # features must be in both reference and query feature.slot <- ifelse(test = normalization.method == "SCT", yes = "scale.data", no = "data") query.assay.check <- query.assay reference.assay.check <- reference.assay ref.features <- rownames(x = GetAssayData(object = reference[[reference.assay.check]], slot = feature.slot)) query.features <- rownames(x = GetAssayData(object = query[[query.assay.check]], slot = feature.slot)) if (normalization.method == "SCT") { query.model.features <- rownames(x = Misc(object = query[[query.assay]])$vst.out$gene_attr) query.features <- unique(c(query.features, query.model.features)) ref.model.features <- rownames(x = Misc(object = reference[[reference.assay]])$vst.out$gene_attr) ref.features <- unique(c(ref.features, ref.model.features)) } if (!is.null(x = features)) { if (project.query) { features.new <- intersect(x = features, y = ref.features) } else { features.new <- intersect(x = features, y = query.features) } if (length(x = features.new) != length(x = features)) { warning(length(x = features) - length(x = features.new), " features of ", "the features specified were not present in both the reference ", "query assays. \nContinuing with remaining ", length(x = features.new), " features.", immediate. = TRUE, call. = FALSE) features <- features.new } } else { if (project.query) { features <- intersect( x = VariableFeatures(object = query[[query.assay]]), y = ref.features ) } else { features <- intersect( x = VariableFeatures(object = reference[[reference.assay]]), y = query.features ) } } if (length(x = features) == 0) { stop("No features to use in finding transfer anchors. To troubleshoot, try ", "explicitly providing features to the features parameter and ensure that ", "they are present in both reference and query assays.", call. = FALSE) } ModifyParam(param = "features", value = features) if (!is.null(x = reference.reduction)) { if (project.query) { if (!reference.reduction %in% Reductions(object = query)){ stop("reference.reduction (", reference.reduction, ") is not present in ", "the provided query object (Note: project.query was set to TRUE).", call. = FALSE) } if (ncol(x = reference[[reference.reduction]]) < max(dims)) { stop("reference.reduction (", reference.reduction, ") does not contain ", "all the dimensions required by the dims parameter (Note: ", "project.query was set to TRUE).", call. = FALSE) } } else { if (!reference.reduction %in% Reductions(object = reference)){ stop("reference.reduction (", reference.reduction, ") is not present in ", "the provided reference object.", call. = FALSE) } if (ncol(x = reference[[reference.reduction]]) < max(dims)) { stop("reference.reduction (", reference.reduction, ") does not contain ", "all the dimensions required by the dims parameter.", call. = FALSE) } } } else { if (reduction == "lsiproject") { stop("Must supply a reference reduction if reduction='lsiproject'") } mdim <- max(dims) if (npcs < mdim) { warning("npcs is smaller than the largest value requested by the dims ", "parameter.\nSetting npcs to ", mdim, " and continuing.", immediate. = TRUE, call. = FALSE) ModifyParam(param = "npcs", value = mdim) if (mdim >= length(x = features)) { stop("npcs (", npcs, ") must be smaller than the number of features (", length(x = features), "). Please either lower the npcs and/or dims ", "parameter settings or increase the size of the feature set.", call. = FALSE) } } } if (!is.null(x = reference.neighbors)) { if (!reference.neighbors %in% Neighbors(object = reference)) { stop("Specified reference.neighbors (", reference.neighbors, ") is not ", "available in the provided reference object.", call. = FALSE) } k.nn <- max(k.score, k.anchor) if (ncol(x = Indices(reference[[reference.neighbors]])) < (k.nn + 1)){ stop("k.score or k.anchor is larger than the number of neighbors ", "contained in reference.nn. Recompute reference.nn using ", "FindNeighbors with k > k.score and k > k.anchor", call. = FALSE) } } } # Helper function to validate parameters for TransferData # ValidateParams_TransferData <- function( anchorset, combined.ob, anchors, reference.cells, query.cells, reference, query, refdata, weight.reduction, l2.norm, dims, k.weight, sd.weight, eps, n.trees, verbose, slot, prediction.assay, label.transfer ) { if (!inherits(x = refdata, what = "list")) { refdata <- list(id = refdata) } for (i in 1:length(x = refdata)) { if (inherits(x = refdata[[i]], what = c("character", "factor"))) { # check is it's in the reference object if (length(x = refdata[[i]]) == 1) { if (is.null(x = reference)) { warning("If providing a single string to refdata element number ", i, ", please provide the reference object. Skipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE next } if (refdata[[i]] %in% Assays(object = reference)) { refdata[[i]] <- GetAssayData(object = reference, assay = refdata[[i]]) colnames(x = refdata[[i]]) <- paste0(colnames(x = refdata[[i]]), "_reference") label.transfer[[i]] <- FALSE next } else if (refdata[[i]] %in% colnames(x = reference[[]])) { refdata[[i]] <- reference[[refdata[[i]]]][, 1] } else { warning("Element number ", i, " provided to refdata does not exist in ", "the provided reference object.", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE next } } else if (length(x = refdata[[i]]) != length(x = reference.cells)) { warning("Please provide a vector that is the same length as the number ", "of reference cells used in anchor finding.\n", "Length of vector provided: ", length(x = refdata[[i]]), "\n", "Length of vector required: ", length(x = reference.cells), "\nSkipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } label.transfer[[i]] <- TRUE } else if (inherits(x = refdata[[i]], what = c("dgCMatrix", "matrix"))) { if (ncol(x = refdata[[i]]) != length(x = reference.cells)) { warning("Please provide a matrix that has the same number of columns as ", "the number of reference cells used in anchor finding.\n", "Number of columns in provided matrix : ", ncol(x = refdata[[i]]), "\n", "Number of columns required : ", length(x = reference.cells), "\nSkipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } else { colnames(x = refdata[[i]]) <- paste0(colnames(x = refdata[[i]]), "_reference") if (any(!colnames(x = refdata[[i]]) == reference.cells)) { if (any(!colnames(x = refdata[[i]]) %in% reference.cells) || any(!reference.cells %in% colnames(x = refdata[[i]]))) { warning("Some (or all) of the column names of the provided refdata ", "don't match the reference cells used in anchor finding ", "\nSkipping element", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } else { refdata[[i]] <- refdata[[i]][, reference.cells] } } } if (!slot %in% c("counts", "data")) { stop("Please specify slot as either 'counts' or 'data'.") } label.transfer[[i]] <- FALSE } else { warning("Please provide either a vector (character or factor) for label ", "transfer or a matrix for feature transfer. \nType provided: ", class(x = refdata[[i]])) refdata[[i]] <- FALSE } if (names(x = refdata)[i] == "") { possible.names <- make.unique(names = c(names(x = refdata), paste0("e", i))) names(x = refdata)[i] <- possible.names[length(x = possible.names)] if (verbose) { message("refdata element ", i, " is not named. Setting name as ", names(x = refdata)[i]) } } } ModifyParam(param = "label.transfer", value = label.transfer) if (all(unlist(x = lapply(X = refdata, FUN = isFALSE)))) { stop("None of the provided refdata elements are valid.", call. = FALSE) } ModifyParam(param = "refdata", value = refdata) valid.weight.reduction <- c("pcaproject", "pca", "cca", "rpca.ref","lsiproject", "lsi") if (!inherits(x = weight.reduction, "DimReduc")) { if (!weight.reduction %in% valid.weight.reduction) { stop("Please provide one of ", paste(valid.weight.reduction, collapse = ", "), " or a custom DimReduc to ", "the weight.reduction parameter.", call. = FALSE) } if (weight.reduction %in% c("pcaproject", "cca", "rpca.ref", "lsiproject") && !weight.reduction %in% Reductions(object = combined.ob)) { stop("Specified weight.reduction (", weight.reduction, ") is not present ", "in the provided anchorset.", call. = FALSE) } if (weight.reduction %in% c("pca", "lsi") && is.null(x = query)) { stop("To use an internal PCA on the query only for weight.reduction, ", "please provide the query object.", call. = FALSE) } } if (inherits(x = weight.reduction, "DimReduc")) { if (is.null(x = dims)) { stop("Please specify dims", call. = FALSE) } if (max(dims) > ncol(x = weight.reduction)) { stop("The max of dims specified (", max(dims), ") is greater than the ", "number of dimensions in the given DimReduc (", ncol(x = weight.reduction), ").", call. = FALSE) } } else { if (is.null(x = dims)) { ModifyParam(param = "dims", value = 1:length(x = slot(object = anchorset, name = "command")$dims)) } } if (!is.null(x = query)) { if (!isTRUE(x = all.equal( target = gsub(pattern = "_query", replacement = "", x = query.cells), current = Cells(x = query), check.attributes = FALSE) )) { stop("Query object provided contains a different set of cells from the ", "query used to construct the AnchorSet provided.", call. = FALSE) } } if(k.weight > nrow(x = anchors)) { stop("Please set k.weight to be smaller than the number of anchors (", nrow(x = anchors), ").", call. = FALSE) } } # Internal function to validate the parameters for IntegrateEmbeddings run on # an IntegrationAnchorSet object # ValidateParams_IntegrateEmbeddings_IntegrationAnchors <- function( anchorset, object.list, reductions, dims.to.integrate, k.weight, weight.reduction, sample.tree ) { nobs <- length(x = object.list) if (is.null(x = reductions)) { stop("Must supply reductions to integrate") } if (!inherits(x = reductions, what = "DimReduc")) { stop("Please provide a single pre-computed DimReduc object to the ", "reductions parameter", call. = FALSE) } else { all.cells <- make.unique(names = unname(obj = do.call( what = c, args = lapply(X = object.list, FUN = Cells))) ) if (nrow(x = reductions) != length(x = all.cells)) { stop("The number of cells in the reduction provided (", nrow(x = reductions), ") doesn't match the number of cells in the objects used to build the ", "AnchorSet (", length(x = all.cells), ").", call. = FALSE) } if (!all(Cells(x = reductions) %in% all.cells)) { stop("The cell names in the reduction provided don't match the cell names ", "present in the objects used to build the AnchorSet", call. = FALSE) } dims.to.integrate <- dims.to.integrate %||% 1:ncol(x = reductions) if (max(dims.to.integrate) > ncol(x = reductions)) { warning("Max dims.to.integrate is larger than the number of dimensions in ", "the provided reduction. Setting dims.to.integrate to 1:", ncol(x = reductions), " and continuing.", immediate. = TRUE, call. = FALSE) } ModifyParam(param = 'dims.to.integrate', value = 1:ncol(x = reductions)) } if (!is.null(x = weight.reduction)) { if (inherits(x = weight.reduction, what = "character")) { if (length(x = weight.reduction) == 1) { weight.reduction <- as.list(x = rep(x = weight.reduction, times = nobs)) } ModifyParam(param = 'weight.reduction', value = weight.reduction) for (i in 1:nobs) { if (!weight.reduction[[i]] %in% Reductions(object = object.list[[i]])) { stop("weight.reduction (", weight.reduction[[i]], ") is not present ", "in object number ", i, ".", call. = FALSE) } } } if (inherits(x = weight.reduction[[1]], what = "DimReduc")) { if (length(x = weight.reduction) != nobs) { stop("Please provide one weight.reduction for each object. ", length(x = weight.reduction), " provided, ", nobs, " required.", call. = FALSE) } for (i in 1:nobs) { if (!isTRUE(all.equal( target = Cells(x = weight.reduction[[i]]), current = Cells(x = object.list[[i]]))) ) { stop("Cell names in the provided weight.reduction ", i, " don't ", "match with the cell names in object ", i, ".", call. = FALSE) } } } } min.object.size <- min(sapply(X = object.list, FUN = ncol)) if (k.weight > min.object.size) { stop("k.weight (", k.weight, ") is set larger than the number of cells in ", "the smallest object (", min.object.size, "). Please choose a smaller ", "k.weight.", call. = FALSE) } if (!is.null(x = sample.tree)) { if (ncol(x = sample.tree) != 2) { stop("Invalid sample tree. Please provide a two column matrix specifying the order of integration.") } if (min(sample.tree) < (-1 * nobs)) { stop("Invalid sample tree. Dataset index greater than the number of ", "objects was provided.") } } } # Internal function to validate the parameters for IntegrateEmbeddings run on # a TransferAnchorSet object # ValidateParams_IntegrateEmbeddings_TransferAnchors <- function( anchorset, combined.object , reference, query, reductions, dims.to.integrate, k.weight, weight.reduction, reuse.weights.matrix ) { if (missing(x = reference)) { stop("Please provide the reference object.", call. = FALSE) } if (missing(x = query)) { stop("Please provide the query object.", call. = FALSE) } reference.cells <- slot(object = anchorset, name = "reference.cells") reference.cells <- gsub(pattern = "_reference", replacement = "", x = reference.cells) if (!isTRUE(x = all.equal(target = reference.cells, current = Cells(x = reference)))) { stop("The set of cells used as a reference in the AnchorSet does not match ", "the set of cells provided in the reference object.") } query.cells <- slot(object = anchorset, name = "query.cells") query.cells <- gsub(pattern = "_query", replacement = "", x = query.cells) if (!isTRUE(x = all.equal(target = query.cells, current = Cells(x = query), check.attributes = FALSE))) { stop("The set of cells used as a query in the AnchorSet does not match ", "the set of cells provided in the query object.") } if (length(x = reductions) != 1) { stop("Please provide a single reduction name to reductions that is present ", "in the anchorset.", call. = FALSE) } if (!reductions %in% Reductions(object = combined.object)) { stop("Please specify a reduction that is present in the anchorset: ", paste(Reductions(object = combined.object), collapse = ", "), call. = FALSE) } reference <- RenameCells(object = reference, new.names = paste0(Cells(x = reference), "_reference")) reference.embeddings <- Embeddings(object = combined.object[[reductions]])[Cells(x = reference), ] reference[[reductions]] <- CreateDimReducObject(embeddings = reference.embeddings, assay = DefaultAssay(object = reference)) ModifyParam(param = "reference", value = reference) query <- RenameCells(object = query, new.names = paste0(Cells(x = query), "_query")) query.embeddings <- Embeddings(object = combined.object[[reductions]])[Cells(x = query), ] query[[reductions]] <- CreateDimReducObject(embeddings = query.embeddings, assay = DefaultAssay(object = query)) ModifyParam(param = "query", value = query) ModifyParam(param = "reductions", value = c(reductions, reductions)) min.ndim <- min(ncol(x = query[[reductions[2]]]), ncol(x = reference[[reductions[1]]])) if (is.null(x = dims.to.integrate)) { dims.to.integrate <- 1:min.ndim } else { if (max(dims.to.integrate) > min.ndim) { dims.to.integrate <- dims.to.integrate[dims.to.integrate <= min.ndim] warning("Max dims.to.integrate is larger than the max dims for at least ", "one of the reductions specified. Setting dims.to.integrate to ", paste(dims.to.integrate, collapse = ","), " and continuing.", immediate. = TRUE, call. = FALSE) } } ModifyParam(param = "dims.to.integrate", value = dims.to.integrate) if (isTRUE(x = reuse.weights.matrix)) { weights.matrix <- Tool(object = query, slot = "TransferData")$weights.matrix if (is.null(x = weights.matrix)) { message("Requested to reuse weights matrix, but no weights found. Computing new weights.") reuse.weights.matrix <- FALSE } else if (nrow(x = weights.matrix) != nrow(x = slot(object = anchorset, name = "anchors"))) { stop("The number of anchors in the weights matrix stored in the query (", nrow(x = weights.matrix), ") doesn't match the number of anchors ", "in the anchorset (", nrow(x = slot(object = anchorset, name = "anchors")), ").", call. = FALSE) } else { ModifyParam(param = 'weights.matrix', value = weights.matrix) } } # check T/F again due to possible modification in above if (isFALSE(x = reuse.weights.matrix)) { if (k.weight > ncol(x = query)) { stop("k.weight (", k.weight, ") is set larger than the number of cells in ", "the query object (", ncol(x = query), "). Please choose a smaller ", "k.weight.", call. = FALSE) } if (inherits(x = weight.reduction, what = "list")) { if (length(x = weight.reduction) > 2) { stop("Supplied too many dimension reduction objects for weight.reduction. ", "Should supply a single DimReduc object.") } if (length(x = weight.reduction) == 2) { # take the second element as the dimreduc to use for query weight.reduction <- weight.reduction[[2]] } } if (inherits(x = weight.reduction, what = "character")) { if (length(x = weight.reduction) > 2) { stop("Supplied too many dimension reduction names for weight.reduction. ", "Should supply the name of a single DimReduc present in the query.") } if (length(x = weight.reduction) == 2) { # take the second element as the dimreduc to use for query weight.reduction <- weight.reduction[[2]] } if (!weight.reduction %in% Reductions(object = query)) { stop("The weight.reduction ", weight.reduction, " is not present in the ", "query object.", call. = FALSE) } ModifyParam(param = 'weight.reduction', value = list(NULL, query[[weight.reduction]])) } if (inherits(x = weight.reduction, what = "DimReduc")) { weight.reduction <- RenameCells(object = weight.reduction, new.names = paste0(Cells(x = weight.reduction), "_query")) if (!isTRUE(all.equal( target = Cells(x = weight.reduction), current = Cells(x = query) ))) { stop("Cell names in the provided weight.reduction don't ", "match with the cell names in the query object.", call. = FALSE) } ModifyParam(param = 'weight.reduction', value = list(NULL, weight.reduction)) } } } Seurat/R/dimensional_reduction.R0000644000176200001440000023737114156670503016443 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Determine statistical significance of PCA scores. #' #' Randomly permutes a subset of data, and calculates projected PCA scores for #' these 'random' genes. Then compares the PCA scores for the 'random' genes #' with the observed PCA scores to determine statistical signifance. End result #' is a p-value for each gene's association with each principal component. #' #' @param object Seurat object #' @param reduction DimReduc to use. ONLY PCA CURRENTLY SUPPORTED. #' @param assay Assay used to calculate reduction. #' @param dims Number of PCs to compute significance for #' @param num.replicate Number of replicate samplings to perform #' @param prop.freq Proportion of the data to randomly permute for each #' replicate #' @param verbose Print progress bar showing the number of replicates #' that have been processed. #' @param maxit maximum number of iterations to be performed by the irlba function of RunPCA #' #' @return Returns a Seurat object where JS(object = object[['pca']], slot = 'empirical') #' represents p-values for each gene in the PCA analysis. If ProjectPCA is #' subsequently run, JS(object = object[['pca']], slot = 'full') then #' represents p-values for all genes. #' #' @importFrom methods new #' @importFrom pbapply pblapply pbsapply #' @importFrom future.apply future_lapply future_sapply #' @importFrom future nbrOfWorkers #' #' @references Inspired by Chung et al, Bioinformatics (2014) #' @concept dimensional_reduction #' #' @export #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small = suppressWarnings(JackStraw(pbmc_small)) #' head(JS(object = pbmc_small[['pca']], slot = 'empirical')) #' } #' JackStraw <- function( object, reduction = "pca", assay = NULL, dims = 20, num.replicate = 100, prop.freq = 0.01, verbose = TRUE, maxit = 1000 ) { if (reduction != "pca") { stop("Only pca for reduction is currently supported") } if (verbose && nbrOfWorkers() == 1) { my.lapply <- pblapply my.sapply <- pbsapply } else { my.lapply <- future_lapply my.sapply <- future_sapply } assay <- assay %||% DefaultAssay(object = object) if (IsSCT(assay = object[[assay]])) { stop("JackStraw cannot be run on SCTransform-normalized data. Please supply a non-SCT assay.") } if (dims > length(x = object[[reduction]])) { dims <- length(x = object[[reduction]]) warning("Number of dimensions specified is greater than those available. Setting dims to ", dims, " and continuing", immediate. = TRUE) } if (dims > nrow(x = object)) { dims <- nrow(x = object) warning("Number of dimensions specified is greater than the number of cells. Setting dims to ", dims, " and continuing", immediate. = TRUE) } loadings <- Loadings(object = object[[reduction]], projected = FALSE) reduc.features <- rownames(x = loadings) if (length(x = reduc.features) < 3) { stop("Too few features") } if (length(x = reduc.features) * prop.freq < 3) { warning( "Number of variable genes given ", prop.freq, " as the prop.freq is low. Consider including more variable genes and/or increasing prop.freq. ", "Continuing with 3 genes in every random sampling." ) } data.use <- GetAssayData(object = object, assay = assay, slot = "scale.data")[reduc.features, ] rev.pca <- object[[paste0('RunPCA.', assay)]]$rev.pca weight.by.var <- object[[paste0('RunPCA.', assay)]]$weight.by.var fake.vals.raw <- my.lapply( X = 1:num.replicate, FUN = JackRandom, scaled.data = data.use, prop.use = prop.freq, r1.use = 1, r2.use = dims, rev.pca = rev.pca, weight.by.var = weight.by.var, maxit = maxit ) fake.vals <- sapply( X = 1:dims, FUN = function(x) { return(as.numeric(x = unlist(x = lapply( X = 1:num.replicate, FUN = function(y) { return(fake.vals.raw[[y]][, x]) } )))) } ) fake.vals <- as.matrix(x = fake.vals) jackStraw.empP <- as.matrix( my.sapply( X = 1:dims, FUN = function(x) { return(unlist(x = lapply( X = abs(loadings[, x]), FUN = EmpiricalP, nullval = abs(fake.vals[,x]) ))) } ) ) colnames(x = jackStraw.empP) <- paste0("PC", 1:ncol(x = jackStraw.empP)) jackstraw.obj <- new( Class = "JackStrawData", empirical.p.values = jackStraw.empP, fake.reduction.scores = fake.vals, empirical.p.values.full = matrix() ) JS(object = object[[reduction]]) <- jackstraw.obj object <- LogSeuratCommand(object = object) return(object) } #' L2-normalization #' #' Perform l2 normalization on given dimensional reduction #' #' @param object Seurat object #' @param reduction Dimensional reduction to normalize #' @param new.dr name of new dimensional reduction to store #' (default is olddr.l2) #' @param new.key name of key for new dimensional reduction #' #' @return Returns a \code{\link{Seurat}} object #' @concept dimensional_reduction #' #' @export #' L2Dim <- function(object, reduction, new.dr = NULL, new.key = NULL) { l2.norm <- L2Norm(mat = Embeddings(object[[reduction]])) if(is.null(new.dr)){ new.dr <- paste0(reduction, ".l2") } if(is.null(new.key)){ new.key <- paste0("L2", Key(object[[reduction]])) } colnames(x = l2.norm) <- paste0(new.key, 1:ncol(x = l2.norm)) l2.dr <- CreateDimReducObject( embeddings = l2.norm, loadings = Loadings(object = object[[reduction]], projected = FALSE), projected = Loadings(object = object[[reduction]], projected = TRUE), assay = DefaultAssay(object = object), stdev = slot(object = object[[reduction]], name = 'stdev'), key = new.key, jackstraw = slot(object = object[[reduction]], name = 'jackstraw'), misc = slot(object = object[[reduction]], name = 'misc') ) object[[new.dr]] <- l2.dr return(object) } #' L2-Normalize CCA #' #' Perform l2 normalization on CCs #' #' @param object Seurat object #' @param \dots Additional parameters to L2Dim. #' @concept dimensional_reduction #' #' @export #' L2CCA <- function(object, ...){ CheckDots(..., fxns = 'L2Dim') return(L2Dim(object = object, reduction = "cca", ...)) } #' Significant genes from a PCA #' #' Returns a set of genes, based on the JackStraw analysis, that have #' statistically significant associations with a set of PCs. #' #' @param object Seurat object #' @param pcs.use PCS to use. #' @param pval.cut P-value cutoff #' @param use.full Use the full list of genes (from the projected PCA). Assumes #' that \code{ProjectDim} has been run. Currently, must be set to FALSE. #' @param max.per.pc Maximum number of genes to return per PC. Used to avoid genes from one PC dominating the entire analysis. #' #' @return A vector of genes whose p-values are statistically significant for #' at least one of the given PCs. #' #' @export #' @concept dimensional_reduction #' #' @seealso \code{\link{ProjectDim}} \code{\link{JackStraw}} #' #' @examples #' data("pbmc_small") #' PCASigGenes(pbmc_small, pcs.use = 1:2) #' PCASigGenes <- function( object, pcs.use, pval.cut = 0.1, use.full = FALSE, max.per.pc = NULL ) { # pvals.use <- GetDimReduction(object,reduction.type = "pca",slot = "jackstraw")@empirical.p.values empirical.use <- ifelse(test = use.full, yes = 'full', no = 'empirical') pvals.use <- JS(object = object[['pca']], slot = empirical.use) if (length(x = pcs.use) == 1) { pvals.min <- pvals.use[, pcs.use] } if (length(x = pcs.use) > 1) { pvals.min <- apply(X = pvals.use[, pcs.use], MARGIN = 1, FUN = min) } names(x = pvals.min) <- rownames(x = pvals.use) features <- names(x = pvals.min)[pvals.min < pval.cut] if (!is.null(x = max.per.pc)) { top.features <- TopFeatures( object = object[['pca']], dim = pcs.use, nfeatures = max.per.pc, projected = use.full, balanced = FALSE ) features <- intersect(x = top.features, y = features) } return(features) } #' Project Dimensional reduction onto full dataset #' #' Takes a pre-computed dimensional reduction (typically calculated on a subset #' of genes) and projects this onto the entire dataset (all genes). Note that #' the cell loadings will remain unchanged, but now there are gene loadings for #' all genes. #' #' @param object Seurat object #' @param reduction Reduction to use #' @param assay Assay to use #' @param dims.print Number of dims to print features for #' @param nfeatures.print Number of features with highest/lowest loadings to print for #' each dimension #' @param overwrite Replace the existing data in feature.loadings #' @param do.center Center the dataset prior to projection (should be set to TRUE) #' @param verbose Print top genes associated with the projected dimensions #' #' @return Returns Seurat object with the projected values #' #' @export #' @concept dimensional_reduction #' #' @examples #' data("pbmc_small") #' pbmc_small #' pbmc_small <- ProjectDim(object = pbmc_small, reduction = "pca") #' # Vizualize top projected genes in heatmap #' DimHeatmap(object = pbmc_small, reduction = "pca", dims = 1, balanced = TRUE) #' ProjectDim <- function( object, reduction = "pca", assay = NULL, dims.print = 1:5, nfeatures.print = 20, overwrite = FALSE, do.center = FALSE, verbose = TRUE ) { redeuc <- object[[reduction]] assay <- assay %||% DefaultAssay(object = redeuc) data.use <- GetAssayData( object = object[[assay]], slot = "scale.data" ) if (do.center) { data.use <- scale(x = as.matrix(x = data.use), center = TRUE, scale = FALSE) } cell.embeddings <- Embeddings(object = redeuc) new.feature.loadings.full <- data.use %*% cell.embeddings rownames(x = new.feature.loadings.full) <- rownames(x = data.use) colnames(x = new.feature.loadings.full) <- colnames(x = cell.embeddings) Loadings(object = redeuc, projected = TRUE) <- new.feature.loadings.full if (overwrite) { Loadings(object = redeuc, projected = FALSE) <- new.feature.loadings.full } object[[reduction]] <- redeuc if (verbose) { print( x = redeuc, dims = dims.print, nfeatures = nfeatures.print, projected = TRUE ) } object <- LogSeuratCommand(object = object) return(object) } #' @param query.dims Dimensions (columns) to use from query #' @param reference.dims Dimensions (columns) to use from reference #' @param ... Additional parameters to \code{\link{RunUMAP}} #' #' @inheritParams FindNeighbors #' @inheritParams RunUMAP #' #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' ProjectUMAP.default <- function( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) { query.dims <- query.dims %||% 1:ncol(x = query) reference.dims <- reference.dims %||% query.dims if (length(x = reference.dims) != length(x = query.dims)) { stop("Length of Reference and Query number of dimensions are not equal") } if (any(reference.dims > ncol(x = reference))) { stop("Reference dims is larger than the number of dimensions present.", call. = FALSE) } if (any(query.dims > ncol(x = query))) { stop("Query dims is larger than the number of dimensions present.", call. = FALSE) } if (length(x = Misc(object = reduction.model, slot = 'model')) == 0) { stop( "The provided reduction.model does not have a model stored. Please try ", "running umot-learn on the object first", call. = FALSE ) } query.neighbor <- FindNeighbors( object = reference[, reference.dims], query = query[, query.dims], k.param = k.param, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, cache.index = cache.index, index = index, return.neighbor = TRUE, l2.norm = l2.norm ) proj.umap <- RunUMAP(object = query.neighbor, reduction.model = reduction.model, ...) return(list(proj.umap = proj.umap, query.neighbor = query.neighbor)) } #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' @method ProjectUMAP DimReduc #' ProjectUMAP.DimReduc <- function( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) { proj.umap <- ProjectUMAP( query = Embeddings(object = query), query.dims = query.dims, reference = Embeddings(object = reference), reference.dims = reference.dims, k.param = k.param, nn.method = nn.method, n.trees = 50, annoy.metric = annoy.metric, l2.norm = l2.norm, cache.index = cache.index, index = index, neighbor.name = neighbor.name, reduction.model = reduction.model, ... ) return(proj.umap) } #' @param reference Reference dataset #' @param query.reduction Name of reduction to use from the query for neighbor #' finding #' @param reference.reduction Name of reduction to use from the reference for #' neighbor finding #' @param neighbor.name Name to store neighbor information in the query #' @param reduction.name Name of projected UMAP to store in the query #' @param reduction.key Value for the projected UMAP key #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' @method ProjectUMAP Seurat #' ProjectUMAP.Seurat <- function( query, query.reduction, query.dims = NULL, reference, reference.reduction, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, reduction.name = "ref.umap", reduction.key = "refUMAP_", ... ) { if (!query.reduction %in% Reductions(object = query)) { stop("The query.reduction (", query.reduction, ") is not present in the ", "provided query", call. = FALSE) } if (!reference.reduction %in% Reductions(object = reference)) { stop("The reference.reduction (", reference.reduction, ") is not present in the ", "provided reference.", call. = FALSE) } if (!reduction.model %in% Reductions(object = reference)) { stop("The reduction.model (", reduction.model, ") is not present in the ", "provided reference.", call. = FALSE) } proj.umap <- ProjectUMAP( query = query[[query.reduction]], query.dims = query.dims, reference = reference[[reference.reduction]], reference.dims = reference.dims, k.param = k.param, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, l2.norm = l2.norm, cache.index = cache.index, index = index, neighbor.name = neighbor.name, reduction.model = reference[[reduction.model]], reduction.key = reduction.key, assay = DefaultAssay(query), ... ) query[[reduction.name]] <- proj.umap$proj.umap query[[neighbor.name]] <- proj.umap$query.neighbor return(query) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param standardize Standardize matrices - scales columns to have unit variance #' and mean 0 #' @param num.cc Number of canonical vectors to calculate #' @param seed.use Random seed to set. If NULL, does not set a seed #' @param verbose Show progress messages #' #' @importFrom irlba irlba #' #' @rdname RunCCA #' @concept dimensional_reduction #' @export #' RunCCA.default <- function( object1, object2, standardize = TRUE, num.cc = 20, seed.use = 42, verbose = FALSE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } cells1 <- colnames(x = object1) cells2 <- colnames(x = object2) if (standardize) { object1 <- Standardize(mat = object1, display_progress = FALSE) object2 <- Standardize(mat = object2, display_progress = FALSE) } mat3 <- crossprod(x = object1, y = object2) cca.svd <- irlba(A = mat3, nv = num.cc) cca.data <- rbind(cca.svd$u, cca.svd$v) colnames(x = cca.data) <- paste0("CC", 1:num.cc) rownames(cca.data) <- c(cells1, cells2) cca.data <- apply( X = cca.data, MARGIN = 2, FUN = function(x) { if (sign(x[1]) == -1) { x <- x * -1 } return(x) } ) return(list(ccv = cca.data, d = cca.svd$d)) } #' @param assay1,assay2 Assays to pull from in the first and second objects, respectively #' @param features Set of genes to use in CCA. Default is the union of both #' the variable features sets present in both objects. #' @param renormalize Renormalize raw data after merging the objects. If FALSE, #' merge the data matrices also. #' @param rescale Rescale the datasets prior to CCA. If FALSE, uses existing data in the scale data slots. #' @param compute.gene.loadings Also compute the gene loadings. NOTE - this will #' scale every gene in the dataset which may impose a high memory cost. #' @param add.cell.id1,add.cell.id2 Add ... #' @param ... Extra parameters (passed onto MergeSeurat in case with two objects #' passed, passed onto ScaleData in case with single object and rescale.groups #' set to TRUE) #' #' @rdname RunCCA #' @concept dimensional_reduction #' @export #' @method RunCCA Seurat #' RunCCA.Seurat <- function( object1, object2, assay1 = NULL, assay2 = NULL, num.cc = 20, features = NULL, renormalize = FALSE, rescale = FALSE, compute.gene.loadings = TRUE, add.cell.id1 = NULL, add.cell.id2 = NULL, verbose = TRUE, ... ) { assay1 <- assay1 %||% DefaultAssay(object = object1) assay2 <- assay2 %||% DefaultAssay(object = object2) if (assay1 != assay2) { warning("Running CCA on different assays") } if (is.null(x = features)) { if (length(x = VariableFeatures(object = object1, assay = assay1)) == 0) { stop(paste0("VariableFeatures not computed for the ", assay1, " assay in object1")) } if (length(x = VariableFeatures(object = object2, assay = assay2)) == 0) { stop(paste0("VariableFeatures not computed for the ", assay2, " assay in object2")) } features <- union(x = VariableFeatures(object = object1), y = VariableFeatures(object = object2)) if (length(x = features) == 0) { stop("Zero features in the union of the VariableFeature sets ") } } nfeatures <- length(x = features) if (!(rescale)) { data.use1 <- GetAssayData(object = object1, assay = assay1, slot = "scale.data") data.use2 <- GetAssayData(object = object2, assay = assay2, slot = "scale.data") features <- CheckFeatures(data.use = data.use1, features = features, object.name = "object1", verbose = FALSE) features <- CheckFeatures(data.use = data.use2, features = features, object.name = "object2", verbose = FALSE) data1 <- data.use1[features, ] data2 <- data.use2[features, ] } if (rescale) { data.use1 <- GetAssayData(object = object1, assay = assay1, slot = "data") data.use2 <- GetAssayData(object = object2, assay = assay2, slot = "data") features <- CheckFeatures(data.use = data.use1, features = features, object.name = "object1", verbose = FALSE) features <- CheckFeatures(data.use = data.use2, features = features, object.name = "object2", verbose = FALSE) data1 <- data.use1[features,] data2 <- data.use2[features,] if (verbose) message("Rescaling groups") data1 <- FastRowScale(as.matrix(data1)) dimnames(data1) <- list(features, colnames(x = object1)) data2 <- FastRowScale(as.matrix(data2)) dimnames(data2) <- list(features, colnames(x = object2)) } if (length(x = features) / nfeatures < 0.1 & verbose) { warning("More than 10% of provided features filtered out. Please check that the given features are present in the scale.data slot for both the assays provided here and that they have non-zero variance.") } if (length(x = features) < 50) { warning("Fewer than 50 features used as input for CCA.") } if (verbose) { message("Running CCA") } cca.results <- RunCCA( object1 = data1, object2 = data2, standardize = TRUE, num.cc = num.cc, verbose = verbose, ) if (verbose) { message("Merging objects") } combined.object <- merge( x = object1, y = object2, merge.data = TRUE, ... ) rownames(x = cca.results$ccv) <- Cells(x = combined.object) colnames(x = data1) <- Cells(x = combined.object)[1:ncol(x = data1)] colnames(x = data2) <- Cells(x = combined.object)[(ncol(x = data1) + 1):length(x = Cells(x = combined.object))] combined.object[['cca']] <- CreateDimReducObject( embeddings = cca.results$ccv[colnames(combined.object), ], assay = assay1, key = "CC_" ) combined.object[['cca']]@assay.used <- DefaultAssay(combined.object) if (ncol(combined.object) != (ncol(object1) + ncol(object2))) { warning("Some cells removed after object merge due to minimum feature count cutoff") } combined.scale <- cbind(data1,data2) combined.object <- SetAssayData(object = combined.object,new.data = combined.scale, slot = "scale.data") if (renormalize) { combined.object <- NormalizeData( object = combined.object, assay = assay1, normalization.method = object1[[paste0("NormalizeData.", assay1)]]$normalization.method, scale.factor = object1[[paste0("NormalizeData.", assay1)]]$scale.factor ) } if (compute.gene.loadings) { combined.object <- ProjectDim( object = combined.object, reduction = "cca", verbose = FALSE, overwrite = TRUE) } return(combined.object) } #' @param assay Name of Assay ICA is being run on #' @param nics Number of ICs to compute #' @param rev.ica By default, computes the dimensional reduction on the cell x #' feature matrix. Setting to true will compute it on the transpose (feature x cell #' matrix). #' @param ica.function ICA function from ica package to run (options: icafast, #' icaimax, icajade) #' @param verbose Print the top genes associated with high/low loadings for #' the ICs #' @param ndims.print ICs to print genes for #' @param nfeatures.print Number of genes to print for each IC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. #' @param seed.use Set a random seed. Setting NULL will not set a seed. #' @param \dots Additional arguments to be passed to fastica #' #' @importFrom ica icafast icaimax icajade #' #' @rdname RunICA #' @concept dimensional_reduction #' @export #' @method RunICA default #' RunICA.default <- function( object, assay = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) { CheckDots(..., fxns = ica.function) if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } nics <- min(nics, ncol(x = object)) ica.fxn <- eval(expr = parse(text = ica.function)) if (rev.ica) { ica.results <- ica.fxn(object, nc = nics,...) cell.embeddings <- ica.results$M } else { ica.results <- ica.fxn(t(x = object), nc = nics,...) cell.embeddings <- ica.results$S } feature.loadings <- (as.matrix(x = object ) %*% as.matrix(x = cell.embeddings)) colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = feature.loadings)) colnames(x = cell.embeddings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, key = reduction.key ) if (verbose) { print(x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print) } return(reduction.data) } #' @param features Features to compute ICA on #' #' @rdname RunICA #' @concept dimensional_reduction #' @export #' @method RunICA Assay #' RunICA.Assay <- function( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunICA( object = data.use, assay = assay, nics = nics, rev.ica = rev.ica, ica.function = ica.function, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name #' #' @rdname RunICA #' @concept dimensional_reduction #' @method RunICA Seurat #' @export #' RunICA.Seurat <- function( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "IC_", seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) reduction.data <- RunICA( object = assay.data, assay = assay, features = features, nics = nics, rev.ica = rev.ica, ica.function = ica.function, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of Assay PCA is being run on #' @param npcs Total Number of PCs to compute and store (50 by default) #' @param rev.pca By default computes the PCA on the cell x gene matrix. Setting #' to true will compute it on gene x cell matrix. #' @param weight.by.var Weight the cell embeddings by the variance of each PC #' (weights the gene loadings if rev.pca is TRUE) #' @param verbose Print the top genes associated with high/low loadings for #' the PCs #' @param ndims.print PCs to print genes for #' @param nfeatures.print Number of genes to print for each PC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. PC by default #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' @param approx Use truncated singular value decomposition to approximate PCA #' #' @importFrom irlba irlba #' @importFrom stats prcomp #' @importFrom utils capture.output #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' RunPCA.default <- function( object, assay = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, approx = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (rev.pca) { npcs <- min(npcs, ncol(x = object) - 1) pca.results <- irlba(A = object, nv = npcs, ...) total.variance <- sum(RowVar(x = t(x = object))) sdev <- pca.results$d/sqrt(max(1, nrow(x = object) - 1)) if (weight.by.var) { feature.loadings <- pca.results$u %*% diag(pca.results$d) } else{ feature.loadings <- pca.results$u } cell.embeddings <- pca.results$v } else { total.variance <- sum(RowVar(x = object)) if (approx) { npcs <- min(npcs, nrow(x = object) - 1) pca.results <- irlba(A = t(x = object), nv = npcs, ...) feature.loadings <- pca.results$v sdev <- pca.results$d/sqrt(max(1, ncol(object) - 1)) if (weight.by.var) { cell.embeddings <- pca.results$u %*% diag(pca.results$d) } else { cell.embeddings <- pca.results$u } } else { npcs <- min(npcs, nrow(x = object)) pca.results <- prcomp(x = t(object), rank. = npcs, ...) feature.loadings <- pca.results$rotation sdev <- pca.results$sdev if (weight.by.var) { cell.embeddings <- pca.results$x } else { cell.embeddings <- pca.results$x / (pca.results$sdev[1:npcs] * sqrt(x = ncol(x = object) - 1)) } } } rownames(x = feature.loadings) <- rownames(x = object) colnames(x = feature.loadings) <- paste0(reduction.key, 1:npcs) rownames(x = cell.embeddings) <- colnames(x = object) colnames(x = cell.embeddings) <- colnames(x = feature.loadings) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, stdev = sdev, key = reduction.key, misc = list(total.variance = total.variance) ) if (verbose) { msg <- capture.output(print( x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print )) message(paste(msg, collapse = '\n')) } return(reduction.data) } #' @param features Features to compute PCA on. If features=NULL, PCA will be run #' using the variable features for the Assay. Note that the features must be present #' in the scaled data. Any requested features that are not scaled or have 0 variance #' will be dropped, and the PCA will be run using the remaining features. #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' @method RunPCA Assay #' RunPCA.Assay <- function( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunPCA( object = data.use, assay = assay, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name, pca by default #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' @method RunPCA Seurat #' RunPCA.Seurat <- function( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "pca", reduction.key = "PC_", seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) reduction.data <- RunPCA( object = assay.data, assay = assay, features = features, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of assay that that t-SNE is being run on #' @param seed.use Random seed for the t-SNE. If NULL, does not set the seed #' @param tsne.method Select the method to use to compute the tSNE. Available #' methods are: #' \itemize{ #' \item{Rtsne: }{Use the Rtsne package Barnes-Hut implementation of tSNE (default)} # \item{tsne: }{standard tsne - not recommended for large datasets} #' \item{FIt-SNE: }{Use the FFT-accelerated Interpolation-based t-SNE. Based on #' Kluger Lab code found here: https://github.com/KlugerLab/FIt-SNE} #' } #' @param dim.embed The dimensional space of the resulting tSNE embedding #' (default is 2). For example, set to 3 for a 3d tSNE #' @param reduction.key dimensional reduction key, specifies the string before the number for the dimension names. tSNE_ by default #' #' @importFrom Rtsne Rtsne #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE matrix #' RunTSNE.matrix <- function( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } tsne.data <- switch( EXPR = tsne.method, 'Rtsne' = Rtsne( X = object, dims = dim.embed, pca = FALSE, ... # PCA/is_distance )$Y, 'FIt-SNE' = fftRtsne(X = object, dims = dim.embed, rand_seed = seed.use, ...), stop("Invalid tSNE method: please choose from 'Rtsne' or 'FIt-SNE'") ) colnames(x = tsne.data) <- paste0(reduction.key, 1:ncol(x = tsne.data)) rownames(x = tsne.data) <- rownames(x = object) tsne.reduction <- CreateDimReducObject( embeddings = tsne.data, key = reduction.key, assay = assay, global = TRUE ) return(tsne.reduction) } #' @param cells Which cells to analyze (default, all cells) #' @param dims Which dimensions to use as input features #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE DimReduc #' RunTSNE.DimReduc <- function( object, cells = NULL, dims = 1:5, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) args$object <- args$object[[cells, args$dims]] args$dims <- NULL args$cells <- NULL args$assay <- DefaultAssay(object = object) return(do.call(what = 'RunTSNE', args = args)) } #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE dist #' RunTSNE.dist <- function( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) args$object <- as.matrix(x = args$object) args$is_distance <- TRUE return(do.call(what = 'RunTSNE', args = args)) } #' @param reduction Which dimensional reduction (e.g. PCA, ICA) to use for #' the tSNE. Default is PCA #' @param features If set, run the tSNE on this subset of features #' (instead of running on a set of reduced dimensions). Not set (NULL) by default; #' \code{dims} must be NULL to run on features #' @param distance.matrix If set, runs tSNE on the given distance matrix #' instead of data matrix (experimental) #' @param reduction.name dimensional reduction name, specifies the position in the object$dr list. tsne by default #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE Seurat #' RunTSNE.Seurat <- function( object, reduction = "pca", cells = NULL, dims = 1:5, features = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, distance.matrix = NULL, reduction.name = "tsne", reduction.key = "tSNE_", ... ) { cells <- cells %||% Cells(x = object) tsne.reduction <- if (!is.null(x = distance.matrix)) { RunTSNE( object = distance.matrix, assay = DefaultAssay(object = object), seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, is_distance = TRUE, ... ) } else if (!is.null(x = dims)) { RunTSNE( object = object[[reduction]], cells = cells, dims = dims, seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, ... ) } else if (!is.null(x = features)) { RunTSNE( object = t(x = as.matrix(x = GetAssayData(object = object)[features, cells])), assay = DefaultAssay(object = object), seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, ... ) } else { stop("Unknown way of running tSNE") } object[[reduction.name]] <- tsne.reduction object <- LogSeuratCommand(object = object) return(object) } #' @importFrom reticulate py_module_available py_set_seed import #' @importFrom uwot umap umap_transform #' @importFrom future nbrOfWorkers #' #' @rdname RunUMAP #' @concept dimensional_reduction #' @method RunUMAP default #' @export #' RunUMAP.default <- function( object, reduction.key = 'UMAP_', assay = NULL, reduction.model = NULL, return.model = FALSE, umap.method = 'uwot', n.neighbors = 30L, n.components = 2L, metric = 'cosine', n.epochs = NULL, learning.rate = 1.0, min.dist = 0.3, spread = 1.0, set.op.mix.ratio = 1.0, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, ... ) { CheckDots(...) if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (umap.method != 'umap-learn' && getOption('Seurat.warn.umap.uwot', TRUE)) { warning( "The default method for RunUMAP has changed from calling Python UMAP via reticulate to the R-native UWOT using the cosine metric", "\nTo use Python UMAP via reticulate, set umap.method to 'umap-learn' and metric to 'correlation'", "\nThis message will be shown once per session", call. = FALSE, immediate. = TRUE ) options(Seurat.warn.umap.uwot = FALSE) } if (umap.method == 'uwot-learn') { warning("'uwot-learn' is deprecated. Set umap.method = 'uwot' and return.model = TRUE") umap.method <- "uwot" return.model <- TRUE } if (densmap && umap.method != 'umap-learn'){ warning("densmap is only supported by umap-learn method. Method is changed to 'umap-learn'") umap.method <- 'umap-learn' } if (return.model) { if (verbose) { message("UMAP will return its model") } umap.method = "uwot" } if (inherits(x = object, what = "Neighbor")) { object <- list( idx = Indices(object), dist = Distances(object) ) } if (!is.null(x = reduction.model)) { if (verbose) { message("Running UMAP projection") } umap.method <- "uwot-predict" } umap.output <- switch( EXPR = umap.method, 'umap-learn' = { if (!py_module_available(module = 'umap')) { stop("Cannot find UMAP, please install through pip (e.g. pip install umap-learn).") } if (!py_module_available(module = 'sklearn')) { stop("Cannot find sklearn, please install through pip (e.g. pip install scikit-learn).") } if (!is.null(x = seed.use)) { py_set_seed(seed = seed.use) } if (typeof(x = n.epochs) == "double") { n.epochs <- as.integer(x = n.epochs) } umap_import <- import(module = "umap", delay_load = TRUE) sklearn <- import("sklearn", delay_load = TRUE) if (densmap && numeric_version(x = umap_import$pkg_resources$get_distribution("umap-learn")$version) < numeric_version(x = "0.5.0")) { stop("densmap is only supported by versions >= 0.5.0 of umap-learn. Upgrade umap-learn (e.g. pip install --upgrade umap-learn).") } random.state <- sklearn$utils$check_random_state(seed = as.integer(x = seed.use)) umap.args <- list( n_neighbors = as.integer(x = n.neighbors), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, random_state = random.state, a = a, b = b, metric_kwds = metric.kwds, angular_rp_forest = angular.rp.forest, verbose = verbose ) if (numeric_version(x = umap_import$pkg_resources$get_distribution("umap-learn")$version) >= numeric_version(x = "0.5.0")) { umap.args <- c(umap.args, list( densmap = densmap, dens_lambda = dens.lambda, dens_frac = dens.frac, dens_var_shift = dens.var.shift, output_dens = FALSE )) } umap <- do.call(what = umap_import$UMAP, args = umap.args) umap$fit_transform(as.matrix(x = object)) }, 'uwot' = { if (is.list(x = object)) { umap( X = NULL, nn_method = object, n_threads = nbrOfWorkers(), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, ret_model = return.model ) } else { umap( X = object, n_threads = nbrOfWorkers(), n_neighbors = as.integer(x = n.neighbors), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, ret_model = return.model ) } }, 'uwot-predict' = { if (metric == 'correlation') { warning( "UWOT does not implement the correlation metric, using cosine instead", call. = FALSE, immediate. = TRUE ) metric <- 'cosine' } if (is.null(x = reduction.model) || !inherits(x = reduction.model, what = 'DimReduc')) { stop( "If running projection UMAP, please pass a DimReduc object with the model stored to reduction.model.", call. = FALSE ) } model <- Misc( object = reduction.model, slot = "model" ) if (length(x = model) == 0) { stop( "The provided reduction.model does not have a model stored. Please try running umot-learn on the object first", call. = FALSE ) } if (is.list(x = object)) { if (ncol(object$idx) != model$n_neighbors) { warning("Number of neighbors between query and reference ", "is not equal to the number of neighbros within reference") model$n_neighbors <- ncol(object$idx) } umap_transform( X = NULL, nn_method = object, model = model, n_threads = nbrOfWorkers(), n_epochs = n.epochs, verbose = verbose ) } else { umap_transform( X = object, model = model, n_threads = nbrOfWorkers(), n_epochs = n.epochs, verbose = verbose ) } }, stop("Unknown umap method: ", umap.method, call. = FALSE) ) if (return.model) { umap.output$nn_index <- NULL umap.model <- umap.output umap.output <- umap.output$embedding } colnames(x = umap.output) <- paste0(reduction.key, 1:ncol(x = umap.output)) if (inherits(x = object, what = 'dist')) { rownames(x = umap.output) <- attr(x = object, "Labels") } else if (is.list(x = object)) { rownames(x = umap.output) <- rownames(x = object$idx) } else { rownames(x = umap.output) <- rownames(x = object) } umap.reduction <- CreateDimReducObject( embeddings = umap.output, key = reduction.key, assay = assay, global = TRUE ) if (return.model) { Misc(umap.reduction, slot = "model") <- umap.model } return(umap.reduction) } #' @importFrom reticulate py_module_available import #' #' @rdname RunUMAP #' @concept dimensional_reduction #' @method RunUMAP Graph #' @export #' RunUMAP.Graph <- function( object, assay = NULL, umap.method = 'umap-learn', n.components = 2L, metric = 'correlation', n.epochs = 0L, learning.rate = 1, min.dist = 0.3, spread = 1, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, densmap = FALSE, densmap.kwds = NULL, verbose = TRUE, reduction.key = 'UMAP_', ... ) { #CheckDots(...) if (umap.method != 'umap-learn') { warning( "Running UMAP on Graph objects is only supported using the umap-learn method", call. = FALSE, immediate. = TRUE ) } if (!py_module_available(module = 'umap')) { stop("Cannot find UMAP, please install through pip (e.g. pip install umap-learn).") } if (!py_module_available(module = 'numpy')) { stop("Cannot find numpy, please install through pip (e.g. pip install numpy).") } if (!py_module_available(module = 'sklearn')) { stop("Cannot find sklearn, please install through pip (e.g. pip install scikit-learn).") } if (!py_module_available(module = 'scipy')) { stop("Cannot find scipy, please install through pip (e.g. pip install scipy).") } np <- import("numpy", delay_load = TRUE) sp <- import("scipy", delay_load = TRUE) sklearn <- import("sklearn", delay_load = TRUE) umap <- import("umap", delay_load = TRUE) diag(x = object) <- 0 data <- object object <- sp$sparse$coo_matrix(arg1 = object) ab.params <- umap$umap_$find_ab_params(spread = spread, min_dist = min.dist) a <- a %||% ab.params[[1]] b <- b %||% ab.params[[2]] n.epochs <- n.epochs %||% 0L random.state <- sklearn$utils$check_random_state(seed = as.integer(x = seed.use)) umap.args <- list( data = data, graph = object, n_components = n.components, initial_alpha = learning.rate, a = a, b = b, gamma = repulsion.strength, negative_sample_rate = negative.sample.rate, n_epochs = as.integer(x = n.epochs), random_state = random.state, init = "spectral", metric = metric, metric_kwds = metric.kwds, verbose = verbose ) if (numeric_version(x = umap$pkg_resources$get_distribution("umap-learn")$version) >= numeric_version(x = "0.5.0")) { umap.args <- c(umap.args, list( densmap = densmap, densmap_kwds = densmap.kwds, output_dens = FALSE )) } embeddings <- do.call(what = umap$umap_$simplicial_set_embedding, args = umap.args) if (length(x = embeddings) == 2) { embeddings <- embeddings[[1]] } rownames(x = embeddings) <- colnames(x = data) colnames(x = embeddings) <- paste0("UMAP_", 1:n.components) # center the embeddings on zero embeddings <- scale(x = embeddings, scale = FALSE) umap <- CreateDimReducObject( embeddings = embeddings, key = reduction.key, assay = assay, global = TRUE ) return(umap) } #' @rdname RunUMAP #' @concept dimensional_reduction #' @method RunUMAP Neighbor #' @export #' RunUMAP.Neighbor <- function( object, reduction.model, ... ) { neighborlist <- list("idx" = Indices(object), "dist" = Distances(object)) RunUMAP( object = neighborlist, reduction.model = reduction.model, ... ) } #' @param reduction.model \code{DimReduc} object that contains the umap model #' @param dims Which dimensions to use as input features, used only if #' \code{features} is NULL #' @param reduction Which dimensional reduction (PCA or ICA) to use for the #' UMAP input. Default is PCA #' @param features If set, run UMAP on this subset of features (instead of running on a #' set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run #' on features #' @param graph Name of graph on which to run UMAP #' @param assay Assay to pull data for when using \code{features}, or assay used to construct Graph #' if running UMAP on a Graph #' @param nn.name Name of knn output on which to run UMAP #' @param slot The slot used to pull data for when using \code{features}. data slot is by default. #' @param umap.method UMAP implementation to run. Can be #' \describe{ #' \item{\code{uwot}:}{Runs umap via the uwot R package} #' \item{\code{uwot-learn}:}{Runs umap via the uwot R package and return the learned umap model} #' \item{\code{umap-learn}:}{Run the Seurat wrapper of the python umap-learn package} #' } #' @param n.neighbors This determines the number of neighboring points used in #' local approximations of manifold structure. Larger values will result in more #' global structure being preserved at the loss of detailed local structure. In #' general this parameter should often be in the range 5 to 50. #' @param n.components The dimension of the space to embed into. #' @param metric metric: This determines the choice of metric used to measure #' distance in the input space. A wide variety of metrics are already coded, and #' a user defined function can be passed as long as it has been JITd by numba. #' @param n.epochs he number of training epochs to be used in optimizing the low dimensional #' embedding. Larger values result in more accurate embeddings. If NULL is specified, a value will #' be selected based on the size of the input dataset (200 for large datasets, 500 for small). #' @param learning.rate The initial learning rate for the embedding optimization. #' @param min.dist This controls how tightly the embedding is allowed compress points together. #' Larger values ensure embedded points are moreevenly distributed, while smaller values allow the #' algorithm to optimise more accurately with regard to local structure. Sensible values are in #' the range 0.001 to 0.5. #' @param spread The effective scale of embedded points. In combination with min.dist this #' determines how clustered/clumped the embedded points are. #' @param set.op.mix.ratio Interpolate between (fuzzy) union and intersection as the set operation #' used to combine local fuzzy simplicial sets to obtain a global fuzzy simplicial sets. Both fuzzy #' set operations use the product t-norm. The value of this parameter should be between 0.0 and #' 1.0; a value of 1.0 will use a pure fuzzy union, while 0.0 will use a pure fuzzy intersection. #' @param local.connectivity The local connectivity required - i.e. the number of nearest neighbors #' that should be assumed to be connected at a local level. The higher this value the more connected #' the manifold becomes locally. In practice this should be not more than the local intrinsic #' dimension of the manifold. #' @param repulsion.strength Weighting applied to negative samples in low dimensional embedding #' optimization. Values higher than one will result in greater weight being given to negative #' samples. #' @param negative.sample.rate The number of negative samples to select per positive sample in the #' optimization process. Increasing this value will result in greater repulsive force being applied, #' greater optimization cost, but slightly more accuracy. #' @param a More specific parameters controlling the embedding. If NULL, these values are set #' automatically as determined by min. dist and spread. Parameter of differentiable approximation of #' right adjoint functor. #' @param b More specific parameters controlling the embedding. If NULL, these values are set #' automatically as determined by min. dist and spread. Parameter of differentiable approximation of #' right adjoint functor. #' @param uwot.sgd Set \code{uwot::umap(fast_sgd = TRUE)}; see \code{\link[uwot]{umap}} for more details #' @param metric.kwds A dictionary of arguments to pass on to the metric, such as the p value for #' Minkowski distance. If NULL then no arguments are passed on. #' @param angular.rp.forest Whether to use an angular random projection forest to initialise the #' approximate nearest neighbor search. This can be faster, but is mostly on useful for metric that #' use an angular style distance such as cosine, correlation etc. In the case of those metrics #' angular forests will be chosen automatically. #' @param densmap Whether to use the density-augmented objective of densMAP. #' Turning on this option generates an embedding where the local densities #' are encouraged to be correlated with those in the original space. #' Parameters below with the prefix ‘dens’ further control the behavior #' of this extension. Default is FALSE. Only compatible with 'umap-learn' method #' and version of umap-learn >= 0.5.0 #' @param densmap.kwds A dictionary of arguments to pass on to the densMAP optimization. #' @param dens.lambda Specific parameter which controls the regularization weight #' of the density correlation term in densMAP. Higher values prioritize density #' preservation over the UMAP objective, and vice versa for values closer to zero. #' Setting this parameter to zero is equivalent to running the original UMAP algorithm. #' Default value is 2. #' @param dens.frac Specific parameter which controls the fraction of epochs #' (between 0 and 1) where the density-augmented objective is used in densMAP. #' The first (1 - dens_frac) fraction of epochs optimize the original UMAP #' objective before introducing the density correlation term. Default is 0.3. #' @param dens.var.shift Specific parameter which specifies a small constant #' added to the variance of local radii in the embedding when calculating #' the density correlation objective to prevent numerical instability from #' dividing by a small number. Default is 0.1. #' @param reduction.name Name to store dimensional reduction under in the Seurat object #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. UMAP by default #' @param return.model whether UMAP will return the uwot model #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed #' @param verbose Controls verbosity #' #' @rdname RunUMAP #' @concept dimensional_reduction #' @export #' @method RunUMAP Seurat #' RunUMAP.Seurat <- function( object, dims = NULL, reduction = 'pca', features = NULL, graph = NULL, assay = DefaultAssay(object = object), nn.name = NULL, slot = 'data', umap.method = 'uwot', reduction.model = NULL, return.model = FALSE, n.neighbors = 30L, n.components = 2L, metric = 'cosine', n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, reduction.name = 'umap', reduction.key = 'UMAP_', ... ) { CheckDots(...) if (sum(c(is.null(x = dims), is.null(x = features), is.null(x = graph))) < 2) { stop("Please specify only one of the following arguments: dims, features, or graph") } if (!is.null(x = features)) { data.use <- as.matrix(x = t(x = GetAssayData(object = object, slot = slot, assay = assay)[features, , drop = FALSE])) if (ncol(x = data.use) < n.components) { stop( "Please provide as many or more features than n.components: ", length(x = features), " features provided, ", n.components, " UMAP components requested", call. = FALSE ) } } else if (!is.null(x = dims)) { data.use <- Embeddings(object[[reduction]])[, dims] assay <- DefaultAssay(object = object[[reduction]]) if (length(x = dims) < n.components) { stop( "Please provide as many or more dims than n.components: ", length(x = dims), " dims provided, ", n.components, " UMAP components requested", call. = FALSE ) } } else if (!is.null(x = nn.name)) { if (!inherits(x = object[[nn.name]], what = "Neighbor")) { stop( "Please specify a Neighbor object name, ", "instead of the name of a ", class(object[[nn.name]]), " object", call. = FALSE ) } data.use <- object[[nn.name]] } else if (!is.null(x = graph)) { if (!inherits(x = object[[graph]], what = "Graph")) { stop( "Please specify a Graph object name, ", "instead of the name of a ", class(object[[graph]]), " object", call. = FALSE ) } data.use <- object[[graph]] } else { stop("Please specify one of dims, features, or graph") } object[[reduction.name]] <- RunUMAP( object = data.use, reduction.model = reduction.model, return.model = return.model, assay = assay, umap.method = umap.method, n.neighbors = n.neighbors, n.components = n.components, metric = metric, n.epochs = n.epochs, learning.rate = learning.rate, min.dist = min.dist, spread = spread, set.op.mix.ratio = set.op.mix.ratio, local.connectivity = local.connectivity, repulsion.strength = repulsion.strength, negative.sample.rate = negative.sample.rate, a = a, b = b, uwot.sgd = uwot.sgd, seed.use = seed.use, metric.kwds = metric.kwds, angular.rp.forest = angular.rp.forest, densmap = densmap, dens.lambda = dens.lambda, dens.frac = dens.frac, dens.var.shift = dens.var.shift, reduction.key = reduction.key, verbose = verbose ) object <- LogSeuratCommand(object = object) return(object) } #' @param dims Which dimensions to examine #' @param score.thresh Threshold to use for the proportion test of PC #' significance (see Details) #' #' @importFrom stats prop.test #' #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw JackStrawData #' ScoreJackStraw.JackStrawData <- function( object, dims = 1:5, score.thresh = 1e-5, ... ) { CheckDots(...) pAll <- JS(object = object, slot = "empirical.p.values") pAll <- pAll[, dims, drop = FALSE] pAll <- as.data.frame(pAll) pAll$Contig <- rownames(x = pAll) score.df <- NULL for (i in dims) { pc.score <- suppressWarnings(prop.test( x = c( length(x = which(x = pAll[, i] <= score.thresh)), floor(x = nrow(x = pAll) * score.thresh) ), n = c(nrow(pAll), nrow(pAll)) )$p.val) if (length(x = which(x = pAll[, i] <= score.thresh)) == 0) { pc.score <- 1 } if (is.null(x = score.df)) { score.df <- data.frame(PC = paste0("PC", i), Score = pc.score) } else { score.df <- rbind(score.df, data.frame(PC = paste0("PC", i), Score = pc.score)) } } score.df$PC <- dims score.df <- as.matrix(score.df) JS(object = object, slot = 'overall') <- score.df return(object) } #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw DimReduc #' ScoreJackStraw.DimReduc <- function(object, dims = 1:5, score.thresh = 1e-5, ...) { JS(object = object) <- ScoreJackStraw( object = JS(object = object), dims = dims, score.thresh = score.thresh, ... ) return(object) } #' @param reduction Reduction associated with JackStraw to score #' @param do.plot Show plot. To return ggplot object, use \code{JackStrawPlot} after #' running ScoreJackStraw. #' #' @seealso \code{\link{JackStrawPlot}} #' #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw Seurat #' ScoreJackStraw.Seurat <- function( object, reduction = "pca", dims = 1:5, score.thresh = 1e-5, do.plot = FALSE, ... ) { object[[reduction]] <- ScoreJackStraw( object = object[[reduction]], dims = dims, score.thresh = score.thresh, ... ) if (do.plot) { CheckDots(..., fxns = 'JackStrawPlot') suppressWarnings(expr = print(JackStrawPlot( object = object, reduction = reduction, dims = dims, ... ))) } object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Check that features are present and have non-zero variance # # @param data.use Feature matrix (features are rows) # @param features Features to check # @param object.name Name of object for message printing # @param verbose Print warnings # # @return Returns a vector of features that is the subset of features # that have non-zero variance # CheckFeatures <- function( data.use, features, object.name, verbose = TRUE ) { if (any(!features %in% rownames(x = data.use))) { missing.features <- features[!features %in% rownames(x = data.use)] features <- setdiff(x = features, y = missing.features) if (verbose){ warning( paste0( "The following ", length(x = missing.features), " features are not scaled in ", object.name, ": ", paste0(missing.features, collapse = ", ") )) } } if (inherits(x = data.use, what = 'dgCMatrix')) { features.var <- SparseRowVar(mat = data.use[features, ], display_progress = F) } else { features.var <- RowVar(x = data.use[features, ]) } no.var.features <- features[features.var == 0] if (length(x = no.var.features) > 0 && verbose) { warning( paste0( "The following features have zero variance in ", object.name, ": ", paste0(no.var.features, collapse = ", ") )) } features <- setdiff(x = features, y = no.var.features) features <- features[!is.na(x = features)] return(features) } #internal EmpiricalP <- function(x, nullval) { return(sum(nullval > x) / length(x = nullval)) } # FIt-SNE helper function for calling fast_tsne from R # # Based on Kluger Lab FIt-SNE v1.2.1 code on https://github.com/KlugerLab/FIt-SNE/blob/master/fast_tsne.R # commit 601608ed42e4be2765970910927da20f0b0bf9b9 on June 25, 2020 # #' @importFrom utils file_test # fftRtsne <- function(X, dims = 2, perplexity = 30, theta = 0.5, max_iter = 750, fft_not_bh = TRUE, ann_not_vptree = TRUE, stop_early_exag_iter = 250, exaggeration_factor = 12.0, no_momentum_during_exag = FALSE, start_late_exag_iter = -1, late_exag_coeff = 1.0, mom_switch_iter = 250, momentum = 0.5, final_momentum = 0.8, learning_rate = 'auto', n_trees = 50, search_k = -1, rand_seed = -1, nterms = 3, intervals_per_integer = 1, min_num_intervals = 50, K = -1, sigma = -30, initialization = 'pca', max_step_norm = 5, data_path = NULL, result_path = NULL, load_affinities = NULL, fast_tsne_path = NULL, nthreads = getOption('mc.cores', default = 1), perplexity_list = NULL, get_costs = FALSE, df = 1.0, ... ) { CheckDots(...) if (is.null(x = data_path)) { data_path <- tempfile(pattern = 'fftRtsne_data_', fileext = '.dat') } if (is.null(x = result_path)) { result_path <- tempfile(pattern = 'fftRtsne_result_', fileext = '.dat') } if (is.null(x = fast_tsne_path)) { # suppressWarnings(expr = fast_tsne_path <- system2(command = 'which', args = 'fast_tsne', stdout = TRUE)) fast_tsne_path <- SysExec(progs = ifelse( test = .Platform$OS.type == 'windows', yes = 'FItSNE.exe', no = 'fast_tsne' )) if (length(x = fast_tsne_path) == 0) { stop("no fast_tsne_path specified and fast_tsne binary is not in the search path") } } fast_tsne_path <- normalizePath(path = fast_tsne_path) if (!file_test(op = '-x', x = fast_tsne_path)) { stop("fast_tsne_path '", fast_tsne_path, "' does not exist or is not executable") } # check fast_tsne version ft.out <- suppressWarnings(expr = system2(command = fast_tsne_path, stdout = TRUE)) version_number <- regmatches(ft.out[1], regexpr('= t-SNE v[0-9.]+', ft.out[1])) if (is.null(version_number)){ message("First line of fast_tsne output is") message(ft.out[1]) stop("Our FIt-SNE wrapper requires FIt-SNE v1.0+, please install the appropriate version from github.com/KlugerLab/FIt-SNE and have fast_tsne_path point to it if it's not in your path") } else { version_number <- gsub('= t-SNE v', '', version_number) } is.wholenumber <- function(x, tol = .Machine$double.eps ^ 0.5) { return(abs(x = x - round(x = x)) < tol) } if (version_number == '1.0.0' && df != 1.0) { stop("This version of FIt-SNE does not support df!=1. Please install the appropriate version from github.com/KlugerLab/FIt-SNE") } if (!is.numeric(x = theta) || (theta < 0.0) || (theta > 1.0) ) { stop("Incorrect theta.") } if (nrow(x = X) - 1 < 3 * perplexity) { stop("Perplexity is too large.") } if (!is.matrix(x = X)) { stop("Input X is not a matrix") } if (!(max_iter > 0)) { stop("Incorrect number of iterations.") } if (!is.wholenumber(x = stop_early_exag_iter) || stop_early_exag_iter < 0) { stop("stop_early_exag_iter should be a positive integer") } if (!is.numeric(x = exaggeration_factor)) { stop("exaggeration_factor should be numeric") } if (!is.numeric(df)) { stop("df should be numeric") } if (!is.wholenumber(x = dims) || dims <= 0) { stop("Incorrect dimensionality.") } if (search_k == -1) { if (perplexity > 0) { search_k <- n_trees * perplexity * 3 } else if (perplexity == 0) { search_k <- n_trees * max(perplexity_list) * 3 } else { search_k <- n_trees * K } } if (is.character(learning_rate) && learning_rate =='auto') { learning_rate = max(200, nrow(X)/exaggeration_factor) } if (is.character(start_late_exag_iter) && start_late_exag_iter =='auto') { if (late_exag_coeff > 0) { start_late_exag_iter = stop_early_exag_iter } else { start_late_exag_iter = -1 } } if (is.character(initialization) && initialization == 'pca') { if (rand_seed != -1) { set.seed(rand_seed) } if (requireNamespace("rsvd", quietly = TRUE)) { message('Using rsvd() to compute the top PCs for initialization.') X_c <- scale(x = X, center = TRUE, scale = FALSE) rsvd_out <- rsvd::rsvd(A = X_c, k = dims) X_top_pcs <- rsvd_out$u %*% diag(x = rsvd_out$d, nrow = dims) } else if (requireNamespace("irlba", quietly = TRUE)) { message('Using irlba() to compute the top PCs for initialization.') X_colmeans <- colMeans(x = X) irlba_out <- irlba::irlba(A = X, nv = dims, center = X_colmeans) X_top_pcs <- irlba_out$u %*% diag(x = irlba_out$d, nrow = dims) } else { stop( "By default, FIt-SNE initializes the embedding with the top PCs. We use either rsvd or irlba for fast computation. To use this functionality, please install the rsvd package with install.packages('rsvd') or the irlba package with install.packages('ilrba'). Otherwise, set initialization to NULL for random initialization, or any N by dims matrix for custom initialization." ) } initialization <- 0.0001*(X_top_pcs/sd(X_top_pcs[,1])) } else if (is.character(x = initialization) && initialization == 'random') { message('Random initialization') initialization = NULL } nbody_algo <- ifelse(test = fft_not_bh, yes = 2, no = 1) if (is.null(load_affinities)) { load_affinities <- 0 } else { if (load_affinities == 'load') { load_affinities <- 1 } else if (load_affinities == 'save') { load_affinities <- 2 } else { load_affinities <- 0 } } knn_algo <- ifelse(test = ann_not_vptree, yes = 1, no = 2) tX <- as.numeric(t(X)) f <- file(description = data_path, open = "wb") n = nrow(x = X) D = ncol(x = X) writeBin(object = as.integer(x = n), con = f, size = 4) writeBin(object = as.integer(x = D), con = f, size = 4) writeBin(object = as.numeric(x = theta), con = f, size = 8) writeBin(object = as.numeric(x = perplexity), con = f, size = 8) if (perplexity == 0) { writeBin(object = as.integer(x = length(x = perplexity_list)), con = f, size = 4) writeBin(object = perplexity_list, con = f) } writeBin(object = as.integer(x = dims), con = f, size = 4) #theta writeBin(object = as.integer(x = max_iter), con = f, size = 4) writeBin(object = as.integer(x = stop_early_exag_iter), con = f, size = 4) writeBin(object = as.integer(x = mom_switch_iter), con = f, size = 4) writeBin(object = as.numeric(x = momentum), con = f, size = 8) writeBin(object = as.numeric(x = final_momentum), con = f, size = 8) writeBin(object = as.numeric(x = learning_rate), con = f, size = 8) if (!(version_number %in% c('1.1.0', '1.0.0'))) { writeBin(object = as.numeric(x = max_step_norm), f, size = 8) } writeBin(object = as.integer(x = K), con = f, size = 4) #K writeBin(object = as.numeric(x = sigma), con = f, size = 8) #sigma writeBin(object = as.integer(x = nbody_algo), con = f, size = 4) #not barnes hut writeBin(object = as.integer(x = knn_algo), con = f, size = 4) writeBin(object = as.numeric(x = exaggeration_factor), con = f, size = 8) #compexag writeBin(object = as.integer(x = no_momentum_during_exag), con = f, size = 4) writeBin(object = as.integer(x = n_trees), con = f, size = 4) writeBin(object = as.integer(x = search_k), con = f, size = 4) writeBin(object = as.integer(x = start_late_exag_iter), con = f, size = 4) writeBin(object = as.numeric(x = late_exag_coeff), con = f, size = 8) writeBin(object = as.integer(x = nterms), con = f, size = 4) writeBin(object = as.numeric(x = intervals_per_integer), con = f, size = 8) writeBin(object = as.integer(x = min_num_intervals), con = f, size = 4) writeBin(object = tX, con = f) writeBin(object = as.integer(x = rand_seed), con = f, size = 4) if (version_number != "1.0.0") { writeBin(object = as.numeric(x = df), con = f, size = 8) } writeBin(object = as.integer(x = load_affinities), con = f, size = 4) if (!is.null(x = initialization)) { writeBin(object = c(t(x = initialization)), con = f) } close(con = f) if (version_number == "1.0.0") { flag <- system2( command = fast_tsne_path, args = c(data_path, result_path, nthreads) ) } else { flag <- system2( command = fast_tsne_path, args = c(version_number, data_path, result_path, nthreads) ) } if (flag != 0) { stop('tsne call failed') } f <- file(description = result_path, open = "rb") n <- readBin(con = f, what = integer(), n = 1, size = 4) d <- readBin(con = f, what = integer(), n = 1, size = 4) Y <- readBin(con = f, what = numeric(), n = n * d) Y <- t(x = matrix(Y, nrow = d)) if (get_costs) { tmp <- readBin(con = f, what = integer(), n = 1, size = 4) costs <- readBin(con = f, what = numeric(), n = max_iter, size = 8) Yout <- list(Y = Y, costs = costs) } else { Yout <- Y } close(con = f) file.remove(data_path) file.remove(result_path) return(Yout) } #internal # JackRandom <- function( scaled.data, prop.use = 0.01, r1.use = 1, r2.use = 5, seed.use = 1, rev.pca = FALSE, weight.by.var = weight.by.var, maxit = 1000 ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } rand.genes <- sample( x = rownames(x = scaled.data), size = nrow(x = scaled.data) * prop.use ) # make sure that rand.genes is at least 3 if (length(x = rand.genes) < 3) { rand.genes <- sample(x = rownames(x = scaled.data), size = 3) } data.mod <- scaled.data data.mod[rand.genes, ] <- MatrixRowShuffle(x = scaled.data[rand.genes, ]) temp.object <- RunPCA( object = data.mod, assay = "temp", npcs = r2.use, features = rownames(x = data.mod), rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = FALSE, maxit = maxit ) return(Loadings(temp.object)[rand.genes, r1.use:r2.use]) } # Calculates the l2-norm of a vector # # Modified from PMA package # @references Witten, Tibshirani, and Hastie, Biostatistics 2009 # @references \url{https://github.com/cran/PMA/blob/master/R/PMD.R} # # @param vec numeric vector # # @return returns the l2-norm. # L2Norm <- function(vec) { a <- sqrt(x = sum(vec ^ 2)) if (a == 0) { a <- .05 } return(a) } # Prep data for dimensional reduction # # Common checks and preparatory steps before running certain dimensional # reduction techniques # # @param object Assay object # @param features Features to use as input for the dimensional reduction technique. # Default is variable features # @ param verbose Print messages and warnings # # PrepDR <- function( object, features = NULL, slot = 'scale.data', verbose = TRUE ) { if (length(x = VariableFeatures(object = object)) == 0 && is.null(x = features)) { stop("Variable features haven't been set. Run FindVariableFeatures() or provide a vector of feature names.") } data.use <- GetAssayData(object = object, slot = slot) if (nrow(x = data.use ) == 0 && slot == "scale.data") { stop("Data has not been scaled. Please run ScaleData and retry") } features <- features %||% VariableFeatures(object = object) features.keep <- unique(x = features[features %in% rownames(x = data.use)]) if (length(x = features.keep) < length(x = features)) { features.exclude <- setdiff(x = features, y = features.keep) if (verbose) { warning(paste0("The following ", length(x = features.exclude), " features requested have not been scaled (running reduction without them): ", paste0(features.exclude, collapse = ", "))) } } features <- features.keep if (inherits(x = data.use, what = 'dgCMatrix')) { features.var <- RowVarSparse(mat = data.use[features, ]) } else { features.var <- RowVar(x = data.use[features, ]) } features.keep <- features[features.var > 0] if (length(x = features.keep) < length(x = features)) { features.exclude <- setdiff(x = features, y = features.keep) if (verbose) { warning(paste0("The following ", length(x = features.exclude), " features requested have zero variance (running reduction without them): ", paste0(features.exclude, collapse = ", "))) } } features <- features.keep features <- features[!is.na(x = features)] data.use <- data.use[features, ] return(data.use) } #' @param assay Name of Assay SPCA is being run on #' @param npcs Total Number of SPCs to compute and store (50 by default) #' @param verbose Print the top genes associated with high/low loadings for #' the SPCs #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. SPC by default #' @param graph Graph used supervised by SPCA #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' #' @importFrom irlba irlba #' #' @concept dimensional_reduction #' @rdname RunSPCA #' @export RunSPCA.default <- function( object, assay = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = FALSE, seed.use = 42, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } npcs <- min(npcs, nrow(x = object) - 1) if (verbose) { message("Computing sPCA transformation") } HSIC <- object %*% graph %*% t(x = object) pca.results <- irlba(A = HSIC, nv = npcs) feature.loadings <- pca.results$u rownames(x = feature.loadings) <- rownames(x = object) cell.embeddings <- t(object) %*% feature.loadings colnames(x = cell.embeddings) <- colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) sdev <- pca.results$d / sqrt(max(1, nrow(x = HSIC) - 1)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, stdev = sdev, key = reduction.key ) return(reduction.data) } #' @param features Features to compute SPCA on. If features=NULL, SPCA will be run #' using the variable features for the Assay. #' #' @rdname RunSPCA #' @concept dimensional_reduction #' @export #' @method RunSPCA Assay #' RunSPCA.Assay <- function( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunSPCA( object = data.use, assay = assay, npcs = npcs, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name, spca by default #' @rdname RunSPCA #' @concept dimensional_reduction #' @export #' @method RunSPCA Seurat #' RunSPCA.Seurat <- function( object, assay = NULL, features = NULL, npcs = 50, reduction.name = "spca", reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) if (is.null(x = graph)) { stop("Graph is not provided") } else if (is.character(x = graph)) { graph <- object[[graph]] } reduction.data <- RunSPCA( object = assay.data, assay = assay, features = features, npcs = npcs, reduction.name = reduction.name, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of Assay SLSI is being run on #' @param n Total Number of SLSI components to compute and store #' @param verbose Display messages #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names #' @param graph Graph used supervised by SLSI #' @param seed.use Set a random seed. Setting NULL will not set a seed. #' #' @importFrom irlba irlba #' @importMethodsFrom Matrix t #' #' @concept dimensional_reduction #' @rdname RunSLSI #' @export RunSLSI.default <- function( object, assay = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } n <- min(n, nrow(x = object) - 1) if (verbose) { message("Smoothing peaks matrix") } object.smooth <- t(x = graph) %*% (t(x = object) %*% object) %*% graph if (verbose) { message("Performing eigendecomposition") } svd.V <- irlba(A = object.smooth, nv = n, nu = n, ...) sigma <- sqrt(x = svd.V$d) feature.loadings <- object %*% (graph %*% svd.V$u) %*% diag(x = 1/sigma) feature.loadings <- as.matrix(x = feature.loadings) cell.embeddings <- t(x = object) %*% feature.loadings %*% diag(x = 1/sigma) cell.embeddings <- as.matrix(x = cell.embeddings) # construct svd list stored in misc for LSI projection svd.lsi <- svd.V svd.lsi$d <- sigma svd.lsi$u <- feature.loadings svd.lsi$v <- cell.embeddings colnames(x = cell.embeddings) <- paste0(reduction.key, 1:ncol(cell.embeddings)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, key = reduction.key, assay = assay, misc = svd.lsi ) return(reduction.data) } #' @param features Features to compute SLSI on. If NULL, SLSI will be run #' using the variable features for the Assay. #' #' @rdname RunSLSI #' @concept dimensional_reduction #' @export #' @method RunSLSI Assay #' RunSLSI.Assay <- function( object, assay = NULL, features = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, slot = "data", verbose = verbose ) reduction.data <- RunSLSI( object = data.use, assay = assay, n = n, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name #' @rdname RunSLSI #' @concept dimensional_reduction #' @export #' @method RunSLSI Seurat #' RunSLSI.Seurat <- function( object, assay = NULL, features = NULL, n = 50, reduction.name = "slsi", reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) if (is.null(x = graph)) { stop("Graph is not provided") } else if (is.character(x = graph)) { graph <- object[[graph]] } reduction.data <- RunSLSI( object = assay.data, assay = assay, features = features, n = n, reduction.name = reduction.name, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } Seurat/R/clustering.R0000644000176200001440000015664614024674706014254 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Construct weighted nearest neighbor graph #' #' This function will construct a weighted nearest neighbor (WNN) graph. For #' each cell, we identify the nearest neighbors based on a weighted combination #' of two modalities. Takes as input two dimensional reductions, one computed #' for each modality.Other parameters are listed for debugging, but can be left #' as default values. #' #' @param object A Seurat object #' @param reduction.list A list of two dimensional reductions, one for each of #' the modalities to be integrated #' @param dims.list A list containing the dimensions for each reduction to use #' @param k.nn the number of multimodal neighbors to compute. 20 by default #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction. TRUE by default. #' @param knn.graph.name Multimodal knn graph name #' @param snn.graph.name Multimodal snn graph name #' @param weighted.nn.name Multimodal neighbor object name #' @param modality.weight.name Variable name to store modality weight in object #' meta data #' @param knn.range The number of approximate neighbors to compute #' @param prune.SNN Cutoff not to discard edge in SNN graph #' @param sd.scale The scaling factor for kernel width. 1 by default #' @param cross.contant.list Constant used to avoid divide-by-zero errors. 1e-4 #' by default #' @param smooth Smoothing modality score across each individual modality #' neighbors. FALSE by default #' @param return.intermediate Store intermediate results in misc #' @param modality.weight A \code{\link{ModalityWeights}} object generated by #' \code{FindModalityWeights} #' @param verbose Print progress bars and output #' #' @return Seurat object containing a nearest-neighbor object, KNN graph, and #' SNN graph - each based on a weighted combination of modalities. #' @concept clustering #' @export #' FindMultiModalNeighbors <- function( object, reduction.list, dims.list, k.nn = 20, l2.norm = TRUE, knn.graph.name = "wknn", snn.graph.name = "wsnn", weighted.nn.name = "weighted.nn", modality.weight.name = NULL, knn.range = 200, prune.SNN = 1/15, sd.scale = 1, cross.contant.list = NULL, smooth = FALSE, return.intermediate = FALSE, modality.weight = NULL, verbose = TRUE ) { cross.contant.list <- cross.contant.list %||% as.list(x = rep(x = 1e-4, times = length(x = reduction.list))) if (is.null(x = modality.weight)) { if (verbose) { message("Calculating cell-specific modality weights") } modality.weight <- FindModalityWeights( object = object, reduction.list = reduction.list, dims.list = dims.list, k.nn = k.nn, sd.scale = sd.scale, l2.norm = l2.norm, cross.contant.list = cross.contant.list, smooth = smooth, verbose = verbose ) } modality.weight.name <- modality.weight.name %||% paste0(modality.weight@modality.assay, ".weight") modality.assay <- slot(object = modality.weight, name = "modality.assay") if (length(modality.weight.name) != length(reduction.list)) { warning("The number of provided modality.weight.name is not equal to the number of modalities. ", paste(paste0(modality.assay, ".weight"), collapse = " "), " are used to store the modality weights" ) modality.weight.name <- paste0(modality.assay, ".weight") } first.assay <- modality.assay[1] weighted.nn <- MultiModalNN( object = object, k.nn = k.nn, modality.weight = modality.weight, knn.range = knn.range, verbose = verbose ) select_nn <- Indices(object = weighted.nn) select_nn_dist <- Distances(object = weighted.nn) # compute KNN graph if (verbose) { message("Constructing multimodal KNN graph") } j <- as.numeric(x = t(x = select_nn )) i <- ((1:length(x = j)) - 1) %/% k.nn + 1 nn.matrix <- sparseMatrix( i = i, j = j, x = 1, dims = c(ncol(x = object), ncol(x = object)) ) diag(x = nn.matrix) <- 1 rownames(x = nn.matrix) <- colnames(x = nn.matrix) <- colnames(x = object) nn.matrix <- nn.matrix + t(x = nn.matrix) - t(x = nn.matrix) * nn.matrix nn.matrix <- as.Graph(x = nn.matrix) slot(object = nn.matrix, name = "assay.used") <- first.assay object[[knn.graph.name]] <- nn.matrix # compute SNN graph if (verbose) { message("Constructing multimodal SNN graph") } snn.matrix <- ComputeSNN(nn_ranked = select_nn, prune = prune.SNN) rownames(x = snn.matrix) <- colnames(x = snn.matrix) <- Cells(x = object) snn.matrix <- as.Graph(x = snn.matrix ) slot(object = snn.matrix, name = "assay.used") <- first.assay object[[snn.graph.name]] <- snn.matrix # add neighbors and modality weights object[[weighted.nn.name]] <- weighted.nn for (m in 1:length(x = modality.weight.name)) { object[[modality.weight.name[[m]]]] <- slot( object = modality.weight, name = "modality.weight.list" )[[m]] } # add command log modality.weight.command <- slot(object = modality.weight, name = "command") slot(object = modality.weight.command, name = "assay.used") <- first.assay modality.weight.command.name <- slot(object = modality.weight.command, name = "name") object[[modality.weight.command.name]] <- modality.weight.command command <- LogSeuratCommand(object = object, return.command = TRUE) slot(object = command, name = "params")$modality.weight <- NULL slot(object = command, name = "assay.used") <- first.assay command.name <- slot(object = command, name = "name") object[[command.name]] <- command if (return.intermediate) { Misc(object = object, slot = "modality.weight") <- modality.weight } return (object) } #' Find subclusters under one cluster #' #' @inheritParams FindClusters #' @param cluster the cluster to be sub-clustered #' @param subcluster.name the name of sub cluster added in the meta.data #' #' @return return a object with sub cluster labels in the sub-cluster.name variable #' @concept clustering #' @export #' FindSubCluster <- function( object, cluster, graph.name, subcluster.name = "sub.cluster", resolution = 0.5, algorithm = 1 ) { sub.cell <- WhichCells(object = object, idents = cluster) sub.graph <- as.Graph(x = object[[graph.name]][sub.cell, sub.cell]) sub.clusters <- FindClusters( object = sub.graph, resolution = resolution, algorithm = algorithm ) sub.clusters[, 1] <- paste(cluster, sub.clusters[, 1], sep = "_") object[[subcluster.name]] <- as.character(x = Idents(object = object)) object[[subcluster.name]][sub.cell, ] <- sub.clusters[, 1] return(object) } #' Predict value from nearest neighbors #' #' This function will predict expression or cell embeddings from its k nearest #' neighbors index. For each cell, it will average its k neighbors value to get #' its new imputed value. It can average expression value in assays and cell #' embeddings from dimensional reductions. #' #' @param object The object used to calculate knn #' @param nn.idx k near neighbour indices. A cells x k matrix. #' @param assay Assay used for prediction #' @param reduction Cell embedding of the reduction used for prediction #' @param dims Number of dimensions of cell embedding #' @param return.assay Return an assay or a predicted matrix #' @param slot slot used for prediction #' @param features features used for prediction #' @param mean.function the function used to calculate row mean #' @param seed Sets the random seed to check if the nearest neighbor is query #' cell #' @param verbose Print progress #' #' @return return an assay containing predicted expression value in the data #' slot #' @concept integration #' @export #' PredictAssay <- function( object, nn.idx, assay, reduction = NULL, dims = NULL, return.assay = TRUE, slot = "scale.data", features = NULL, mean.function = rowMeans, seed = 4273, verbose = TRUE ){ if (!inherits(x = mean.function, what = 'function')) { stop("'mean.function' must be a function") } if (is.null(x = reduction)) { reference.data <- GetAssayData( object = object, assay = assay, slot = slot ) features <- features %||% VariableFeatures(object = object[[assay]]) if (length(x = features) == 0) { features <- rownames(x = reference.data) if (verbose) { message("VariableFeatures are empty in the ", assay, " assay, features in the ", slot, " slot will be used" ) } } reference.data <- reference.data[features, , drop = FALSE] } else { if (is.null(x = dims)) { stop("dims is empty") } reference.data <- t(x = Embeddings(object = object, reduction = reduction)[, dims]) } set.seed(seed = seed) nn.check <- sample(x = 1:nrow(x = nn.idx), size = min(50, nrow(x = nn.idx))) if (all(nn.idx[nn.check, 1] == nn.check)) { if(verbose){ message("The nearest neighbor is the query cell itself, and it will not be used for prediction") } nn.idx <- nn.idx[,-1] } predicted <- apply( X = nn.idx, MARGIN = 1, FUN = function(x) mean.function(reference.data[, x] ) ) colnames(x = predicted) <- Cells(x = object) if (return.assay) { # TODO: restore once check.matrix is implemented in SeuratObject # predicted.assay <- CreateAssayObject(data = predicted, check.matrix = FALSE) predicted.assay <- CreateAssayObject(data = predicted) return (predicted.assay) } else { return (predicted) } } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom pbapply pblapply #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @param modularity.fxn Modularity function (1 = standard; 2 = alternative). #' @param initial.membership,node.sizes Parameters to pass to the Python leidenalg function. #' @param resolution Value of the resolution parameter, use a value above #' (below) 1.0 if you want to obtain a larger (smaller) number of communities. #' @param algorithm Algorithm for modularity optimization (1 = original Louvain #' algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM #' algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python. #' @param method Method for running leiden (defaults to matrix which is fast for small datasets). #' Enable method = "igraph" to avoid casting large data to a dense matrix. #' @param n.start Number of random starts. #' @param n.iter Maximal number of iterations per random start. #' @param random.seed Seed of the random number generator. #' @param group.singletons Group singletons into nearest cluster. If FALSE, assign all singletons to #' a "singleton" group #' @param temp.file.location Directory where intermediate files will be written. #' Specify the ABSOLUTE path. #' @param edge.file.name Edge file to use as input for modularity optimizer jar. #' @param verbose Print output #' #' @rdname FindClusters #' @concept clustering #' @export #' FindClusters.default <- function( object, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) { CheckDots(...) if (is.null(x = object)) { stop("Please provide an SNN graph") } if (tolower(x = algorithm) == "louvain") { algorithm <- 1 } if (tolower(x = algorithm) == "leiden") { algorithm <- 4 } if (nbrOfWorkers() > 1) { clustering.results <- future_lapply( X = resolution, FUN = function(r) { if (algorithm %in% c(1:3)) { ids <- RunModularityClustering( SNN = object, modularity = modularity.fxn, resolution = r, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, print.output = verbose, temp.file.location = temp.file.location, edge.file.name = edge.file.name ) } else if (algorithm == 4) { ids <- RunLeiden( object = object, method = method, partition.type = "RBConfigurationVertexPartition", initial.membership = initial.membership, node.sizes = node.sizes, resolution.parameter = r, random.seed = random.seed, n.iter = n.iter ) } else { stop("algorithm not recognised, please specify as an integer or string") } names(x = ids) <- colnames(x = object) ids <- GroupSingletons(ids = ids, SNN = object, verbose = verbose) results <- list(factor(x = ids)) names(x = results) <- paste0('res.', r) return(results) } ) clustering.results <- as.data.frame(x = clustering.results) } else { clustering.results <- data.frame(row.names = colnames(x = object)) for (r in resolution) { if (algorithm %in% c(1:3)) { ids <- RunModularityClustering( SNN = object, modularity = modularity.fxn, resolution = r, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, print.output = verbose, temp.file.location = temp.file.location, edge.file.name = edge.file.name) } else if (algorithm == 4) { ids <- RunLeiden( object = object, method = method, partition.type = "RBConfigurationVertexPartition", initial.membership = initial.membership, node.sizes = node.sizes, resolution.parameter = r, random.seed = random.seed, n.iter = n.iter ) } else { stop("algorithm not recognised, please specify as an integer or string") } names(x = ids) <- colnames(x = object) ids <- GroupSingletons(ids = ids, SNN = object, group.singletons = group.singletons, verbose = verbose) clustering.results[, paste0("res.", r)] <- factor(x = ids) } } return(clustering.results) } #' @importFrom methods is #' #' @param graph.name Name of graph to use for the clustering algorithm #' #' @rdname FindClusters #' @export #' @concept clustering #' @method FindClusters Seurat #' FindClusters.Seurat <- function( object, graph.name = NULL, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) { CheckDots(...) graph.name <- graph.name %||% paste0(DefaultAssay(object = object), "_snn") if (!graph.name %in% names(x = object)) { stop("Provided graph.name not present in Seurat object") } if (!is(object = object[[graph.name]], class2 = "Graph")) { stop("Provided graph.name does not correspond to a graph object.") } clustering.results <- FindClusters( object = object[[graph.name]], modularity.fxn = modularity.fxn, initial.membership = initial.membership, node.sizes = node.sizes, resolution = resolution, method = method, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, group.singletons = group.singletons, temp.file.location = temp.file.location, edge.file.name = edge.file.name, verbose = verbose, ... ) colnames(x = clustering.results) <- paste0(graph.name, "_", colnames(x = clustering.results)) object <- AddMetaData(object = object, metadata = clustering.results) Idents(object = object) <- colnames(x = clustering.results)[ncol(x = clustering.results)] levels <- levels(x = object) levels <- tryCatch( expr = as.numeric(x = levels), warning = function(...) { return(levels) }, error = function(...) { return(levels) } ) Idents(object = object) <- factor(x = Idents(object = object), levels = sort(x = levels)) object[['seurat_clusters']] <- Idents(object = object) cmd <- LogSeuratCommand(object = object, return.command = TRUE) slot(object = cmd, name = 'assay.used') <- DefaultAssay(object = object[[graph.name]]) object[[slot(object = cmd, name = 'name')]] <- cmd return(object) } #' @param query Matrix of data to query against object. If missing, defaults to #' object. #' @param distance.matrix Boolean value of whether the provided matrix is a #' distance matrix; note, for objects of class \code{dist}, this parameter will #' be set automatically #' @param k.param Defines k for the k-nearest neighbor algorithm #' @param return.neighbor Return result as \code{\link{Neighbor}} object. Not #' used with distance matrix input. #' @param compute.SNN also compute the shared nearest neighbor graph #' @param prune.SNN Sets the cutoff for acceptable Jaccard index when #' computing the neighborhood overlap for the SNN construction. Any edges with #' values less than or equal to this will be set to 0 and removed from the SNN #' graph. Essentially sets the stringency of pruning (0 --- no pruning, 1 --- #' prune everything). #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param annoy.metric Distance metric for annoy. Options include: euclidean, #' cosine, manhattan, and hamming #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param nn.eps Error bound when performing nearest neighbor seach using RANN; #' default of 0.0 implies exact nearest neighbor search #' @param verbose Whether or not to print output to the console #' @param force.recalc Force recalculation of (S)NN. #' @param l2.norm Take L2Norm of the data #' @param cache.index Include cached index in returned Neighbor object #' (only relevant if return.neighbor = TRUE) #' @param index Precomputed index. Useful if querying new data against existing #' index to avoid recomputing. #' #' @importFrom RANN nn2 #' @importFrom methods as #' #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors default #' FindNeighbors.default <- function( object, query = NULL, distance.matrix = FALSE, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, index = NULL, ... ) { CheckDots(...) if (is.null(x = dim(x = object))) { warning( "Object should have two dimensions, attempting to coerce to matrix", call. = FALSE ) object <- as.matrix(x = object) } if (is.null(rownames(x = object))) { stop("Please provide rownames (cell names) with the input object") } n.cells <- nrow(x = object) if (n.cells < k.param) { warning( "k.param set larger than number of cells. Setting k.param to number of cells - 1.", call. = FALSE ) k.param <- n.cells - 1 } if (l2.norm) { object <- L2Norm(mat = object) query <- query %iff% L2Norm(mat = query) } query <- query %||% object # find the k-nearest neighbors for each single cell if (!distance.matrix) { if (verbose) { if (return.neighbor) { message("Computing nearest neighbors") } else { message("Computing nearest neighbor graph") } } nn.ranked <- NNHelper( data = object, query = query, k = k.param, method = nn.method, n.trees = n.trees, searchtype = "standard", eps = nn.eps, metric = annoy.metric, cache.index = cache.index, index = index ) if (return.neighbor) { if (compute.SNN) { warning("The SNN graph is not computed if return.neighbor is TRUE.", call. = FALSE) } return(nn.ranked) } nn.ranked <- Indices(object = nn.ranked) } else { if (verbose) { message("Building SNN based on a provided distance matrix") } knn.mat <- matrix(data = 0, ncol = k.param, nrow = n.cells) knd.mat <- knn.mat for (i in 1:n.cells) { knn.mat[i, ] <- order(object[i, ])[1:k.param] knd.mat[i, ] <- object[i, knn.mat[i, ]] } nn.ranked <- knn.mat[, 1:k.param] } # convert nn.ranked into a Graph j <- as.numeric(x = t(x = nn.ranked)) i <- ((1:length(x = j)) - 1) %/% k.param + 1 nn.matrix <- as(object = sparseMatrix(i = i, j = j, x = 1, dims = c(nrow(x = object), nrow(x = object))), Class = "Graph") rownames(x = nn.matrix) <- rownames(x = object) colnames(x = nn.matrix) <- rownames(x = object) neighbor.graphs <- list(nn = nn.matrix) if (compute.SNN) { if (verbose) { message("Computing SNN") } snn.matrix <- ComputeSNN( nn_ranked = nn.ranked, prune = prune.SNN ) rownames(x = snn.matrix) <- rownames(x = object) colnames(x = snn.matrix) <- rownames(x = object) snn.matrix <- as.Graph(x = snn.matrix) neighbor.graphs[["snn"]] <- snn.matrix } return(neighbor.graphs) } #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors Assay #' FindNeighbors.Assay <- function( object, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) features <- features %||% VariableFeatures(object = object) data.use <- t(x = GetAssayData(object = object, slot = "data")[features, ]) neighbor.graphs <- FindNeighbors( object = data.use, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, force.recalc = force.recalc, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) return(neighbor.graphs) } #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors dist #' FindNeighbors.dist <- function( object, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) return(FindNeighbors( object = as.matrix(x = object), distance.matrix = TRUE, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.eps = nn.eps, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, verbose = verbose, force.recalc = force.recalc, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... )) } #' @param assay Assay to use in construction of (S)NN; used only when \code{dims} #' is \code{NULL} #' @param features Features to use as input for building the (S)NN; used only when #' \code{dims} is \code{NULL} #' @param reduction Reduction to use as input for building the (S)NN #' @param dims Dimensions of reduction to use as input #' @param do.plot Plot SNN graph on tSNE coordinates #' @param graph.name Optional naming parameter for stored (S)NN graph #' (or Neighbor object, if return.neighbor = TRUE). Default is assay.name_(s)nn. #' To store both the neighbor graph and the shared nearest neighbor (SNN) graph, #' you must supply a vector containing two names to the \code{graph.name} #' parameter. The first element in the vector will be used to store the nearest #' neighbor (NN) graph, and the second element used to store the SNN graph. If #' only one name is supplied, only the NN graph is stored. #' #' @importFrom igraph graph.adjacency plot.igraph E #' #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors Seurat #' FindNeighbors.Seurat <- function( object, reduction = "pca", dims = 1:10, assay = NULL, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, force.recalc = FALSE, do.plot = FALSE, graph.name = NULL, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) if (!is.null(x = dims)) { assay <- DefaultAssay(object = object[[reduction]]) data.use <- Embeddings(object = object[[reduction]]) if (max(dims) > ncol(x = data.use)) { stop("More dimensions specified in dims than have been computed") } data.use <- data.use[, dims] neighbor.graphs <- FindNeighbors( object = data.use, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, force.recalc = force.recalc, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) } else { assay <- assay %||% DefaultAssay(object = object) data.use <- GetAssay(object = object, assay = assay) neighbor.graphs <- FindNeighbors( object = data.use, features = features, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, force.recalc = force.recalc, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) } if (length(x = neighbor.graphs) == 1) { neighbor.graphs <- list(nn = neighbor.graphs) } graph.name <- graph.name %||% if (return.neighbor) { paste0(assay, ".", names(x = neighbor.graphs)) } else { paste0(assay, "_", names(x = neighbor.graphs)) } if (length(x = graph.name) == 1) { message("Only one graph name supplied, storing nearest-neighbor graph only") } for (ii in 1:length(x = graph.name)) { if (inherits(x = neighbor.graphs[[ii]], what = "Graph")) { DefaultAssay(object = neighbor.graphs[[ii]]) <- assay } object[[graph.name[[ii]]]] <- neighbor.graphs[[ii]] } if (do.plot) { if (!"tsne" %in% names(x = object@reductions)) { warning("Please compute a tSNE for SNN visualization. See RunTSNE().") } else { if (nrow(x = Embeddings(object = object[["tsne"]])) != ncol(x = object)) { warning("Please compute a tSNE for SNN visualization. See RunTSNE().") } else { net <- graph.adjacency( adjmatrix = as.matrix(x = neighbor.graphs[[2]]), mode = "undirected", weighted = TRUE, diag = FALSE ) plot.igraph( x = net, layout = as.matrix(x = Embeddings(object = object[["tsne"]])), edge.width = E(graph = net)$weight, vertex.label = NA, vertex.size = 0 ) } } } object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Run annoy # # @param data Data to build the index with # @param query A set of data to be queried against data # @param metric Distance metric; can be one of "euclidean", "cosine", "manhattan", # "hamming" # @param n.trees More trees gives higher precision when querying # @param k Number of neighbors # @param search.k During the query it will inspect up to search_k nodes which # gives you a run-time tradeoff between better accuracy and speed. # @param include.distance Include the corresponding distances # @param index optional index object, will be recomputed if not provided # AnnoyNN <- function(data, query = data, metric = "euclidean", n.trees = 50, k, search.k = -1, include.distance = TRUE, index = NULL ) { idx <- index %||% AnnoyBuildIndex( data = data, metric = metric, n.trees = n.trees) nn <- AnnoySearch( index = idx, query = query, k = k, search.k = search.k, include.distance = include.distance) nn$idx <- idx nn$alg.info <- list(metric = metric, ndim = ncol(x = data)) return(nn) } # Build the annoy index # # @param data Data to build the index with # @param metric Distance metric; can be one of "euclidean", "cosine", "manhattan", # "hamming" # @param n.trees More trees gives higher precision when querying # #' @importFrom RcppAnnoy AnnoyEuclidean AnnoyAngular AnnoyManhattan AnnoyHamming # AnnoyBuildIndex <- function(data, metric = "euclidean", n.trees = 50) { f <- ncol(x = data) a <- switch( EXPR = metric, "euclidean" = new(Class = RcppAnnoy::AnnoyEuclidean, f), "cosine" = new(Class = RcppAnnoy::AnnoyAngular, f), "manhattan" = new(Class = RcppAnnoy::AnnoyManhattan, f), "hamming" = new(Class = RcppAnnoy::AnnoyHamming, f), stop ("Invalid metric") ) for (ii in seq(nrow(x = data))) { a$addItem(ii - 1, data[ii, ]) } a$build(n.trees) return(a) } # Search an Annoy approximate nearest neighbor index # # @param Annoy index, built with AnnoyBuildIndex # @param query A set of data to be queried against the index # @param k Number of neighbors # @param search.k During the query it will inspect up to search_k nodes which # gives you a run-time tradeoff between better accuracy and speed. # @param include.distance Include the corresponding distances in the result # # @return A list with 'nn.idx' (for each element in 'query', the index of the # nearest k elements in the index) and 'nn.dists' (the distances of the nearest # k elements) # #' @importFrom future plan #' @importFrom future.apply future_lapply # AnnoySearch <- function(index, query, k, search.k = -1, include.distance = TRUE) { n <- nrow(x = query) idx <- matrix(nrow = n, ncol = k) dist <- matrix(nrow = n, ncol = k) convert <- methods::is(index, "Rcpp_AnnoyAngular") if (!inherits(x = plan(), what = "multicore")) { oplan <- plan(strategy = "sequential") on.exit(plan(oplan), add = TRUE) } res <- future_lapply(X = 1:n, FUN = function(x) { res <- index$getNNsByVectorList(query[x, ], k, search.k, include.distance) # Convert from Angular to Cosine distance if (convert) { res$dist <- 0.5 * (res$dist * res$dist) } list(res$item + 1, res$distance) }) for (i in 1:n) { idx[i, ] <- res[[i]][[1]] if (include.distance) { dist[i, ] <- res[[i]][[2]] } } return(list(nn.idx = idx, nn.dists = dist)) } # Calculate mean distance of the farthest neighbors from SNN graph # # This function will compute the average distance of the farthest k.nn # neighbors with the lowest nonzero SNN edge weight. First, for each cell it # finds the k.nn neighbors with the smallest edge weight. If there are multiple # cells with the same edge weight at the k.nn-th index, consider all of those # cells in the next step. Next, it computes the euclidean distance to all k.nn # cells in the space defined by the embeddings matrix and returns the average # distance to the farthest k.nn cells. # # @param snn.graph An SNN graph # @param embeddings The cell embeddings used to calculate neighbor distances # @param k.nn The number of neighbors to calculate # @param l2.norm Perform L2 normalization on the cell embeddings # @param nearest.dist The vector of distance to the nearest neighbors to # subtract off from distance calculations # # ComputeSNNwidth <- function( snn.graph, embeddings, k.nn, l2.norm = TRUE, nearest.dist = NULL ) { if (l2.norm) { embeddings <- L2Norm(mat = embeddings) } nearest.dist <- nearest.dist %||% rep(x = 0, times = ncol(x = snn.graph)) if (length(x = nearest.dist) != ncol(x = snn.graph)) { stop("Please provide a vector for nearest.dist that has as many elements as", " there are columns in the snn.graph (", ncol(x = snn.graph), ").") } snn.width <- SNN_SmallestNonzero_Dist( snn = snn.graph, mat = embeddings, n = k.nn, nearest_dist = nearest.dist ) return (snn.width) } # Create an Annoy index # # @note Function exists because it's not exported from \pkg{uwot} # # @param name Distance metric name # @param ndim Number of dimensions # # @return An nn index object # #' @importFrom methods new #' @importFrom RcppAnnoy AnnoyAngular AnnoyManhattan AnnoyEuclidean AnnoyHamming # CreateAnn <- function(name, ndim) { return(switch( EXPR = name, cosine = new(Class = AnnoyAngular, ndim), manhattan = new(Class = AnnoyManhattan, ndim), euclidean = new(Class = AnnoyEuclidean, ndim), hamming = new(Class = AnnoyHamming, ndim), stop("BUG: unknown Annoy metric '", name, "'") )) } # Calculate modality weights # # This function calculates cell-specific modality weights which are used to # in WNN analysis. #' @inheritParams FindMultiModalNeighbors # @param object A Seurat object # @param snn.far.nn Use SNN farthest neighbors to calculate the kernel width # @param s.nn How many SNN neighbors to use in kernel width # @param sigma.idx Neighbor index used to calculate kernel width if snn.far.nn = FALSE # @importFrom pbapply pblapply # @return Returns a \code{ModalityWeights} object that can be used as input to # \code{\link{FindMultiModalNeighbors}} # #' @importFrom pbapply pblapply # FindModalityWeights <- function( object, reduction.list, dims.list, k.nn = 20, snn.far.nn = TRUE, s.nn = k.nn, prune.SNN = 0, l2.norm = TRUE, sd.scale = 1, query = NULL, cross.contant.list = NULL, sigma.idx = k.nn, smooth = FALSE, verbose = TRUE ) { my.lapply <- ifelse( test = verbose, yes = pblapply, no = lapply ) cross.contant.list <- cross.contant.list %||% as.list(x = rep(x = 1e-4, times = length(x = reduction.list))) reduction.set <- unlist(x = reduction.list) names(x = reduction.list) <- names(x = dims.list) <- names(x = cross.contant.list) <- reduction.set embeddings.list <- lapply( X = reduction.list, FUN = function(r) Embeddings(object = object, reduction = r)[, dims.list[[r]]] ) if (l2.norm) { embeddings.list.norm <- lapply( X = embeddings.list, FUN = function(embeddings) L2Norm(mat = embeddings) ) } else { embeddings.list.norm <- embeddings.list } if (is.null(x = query)) { query.embeddings.list.norm <- embeddings.list.norm query <- object } else { if (snn.far.nn) { stop("query does not support using snn to find distant neighbors") } query.embeddings.list <- lapply( X = reduction.list, FUN = function(r) { Embeddings(object = query, reduction = r)[, dims.list[[r]]] } ) if (l2.norm) { query.embeddings.list <- lapply( X = query.embeddings.list, FUN = function(embeddings) L2Norm(mat = embeddings) ) } query.embeddings.list.norm <- query.embeddings.list } if (verbose) { message("Finding ", k.nn, " nearest neighbors for each modality.") } nn.list <- my.lapply( X = reduction.list, FUN = function(r) { nn.r <- NNHelper( data = embeddings.list.norm[[r]], query = query.embeddings.list.norm[[r]], k = max(k.nn, sigma.idx, s.nn), method = "annoy", metric = "euclidean" ) return(nn.r) } ) sigma.nn.list <- nn.list if (sigma.idx > k.nn || s.nn > k.nn) { nn.list <- lapply( X = nn.list, FUN = function(nn){ slot(object = nn, name = "nn.idx") <- Indices(object = nn)[, 1:k.nn] slot(object = nn, name = "nn.dists") <- Distances(object = nn)[, 1:k.nn] return(nn) } ) } nearest_dist <- lapply(X = reduction.list, FUN = function(r) Distances(object = nn.list[[r]])[, 2]) within_impute <- list() cross_impute <- list() # Calculating within and cross modality distance for (r in reduction.set) { reduction.norm <- paste0(r, ".norm") object[[ reduction.norm ]] <- CreateDimReducObject( embeddings = embeddings.list.norm[[r]], key = paste0("norm", Key(object = object[[r]])), assay = DefaultAssay(object = object[[r]]) ) within_impute[[r]] <- PredictAssay( object = object, nn.idx = Indices(object = nn.list[[r]]), reduction = reduction.norm, dims = 1:ncol(x = embeddings.list.norm[[r]]), verbose = FALSE, return.assay = FALSE ) cross.modality <- setdiff(x = reduction.set, y = r) cross_impute[[r]] <- lapply(X = cross.modality, FUN = function(r2) { PredictAssay( object = object, nn.idx = Indices(object = nn.list[[r2]]), reduction = reduction.norm, dims = 1:ncol(x = embeddings.list.norm[[r]]), verbose = FALSE, return.assay = FALSE ) } ) names(x = cross_impute[[r]]) <- cross.modality } within_impute_dist <- lapply( X = reduction.list, FUN = function(r) { r_dist <- impute_dist( x = query.embeddings.list.norm[[r]], y = t(x = within_impute[[r]]), nearest.dist = nearest_dist[[r]] ) return(r_dist) } ) cross_impute_dist <- lapply( X = reduction.list, FUN = function(r) { r_dist <- sapply(setdiff(x = reduction.set, y = r), FUN = function(r2) { r2_dist <- impute_dist( x = query.embeddings.list.norm[[r]], y = t(x = cross_impute[[r]][[r2]]), nearest.dist = nearest_dist[[r]] ) return( r2_dist) }) return(r_dist) } ) # calculate kernel width if (snn.far.nn) { if (verbose) { message("Calculating kernel bandwidths") } snn.graph.list <- lapply( X = sigma.nn.list, FUN = function(nn) { snn.matrix <- ComputeSNN( nn_ranked = Indices(object = nn)[, 1:s.nn], prune = prune.SNN ) colnames(x = snn.matrix) <- rownames(x = snn.matrix) <- Cells(x = object) return (snn.matrix) } ) farthest_nn_dist <- my.lapply( X = 1:length(x = snn.graph.list), FUN = function(s) { distant_nn <- ComputeSNNwidth( snn.graph = snn.graph.list[[s]], k.nn = k.nn, l2.norm = FALSE, embeddings = embeddings.list.norm[[s]], nearest.dist = nearest_dist[[s]] ) return (distant_nn) } ) names(x = farthest_nn_dist) <- unlist(x = reduction.list) modality_sd.list <- lapply( X = farthest_nn_dist, FUN = function(sd) sd * sd.scale ) } else { if (verbose) { message("Calculating sigma by ", sigma.idx, "th neighbor") } modality_sd.list <- lapply( X = reduction.list , FUN = function(r) { rdist <- Distances(object = sigma.nn.list[[r]])[, sigma.idx] - nearest_dist[[r]] rdist <- rdist * sd.scale return (rdist) } ) } # Calculating within and cross modality kernel, and modality weights within_impute_kernel <- lapply( X = reduction.list, FUN = function(r) { exp(-1 * (within_impute_dist[[r]] / modality_sd.list[[r]]) ) } ) cross_impute_kernel <- lapply( X = reduction.list, FUN = function(r) { exp(-1 * (cross_impute_dist[[r]] / modality_sd.list[[r]]) ) } ) params <- list( "reduction.list" = reduction.list, "dims.list" = dims.list, "l2.norm" = l2.norm, "k.nn" = k.nn, "sigma.idx" = sigma.idx, "snn.far.nn" = snn.far.nn , "sigma.list" = modality_sd.list, "nearest.dist" = nearest_dist ) modality_score <- lapply( X = reduction.list, FUN = function(r) { score.r <- sapply( X = setdiff(x = reduction.set, y = r), FUN = function(r2) { score <- within_impute_kernel[[r]] / (cross_impute_kernel[[r]][, r2] + cross.contant.list[[r]]) score <- MinMax(data = score, min = 0, max = 200) return(score) } ) return(score.r) } ) if (smooth) { modality_score <- lapply( X = reduction.list, FUN = function(r) { apply( X = Indices(object = nn.list[[r]]), MARGIN = 1, FUN = function(nn) mean(x = modality_score[[r]][nn[-1]]) ) } ) } all_modality_score <- rowSums(x = exp(x = Reduce(f = cbind, x = modality_score))) modality.weight <- lapply( X = modality_score, FUN = function(score_m) { rowSums(x = exp(x = score_m))/all_modality_score } ) score.mat <- list( within_impute_dist = within_impute_dist, cross_impute_dist = cross_impute_dist, within_impute_kernel = within_impute_kernel, cross_impute_kernel = cross_impute_kernel, modality_score = modality_score ) # unlist the input parameters command <- LogSeuratCommand(object = object, return.command = TRUE) command@params <- lapply(X = command@params, FUN = function(l) unlist(x = l)) modality.assay <- sapply( X = reduction.list , FUN = function (r) slot(object[[r]], name = "assay.used") ) modality.weights.obj <- new( Class = "ModalityWeights", modality.weight.list = modality.weight, modality.assay = modality.assay, params = params, score.matrix = score.mat, command = command ) return(modality.weights.obj) } # Group single cells that make up their own cluster in with the cluster they are # most connected to. # # @param ids Named vector of cluster ids # @param SNN SNN graph used in clustering # @param group.singletons Group singletons into nearest cluster. If FALSE, assign all singletons to # a "singleton" group # # @return Returns Seurat object with all singletons merged with most connected cluster # GroupSingletons <- function(ids, SNN, group.singletons = TRUE, verbose = TRUE) { # identify singletons singletons <- c() singletons <- names(x = which(x = table(ids) == 1)) singletons <- intersect(x = unique(x = ids), singletons) if (!group.singletons) { ids[which(ids %in% singletons)] <- "singleton" return(ids) } # calculate connectivity of singletons to other clusters, add singleton # to cluster it is most connected to cluster_names <- as.character(x = unique(x = ids)) cluster_names <- setdiff(x = cluster_names, y = singletons) connectivity <- vector(mode = "numeric", length = length(x = cluster_names)) names(x = connectivity) <- cluster_names new.ids <- ids for (i in singletons) { i.cells <- names(which(ids == i)) for (j in cluster_names) { j.cells <- names(which(ids == j)) subSNN <- SNN[i.cells, j.cells] set.seed(1) # to match previous behavior, random seed being set in WhichCells if (is.object(x = subSNN)) { connectivity[j] <- sum(subSNN) / (nrow(x = subSNN) * ncol(x = subSNN)) } else { connectivity[j] <- mean(x = subSNN) } } m <- max(connectivity, na.rm = T) mi <- which(x = connectivity == m, arr.ind = TRUE) closest_cluster <- sample(x = names(x = connectivity[mi]), 1) ids[i.cells] <- closest_cluster } if (length(x = singletons) > 0 && verbose) { message(paste( length(x = singletons), "singletons identified.", length(x = unique(x = ids)), "final clusters." )) } return(ids) } # Find multimodal neighbors # # @param object The object used to calculate knn # @param query The query object when query and reference are different # @param modality.weight A \code{\link{ModalityWeights}} object generated by # \code{\link{FindModalityWeights}} # @param modality.weight.list A list of modality weight value # @param k.nn Number of nearest multimodal neighbors to compute # @param reduction.list A list of reduction name # @param dims.list A list of dimensions used for the reduction # @param knn.range The number of approximate neighbors to compute # @param kernel.power The power for the exponential kernel # @param nearest.dist The list of distance to the nearest neighbors # @param sigma.list The list of kernel width # @param l2.norm Perform L2 normalization on the cell embeddings after # dimensional reduction # @param verbose Print output to the console # @importFrom pbapply pblapply # @return return a list containing nn index and nn multimodal distance # #' @importFrom methods new #' @importClassesFrom SeuratObject Neighbor # MultiModalNN <- function( object, query = NULL, modality.weight = NULL, modality.weight.list = NULL, k.nn = NULL, reduction.list = NULL, dims.list = NULL, knn.range = 200, kernel.power = 1, nearest.dist = NULL, sigma.list = NULL, l2.norm = NULL, verbose = TRUE ){ my.lapply <- ifelse( test = verbose, yes = pblapply, no = lapply ) k.nn <- k.nn %||% slot(object = modality.weight, name = "params")$k.nn reduction.list <- reduction.list %||% slot(object = modality.weight, name = "params")$reduction.list dims.list = dims.list %||% slot(object = modality.weight, name = "params")$dims.list nearest.dist = nearest.dist %||% slot(object = modality.weight, name = "params")$nearest.dist sigma.list =sigma.list %||% slot(object = modality.weight, name = "params")$sigma.list l2.norm = l2.norm %||% slot(object = modality.weight, name = "params")$l2.norm modality.weight.value <- modality.weight.list %||% slot(object = modality.weight, name = "modality.weight.list") names(x = modality.weight.value) <- unlist(x = reduction.list) if (inherits(x = object, what = "Seurat")) { reduction_embedding <- lapply( X = 1:length(x = reduction.list), FUN = function(x) { Embeddings(object = object, reduction = reduction.list[[x]])[, dims.list[[x]]] } ) } else { reduction_embedding <- object } if (is.null(x = query)) { query.reduction_embedding <- reduction_embedding query <- object } else { if (inherits(x = object, what = "Seurat")) { query.reduction_embedding <- lapply( X = 1:length(x = reduction.list), FUN = function(x) { Embeddings(object = query, reduction = reduction.list[[x]] )[, dims.list[[x]]] } ) } else { query.reduction_embedding <- query } } if (l2.norm) { query.reduction_embedding <- lapply( X = query.reduction_embedding, FUN = function(x) L2Norm(mat = x) ) reduction_embedding <- lapply( X = reduction_embedding, FUN = function(x) L2Norm(mat = x) ) } query.cell.num <- nrow(x = query.reduction_embedding[[1]]) reduction.num <- length(x = query.reduction_embedding) if (verbose) { message("Finding multimodal neighbors") } reduction_nn <- my.lapply( X = 1:reduction.num, FUN = function(x) { nn_x <- NNHelper( data = reduction_embedding[[x]], query = query.reduction_embedding[[x]], k = knn.range, method = 'annoy', metric = "euclidean" ) return (nn_x) } ) # union of rna and adt nn, remove itself from neighobors reduction_nn <- lapply( X = reduction_nn, FUN = function(x) Indices(object = x)[, -1] ) nn_idx <- lapply( X = 1:query.cell.num , FUN = function(x) { Reduce( f = union, x = lapply( X = reduction_nn, FUN = function(y) y[x, ] ) ) } ) # calculate euclidean distance of all neighbors nn_dist <- my.lapply( X = 1:reduction.num, FUN = function(r) { nndist <- NNdist( nn.idx = nn_idx, embeddings = reduction_embedding[[r]], query.embeddings = query.reduction_embedding[[r]], nearest.dist = nearest.dist[[r]] ) return(nndist) } ) # modality weighted distance if (length(x = sigma.list[[1]]) == 1) { sigma.list <- lapply(X = sigma.list, FUN = function(x) rep(x = x, ncol(x = object))) } nn_weighted_dist <- lapply( X = 1:reduction.num, FUN = function(r) { lapply( X = 1:query.cell.num, FUN = function(x) { exp(-1*(nn_dist[[r]][[x]] / sigma.list[[r]][x] ) ** kernel.power) * modality.weight.value[[r]][x] } ) } ) nn_weighted_dist <- sapply( X = 1:query.cell.num, FUN = function(x) { Reduce( f = "+", x = lapply( X = 1:reduction.num, FUN = function(r) nn_weighted_dist[[r]][[x]] ) ) } ) # select k nearest joint neighbors select_order <- lapply( X = nn_weighted_dist, FUN = function(dist) { order(dist, decreasing = TRUE) }) select_nn <- t(x = sapply( X = 1:query.cell.num, FUN = function(x) nn_idx[[x]][select_order[[x]]][1:k.nn] ) ) select_dist <- t(x = sapply( X = 1:query.cell.num, FUN = function(x) nn_weighted_dist[[x]][select_order[[x]]][1:k.nn]) ) select_dist <- sqrt(x = MinMax(data = (1 - select_dist) / 2, min = 0, max = 1)) weighted.nn <- new( Class = 'Neighbor', nn.idx = select_nn, nn.dist = select_dist, alg.info = list(), cell.names = Cells(x = query) ) return(weighted.nn) } # Calculate NN distance for the given nn.idx # @param nn.idx The nearest neighbors position index # @param embeddings cell embeddings # @param metric distance metric # @param query.embeddings query cell embeddings # @param nearest.dist The list of distance to the nearest neighbors # NNdist <- function( nn.idx, embeddings, metric = "euclidean", query.embeddings = NULL, nearest.dist = NULL ) { if (!is.list(x = nn.idx)) { nn.idx <- lapply(X = 1:nrow(x = nn.idx), FUN = function(x) nn.idx[x, ]) } query.embeddings <- query.embeddings %||% embeddings nn.dist <- fast_dist( x = query.embeddings, y = embeddings, n = nn.idx ) if (!is.null(x = nearest.dist)) { nn.dist <- lapply( X = 1:nrow(x = query.embeddings), FUN = function(x) { r_dist = nn.dist[[x]] - nearest.dist[x] r_dist[r_dist < 0] <- 0 return(r_dist) } ) } return(nn.dist) } # Internal helper function to dispatch to various neighbor finding methods # # @param data Input data # @param query Data to query against data # @param k Number of nearest neighbors to compute # @param method Nearest neighbor method to use: "rann", "annoy" # @param cache.index Store algorithm index with results for reuse # @param ... additional parameters to specific neighbor finding method # #' @importFrom methods new #' @importClassesFrom SeuratObject Neighbor # NNHelper <- function(data, query = data, k, method, cache.index = FALSE, ...) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) results <- ( switch( EXPR = method, "rann" = { args <- args[intersect(x = names(x = args), y = names(x = formals(fun = nn2)))] do.call(what = 'nn2', args = args) }, "annoy" = { args <- args[intersect(x = names(x = args), y = names(x = formals(fun = AnnoyNN)))] do.call(what = 'AnnoyNN', args = args) }, stop("Invalid method. Please choose one of 'rann', 'annoy'") ) ) n.ob <- new( Class = 'Neighbor', nn.idx = results$nn.idx, nn.dist = results$nn.dists, alg.info = results$alg.info %||% list(), cell.names = rownames(x = query) ) if (isTRUE(x = cache.index) && !is.null(x = results$idx)) { slot(object = n.ob, name = "alg.idx") <- results$idx } return(n.ob) } # Run Leiden clustering algorithm # # Implements the Leiden clustering algorithm in R using reticulate # to run the Python version. Requires the python "leidenalg" and "igraph" modules # to be installed. Returns a vector of partition indices. # # @param adj_mat An adjacency matrix or SNN matrix # @param partition.type Type of partition to use for Leiden algorithm. # Defaults to RBConfigurationVertexPartition. Options include: ModularityVertexPartition, # RBERVertexPartition, CPMVertexPartition, MutableVertexPartition, # SignificanceVertexPartition, SurpriseVertexPartition (see the Leiden python # module documentation for more details) # @param initial.membership,node.sizes Parameters to pass to the Python leidenalg function. # @param resolution.parameter A parameter controlling the coarseness of the clusters # for Leiden algorithm. Higher values lead to more clusters. (defaults to 1.0 for # partition types that accept a resolution parameter) # @param random.seed Seed of the random number generator # @param n.iter Maximal number of iterations per random start # # @keywords graph network igraph mvtnorm simulation # #' @importFrom leiden leiden #' @importFrom reticulate py_module_available #' @importFrom igraph graph_from_adjacency_matrix graph_from_adj_list # # @author Tom Kelly # # @export # RunLeiden <- function( object, method = c("matrix", "igraph"), partition.type = c( 'RBConfigurationVertexPartition', 'ModularityVertexPartition', 'RBERVertexPartition', 'CPMVertexPartition', 'MutableVertexPartition', 'SignificanceVertexPartition', 'SurpriseVertexPartition' ), initial.membership = NULL, node.sizes = NULL, resolution.parameter = 1, random.seed = 0, n.iter = 10 ) { if (!py_module_available(module = 'leidenalg')) { stop( "Cannot find Leiden algorithm, please install through pip (e.g. pip install leidenalg).", call. = FALSE ) } switch( EXPR = method, "matrix" = { input <- as(object = object, Class = "matrix") }, "igraph" = { input <- if (inherits(x = object, what = 'list')) { graph_from_adj_list(adjlist = object) } else if (inherits(x = object, what = c('dgCMatrix', 'matrix', 'Matrix'))) { if (inherits(x = object, what = 'Graph')) { object <- as(object = object, Class = "dgCMatrix") } graph_from_adjacency_matrix(adjmatrix = object, weighted = TRUE) } else if (inherits(x = object, what = 'igraph')) { object } else { stop( "Method for Leiden not found for class", class(x = object), call. = FALSE ) } }, stop("Method for Leiden must be either 'matrix' or igraph'") ) #run leiden from CRAN package (calls python with reticulate) partition <- leiden( object = input, partition_type = partition.type, initial_membership = initial.membership, weights = NULL, node_sizes = node.sizes, resolution_parameter = resolution.parameter, seed = random.seed, n_iterations = n.iter ) return(partition) } # Runs the modularity optimizer (C++ port of java program ModularityOptimizer.jar) # # @param SNN SNN matrix to use as input for the clustering algorithms # @param modularity Modularity function to use in clustering (1 = standard; 2 = alternative) # @param resolution Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities # @param algorithm Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python module. # @param n.start Number of random starts # @param n.iter Maximal number of iterations per random start # @param random.seed Seed of the random number generator # @param print.output Whether or not to print output to the console # @param temp.file.location Deprecated and no longer used # @param edge.file.name Path to edge file to use # # @return Seurat object with identities set to the results of the clustering procedure # #' @importFrom utils read.table write.table # RunModularityClustering <- function( SNN = matrix(), modularity = 1, resolution = 0.8, algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, print.output = TRUE, temp.file.location = NULL, edge.file.name = NULL ) { edge_file <- edge.file.name %||% '' clusters <- RunModularityClusteringCpp( SNN, modularity, resolution, algorithm, n.start, n.iter, random.seed, print.output, edge_file ) return(clusters) } Seurat/R/visualization.R0000644000176200001440000073222414170106546014760 0ustar liggesusers#' @importFrom utils globalVariables #' @importFrom ggplot2 ggproto GeomViolin #' @importFrom SeuratObject DefaultDimReduc #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Heatmaps #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Dimensional reduction heatmap #' #' Draws a heatmap focusing on a principal component. Both cells and genes are sorted by their #' principal component scores. Allows for nice visualization of sources of heterogeneity in the dataset. #' #' @inheritParams DoHeatmap #' @param dims Dimensions to plot #' @param nfeatures Number of genes to plot #' @param cells A list of cells to plot. If numeric, just plots the top cells. #' @param reduction Which dimensional reduction to use #' @param balanced Plot an equal number of genes with both + and - scores. #' @param projected Use the full projected dimensional reduction #' @param ncol Number of columns to plot #' @param fast If true, use \code{image} to generate plots; faster than using ggplot2, but not customizable #' @param assays A vector of assays to pull data from #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return No return value by default. If using fast = FALSE, will return a #' \code{\link[patchwork]{patchwork}ed} ggplot object if combine = TRUE, otherwise #' returns a list of ggplot objects #' #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' #' @seealso \code{\link[graphics]{image}} \code{\link[ggplot2]{geom_raster}} #' #' @examples #' data("pbmc_small") #' DimHeatmap(object = pbmc_small) #' DimHeatmap <- function( object, dims = 1, nfeatures = 30, cells = NULL, reduction = 'pca', disp.min = -2.5, disp.max = NULL, balanced = TRUE, projected = FALSE, ncol = NULL, fast = TRUE, raster = TRUE, slot = 'scale.data', assays = NULL, combine = TRUE ) { ncol <- ncol %||% ifelse(test = length(x = dims) > 2, yes = 3, no = length(x = dims)) plots <- vector(mode = 'list', length = length(x = dims)) assays <- assays %||% DefaultAssay(object = object) disp.max <- disp.max %||% ifelse( test = slot == 'scale.data', yes = 2.5, no = 6 ) if (!DefaultAssay(object = object[[reduction]]) %in% assays) { warning("The original assay that the reduction was computed on is different than the assay specified") } cells <- cells %||% ncol(x = object) if (is.numeric(x = cells)) { cells <- lapply( X = dims, FUN = function(x) { cells <- TopCells( object = object[[reduction]], dim = x, ncells = cells, balanced = balanced ) if (balanced) { cells$negative <- rev(x = cells$negative) } cells <- unlist(x = unname(obj = cells)) return(cells) } ) } if (!is.list(x = cells)) { cells <- lapply(X = 1:length(x = dims), FUN = function(x) {return(cells)}) } features <- lapply( X = dims, FUN = TopFeatures, object = object[[reduction]], nfeatures = nfeatures, balanced = balanced, projected = projected ) features.all <- unique(x = unlist(x = features)) if (length(x = assays) > 1) { features.keyed <- lapply( X = assays, FUN = function(assay) { features <- features.all[features.all %in% rownames(x = object[[assay]])] if (length(x = features) > 0) { return(paste0(Key(object = object[[assay]]), features)) } } ) features.keyed <- Filter(f = Negate(f = is.null), x = features.keyed) features.keyed <- unlist(x = features.keyed) } else { features.keyed <- features.all DefaultAssay(object = object) <- assays } data.all <- FetchData( object = object, vars = features.keyed, cells = unique(x = unlist(x = cells)), slot = slot ) data.all <- MinMax(data = data.all, min = disp.min, max = disp.max) data.limits <- c(min(data.all), max(data.all)) # if (check.plot && any(c(length(x = features.keyed), length(x = cells[[1]])) > 700)) { # choice <- menu(c("Continue with plotting", "Quit"), title = "Plot(s) requested will likely take a while to plot.") # if (choice != 1) { # return(invisible(x = NULL)) # } # } if (fast) { nrow <- floor(x = length(x = dims) / 3.01) + 1 orig.par <- par()$mfrow par(mfrow = c(nrow, ncol)) } for (i in 1:length(x = dims)) { dim.features <- c(features[[i]][[2]], rev(x = features[[i]][[1]])) dim.features <- rev(x = unlist(x = lapply( X = dim.features, FUN = function(feat) { return(grep(pattern = paste0(feat, '$'), x = features.keyed, value = TRUE)) } ))) dim.cells <- cells[[i]] data.plot <- data.all[dim.cells, dim.features] if (fast) { SingleImageMap( data = data.plot, title = paste0(Key(object = object[[reduction]]), dims[i]), order = dim.cells ) } else { plots[[i]] <- SingleRasterMap( data = data.plot, raster = raster, limits = data.limits, cell.order = dim.cells, feature.order = dim.features ) } } if (fast) { par(mfrow = orig.par) return(invisible(x = NULL)) } if (combine) { plots <- wrap_plots(plots, ncol = ncol, guides = "collect") } return(plots) } #' Feature expression heatmap #' #' Draws a heatmap of single cell feature expression. #' #' @param object Seurat object #' @param features A vector of features to plot, defaults to \code{VariableFeatures(object = object)} #' @param cells A vector of cells to plot #' @param disp.min Minimum display value (all values below are clipped) #' @param disp.max Maximum display value (all values above are clipped); defaults to 2.5 #' if \code{slot} is 'scale.data', 6 otherwise #' @param group.by A vector of variables to group cells by; pass 'ident' to group by cell identity classes #' @param group.bar Add a color bar showing group status for cells #' @param group.colors Colors to use for the color bar #' @param slot Data slot to use, choose from 'raw.data', 'data', or 'scale.data' #' @param assay Assay to pull from # @param check.plot Check that plotting will finish in a reasonable amount of time #' @param label Label the cell identies above the color bar #' @param size Size of text above color bar #' @param hjust Horizontal justification of text above color bar #' @param angle Angle of text above color bar #' @param raster If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on #' some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE #' if you are encountering that issue (note that plots may take longer to produce/render). #' @param draw.lines Include white lines to separate the groups #' @param lines.width Integer number to adjust the width of the separating white lines. #' Corresponds to the number of "cells" between each group. #' @param group.bar.height Scale the height of the color bar #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian scale_color_discrete #' ggplot_build aes_string geom_text #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' DoHeatmap(object = pbmc_small) #' DoHeatmap <- function( object, features = NULL, cells = NULL, group.by = 'ident', group.bar = TRUE, group.colors = NULL, disp.min = -2.5, disp.max = NULL, slot = 'scale.data', assay = NULL, label = TRUE, size = 5.5, hjust = 0, angle = 45, raster = TRUE, draw.lines = TRUE, lines.width = NULL, group.bar.height = 0.02, combine = TRUE ) { cells <- cells %||% colnames(x = object) if (is.numeric(x = cells)) { cells <- colnames(x = object)[cells] } assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay features <- features %||% VariableFeatures(object = object) features <- rev(x = unique(x = features)) disp.max <- disp.max %||% ifelse( test = slot == 'scale.data', yes = 2.5, no = 6 ) # make sure features are present possible.features <- rownames(x = GetAssayData(object = object, slot = slot)) if (any(!features %in% possible.features)) { bad.features <- features[!features %in% possible.features] features <- features[features %in% possible.features] if(length(x = features) == 0) { stop("No requested features found in the ", slot, " slot for the ", assay, " assay.") } warning("The following features were omitted as they were not found in the ", slot, " slot for the ", assay, " assay: ", paste(bad.features, collapse = ", ")) } data <- as.data.frame(x = as.matrix(x = t(x = GetAssayData( object = object, slot = slot)[features, cells, drop = FALSE]))) object <- suppressMessages(expr = StashIdent(object = object, save.name = 'ident')) group.by <- group.by %||% 'ident' groups.use <- object[[group.by]][cells, , drop = FALSE] # group.use <- switch( # EXPR = group.by, # 'ident' = Idents(object = object), # object[[group.by, drop = TRUE]] # ) # group.use <- factor(x = group.use[cells]) plots <- vector(mode = 'list', length = ncol(x = groups.use)) for (i in 1:ncol(x = groups.use)) { data.group <- data group.use <- groups.use[, i, drop = TRUE] if (!is.factor(x = group.use)) { group.use <- factor(x = group.use) } names(x = group.use) <- cells if (draw.lines) { # create fake cells to serve as the white lines, fill with NAs lines.width <- lines.width %||% ceiling(x = nrow(x = data.group) * 0.0025) placeholder.cells <- sapply( X = 1:(length(x = levels(x = group.use)) * lines.width), FUN = function(x) { return(RandomName(length = 20)) } ) placeholder.groups <- rep(x = levels(x = group.use), times = lines.width) group.levels <- levels(x = group.use) names(x = placeholder.groups) <- placeholder.cells group.use <- as.vector(x = group.use) names(x = group.use) <- cells group.use <- factor(x = c(group.use, placeholder.groups), levels = group.levels) na.data.group <- matrix( data = NA, nrow = length(x = placeholder.cells), ncol = ncol(x = data.group), dimnames = list(placeholder.cells, colnames(x = data.group)) ) data.group <- rbind(data.group, na.data.group) } lgroup <- length(levels(group.use)) plot <- SingleRasterMap( data = data.group, raster = raster, disp.min = disp.min, disp.max = disp.max, feature.order = features, cell.order = names(x = sort(x = group.use)), group.by = group.use ) if (group.bar) { # TODO: Change group.bar to annotation.bar default.colors <- c(hue_pal()(length(x = levels(x = group.use)))) if (!is.null(x = names(x = group.colors))) { cols <- unname(obj = group.colors[levels(x = group.use)]) } else { cols <- group.colors[1:length(x = levels(x = group.use))] %||% default.colors } if (any(is.na(x = cols))) { cols[is.na(x = cols)] <- default.colors[is.na(x = cols)] cols <- Col2Hex(cols) col.dups <- sort(x = unique(x = which(x = duplicated(x = substr( x = cols, start = 1, stop = 7 ))))) through <- length(x = default.colors) while (length(x = col.dups) > 0) { pal.max <- length(x = col.dups) + through cols.extra <- hue_pal()(pal.max)[(through + 1):pal.max] cols[col.dups] <- cols.extra col.dups <- sort(x = unique(x = which(x = duplicated(x = substr( x = cols, start = 1, stop = 7 ))))) } } group.use2 <- sort(x = group.use) if (draw.lines) { na.group <- RandomName(length = 20) levels(x = group.use2) <- c(levels(x = group.use2), na.group) group.use2[placeholder.cells] <- na.group cols <- c(cols, "#FFFFFF") } pbuild <- ggplot_build(plot = plot) names(x = cols) <- levels(x = group.use2) # scale the height of the bar y.range <- diff(x = pbuild$layout$panel_params[[1]]$y.range) y.pos <- max(pbuild$layout$panel_params[[1]]$y.range) + y.range * 0.015 y.max <- y.pos + group.bar.height * y.range x.min <- min(pbuild$layout$panel_params[[1]]$x.range) + 0.1 x.max <- max(pbuild$layout$panel_params[[1]]$x.range) - 0.1 plot <- plot + annotation_raster( raster = t(x = cols[group.use2]), xmin = x.min, xmax = x.max, ymin = y.pos, ymax = y.max ) + coord_cartesian(ylim = c(0, y.max), clip = 'off') + scale_color_discrete(name = "Identity", na.translate = FALSE) if (label) { x.max <- max(pbuild$layout$panel_params[[1]]$x.range) # Attempt to pull xdivs from x.major in ggplot2 < 3.3.0; if NULL, pull from the >= 3.3.0 slot x.divs <- pbuild$layout$panel_params[[1]]$x.major %||% attr(x = pbuild$layout$panel_params[[1]]$x$get_breaks(), which = "pos") x <- data.frame(group = sort(x = group.use), x = x.divs) label.x.pos <- tapply(X = x$x, INDEX = x$group, FUN = function(y) { if (isTRUE(x = draw.lines)) { mean(x = y[-length(x = y)]) } else { mean(x = y) } }) label.x.pos <- data.frame(group = names(x = label.x.pos), label.x.pos) plot <- plot + geom_text( stat = "identity", data = label.x.pos, aes_string(label = 'group', x = 'label.x.pos'), y = y.max + y.max * 0.03 * 0.5, angle = angle, hjust = hjust, size = size ) plot <- suppressMessages(plot + coord_cartesian( ylim = c(0, y.max + y.max * 0.002 * max(nchar(x = levels(x = group.use))) * size), clip = 'off') ) } } plot <- plot + theme(line = element_blank()) plots[[i]] <- plot } if (combine) { plots <- wrap_plots(plots) } return(plots) } #' Hashtag oligo heatmap #' #' Draws a heatmap of hashtag oligo signals across singlets/doublets/negative cells. Allows for the visualization of HTO demultiplexing results. #' #' @param object Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized, and demultiplexing has been run with HTODemux(). #' @param classification The naming for metadata column with classification result from HTODemux(). #' @param global.classification The slot for metadata column specifying a cell as singlet/doublet/negative. #' @param assay Hashtag assay name. #' @param ncells Number of cells to plot. Default is to choose 5000 cells by random subsampling, to avoid having to draw exceptionally large heatmaps. #' @param singlet.names Namings for the singlets. Default is to use the same names as HTOs. #' @param raster If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on #' some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE #' if you are encountering that issue (note that plots may take longer to produce/render). #' @return Returns a ggplot2 plot object. #' #' @importFrom ggplot2 guides #' @export #' @concept visualization #' #' @seealso \code{\link{HTODemux}} #' #' @examples #' \dontrun{ #' object <- HTODemux(object) #' HTOHeatmap(object) #' } #' HTOHeatmap <- function( object, assay = 'HTO', classification = paste0(assay, '_classification'), global.classification = paste0(assay, '_classification.global'), ncells = 5000, singlet.names = NULL, raster = TRUE ) { DefaultAssay(object = object) <- assay Idents(object = object) <- object[[classification, drop = TRUE]] if (ncells > ncol(x = object)) { warning("ncells (", ncells, ") is larger than the number of cells present in the provided object (", ncol(x = object), "). Plotting heatmap for all cells.") } else { object <- subset( x = object, cells = sample(x = colnames(x = object), size = ncells) ) } classification <- object[[classification]] singlets <- which(x = object[[global.classification]] == 'Singlet') singlet.ids <- sort(x = unique(x = as.character(x = classification[singlets, ]))) doublets <- which(object[[global.classification]] == 'Doublet') doublet.ids <- sort(x = unique(x = as.character(x = classification[doublets, ]))) heatmap.levels <- c(singlet.ids, doublet.ids, 'Negative') object <- ScaleData(object = object, assay = assay, verbose = FALSE) data <- FetchData(object = object, vars = singlet.ids) Idents(object = object) <- factor(x = classification[, 1], levels = heatmap.levels) plot <- SingleRasterMap( data = data, raster = raster, feature.order = rev(x = singlet.ids), cell.order = names(x = sort(x = Idents(object = object))), group.by = Idents(object = object) ) + guides(color = FALSE) return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Expression by identity plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Single cell ridge plot #' #' Draws a ridge plot of single cell data (gene expression, metrics, PC #' scores, etc.) #' #' @param object Seurat object #' @param features Features to plot (gene expression, metrics, PC scores, #' anything that can be retreived by FetchData) #' @param cols Colors to use for plotting #' @param idents Which classes to include in the plot (default is all) #' @param sort Sort identity classes (on the x-axis) by the average #' expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction #' @param assay Name of assay to use, defaults to the active assay #' @param group.by Group (color) cells in different ways (for example, orig.ident) #' @param y.max Maximum y axis value #' @param same.y.lims Set all the y-axis limits to the same values #' @param log plot the feature axis on log scale #' @param ncol Number of columns if multiple plots are displayed #' @param slot Use non-normalized counts data for plotting #' @param stack Horizontally stack plots for each feature #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot #' @param fill.by Color violins/ridges based on either 'feature' or 'ident' #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' RidgePlot(object = pbmc_small, features = 'PC_1') #' RidgePlot <- function( object, features, cols = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = 'data', stack = FALSE, combine = TRUE, fill.by = 'feature' ) { return(ExIPlot( object = object, type = 'ridge', features = features, idents = idents, ncol = ncol, sort = sort, assay = assay, y.max = y.max, same.y.lims = same.y.lims, cols = cols, group.by = group.by, log = log, slot = slot, stack = stack, combine = combine, fill.by = fill.by )) } #' Single cell violin plot #' #' Draws a violin plot of single cell data (gene expression, metrics, PC #' scores, etc.) #' #' @inheritParams RidgePlot #' @param pt.size Point size for geom_violin #' @param split.by A variable to split the violin plots by, #' @param split.plot plot each group of the split violin plots by multiple or #' single violin shapes. #' @param adjust Adjust parameter for geom_violin #' @param flip flip plot orientation (identities on x-axis) #' @param raster Convert points to raster format. Requires 'ggrastr' to be installed. # default is \code{NULL} which automatically rasterizes if ggrastr is installed and # number of points exceed 100,000. #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @export #' @concept visualization #' #' @seealso \code{\link{FetchData}} #' #' @examples #' data("pbmc_small") #' VlnPlot(object = pbmc_small, features = 'PC_1') #' VlnPlot(object = pbmc_small, features = 'LYZ', split.by = 'groups') #' VlnPlot <- function( object, features, cols = NULL, pt.size = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, split.by = NULL, adjust = 1, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = 'data', split.plot = FALSE, stack = FALSE, combine = TRUE, fill.by = 'feature', flip = FALSE, raster = NULL ) { if ( !is.null(x = split.by) & getOption(x = 'Seurat.warn.vlnplot.split', default = TRUE) ) { message( "The default behaviour of split.by has changed.\n", "Separate violin plots are now plotted side-by-side.\n", "To restore the old behaviour of a single split violin,\n", "set split.plot = TRUE. \nThis message will be shown once per session." ) options(Seurat.warn.vlnplot.split = FALSE) } return(ExIPlot( object = object, type = ifelse(test = split.plot, yes = 'splitViolin', no = 'violin'), features = features, idents = idents, ncol = ncol, sort = sort, assay = assay, y.max = y.max, same.y.lims = same.y.lims, adjust = adjust, pt.size = pt.size, cols = cols, group.by = group.by, split.by = split.by, log = log, slot = slot, stack = stack, combine = combine, fill.by = fill.by, flip = flip, raster = raster )) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Dimensional reduction plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Color dimensional reduction plot by tree split #' #' Returns a DimPlot colored based on whether the cells fall in clusters #' to the left or to the right of a node split in the cluster tree. #' #' @param object Seurat object #' @param node Node in cluster tree on which to base the split #' @param left.color Color for the left side of the split #' @param right.color Color for the right side of the split #' @param other.color Color for all other cells #' @inheritDotParams DimPlot -object #' #' @return Returns a DimPlot #' #' @export #' @concept visualization #' #' @seealso \code{\link{DimPlot}} #' #' @examples #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small <- BuildClusterTree(object = pbmc_small, verbose = FALSE) #' PlotClusterTree(pbmc_small) #' ColorDimSplit(pbmc_small, node = 5) #' } #' ColorDimSplit <- function( object, node, left.color = 'red', right.color = 'blue', other.color = 'grey50', ... ) { CheckDots(..., fxns = 'DimPlot') tree <- Tool(object = object, slot = "BuildClusterTree") split <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] all.children <- sort(x = tree$edge[, 2][!tree$edge[, 2] %in% tree$edge[, 1]]) left.group <- DFT(tree = tree, node = split[1], only.children = TRUE) right.group <- DFT(tree = tree, node = split[2], only.children = TRUE) if (any(is.na(x = left.group))) { left.group <- split[1] } if (any(is.na(x = right.group))) { right.group <- split[2] } left.group <- MapVals(v = left.group, from = all.children, to = tree$tip.label) right.group <- MapVals(v = right.group, from = all.children, to = tree$tip.label) remaining.group <- setdiff(x = tree$tip.label, y = c(left.group, right.group)) left.cells <- WhichCells(object = object, ident = left.group) right.cells <- WhichCells(object = object, ident = right.group) remaining.cells <- WhichCells(object = object, ident = remaining.group) object <- SetIdent( object = object, cells = left.cells, value = "Left Split" ) object <- SetIdent( object = object, cells = right.cells, value = "Right Split" ) object <- SetIdent( object = object, cells = remaining.cells, value = "Not in Split" ) levels(x = object) <- c("Left Split", "Right Split", "Not in Split") colors.use = c(left.color, right.color, other.color) return(DimPlot(object = object, cols = colors.use, ...)) } #' Dimensional reduction plot #' #' Graphs the output of a dimensional reduction technique on a 2D scatter plot where each point is a #' cell and it's positioned based on the cell embeddings determined by the reduction technique. By #' default, cells are colored by their identity class (can be changed with the group.by parameter). #' #' @param object Seurat object #' @param dims Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions #' @param cells Vector of cells to plot (default is all cells) #' @param cols Vector of colors, each color corresponds to an identity class. This may also be a single character #' or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. #' By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. #' See \code{\link{DiscretePalette}} for details. #' @param pt.size Adjust point size for plotting #' @param reduction Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca #' @param group.by Name of one or more metadata columns to group (color) cells by #' (for example, orig.ident); pass 'ident' to group by identity class #' @param split.by Name of a metadata column to split plot by; #' see \code{\link{FetchData}} for more details #' @param shape.by If NULL, all points are circles (default). You can specify any #' cell attribute (that can be pulled with FetchData) allowing for both #' different colors and different shapes on cells. Only applicable if \code{raster = FALSE}. #' @param order Specify the order of plotting for the idents. This can be #' useful for crowded plots if points of interest are being buried. Provide #' either a full list of valid idents or a subset to be plotted last (on top) #' @param shuffle Whether to randomly shuffle the order of points. This can be #' useful for crowded plots if points of interest are being buried. (default is FALSE) #' @param seed Sets the seed if randomly shuffling the order of points. #' @param label Whether to label the clusters #' @param label.size Sets size of labels #' @param label.color Sets the color of the label text #' @param label.box Whether to put a box around the label text (geom_text vs #' geom_label) #' @param repel Repel labels #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply #' pass a vector instead of a list. If set, colors selected cells to the color(s) #' in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); #' will also resize to the size(s) passed to \code{sizes.highlight} #' @param cols.highlight A vector of colors to highlight the cells as; will #' repeat to the length groups in cells.highlight #' @param sizes.highlight Size of highlighted cells; will repeat to the length #' groups in cells.highlight #' @param na.value Color value for NA points when using custom scale #' @param ncol Number of columns for display when combining plots #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' @param raster Convert points to raster format, default is \code{NULL} which #' automatically rasterizes if plotting more than 100,000 cells #' @param raster.dpi Pixel resolution for rasterized plots, passed to geom_scattermore(). #' Default is c(512, 512). #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom rlang !! #' @importFrom ggplot2 facet_wrap vars sym labs #' @importFrom patchwork wrap_plots #' #' @export #' @concept visualization #' #' @note For the old \code{do.hover} and \code{do.identify} functionality, please see #' \code{HoverLocator} and \code{CellSelector}, respectively. #' #' @aliases TSNEPlot PCAPlot ICAPlot #' @seealso \code{\link{FeaturePlot}} \code{\link{HoverLocator}} #' \code{\link{CellSelector}} \code{\link{FetchData}} #' #' @examples #' data("pbmc_small") #' DimPlot(object = pbmc_small) #' DimPlot(object = pbmc_small, split.by = 'ident') #' DimPlot <- function( object, dims = c(1, 2), cells = NULL, cols = NULL, pt.size = NULL, reduction = NULL, group.by = NULL, split.by = NULL, shape.by = NULL, order = NULL, shuffle = FALSE, seed = 1, label = FALSE, label.size = 4, label.color = 'black', label.box = FALSE, repel = FALSE, cells.highlight = NULL, cols.highlight = '#DE2D26', sizes.highlight = 1, na.value = 'grey50', ncol = NULL, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) { if (length(x = dims) != 2) { stop("'dims' must be a two-length vector") } reduction <- reduction %||% DefaultDimReduc(object = object) cells <- cells %||% colnames(x = object) data <- Embeddings(object = object[[reduction]])[cells, dims] data <- as.data.frame(x = data) dims <- paste0(Key(object = object[[reduction]]), dims) object[['ident']] <- Idents(object = object) orig.groups <- group.by group.by <- group.by %||% 'ident' data <- cbind(data, object[[group.by]][cells, , drop = FALSE]) group.by <- colnames(x = data)[3:ncol(x = data)] for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } if (!is.null(x = shape.by)) { data[, shape.by] <- object[[shape.by, drop = TRUE]] } if (!is.null(x = split.by)) { data[, split.by] <- object[[split.by, drop = TRUE]] } if (isTRUE(x = shuffle)) { set.seed(seed = seed) data <- data[sample(x = 1:nrow(x = data)), ] } plots <- lapply( X = group.by, FUN = function(x) { plot <- SingleDimPlot( data = data[, c(dims, x, split.by, shape.by)], dims = dims, col.by = x, cols = cols, pt.size = pt.size, shape.by = shape.by, order = order, label = FALSE, cells.highlight = cells.highlight, cols.highlight = cols.highlight, sizes.highlight = sizes.highlight, na.value = na.value, raster = raster, raster.dpi = raster.dpi ) if (label) { plot <- LabelClusters( plot = plot, id = x, repel = repel, size = label.size, split.by = split.by, box = label.box, color = label.color ) } if (!is.null(x = split.by)) { plot <- plot + FacetTheme() + facet_wrap( facets = vars(!!sym(x = split.by)), ncol = if (length(x = group.by) > 1 || is.null(x = ncol)) { length(x = unique(x = data[, split.by])) } else { ncol } ) } plot <- if (is.null(x = orig.groups)) { plot + labs(title = NULL) } else { plot + CenterTitle() } } ) if (!is.null(x = split.by)) { ncol <- 1 } if (combine) { plots <- wrap_plots(plots, ncol = orig.groups %iff% ncol) } return(plots) } #' Visualize 'features' on a dimensional reduction plot #' #' Colors single cells on a dimensional reduction plot according to a 'feature' #' (i.e. gene expression, PC scores, number of genes detected, etc.) #' #' @inheritParams DimPlot #' @param order Boolean determining whether to plot cells in order of expression. Can be useful if #' cells expressing given feature are getting buried. #' @param features Vector of features to plot. Features can come from: #' \itemize{ #' \item An \code{Assay} feature (e.g. a gene name - "MS4A1") #' \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") #' \item A column name from a \code{DimReduc} object corresponding to the cell embedding values #' (e.g. the PC 1 scores - "PC_1") #' } #' @param cols The two colors to form the gradient over. Provide as string vector with #' the first color corresponding to low values, the second to high. Also accepts a Brewer #' color scale or vector of colors. Note: this will bin the data into number of colors provided. #' When blend is \code{TRUE}, takes anywhere from 1-3 colors: #' \describe{ #' \item{1 color:}{Treated as color for double-negatives, will use default colors 2 and 3 for per-feature expression} #' \item{2 colors:}{Treated as colors for per-feature expression, will use default color 1 for double-negatives} #' \item{3+ colors:}{First color used for double-negatives, colors 2 and 3 used for per-feature expression, all others ignored} #' } #' @param min.cutoff,max.cutoff Vector of minimum and maximum cutoff values for each feature, #' may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10') #' @param split.by A factor in object metadata to split the feature plot by, pass 'ident' #' to split by cell identity'; similar to the old \code{FeatureHeatmap} #' @param keep.scale How to handle the color scale across multiple plots. Options are: #' \itemize{ #' \item{"feature" (default; by row/feature scaling):}{ The plots for each individual feature are scaled to the maximum expression of the feature across the conditions provided to 'split.by'.} #' \item{"all" (universal scaling):}{ The plots for all features and conditions are scaled to the maximum expression value for the feature with the highest overall expression.} #' \item{NULL (no scaling):}{ Each individual plot is scaled to the maximum expression value of the feature in the condition provided to 'split.by'. Be aware setting NULL will result in color scales that are not comparable between plots.} #' } #' @param slot Which slot to pull expression data from? #' @param blend Scale and blend expression values to visualize coexpression of two features #' @param blend.threshold The color cutoff from weak signal to strong signal; ranges from 0 to 1. #' @param ncol Number of columns to combine multiple feature plots to, ignored if \code{split.by} is not \code{NULL} #' @param coord.fixed Plot cartesian coordinates with fixed aspect ratio #' @param by.col If splitting by a factor, plot the splits per column with the features as rows; ignored if \code{blend = TRUE} #' @param sort.cell Redundant with \code{order}. This argument is being #' deprecated. Please use \code{order} instead. #' @param interactive Launch an interactive \code{\link[Seurat:IFeaturePlot]{FeaturePlot}} #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom grDevices rgb #' @importFrom patchwork wrap_plots #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 labs scale_x_continuous scale_y_continuous theme element_rect #' dup_axis guides element_blank element_text margin scale_color_brewer scale_color_gradientn #' scale_color_manual coord_fixed ggtitle #' #' @export #' @concept visualization #' #' @note For the old \code{do.hover} and \code{do.identify} functionality, please see #' \code{HoverLocator} and \code{CellSelector}, respectively. #' #' @aliases FeatureHeatmap #' @seealso \code{\link{DimPlot}} \code{\link{HoverLocator}} #' \code{\link{CellSelector}} #' #' @examples #' data("pbmc_small") #' FeaturePlot(object = pbmc_small, features = 'PC_1') #' FeaturePlot <- function( object, features, dims = c(1, 2), cells = NULL, cols = if (blend) { c('lightgrey', '#ff0000', '#00ff00') } else { c('lightgrey', 'blue') }, pt.size = NULL, order = FALSE, min.cutoff = NA, max.cutoff = NA, reduction = NULL, split.by = NULL, keep.scale = "feature", shape.by = NULL, slot = 'data', blend = FALSE, blend.threshold = 0.5, label = FALSE, label.size = 4, label.color = "black", repel = FALSE, ncol = NULL, coord.fixed = FALSE, by.col = TRUE, sort.cell = NULL, interactive = FALSE, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) { # TODO: deprecate fully on 3.2.0 if (!is.null(x = sort.cell)) { warning( "The sort.cell parameter is being deprecated. Please use the order ", "parameter instead for equivalent functionality.", call. = FALSE, immediate. = TRUE ) if (isTRUE(x = sort.cell)) { order <- sort.cell } } if (interactive) { return(IFeaturePlot( object = object, feature = features[1], dims = dims, reduction = reduction, slot = slot )) } # Check keep.scale param for valid entries if (!(is.null(x = keep.scale)) && !(keep.scale %in% c("feature", "all"))) { stop("`keep.scale` must be set to either `feature`, `all`, or NULL") } # Set a theme to remove right-hand Y axis lines # Also sets right-hand Y axis text label formatting no.right <- theme( axis.line.y.right = element_blank(), axis.ticks.y.right = element_blank(), axis.text.y.right = element_blank(), axis.title.y.right = element_text( face = "bold", size = 14, margin = margin(r = 7) ) ) # Get the DimReduc to use reduction <- reduction %||% DefaultDimReduc(object = object) if (length(x = dims) != 2 || !is.numeric(x = dims)) { stop("'dims' must be a two-length integer vector") } # Figure out blending stuff if (blend && length(x = features) != 2) { stop("Blending feature plots only works with two features") } # Set color scheme for blended FeaturePlots if (blend) { default.colors <- eval(expr = formals(fun = FeaturePlot)$cols) cols <- switch( EXPR = as.character(x = length(x = cols)), '0' = { warning( "No colors provided, using default colors", call. = FALSE, immediate. = TRUE ) default.colors }, '1' = { warning( "Only one color provided, assuming specified is double-negative and augmenting with default colors", call. = FALSE, immediate. = TRUE ) c(cols, default.colors[2:3]) }, '2' = { warning( "Only two colors provided, assuming specified are for features and agumenting with '", default.colors[1], "' for double-negatives", call. = FALSE, immediate. = TRUE ) c(default.colors[1], cols) }, '3' = cols, { warning( "More than three colors provided, using only first three", call. = FALSE, immediate. = TRUE ) cols[1:3] } ) } if (blend && length(x = cols) != 3) { stop("Blending feature plots only works with three colors; first one for negative cells") } # Name the reductions dims <- paste0(Key(object = object[[reduction]]), dims) cells <- cells %||% colnames(x = object) # Get plotting data data <- FetchData( object = object, vars = c(dims, 'ident', features), cells = cells, slot = slot ) # Check presence of features/dimensions if (ncol(x = data) < 4) { stop( "None of the requested features were found: ", paste(features, collapse = ', '), " in slot ", slot, call. = FALSE ) } else if (!all(dims %in% colnames(x = data))) { stop("The dimensions requested were not found", call. = FALSE) } features <- colnames(x = data)[4:ncol(x = data)] # Determine cutoffs min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } brewer.gran <- ifelse( test = length(x = cols) == 1, yes = brewer.pal.info[cols, ]$maxcolors, no = length(x = cols) ) # Apply cutoffs data[, 4:ncol(x = data)] <- sapply( X = 4:ncol(x = data), FUN = function(index) { data.feature <- as.vector(x = data[, index]) min.use <- SetQuantile(cutoff = min.cutoff[index - 3], data.feature) max.use <- SetQuantile(cutoff = max.cutoff[index - 3], data.feature) data.feature[data.feature < min.use] <- min.use data.feature[data.feature > max.use] <- max.use if (brewer.gran == 2) { return(data.feature) } data.cut <- if (all(data.feature == 0)) { 0 } else { as.numeric(x = as.factor(x = cut( x = as.numeric(x = data.feature), breaks = brewer.gran ))) } return(data.cut) } ) colnames(x = data)[4:ncol(x = data)] <- features rownames(x = data) <- cells # Figure out splits (FeatureHeatmap) data$split <- if (is.null(x = split.by)) { RandomName() } else { switch( EXPR = split.by, ident = Idents(object = object)[cells, drop = TRUE], object[[split.by, drop = TRUE]][cells, drop = TRUE] ) } if (!is.factor(x = data$split)) { data$split <- factor(x = data$split) } # Set shaping variable if (!is.null(x = shape.by)) { data[, shape.by] <- object[[shape.by, drop = TRUE]] } # Make list of plots plots <- vector( mode = "list", length = ifelse( test = blend, yes = 4, no = length(x = features) * length(x = levels(x = data$split)) ) ) # Apply common limits xlims <- c(floor(x = min(data[, dims[1]])), ceiling(x = max(data[, dims[1]]))) ylims <- c(floor(min(data[, dims[2]])), ceiling(x = max(data[, dims[2]]))) # Set blended colors if (blend) { ncol <- 4 color.matrix <- BlendMatrix( two.colors = cols[2:3], col.threshold = blend.threshold, negative.color = cols[1] ) cols <- cols[2:3] colors <- list( color.matrix[, 1], color.matrix[1, ], as.vector(x = color.matrix) ) } # Make the plots for (i in 1:length(x = levels(x = data$split))) { # Figure out which split we're working with ident <- levels(x = data$split)[i] data.plot <- data[as.character(x = data$split) == ident, , drop = FALSE] # Blend expression values if (blend) { features <- features[1:2] no.expression <- features[colMeans(x = data.plot[, features]) == 0] if (length(x = no.expression) != 0) { stop( "The following features have no value: ", paste(no.expression, collapse = ', '), call. = FALSE ) } data.plot <- cbind(data.plot[, c(dims, 'ident')], BlendExpression(data = data.plot[, features[1:2]])) features <- colnames(x = data.plot)[4:ncol(x = data.plot)] } # Make per-feature plots for (j in 1:length(x = features)) { feature <- features[j] # Get blended colors if (blend) { cols.use <- as.numeric(x = as.character(x = data.plot[, feature])) + 1 cols.use <- colors[[j]][sort(x = unique(x = cols.use))] } else { cols.use <- NULL } data.single <- data.plot[, c(dims, 'ident', feature, shape.by)] # Make the plot plot <- SingleDimPlot( data = data.single, dims = dims, col.by = feature, order = order, pt.size = pt.size, cols = cols.use, shape.by = shape.by, label = FALSE, raster = raster, raster.dpi = raster.dpi ) + scale_x_continuous(limits = xlims) + scale_y_continuous(limits = ylims) + theme_cowplot() + CenterTitle() # theme(plot.title = element_text(hjust = 0.5)) # Add labels if (label) { plot <- LabelClusters( plot = plot, id = 'ident', repel = repel, size = label.size, color = label.color ) } # Make FeatureHeatmaps look nice(ish) if (length(x = levels(x = data$split)) > 1) { plot <- plot + theme(panel.border = element_rect(fill = NA, colour = 'black')) # Add title plot <- plot + if (i == 1) { labs(title = feature) } else { labs(title = NULL) } # Add second axis if (j == length(x = features) && !blend) { suppressMessages( expr = plot <- plot + scale_y_continuous( sec.axis = dup_axis(name = ident), limits = ylims ) + no.right ) } # Remove left Y axis if (j != 1) { plot <- plot + theme( axis.line.y = element_blank(), axis.ticks.y = element_blank(), axis.text.y = element_blank(), axis.title.y.left = element_blank() ) } # Remove bottom X axis if (i != length(x = levels(x = data$split))) { plot <- plot + theme( axis.line.x = element_blank(), axis.ticks.x = element_blank(), axis.text.x = element_blank(), axis.title.x = element_blank() ) } } else { plot <- plot + labs(title = feature) } # Add colors scale for normal FeaturePlots if (!blend) { plot <- plot + guides(color = NULL) cols.grad <- cols if (length(x = cols) == 1) { plot <- plot + scale_color_brewer(palette = cols) } else if (length(x = cols) > 1) { unique.feature.exp <- unique(data.plot[, feature]) if (length(unique.feature.exp) == 1) { warning("All cells have the same value (", unique.feature.exp, ") of ", feature, ".") if (unique.feature.exp == 0) { cols.grad <- cols[1] } else{ cols.grad <- cols } } plot <- suppressMessages( expr = plot + scale_color_gradientn( colors = cols.grad, guide = "colorbar" ) ) } } if (!(is.null(x = keep.scale)) && keep.scale == "feature" && !blend) { max.feature.value <- max(data[, feature]) min.feature.value <- min(data[, feature]) plot <- suppressMessages(plot & scale_color_gradientn(colors = cols, limits = c(min.feature.value, max.feature.value))) } # Add coord_fixed if (coord.fixed) { plot <- plot + coord_fixed() } # I'm not sure why, but sometimes the damn thing fails without this # Thanks ggplot2 plot <- plot # Place the plot plots[[(length(x = features) * (i - 1)) + j]] <- plot } } # Add blended color key if (blend) { blend.legend <- BlendMap(color.matrix = color.matrix) for (ii in 1:length(x = levels(x = data$split))) { suppressMessages(expr = plots <- append( x = plots, values = list( blend.legend + scale_y_continuous( sec.axis = dup_axis(name = ifelse( test = length(x = levels(x = data$split)) > 1, yes = levels(x = data$split)[ii], no = '' )), expand = c(0, 0) ) + labs( x = features[1], y = features[2], title = if (ii == 1) { paste('Color threshold:', blend.threshold) } else { NULL } ) + no.right ), after = 4 * ii - 1 )) } } # Remove NULL plots plots <- Filter(f = Negate(f = is.null), x = plots) # Combine the plots if (is.null(x = ncol)) { ncol <- 2 if (length(x = features) == 1) { ncol <- 1 } if (length(x = features) > 6) { ncol <- 3 } if (length(x = features) > 9) { ncol <- 4 } } ncol <- ifelse( test = is.null(x = split.by) || blend, yes = ncol, no = length(x = features) ) legend <- if (blend) { 'none' } else { split.by %iff% 'none' } # Transpose the FeatureHeatmap matrix (not applicable for blended FeaturePlots) if (combine) { if (by.col && !is.null(x = split.by) && !blend) { plots <- lapply( X = plots, FUN = function(x) { return(suppressMessages( expr = x + theme_cowplot() + ggtitle("") + scale_y_continuous(sec.axis = dup_axis(name = ""), limits = ylims) + no.right )) } ) nsplits <- length(x = levels(x = data$split)) idx <- 1 for (i in (length(x = features) * (nsplits - 1) + 1):(length(x = features) * nsplits)) { plots[[i]] <- suppressMessages( expr = plots[[i]] + scale_y_continuous( sec.axis = dup_axis(name = features[[idx]]), limits = ylims ) + no.right ) idx <- idx + 1 } idx <- 1 for (i in which(x = 1:length(x = plots) %% length(x = features) == 1)) { plots[[i]] <- plots[[i]] + ggtitle(levels(x = data$split)[[idx]]) + theme(plot.title = element_text(hjust = 0.5)) idx <- idx + 1 } idx <- 1 if (length(x = features) == 1) { for (i in 1:length(x = plots)) { plots[[i]] <- plots[[i]] + ggtitle(levels(x = data$split)[[idx]]) + theme(plot.title = element_text(hjust = 0.5)) idx <- idx + 1 } ncol <- 1 nrow <- nsplits } else { nrow <- split.by %iff% length(x = levels(x = data$split)) } plots <- plots[c(do.call( what = rbind, args = split(x = 1:length(x = plots), f = ceiling(x = seq_along(along.with = 1:length(x = plots)) / length(x = features))) ))] # Set ncol to number of splits (nrow) and nrow to number of features (ncol) plots <- wrap_plots(plots, ncol = nrow, nrow = ncol) if (!is.null(x = legend) && legend == 'none') { plots <- plots & NoLegend() } } else { plots <- wrap_plots(plots, ncol = ncol, nrow = split.by %iff% length(x = levels(x = data$split))) } if (!is.null(x = legend) && legend == 'none') { plots <- plots & NoLegend() } if (!(is.null(x = keep.scale)) && keep.scale == "all" && !blend) { max.feature.value <- max(data[, features]) min.feature.value <- min(data[, features]) plots <- suppressMessages(plots & scale_color_gradientn(colors = cols, limits = c(min.feature.value, max.feature.value))) } } return(plots) } #' Visualize features in dimensional reduction space interactively #' #' @inheritParams FeaturePlot #' @param feature Feature to plot #' #' @return Returns the final plot as a ggplot object #' #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 theme element_text guides scale_color_gradientn #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow sidebarPanel selectInput plotOutput reactiveValues #' observeEvent stopApp observe updateSelectInput renderPlot runGadget #' #' @export #' @concept visualization #' IFeaturePlot <- function(object, feature, dims = c(1, 2), reduction = NULL, slot = 'data') { # Set initial data values feature.label <- 'Feature to visualize' assay.keys <- Key(object = object)[Assays(object = object)] keyed <- sapply(X = assay.keys, FUN = grepl, x = feature) assay <- if (any(keyed)) { names(x = which(x = keyed))[1] } else { DefaultAssay(object = object) } features <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) assays.use <- vapply( X = Assays(object = object), FUN = function(x) { return(!IsMatrixEmpty(x = GetAssayData( object = object, slot = slot, assay = x ))) }, FUN.VALUE = logical(length = 1L) ) assays.use <- sort(x = Assays(object = object)[assays.use]) reduction <- reduction %||% DefaultDimReduc(object = object) dims.reduc <- gsub( pattern = Key(object = object[[reduction]]), replacement = '', x = colnames(x = object[[reduction]]) ) # Set up the gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( sidebarPanel( selectInput( inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay, selectize = FALSE, width = '100%' ), selectInput( inputId = 'feature', label = feature.label, choices = features, selected = feature, selectize = FALSE, width = '100%' ), selectInput( inputId = 'reduction', label = 'Dimensional reduction', choices = Reductions(object = object), selected = reduction, selectize = FALSE, width = '100%' ), selectInput( inputId = 'xdim', label = 'X dimension', choices = dims.reduc, selected = as.character(x = dims[1]), selectize = FALSE, width = '100%' ), selectInput( inputId = 'ydim', label = 'Y dimension', choices = dims.reduc, selected = as.character(x = dims[2]), selectize = FALSE, width = '100%' ), selectInput( inputId = 'palette', label = 'Color scheme', choices = names(x = FeaturePalettes), selected = 'Seurat', selectize = FALSE, width = '100%' ), width = '100%' ), plotOutput(outputId = 'plot', height = '100%'), flex = c(1, 4) ) ) ) # Prepare plotting data dims <- paste0(Key(object = object[[reduction]]), dims) plot.data <- FetchData(object = object, vars = c(dims, feature), slot = slot) # Shiny server server <- function(input, output, session) { plot.env <- reactiveValues( data = plot.data, dims = paste0(Key(object = object[[reduction]]), dims), feature = feature, palette = 'Seurat' ) # Observe events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observe(x = { assay <- input$assay feature.use <- input$feature features.assay <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.use <- ifelse( test = feature.use %in% features.assay, yes = feature.use, no = features.assay[1] ) reduc <- input$reduction dims.reduc <- gsub( pattern = Key(object = object[[reduc]]), replacement = '', x = colnames(x = object[[reduc]]) ) dims <- c(input$xdim, input$ydim) for (i in seq_along(along.with = dims)) { if (!dims[i] %in% dims.reduc) { dims[i] <- dims.reduc[i] } } updateSelectInput( session = session, inputId = 'xdim', label = 'X dimension', choices = dims.reduc, selected = as.character(x = dims[1]) ) updateSelectInput( session = session, inputId = 'ydim', label = 'Y dimension', choices = dims.reduc, selected = as.character(x = dims[2]) ) updateSelectInput( session = session, inputId = 'feature', label = feature.label, choices = features.assay, selected = feature.use ) }) observe(x = { feature.use <- input$feature feature.keyed <- paste0(Key(object = object[[input$assay]]), feature.use) reduc <- input$reduction dims <- c(input$xdim, input$ydim) dims <- paste0(Key(object = object[[reduc]]), dims) plot.data <- tryCatch( expr = FetchData( object = object, vars = c(dims, feature.keyed), slot = slot ), warning = function(...) { return(plot.env$data) }, error = function(...) { return(plot.env$data) } ) dims <- colnames(x = plot.data)[1:2] colnames(x = plot.data) <- c(dims, feature.use) plot.env$data <- plot.data plot.env$feature <- feature.use plot.env$dims <- dims }) observe(x = { plot.env$palette <- input$palette }) # Create the plot output$plot <- renderPlot(expr = { plot.env$plot <- SingleDimPlot( data = plot.env$data, dims = plot.env$dims, col.by = plot.env$feature, label = FALSE ) + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) + guides(color = NULL) + scale_color_gradientn( colors = FeaturePalettes[[plot.env$palette]], guide = 'colorbar' ) plot.env$plot }) } runGadget(app = ui, server = server) } #' Highlight Neighbors in DimPlot #' #' It will color the query cells and the neighbors of the query cells in the #' DimPlot #' #' @inheritParams DimPlot #' @param nn.idx the neighbor index of all cells #' @param query.cells cells used to find their neighbors #' @param show.all.cells Show all cells or only query and neighbor cells #' #' @inherit DimPlot return #' #' @export #' @concept visualization #' NNPlot <- function( object, reduction, nn.idx, query.cells, dims = 1:2, label = FALSE, label.size = 4, repel = FALSE, sizes.highlight = 2, pt.size = 1, cols.highlight = c("#377eb8", "#e41a1c"), na.value = "#bdbdbd", order = c("self", "neighbors", "other"), show.all.cells = TRUE, ... ) { if (inherits(x = nn.idx, what = 'Neighbor')) { rownames(x = slot(object = nn.idx, name = 'nn.idx')) <- Cells(x = nn.idx) nn.idx <- Indices(object = nn.idx) } if (length(x = query.cells) > 1) { neighbor.cells <- apply( X = nn.idx[query.cells, -1], MARGIN = 2, FUN = function(x) { return(Cells(x = object)[x]) } ) } else { neighbor.cells <- Cells(x = object)[nn.idx[query.cells , -1]] } neighbor.cells <- as.vector(x = neighbor.cells) neighbor.cells <- neighbor.cells[!is.na(x = neighbor.cells)] object[["nn.col"]] <- "other" object[["nn.col"]][neighbor.cells, ] <- "neighbors" object[["nn.col"]][query.cells, ] <- "self" object$nn.col <- factor( x = object$nn.col, levels = c("self", "neighbors", "other") ) if (!show.all.cells) { object <- subset( x = object, cells = Cells(x = object)[which(x = object[["nn.col"]] != "other")] ) nn.cols <- c(rev(x = cols.highlight)) nn.pt.size <- sizes.highlight } else { highlight.info <- SetHighlight( cells.highlight = c(query.cells, neighbor.cells), cells.all = Cells(x = object), sizes.highlight = sizes.highlight, pt.size = pt.size, cols.highlight = "red" ) nn.cols <- c(na.value, rev(x = cols.highlight)) nn.pt.size <- highlight.info$size } NN.plot <- DimPlot( object = object, reduction = reduction, dims = dims, group.by = "nn.col", cols = nn.cols, label = label, order = order, pt.size = nn.pt.size , label.size = label.size, repel = repel ) return(NN.plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Scatter plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Cell-cell scatter plot #' #' Creates a plot of scatter plot of features across two single cells. Pearson #' correlation between the two cells is displayed above the plot. #' #' @inheritParams FeatureScatter #' @inheritParams DimPlot #' @param cell1 Cell 1 name #' @param cell2 Cell 2 name #' @param features Features to plot (default, all features) #' @param highlight Features to highlight #' @return A ggplot object #' #' @export #' @concept visualization #' #' @aliases CellPlot #' #' @examples #' data("pbmc_small") #' CellScatter(object = pbmc_small, cell1 = 'ATAGGAGAAACAGA', cell2 = 'CATCAGGATGCACA') #' CellScatter <- function( object, cell1, cell2, features = NULL, highlight = NULL, cols = NULL, pt.size = 1, smooth = FALSE, raster = NULL, raster.dpi = c(512, 512) ) { features <- features %||% rownames(x = object) data <- FetchData( object = object, vars = features, cells = c(cell1, cell2) ) data <- as.data.frame(x = t(x = data)) plot <- SingleCorPlot( data = data, cols = cols, pt.size = pt.size, rows.highlight = highlight, smooth = smooth, raster = raster, raster.dpi = raster.dpi ) return(plot) } #' Scatter plot of single cell data #' #' Creates a scatter plot of two features (typically feature expression), across a #' set of single cells. Cells are colored by their identity class. Pearson #' correlation between the two features is displayed above the plot. #' #' @param object Seurat object #' @param feature1 First feature to plot. Typically feature expression but can also #' be metrics, PC scores, etc. - anything that can be retreived with FetchData #' @param feature2 Second feature to plot. #' @param cells Cells to include on the scatter plot. #' @param shuffle Whether to randomly shuffle the order of points. This can be #' useful for crowded plots if points of interest are being buried. (default is FALSE) #' @param seed Sets the seed if randomly shuffling the order of points. #' @param group.by Name of one or more metadata columns to group (color) cells by #' (for example, orig.ident); pass 'ident' to group by identity class #' @param cols Colors to use for identity class plotting. #' @param pt.size Size of the points on the plot #' @param shape.by Ignored for now #' @param span Spline span in loess function call, if \code{NULL}, no spline added #' @param smooth Smooth the graph (similar to smoothScatter) #' @param slot Slot to pull data from, should be one of 'counts', 'data', or 'scale.data' #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' @param plot.cor Display correlation in plot title #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is greater than #' 100,000 #' @param raster.dpi Pixel resolution for rasterized plots, passed to geom_scattermore(). #' Default is c(512, 512). #' @param jitter Jitter for easier visualization of crowded points #' #' @return A ggplot object #' #' @importFrom ggplot2 geom_smooth aes_string #' @importFrom patchwork wrap_plots #' #' @export #' @concept visualization #' #' @aliases GenePlot #' #' @examples #' data("pbmc_small") #' FeatureScatter(object = pbmc_small, feature1 = 'CD9', feature2 = 'CD3E') #' FeatureScatter <- function( object, feature1, feature2, cells = NULL, shuffle = FALSE, seed = 1, group.by = NULL, cols = NULL, pt.size = 1, shape.by = NULL, span = NULL, smooth = FALSE, combine = TRUE, slot = 'data', plot.cor = TRUE, raster = NULL, raster.dpi = c(512, 512), jitter = TRUE ) { cells <- cells %||% colnames(x = object) if (isTRUE(x = shuffle)) { set.seed(seed = seed) cells <- sample(x = cells) } object[['ident']] <- Idents(object = object) group.by <- group.by %||% 'ident' data <- FetchData( object = object, vars = c(feature1, feature2, group.by), cells = cells, slot = slot ) if (!grepl(pattern = feature1, x = colnames(x = data)[1])) { stop("Feature 1 (", feature1, ") not found.", call. = FALSE) } if (!grepl(pattern = feature2, x = colnames(x = data)[2])) { stop("Feature 2 (", feature2, ") not found.", call. = FALSE) } data <- as.data.frame(x = data) feature1 <- colnames(x = data)[1] feature2 <- colnames(x = data)[2] for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } plots <- lapply( X = group.by, FUN = function(x) { SingleCorPlot( data = data[,c(feature1, feature2)], col.by = data[, x], cols = cols, pt.size = pt.size, smooth = smooth, legend.title = 'Identity', span = span, plot.cor = plot.cor, raster = raster, raster.dpi = raster.dpi, jitter = jitter ) } ) if (isTRUE(x = length(x = plots) == 1)) { return(plots[[1]]) } if (isTRUE(x = combine)) { plots <- wrap_plots(plots, ncol = length(x = group.by)) } return(plots) } #' View variable features #' #' @inheritParams FeatureScatter #' @inheritParams SeuratObject::HVFInfo #' @param cols Colors to specify non-variable/variable status #' @param assay Assay to pull variable features from #' @param log Plot the x-axis in log scale #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is greater than #' 100,000 #' #' @return A ggplot object #' #' @importFrom ggplot2 labs scale_color_manual scale_x_log10 #' @export #' @concept visualization #' #' @aliases VariableGenePlot MeanVarPlot #' #' @seealso \code{\link{FindVariableFeatures}} #' #' @examples #' data("pbmc_small") #' VariableFeaturePlot(object = pbmc_small) #' VariableFeaturePlot <- function( object, cols = c('black', 'red'), pt.size = 1, log = NULL, selection.method = NULL, assay = NULL, raster = NULL, raster.dpi = c(512, 512) ) { if (length(x = cols) != 2) { stop("'cols' must be of length 2") } hvf.info <- HVFInfo( object = object, assay = assay, selection.method = selection.method, status = TRUE ) var.status <- c('no', 'yes')[unlist(x = hvf.info[, ncol(x = hvf.info)]) + 1] if (colnames(x = hvf.info)[3] == 'dispersion.scaled') { hvf.info <- hvf.info[, c(1, 2)] } else { hvf.info <- hvf.info[, c(1, 3)] } axis.labels <- switch( EXPR = colnames(x = hvf.info)[2], 'variance.standardized' = c('Average Expression', 'Standardized Variance'), 'dispersion' = c('Average Expression', 'Dispersion'), 'residual_variance' = c('Geometric Mean of Expression', 'Residual Variance') ) log <- log %||% (any(c('variance.standardized', 'residual_variance') %in% colnames(x = hvf.info))) # var.features <- VariableFeatures(object = object, assay = assay) # var.status <- ifelse( # test = rownames(x = hvf.info) %in% var.features, # yes = 'yes', # no = 'no' # ) plot <- SingleCorPlot( data = hvf.info, col.by = var.status, pt.size = pt.size, raster = raster, raster.dpi = raster.dpi ) if (length(x = unique(x = var.status)) == 1) { switch( EXPR = var.status[1], 'yes' = { cols <- cols[2] labels.legend <- 'Variable' }, 'no' = { cols <- cols[1] labels.legend <- 'Non-variable' } ) } else { labels.legend <- c('Non-variable', 'Variable') } plot <- plot + labs(title = NULL, x = axis.labels[1], y = axis.labels[2]) + scale_color_manual( labels = paste(labels.legend, 'count:', table(var.status)), values = cols ) if (log) { plot <- plot + scale_x_log10() } return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Polygon Plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Polygon DimPlot #' #' Plot cells as polygons, rather than single points. Color cells by identity, or a categorical variable #' in metadata #' #' @inheritParams PolyFeaturePlot #' @param group.by A grouping variable present in the metadata. Default is to use the groupings present #' in the current cell identities (\code{Idents(object = object)}) #' #' @return Returns a ggplot object #' #' @export #' @concept visualization #' PolyDimPlot <- function( object, group.by = NULL, cells = NULL, poly.data = 'spatial', flip.coords = FALSE ) { polygons <- Misc(object = object, slot = poly.data) if (is.null(x = polygons)) { stop("Could not find polygon data in misc slot") } group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells ) group.data$cell <- rownames(x = group.data) data <- merge(x = polygons, y = group.data, by = 'cell') if (flip.coords) { coord.x <- data$x data$x <- data$y data$y <- coord.x } plot <- SinglePolyPlot(data = data, group.by = group.by) return(plot) } #' Polygon FeaturePlot #' #' Plot cells as polygons, rather than single points. Color cells by any value accessible by \code{\link{FetchData}}. #' #' @inheritParams FeaturePlot #' @param poly.data Name of the polygon dataframe in the misc slot #' @param ncol Number of columns to split the plot into #' @param common.scale ... #' @param flip.coords Flip x and y coordinates #' #' @return Returns a ggplot object #' #' @importFrom ggplot2 scale_fill_viridis_c facet_wrap #' #' @export #' @concept visualization #' @concept spatial #' PolyFeaturePlot <- function( object, features, cells = NULL, poly.data = 'spatial', ncol = ceiling(x = length(x = features) / 2), min.cutoff = 0, max.cutoff = NA, common.scale = TRUE, flip.coords = FALSE ) { polygons <- Misc(object = object, slot = poly.data) if (is.null(x = polygons)) { stop("Could not find polygon data in misc slot") } assay.data <- FetchData( object = object, vars = features, cells = cells ) features <- colnames(x = assay.data) cells <- rownames(x = assay.data) min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(assay.data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(assay.data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } assay.data <- mapply( FUN = function(feature, min, max) { return(ScaleColumn(vec = assay.data[, feature], cutoffs = c(min, max))) }, feature = features, min = min.cutoff, max = max.cutoff ) if (common.scale) { assay.data <- apply( X = assay.data, MARGIN = 2, FUN = function(x) { return(x - min(x)) } ) assay.data <- t( x = t(x = assay.data) / apply(X = assay.data, MARGIN = 2, FUN = max) ) } assay.data <- as.data.frame(x = assay.data) assay.data <- data.frame( cell = as.vector(x = replicate(n = length(x = features), expr = cells)), feature = as.vector(x = t(x = replicate(n = length(x = cells), expr = features))), expression = unlist(x = assay.data, use.names = FALSE) ) data <- merge(x = polygons, y = assay.data, by = 'cell') data$feature <- factor(x = data$feature, levels = features) if (flip.coords) { coord.x <- data$x data$x <- data$y data$y <- coord.x } plot <- SinglePolyPlot(data = data, group.by = 'expression', font_size = 8) + scale_fill_viridis_c() + facet_wrap(facets = 'feature', ncol = ncol) return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Spatial Plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Visualize spatial and clustering (dimensional reduction) data in a linked, #' interactive framework #' #' @inheritParams DimPlot #' @inheritParams FeaturePlot #' @inheritParams SpatialPlot #' @param feature Feature to visualize #' @param image Name of the image to use in the plot #' #' @return Returns final plots. If \code{combine}, plots are stiched together #' using \code{\link{CombinePlots}}; otherwise, returns a list of ggplot objects #' #' @rdname LinkedPlots #' @name LinkedPlots #' #' @importFrom scales hue_pal #' @importFrom patchwork wrap_plots #' @importFrom ggplot2 scale_alpha_ordinal guides #' @importFrom miniUI miniPage gadgetTitleBar miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow plotOutput brushOpts clickOpts hoverOpts #' verbatimTextOutput reactiveValues observeEvent stopApp nearPoints #' brushedPoints renderPlot renderPrint runGadget #' #' @aliases LinkedPlot LinkedDimPlot #' #' @export #' @concept visualization #' @concept spatial #' #' @examples #' \dontrun{ #' LinkedDimPlot(seurat.object) #' LinkedFeaturePlot(seurat.object, feature = 'Hpca') #' } #' LinkedDimPlot <- function( object, dims = 1:2, reduction = NULL, image = NULL, group.by = NULL, alpha = c(0.1, 1), combine = TRUE ) { # Setup gadget UI ui <- miniPage( gadgetTitleBar( title = 'LinkedDimPlot', left = miniTitleBarButton(inputId = 'reset', label = 'Reset') ), miniContentPanel( fillRow( plotOutput( outputId = 'spatialplot', height = '100%', # brush = brushOpts(id = 'brush', delay = 10, clip = TRUE, resetOnNew = FALSE), click = clickOpts(id = 'spclick', clip = TRUE), hover = hoverOpts(id = 'sphover', delay = 10, nullOutside = TRUE) ), plotOutput( outputId = 'dimplot', height = '100%', brush = brushOpts(id = 'brush', delay = 10, clip = TRUE, resetOnNew = FALSE), click = clickOpts(id = 'dimclick', clip = TRUE), hover = hoverOpts(id = 'dimhover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) reduction <- reduction %||% DefaultDimReduc(object = object) dims <- dims[1:2] dims <- paste0(Key(object = object[[reduction]]), dims) group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) embeddings <- Embeddings(object = object[[reduction]])[cells.use, dims] plot.data <- cbind(coords, group.data, embeddings) plot.data$selected_ <- FALSE Idents(object = object) <- group.by # Setup the server server <- function(input, output, session) { click <- reactiveValues(pt = NULL, invert = FALSE) plot.env <- reactiveValues(data = plot.data, alpha.by = NULL) # Handle events observeEvent( eventExpr = input$done, handlerExpr = { plots <- list(plot.env$spatialplot, plot.env$dimplot) if (combine) { plots <- wrap_plots(plots, ncol = 2) } stopApp(returnValue = plots) } ) observeEvent( eventExpr = input$reset, handlerExpr = { click$pt <- NULL click$invert <- FALSE session$resetBrush(brushId = 'brush') } ) observeEvent(eventExpr = input$brush, handlerExpr = click$pt <- NULL) observeEvent( eventExpr = input$spclick, handlerExpr = { click$pt <- input$spclick click$invert <- TRUE } ) observeEvent( eventExpr = input$dimclick, handlerExpr = { click$pt <- input$dimclick click$invert <- FALSE } ) observeEvent( eventExpr = c(input$brush, input$spclick, input$dimclick), handlerExpr = { plot.env$data <- if (is.null(x = input$brush)) { clicked <- nearPoints( df = plot.data, coordinfo = if (click$invert) { InvertCoordinate(x = click$pt) } else { click$pt }, threshold = 10, maxpoints = 1 ) if (nrow(x = clicked) == 1) { cell.clicked <- rownames(x = clicked) group.clicked <- plot.data[cell.clicked, group.by, drop = TRUE] idx.group <- which(x = plot.data[[group.by]] == group.clicked) plot.data[idx.group, 'selected_'] <- TRUE plot.data } else { plot.data } } else if (input$brush$outputId == 'dimplot') { brushedPoints(df = plot.data, brush = input$brush, allRows = TRUE) } else if (input$brush$outputId == 'spatialplot') { brushedPoints(df = plot.data, brush = InvertCoordinate(x = input$brush), allRows = TRUE) } plot.env$alpha.by <- if (any(plot.env$data$selected_)) { 'selected_' } else { NULL } } ) # Set plots output$spatialplot <- renderPlot( expr = { plot.env$spatialplot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = group.by, pt.size.factor = 1.6, crop = TRUE, alpha.by = plot.env$alpha.by ) + scale_alpha_ordinal(range = alpha) + NoLegend() plot.env$spatialplot } ) output$dimplot <- renderPlot( expr = { plot.env$dimplot <- SingleDimPlot( data = plot.env$data, dims = dims, col.by = group.by, alpha.by = plot.env$alpha.by ) + scale_alpha_ordinal(range = alpha) + guides(alpha = FALSE) plot.env$dimplot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = if (is.null(x = input[['sphover']])) { input$dimhover } else { InvertCoordinate(x = input$sphover) }, threshold = 10, maxpoints = 1 )) # if (length(x = cell.hover) == 1) { # palette <- hue_pal()(n = length(x = levels(x = object))) # group <- plot.data[cell.hover, group.by, drop = TRUE] # background <- palette[which(x = levels(x = object) == group)] # text <- unname(obj = BGTextColor(background = background)) # style <- paste0( # paste( # paste('background-color:', background), # paste('color:', text), # sep = '; ' # ), # ';' # ) # info <- paste(cell.hover, paste('Group:', group), sep = '
') # } else { # style <- 'background-color: white; color: black' # info <- NULL # } # HTML(text = paste0("

", info, "
")) # p(HTML(info), style = style) # paste0('
', info, '
') # TODO: Get newlines, extra information, and background color working if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Group:', plot.data[cell.hover, group.by, drop = TRUE]), collapse = '
') } else { NULL } } ) } # Run the thang runGadget(app = ui, server = server) } #' @rdname LinkedPlots #' #' @aliases LinkedFeaturePlot #' #' @importFrom ggplot2 scale_fill_gradientn theme scale_alpha guides #' scale_color_gradientn guide_colorbar #' #' @export #' @concept visualization #' @concept spatial LinkedFeaturePlot <- function( object, feature, dims = 1:2, reduction = NULL, image = NULL, slot = 'data', alpha = c(0.1, 1), combine = TRUE ) { # Setup gadget UI ui <- miniPage( gadgetTitleBar( title = 'LinkedFeaturePlot', left = NULL ), miniContentPanel( fillRow( plotOutput( outputId = 'spatialplot', height = '100%', hover = hoverOpts(id = 'sphover', delay = 10, nullOutside = TRUE) ), plotOutput( outputId = 'dimplot', height = '100%', hover = hoverOpts(id = 'dimhover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Prepare plotting data cols <- SpatialColors(n = 100) image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) reduction <- reduction %||% DefaultDimReduc(object = object) dims <- dims[1:2] dims <- paste0(Key(object = object[[reduction]]), dims) group.data <- FetchData( object = object, vars = feature, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) embeddings <- Embeddings(object = object[[reduction]])[cells.use, dims] plot.data <- cbind(coords, group.data, embeddings) # Setup the server server <- function(input, output, session) { plot.env <- reactiveValues() # Handle events observeEvent( eventExpr = input$done, handlerExpr = { plots <- list(plot.env$spatialplot, plot.env$dimplot) if (combine) { plots <- wrap_plots(plots, ncol = 2) } stopApp(returnValue = plots) } ) # Set plots output$spatialplot <- renderPlot( expr = { plot.env$spatialplot <- SingleSpatialPlot( data = plot.data, image = object[[image]], col.by = feature, pt.size.factor = 1.6, crop = TRUE, alpha.by = feature ) + scale_fill_gradientn(name = feature, colours = cols) + theme(legend.position = 'top') + scale_alpha(range = alpha) + guides(alpha = FALSE) plot.env$spatialplot } ) output$dimplot <- renderPlot( expr = { plot.env$dimplot <- SingleDimPlot( data = plot.data, dims = dims, col.by = feature ) + scale_color_gradientn(name = feature, colours = cols, guide = 'colorbar') + guides(color = guide_colorbar()) plot.env$dimplot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = if (is.null(x = input[['sphover']])) { input$dimhover } else { InvertCoordinate(x = input$sphover) }, threshold = 10, maxpoints = 1 )) # TODO: Get newlines, extra information, and background color working if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Expression:', plot.data[cell.hover, feature, drop = TRUE]), collapse = '
') } else { NULL } } ) } runGadget(app = ui, server = server) } #' Visualize clusters spatially and interactively #' #' @inheritParams DimPlot #' @inheritParams SpatialPlot #' @inheritParams LinkedPlots #' #' @return Returns final plot as a ggplot object #' #' @importFrom ggplot2 scale_alpha_ordinal #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow plotOutput verbatimTextOutput reactiveValues #' observeEvent stopApp nearPoints renderPlot runGadget #' #' @export #' @concept visualization #' @concept spatial #' ISpatialDimPlot <- function( object, image = NULL, group.by = NULL, alpha = c(0.3, 1) ) { # Setup gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( plotOutput( outputId = 'plot', height = '100%', click = clickOpts(id = 'click', clip = TRUE), hover = hoverOpts(id = 'hover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Get plotting data # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) plot.data <- cbind(coords, group.data) plot.data$selected_ <- FALSE Idents(object = object) <- group.by # Set up the server server <- function(input, output, session) { click <- reactiveValues(pt = NULL) plot.env <- reactiveValues(data = plot.data, alpha.by = NULL) # Handle events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observeEvent( eventExpr = input$click, handlerExpr = { clicked <- nearPoints( df = plot.data, coordinfo = InvertCoordinate(x = input$click), threshold = 10, maxpoints = 1 ) plot.env$data <- if (nrow(x = clicked) == 1) { cell.clicked <- rownames(x = clicked) cell.clicked <- rownames(x = clicked) group.clicked <- plot.data[cell.clicked, group.by, drop = TRUE] idx.group <- which(x = plot.data[[group.by]] == group.clicked) plot.data[idx.group, 'selected_'] <- TRUE plot.data } else { plot.data } plot.env$alpha.by <- if (any(plot.env$data$selected_)) { 'selected_' } else { NULL } } ) # Set plot output$plot <- renderPlot( expr = { plot.env$plot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = group.by, crop = TRUE, alpha.by = plot.env$alpha.by, pt.size.factor = 1.6 ) + scale_alpha_ordinal(range = alpha) + NoLegend() plot.env$plot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = InvertCoordinate(x = input$hover), threshold = 10, maxpoints = 1 )) if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Group:', plot.data[cell.hover, group.by, drop = TRUE]), collapse = '
') } else { NULL } } ) } runGadget(app = ui, server = server) } #' Visualize features spatially and interactively #' #' @inheritParams FeaturePlot #' @inheritParams SpatialPlot #' @inheritParams LinkedPlots #' #' @return Returns final plot as a ggplot object #' #' @importFrom ggplot2 scale_fill_gradientn theme scale_alpha guides #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow sidebarPanel sliderInput selectInput reactiveValues #' observeEvent stopApp observe updateSelectInput plotOutput renderPlot runGadget #' #' @export #' @concept visualization #' @concept spatial ISpatialFeaturePlot <- function( object, feature, image = NULL, slot = 'data', alpha = c(0.1, 1) ) { # Set inital data values assay.keys <- Key(object = object)[Assays(object = object)] keyed <- sapply(X = assay.keys, FUN = grepl, x = feature) assay <- if (any(keyed)) { names(x = which(x = keyed))[1] } else { DefaultAssay(object = object) } features <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.label <- 'Feature to visualize' assays.use <- vapply( X = Assays(object = object), FUN = function(x) { return(!IsMatrixEmpty(x = GetAssayData( object = object, slot = slot, assay = x ))) }, FUN.VALUE = logical(length = 1L) ) assays.use <- sort(x = Assays(object = object)[assays.use]) # Setup gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( sidebarPanel( sliderInput( inputId = 'alpha', label = 'Alpha intensity', min = 0, max = max(alpha), value = min(alpha), step = 0.01, width = '100%' ), sliderInput( inputId = 'pt.size', label = 'Point size', min = 0, max = 5, value = 1.6, step = 0.1, width = '100%' ), selectInput( inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay, selectize = FALSE, width = '100%' ), selectInput( inputId = 'feature', label = feature.label, choices = features, selected = feature, selectize = FALSE, width = '100%' ), selectInput( inputId = 'palette', label = 'Color scheme', choices = names(x = FeaturePalettes), selected = 'Spatial', selectize = FALSE, width = '100%' ), width = '100%' ), plotOutput(outputId = 'plot', height = '100%'), flex = c(1, 4) ) ) ) # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) coords <- GetTissueCoordinates(object = object[[image]]) feature.data <- FetchData( object = object, vars = feature, cells = cells.use, slot = slot ) plot.data <- cbind(coords, feature.data) server <- function(input, output, session) { plot.env <- reactiveValues( data = plot.data, feature = feature, palette = 'Spatial' ) # Observe events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observe(x = { assay <- input$assay feature.use <- input$feature features.assay <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.use <- ifelse( test = feature.use %in% features.assay, yes = feature.use, no = features.assay[1] ) updateSelectInput( session = session, inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay ) updateSelectInput( session = session, inputId = 'feature', label = feature.label, choices = features.assay, selected = feature.use ) }) observe(x = { feature.use <- input$feature try( expr = { feature.data <- FetchData( object = object, vars = paste0(Key(object = object[[input$assay]]), feature.use), cells = cells.use, slot = slot ) colnames(x = feature.data) <- feature.use plot.env$data <- cbind(coords, feature.data) plot.env$feature <- feature.use }, silent = TRUE ) }) observe(x = { plot.env$palette <- input$palette }) # Create plot output$plot <- renderPlot(expr = { plot.env$plot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = plot.env$feature, pt.size.factor = input$pt.size, crop = TRUE, alpha.by = plot.env$feature ) + # scale_fill_gradientn(name = plot.env$feature, colours = cols) + scale_fill_gradientn(name = plot.env$feature, colours = FeaturePalettes[[plot.env$palette]]) + theme(legend.position = 'top') + scale_alpha(range = c(input$alpha, 1)) + guides(alpha = FALSE) plot.env$plot }) } runGadget(app = ui, server = server) } #' Visualize spatial clustering and expression data. #' #' SpatialPlot plots a feature or discrete grouping (e.g. cluster assignments) as #' spots over the image that was collected. We also provide SpatialFeaturePlot #' and SpatialDimPlot as wrapper functions around SpatialPlot for a consistent #' naming framework. #' #' @inheritParams HoverLocator #' @param object A Seurat object #' @param group.by Name of meta.data column to group the data by #' @param features Name of the feature to visualize. Provide either group.by OR #' features, not both. #' @param images Name of the images to use in the plot(s) #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character or numeric value corresponding to a #' palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By #' default, ggplot2 assigns colors #' @param image.alpha Adjust the opacity of the background images. Set to 0 to #' remove. #' @param crop Crop the plot in to focus on points plotted. Set to FALSE to show #' entire background image. #' @param slot If plotting a feature, which data slot to pull from (counts, #' data, or scale.data) #' @param min.cutoff,max.cutoff Vector of minimum and maximum cutoff #' values for each feature, may specify quantile in the form of 'q##' where '##' #' is the quantile (eg, 'q1', 'q10') #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply pass a vector #' instead of a list. If set, colors selected cells to the color(s) in #' cols.highlight #' @param cols.highlight A vector of colors to highlight the cells as; ordered #' the same as the groups in cells.highlight; last color corresponds to #' unselected cells. #' @param facet.highlight When highlighting certain groups of cells, split each #' group into its own plot #' @param label Whether to label the clusters #' @param label.size Sets the size of the labels #' @param label.color Sets the color of the label text #' @param label.box Whether to put a box around the label text (geom_text vs #' geom_label) #' @param repel Repels the labels to prevent overlap #' @param ncol Number of columns if plotting multiple plots #' @param combine Combine plots into a single gg object; note that if TRUE; #' themeing will not work when plotting multiple features/groupings #' @param pt.size.factor Scale the size of the spots. #' @param alpha Controls opacity of spots. Provide as a vector specifying the #' min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single #' alpha value for each plot. #' @param stroke Control the width of the border around the spots #' @param interactive Launch an interactive SpatialDimPlot or SpatialFeaturePlot #' session, see \code{\link{ISpatialDimPlot}} or #' \code{\link{ISpatialFeaturePlot}} for more details #' @param do.identify,do.hover DEPRECATED in favor of \code{interactive} #' @param identify.ident DEPRECATED #' #' @return If \code{do.identify}, either a vector of cells selected or the object #' with selected cells set to the value of \code{identify.ident} (if set). Else, #' if \code{do.hover}, a plotly object with interactive graphics. Else, a ggplot #' object #' #' @importFrom ggplot2 scale_fill_gradientn ggtitle theme element_text scale_alpha #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' @concept spatial #' #' @examples #' \dontrun{ #' # For functionality analagous to FeaturePlot #' SpatialPlot(seurat.object, features = "MS4A1") #' SpatialFeaturePlot(seurat.object, features = "MS4A1") #' #' # For functionality analagous to DimPlot #' SpatialPlot(seurat.object, group.by = "clusters") #' SpatialDimPlot(seurat.object, group.by = "clusters") #' } #' SpatialPlot <- function( object, group.by = NULL, features = NULL, images = NULL, cols = NULL, image.alpha = 1, crop = TRUE, slot = 'data', min.cutoff = NA, max.cutoff = NA, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), facet.highlight = FALSE, label = FALSE, label.size = 5, label.color = 'white', label.box = TRUE, repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), stroke = 0.25, interactive = FALSE, do.identify = FALSE, identify.ident = NULL, do.hover = FALSE, information = NULL ) { if (isTRUE(x = do.hover) || isTRUE(x = do.identify)) { warning( "'do.hover' and 'do.identify' are deprecated as we are removing plotly-based interactive graphics, use 'interactive' instead for Shiny-based interactivity", call. = FALSE, immediate. = TRUE ) interactive <- TRUE } if (!is.null(x = group.by) & !is.null(x = features)) { stop("Please specific either group.by or features, not both.") } images <- images %||% Images(object = object, assay = DefaultAssay(object = object)) if (length(x = images) == 0) { images <- Images(object = object) } if (length(x = images) < 1) { stop("Could not find any spatial image information") } if (is.null(x = features)) { if (interactive) { return(ISpatialDimPlot( object = object, image = images[1], group.by = group.by, alpha = alpha )) } group.by <- group.by %||% 'ident' object[['ident']] <- Idents(object = object) data <- object[[group.by]] for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } } else { if (interactive) { return(ISpatialFeaturePlot( object = object, feature = features[1], image = images[1], slot = slot, alpha = alpha )) } data <- FetchData( object = object, vars = features, slot = slot ) features <- colnames(x = data) # Determine cutoffs min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } # Apply cutoffs data <- sapply( X = 1:ncol(x = data), FUN = function(index) { data.feature <- as.vector(x = data[, index]) min.use <- SetQuantile(cutoff = min.cutoff[index], data.feature) max.use <- SetQuantile(cutoff = max.cutoff[index], data.feature) data.feature[data.feature < min.use] <- min.use data.feature[data.feature > max.use] <- max.use return(data.feature) } ) colnames(x = data) <- features rownames(x = data) <- Cells(x = object) } features <- colnames(x = data) colnames(x = data) <- features rownames(x = data) <- colnames(x = object) facet.highlight <- facet.highlight && (!is.null(x = cells.highlight) && is.list(x = cells.highlight)) if (do.hover) { if (length(x = images) > 1) { images <- images[1] warning( "'do.hover' requires only one image, using image ", images, call. = FALSE, immediate. = TRUE ) } if (length(x = features) > 1) { features <- features[1] type <- ifelse(test = is.null(x = group.by), yes = 'feature', no = 'grouping') warning( "'do.hover' requires only one ", type, ", using ", features, call. = FALSE, immediate. = TRUE ) } if (facet.highlight) { warning( "'do.hover' requires no faceting highlighted cells", call. = FALSE, immediate. = TRUE ) facet.highlight <- FALSE } } if (facet.highlight) { if (length(x = images) > 1) { images <- images[1] warning( "Faceting the highlight only works with a single image, using image ", images, call. = FALSE, immediate. = TRUE ) } ncols <- length(x = cells.highlight) } else { ncols <- length(x = images) } plots <- vector( mode = "list", length = length(x = features) * ncols ) for (i in 1:ncols) { plot.idx <- i image.idx <- ifelse(test = facet.highlight, yes = 1, no = i) image.use <- object[[images[[image.idx]]]] coordinates <- GetTissueCoordinates(object = image.use) highlight.use <- if (facet.highlight) { cells.highlight[i] } else { cells.highlight } for (j in 1:length(x = features)) { cols.unset <- is.factor(x = data[, features[j]]) && is.null(x = cols) if (cols.unset) { cols <- hue_pal()(n = length(x = levels(x = data[, features[j]]))) names(x = cols) <- levels(x = data[, features[j]]) } plot <- SingleSpatialPlot( data = cbind( coordinates, data[rownames(x = coordinates), features[j], drop = FALSE] ), image = image.use, image.alpha = image.alpha, col.by = features[j], cols = cols, alpha.by = if (is.null(x = group.by)) { features[j] } else { NULL }, pt.alpha = if (!is.null(x = group.by)) { alpha[j] } else { NULL }, geom = if (inherits(x = image.use, what = "STARmap")) { 'poly' } else { 'spatial' }, cells.highlight = highlight.use, cols.highlight = cols.highlight, pt.size.factor = pt.size.factor, stroke = stroke, crop = crop ) if (is.null(x = group.by)) { plot <- plot + scale_fill_gradientn( name = features[j], colours = SpatialColors(n = 100) ) + theme(legend.position = 'top') + scale_alpha(range = alpha) + guides(alpha = FALSE) } else if (label) { plot <- LabelClusters( plot = plot, id = ifelse( test = is.null(x = cells.highlight), yes = features[j], no = 'highlight' ), geom = if (inherits(x = image.use, what = "STARmap")) { 'GeomPolygon' } else { 'GeomSpatial' }, repel = repel, size = label.size, color = label.color, box = label.box, position = "nearest" ) } if (j == 1 && length(x = images) > 1 && !facet.highlight) { plot <- plot + ggtitle(label = images[[image.idx]]) + theme(plot.title = element_text(hjust = 0.5)) } if (facet.highlight) { plot <- plot + ggtitle(label = names(x = cells.highlight)[i]) + theme(plot.title = element_text(hjust = 0.5)) + NoLegend() } plots[[plot.idx]] <- plot plot.idx <- plot.idx + ncols if (cols.unset) { cols <- NULL } } } # if (do.identify) { # return(CellSelector( # plot = plot, # object = identify.ident %iff% object, # ident = identify.ident # )) # } else if (do.hover) { # return(HoverLocator( # plot = plots[[1]], # information = information %||% data[, features, drop = FALSE], # axes = FALSE, # # cols = c('size' = 'point.size.factor', 'colour' = 'fill'), # images = GetImage(object = object, mode = 'plotly', image = images) # )) # } if (length(x = images) > 1 && combine) { plots <- wrap_plots(plots = plots, ncol = length(x = images)) } else if (length(x = images == 1) && combine) { plots <- wrap_plots(plots = plots, ncol = ncol) } return(plots) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Other plotting functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Plot the Barcode Distribution and Calculated Inflection Points #' #' This function plots the calculated inflection points derived from the barcode-rank #' distribution. #' #' See [CalculateBarcodeInflections()] to calculate inflection points and #' [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. #' #' @param object Seurat object #' #' @return Returns a `ggplot2` object showing the by-group inflection points and provided #' (or default) rank threshold values in grey. #' #' @importFrom methods slot #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot geom_line geom_vline aes_string #' #' @export #' @concept visualization #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{CalculateBarcodeInflections}} \code{\link{SubsetByBarcodeInflections}} #' #' @examples #' data("pbmc_small") #' pbmc_small <- CalculateBarcodeInflections(pbmc_small, group.column = 'groups') #' BarcodeInflectionsPlot(pbmc_small) #' BarcodeInflectionsPlot <- function(object) { cbi.data <- Tool(object = object, slot = 'CalculateBarcodeInflections') if (is.null(x = cbi.data)) { stop("Barcode inflections not calculated, please run CalculateBarcodeInflections") } ## Extract necessary data frames inflection_points <- cbi.data$inflection_points barcode_distribution <- cbi.data$barcode_distribution threshold_values <- cbi.data$threshold_values # Set a cap to max rank to avoid plot being overextended if (threshold_values$rank[[2]] > max(barcode_distribution$rank, na.rm = TRUE)) { threshold_values$rank[[2]] <- max(barcode_distribution$rank, na.rm = TRUE) } ## Infer the grouping/barcode variables group_var <- colnames(x = barcode_distribution)[1] barcode_var <- colnames(x = barcode_distribution)[2] barcode_distribution[, barcode_var] <- log10(x = barcode_distribution[, barcode_var] + 1) ## Make the plot plot <- ggplot( data = barcode_distribution, mapping = aes_string( x = 'rank', y = barcode_var, group = group_var, colour = group_var ) ) + geom_line() + geom_vline( data = threshold_values, aes_string(xintercept = 'rank'), linetype = "dashed", colour = 'grey60', size = 0.5 ) + geom_vline( data = inflection_points, mapping = aes_string( xintercept = 'rank', group = group_var, colour = group_var ), linetype = "dashed" ) + theme_cowplot() return(plot) } #' Dot plot visualization #' #' Intuitive way of visualizing how feature expression changes across different #' identity classes (clusters). The size of the dot encodes the percentage of #' cells within a class, while the color encodes the AverageExpression level #' across all cells within a class (blue is high). #' #' @param object Seurat object #' @param assay Name of assay to use, defaults to the active assay #' @param features Input vector of features, or named list of feature vectors #' if feature-grouped panels are desired (replicates the functionality of the #' old SplitDotPlotGG) #' @param cols Colors to plot: the name of a palette from #' \code{RColorBrewer::brewer.pal.info}, a pair of colors defining a gradient, #' or 3+ colors defining multiple gradients (if split.by is set) #' @param col.min Minimum scaled average expression threshold (everything #' smaller will be set to this) #' @param col.max Maximum scaled average expression threshold (everything larger #' will be set to this) #' @param dot.min The fraction of cells at which to draw the smallest dot #' (default is 0). All cell groups with less than this expressing the given #' gene will have no dot drawn. #' @param dot.scale Scale the size of the points, similar to cex #' @param idents Identity classes to include in plot (default is all) #' @param group.by Factor to group the cells by #' @param split.by Factor to split the groups by (replicates the functionality #' of the old SplitDotPlotGG); #' see \code{\link{FetchData}} for more details #' @param cluster.idents Whether to order identities by hierarchical clusters #' based on given features, default is FALSE #' @param scale Determine whether the data is scaled, TRUE for default #' @param scale.by Scale the size of the points by 'size' or by 'radius' #' @param scale.min Set lower limit for scaling, use NA for default #' @param scale.max Set upper limit for scaling, use NA for default #' #' @return A ggplot object #' #' @importFrom grDevices colorRampPalette #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point scale_size scale_radius #' theme element_blank labs scale_color_identity scale_color_distiller #' scale_color_gradient guides guide_legend guide_colorbar #' facet_grid unit #' @importFrom scattermore geom_scattermore #' @importFrom stats dist hclust #' @importFrom RColorBrewer brewer.pal.info #' #' @export #' @concept visualization #' #' @aliases SplitDotPlotGG #' @seealso \code{RColorBrewer::brewer.pal.info} #' #' @examples #' data("pbmc_small") #' cd_genes <- c("CD247", "CD3E", "CD9") #' DotPlot(object = pbmc_small, features = cd_genes) #' pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) #' DotPlot(object = pbmc_small, features = cd_genes, split.by = 'groups') #' DotPlot <- function( object, assay = NULL, features, cols = c("lightgrey", "blue"), col.min = -2.5, col.max = 2.5, dot.min = 0, dot.scale = 6, idents = NULL, group.by = NULL, split.by = NULL, cluster.idents = FALSE, scale = TRUE, scale.by = 'radius', scale.min = NA, scale.max = NA ) { assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay split.colors <- !is.null(x = split.by) && !any(cols %in% rownames(x = brewer.pal.info)) scale.func <- switch( EXPR = scale.by, 'size' = scale_size, 'radius' = scale_radius, stop("'scale.by' must be either 'size' or 'radius'") ) feature.groups <- NULL if (is.list(features) | any(!is.na(names(features)))) { feature.groups <- unlist(x = sapply( X = 1:length(features), FUN = function(x) { return(rep(x = names(x = features)[x], each = length(features[[x]]))) } )) if (any(is.na(x = feature.groups))) { warning( "Some feature groups are unnamed.", call. = FALSE, immediate. = TRUE ) } features <- unlist(x = features) names(x = feature.groups) <- features } cells <- unlist(x = CellsByIdentities(object = object, idents = idents)) data.features <- FetchData(object = object, vars = features, cells = cells) data.features$id <- if (is.null(x = group.by)) { Idents(object = object)[cells, drop = TRUE] } else { object[[group.by, drop = TRUE]][cells, drop = TRUE] } if (!is.factor(x = data.features$id)) { data.features$id <- factor(x = data.features$id) } id.levels <- levels(x = data.features$id) data.features$id <- as.vector(x = data.features$id) if (!is.null(x = split.by)) { splits <- object[[split.by, drop = TRUE]][cells, drop = TRUE] if (split.colors) { if (length(x = unique(x = splits)) > length(x = cols)) { stop("Not enough colors for the number of groups") } cols <- cols[1:length(x = unique(x = splits))] names(x = cols) <- unique(x = splits) } data.features$id <- paste(data.features$id, splits, sep = '_') unique.splits <- unique(x = splits) id.levels <- paste0(rep(x = id.levels, each = length(x = unique.splits)), "_", rep(x = unique(x = splits), times = length(x = id.levels))) } data.plot <- lapply( X = unique(x = data.features$id), FUN = function(ident) { data.use <- data.features[data.features$id == ident, 1:(ncol(x = data.features) - 1), drop = FALSE] avg.exp <- apply( X = data.use, MARGIN = 2, FUN = function(x) { return(mean(x = expm1(x = x))) } ) pct.exp <- apply(X = data.use, MARGIN = 2, FUN = PercentAbove, threshold = 0) return(list(avg.exp = avg.exp, pct.exp = pct.exp)) } ) names(x = data.plot) <- unique(x = data.features$id) if (cluster.idents) { mat <- do.call( what = rbind, args = lapply(X = data.plot, FUN = unlist) ) mat <- scale(x = mat) id.levels <- id.levels[hclust(d = dist(x = mat))$order] } data.plot <- lapply( X = names(x = data.plot), FUN = function(x) { data.use <- as.data.frame(x = data.plot[[x]]) data.use$features.plot <- rownames(x = data.use) data.use$id <- x return(data.use) } ) data.plot <- do.call(what = 'rbind', args = data.plot) if (!is.null(x = id.levels)) { data.plot$id <- factor(x = data.plot$id, levels = id.levels) } if (length(x = levels(x = data.plot$id)) == 1) { scale <- FALSE warning( "Only one identity present, the expression values will be not scaled", call. = FALSE, immediate. = TRUE ) } avg.exp.scaled <- sapply( X = unique(x = data.plot$features.plot), FUN = function(x) { data.use <- data.plot[data.plot$features.plot == x, 'avg.exp'] if (scale) { data.use <- scale(x = data.use) data.use <- MinMax(data = data.use, min = col.min, max = col.max) } else { data.use <- log1p(x = data.use) } return(data.use) } ) avg.exp.scaled <- as.vector(x = t(x = avg.exp.scaled)) if (split.colors) { avg.exp.scaled <- as.numeric(x = cut(x = avg.exp.scaled, breaks = 20)) } data.plot$avg.exp.scaled <- avg.exp.scaled data.plot$features.plot <- factor( x = data.plot$features.plot, levels = features ) data.plot$pct.exp[data.plot$pct.exp < dot.min] <- NA data.plot$pct.exp <- data.plot$pct.exp * 100 if (split.colors) { splits.use <- vapply( X = as.character(x = data.plot$id), FUN = gsub, FUN.VALUE = character(length = 1L), pattern = paste0( '^((', paste(sort(x = levels(x = object), decreasing = TRUE), collapse = '|'), ')_)' ), replacement = '', USE.NAMES = FALSE ) data.plot$colors <- mapply( FUN = function(color, value) { return(colorRampPalette(colors = c('grey', color))(20)[value]) }, color = cols[splits.use], value = avg.exp.scaled ) } color.by <- ifelse(test = split.colors, yes = 'colors', no = 'avg.exp.scaled') if (!is.na(x = scale.min)) { data.plot[data.plot$pct.exp < scale.min, 'pct.exp'] <- scale.min } if (!is.na(x = scale.max)) { data.plot[data.plot$pct.exp > scale.max, 'pct.exp'] <- scale.max } if (!is.null(x = feature.groups)) { data.plot$feature.groups <- factor( x = feature.groups[data.plot$features.plot], levels = unique(x = feature.groups) ) } plot <- ggplot(data = data.plot, mapping = aes_string(x = 'features.plot', y = 'id')) + geom_point(mapping = aes_string(size = 'pct.exp', color = color.by)) + scale.func(range = c(0, dot.scale), limits = c(scale.min, scale.max)) + theme(axis.title.x = element_blank(), axis.title.y = element_blank()) + guides(size = guide_legend(title = 'Percent Expressed')) + labs( x = 'Features', y = ifelse(test = is.null(x = split.by), yes = 'Identity', no = 'Split Identity') ) + theme_cowplot() if (!is.null(x = feature.groups)) { plot <- plot + facet_grid( facets = ~feature.groups, scales = "free_x", space = "free_x", switch = "y" ) + theme( panel.spacing = unit(x = 1, units = "lines"), strip.background = element_blank() ) } if (split.colors) { plot <- plot + scale_color_identity() } else if (length(x = cols) == 1) { plot <- plot + scale_color_distiller(palette = cols) } else { plot <- plot + scale_color_gradient(low = cols[1], high = cols[2]) } if (!split.colors) { plot <- plot + guides(color = guide_colorbar(title = 'Average Expression')) } return(plot) } #' Quickly Pick Relevant Dimensions #' #' Plots the standard deviations (or approximate singular values if running PCAFast) #' of the principle components for easy identification of an elbow in the graph. #' This elbow often corresponds well with the significant dims and is much faster to run than #' Jackstraw #' #' @param object Seurat object #' @param ndims Number of dimensions to plot standard deviation for #' @param reduction Reduction technique to plot standard deviation for #' #' @return A ggplot object #' #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point labs element_line #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' ElbowPlot(object = pbmc_small) #' ElbowPlot <- function(object, ndims = 20, reduction = 'pca') { data.use <- Stdev(object = object, reduction = reduction) if (length(x = data.use) == 0) { stop(paste("No standard deviation info stored for", reduction)) } if (ndims > length(x = data.use)) { warning("The object only has information for ", length(x = data.use), " reductions") ndims <- length(x = data.use) } stdev <- 'Standard Deviation' plot <- ggplot(data = data.frame(dims = 1:ndims, stdev = data.use[1:ndims])) + geom_point(mapping = aes_string(x = 'dims', y = 'stdev')) + labs( x = gsub( pattern = '_$', replacement = '', x = Key(object = object[[reduction]]) ), y = stdev ) + theme_cowplot() return(plot) } #' Boxplot of correlation of a variable (e.g. number of UMIs) with expression #' data #' #' @param object Seurat object #' @param assay Assay where the feature grouping info and correlations are #' stored #' @param feature.group Name of the column in meta.features where the feature #' grouping info is stored #' @param cor Name of the column in meta.features where correlation info is #' stored #' #' @return Returns a ggplot boxplot of correlations split by group #' #' @importFrom ggplot2 geom_boxplot scale_fill_manual geom_hline #' @importFrom cowplot theme_cowplot #' @importFrom scales brewer_pal #' @importFrom stats complete.cases #' #' @export #' @concept visualization #' GroupCorrelationPlot <- function( object, assay = NULL, feature.group = "feature.grp", cor = "nCount_RNA_cor" ) { assay <- assay %||% DefaultAssay(object = object) data <- object[[assay]][[c(feature.group, cor)]] data <- data[complete.cases(data), ] colnames(x = data) <- c('grp', 'cor') plot <- ggplot(data = data, aes_string(x = "grp", y = "cor", fill = "grp")) + geom_boxplot() + theme_cowplot() + scale_fill_manual(values = rev(x = brewer_pal(palette = 'YlOrRd')(n = 7))) + ylab(paste( "Correlation with", gsub(x = cor, pattern = "_cor", replacement = "") )) + geom_hline(yintercept = 0) + NoLegend() + theme( axis.line.x = element_blank(), axis.title.x = element_blank(), axis.ticks.x = element_blank(), axis.text.x = element_blank() ) return(plot) } #' JackStraw Plot #' #' Plots the results of the JackStraw analysis for PCA significance. For each #' PC, plots a QQ-plot comparing the distribution of p-values for all genes #' across each PC, compared with a uniform distribution. Also determines a #' p-value for the overall significance of each PC (see Details). #' #' Significant PCs should show a p-value distribution (black curve) that is #' strongly skewed to the left compared to the null distribution (dashed line) #' The p-value for each PC is based on a proportion test comparing the number #' of genes with a p-value below a particular threshold (score.thresh), compared with the #' proportion of genes expected under a uniform distribution of p-values. #' #' @param object Seurat object #' @param dims Dims to plot #' @param cols Vector of colors, each color corresponds to an individual PC. This may also be a single character #' or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. #' By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. #' See \code{\link{DiscretePalette}} for details. #' @param reduction reduction to pull jackstraw info from #' @param xmax X-axis maximum on each QQ plot. #' @param ymax Y-axis maximum on each QQ plot. #' #' @return A ggplot object #' #' @author Omri Wurtzel #' @seealso \code{\link{ScoreJackStraw}} #' #' @importFrom stats qunif #' @importFrom scales hue_pal #' @importFrom ggplot2 ggplot aes_string stat_qq labs xlim ylim #' coord_flip geom_abline guides guide_legend #' @importFrom cowplot theme_cowplot #' #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' JackStrawPlot(object = pbmc_small) #' JackStrawPlot <- function( object, dims = 1:5, cols = NULL, reduction = 'pca', xmax = 0.1, ymax = 0.3 ) { pAll <- JS(object = object[[reduction]], slot = 'empirical') if (max(dims) > ncol(x = pAll)) { stop("Max dimension is ", ncol(x = pAll)) } pAll <- pAll[, dims, drop = FALSE] pAll <- as.data.frame(x = pAll) data.plot <- Melt(x = pAll) colnames(x = data.plot) <- c("Contig", "PC", "Value") score.df <- JS(object = object[[reduction]], slot = 'overall') if (nrow(x = score.df) < max(dims)) { stop("Jackstraw procedure not scored for all the provided dims. Please run ScoreJackStraw.") } score.df <- score.df[dims, , drop = FALSE] if (nrow(x = score.df) == 0) { stop(paste0("JackStraw hasn't been scored. Please run ScoreJackStraw before plotting.")) } data.plot$PC.Score <- rep( x = paste0("PC ", score.df[ ,"PC"], ": ", sprintf("%1.3g", score.df[ ,"Score"])), each = length(x = unique(x = data.plot$Contig)) ) data.plot$PC.Score <- factor( x = data.plot$PC.Score, levels = paste0("PC ", score.df[, "PC"], ": ", sprintf("%1.3g", score.df[, "Score"])) ) if (is.null(x = cols)) { cols <- hue_pal()(length(x = dims)) } if (length(x = cols) < length(x = dims)) { stop("Not enough colors for the number of dims selected") } gp <- ggplot(data = data.plot, mapping = aes_string(sample = 'Value', color = 'PC.Score')) + stat_qq(distribution = qunif) + labs(x = "Theoretical [runif(1000)]", y = "Empirical") + scale_color_manual(values = cols) + xlim(0, ymax) + ylim(0, xmax) + coord_flip() + geom_abline(intercept = 0, slope = 1, linetype = "dashed", na.rm = TRUE) + guides(color = guide_legend(title = "PC: p-value")) + theme_cowplot() return(gp) } #' Plot clusters as a tree #' #' Plots previously computed tree (from BuildClusterTree) #' #' @param object Seurat object #' @param direction A character string specifying the direction of the tree (default is downwards) #' Possible options: "rightwards", "leftwards", "upwards", and "downwards". #' @param \dots Additional arguments to #' \code{\link[ape:plot.phylo]{ape::plot.phylo}} #' #' @return Plots dendogram (must be precomputed using BuildClusterTree), returns no value #' #' @export #' @concept visualization #' #' @examples #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' PlotClusterTree(object = pbmc_small) #' } PlotClusterTree <- function(object, direction = "downwards", ...) { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } if (is.null(x = Tool(object = object, slot = "BuildClusterTree"))) { stop("Phylogenetic tree does not exist, build using BuildClusterTree") } data.tree <- Tool(object = object, slot = "BuildClusterTree") ape::plot.phylo(x = data.tree, direction = direction, ...) ape::nodelabels() } #' Visualize Dimensional Reduction genes #' #' Visualize top genes associated with reduction components #' #' @param object Seurat object #' @param reduction Reduction technique to visualize results for #' @param dims Number of dimensions to display #' @param nfeatures Number of genes to display #' @param col Color of points to use #' @param projected Use reduction values for full dataset (i.e. projected #' dimensional reduction values) #' @param balanced Return an equal number of genes with + and - scores. If #' FALSE (default), returns the top genes ranked by the scores absolute values #' @param ncol Number of columns to display #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom patchwork wrap_plots #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point labs #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' VizDimLoadings(object = pbmc_small) #' VizDimLoadings <- function( object, dims = 1:5, nfeatures = 30, col = 'blue', reduction = 'pca', projected = FALSE, balanced = FALSE, ncol = NULL, combine = TRUE ) { if (is.null(x = ncol)) { ncol <- 2 if (length(x = dims) == 1) { ncol <- 1 } if (length(x = dims) > 6) { ncol <- 3 } if (length(x = dims) > 9) { ncol <- 4 } } loadings <- Loadings(object = object[[reduction]], projected = projected) features <- lapply( X = dims, FUN = TopFeatures, object = object[[reduction]], nfeatures = nfeatures, projected = projected, balanced = balanced ) features <- lapply( X = features, FUN = unlist, use.names = FALSE ) loadings <- loadings[unlist(x = features), dims, drop = FALSE] names(x = features) <- colnames(x = loadings) <- as.character(x = dims) plots <- lapply( X = as.character(x = dims), FUN = function(i) { data.plot <- as.data.frame(x = loadings[features[[i]], i, drop = FALSE]) colnames(x = data.plot) <- paste0(Key(object = object[[reduction]]), i) data.plot$feature <- factor(x = rownames(x = data.plot), levels = rownames(x = data.plot)) plot <- ggplot( data = data.plot, mapping = aes_string(x = colnames(x = data.plot)[1], y = 'feature') ) + geom_point(col = col) + labs(y = NULL) + theme_cowplot() return(plot) } ) if (combine) { plots <- wrap_plots(plots, ncol = ncol) } return(plots) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Exported utility functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Augments ggplot2-based plot with a PNG image. #' #' Creates "vector-friendly" plots. Does this by saving a copy of the plot as a PNG file, #' then adding the PNG image with \code{\link[ggplot2]{annotation_raster}} to a blank plot #' of the same dimensions as \code{plot}. Please note: original legends and axes will be lost #' during augmentation. #' #' @param plot A ggplot object #' @param width,height Width and height of PNG version of plot #' @param dpi Plot resolution #' #' @return A ggplot object #' #' @importFrom png readPNG #' @importFrom ggplot2 ggplot_build ggsave ggplot aes_string geom_blank annotation_raster ggtitle #' #' @export #' @concept visualization #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' AugmentPlot(plot = plot) #' } #' AugmentPlot <- function(plot, width = 10, height = 10, dpi = 100) { pbuild.params <- ggplot_build(plot = plot)$layout$panel_params[[1]] range.values <- c( pbuild.params$x.range, pbuild.params$y.range ) xyparams <- GetXYAesthetics( plot = plot, geom = class(x = plot$layers[[1]]$geom)[1] ) title <- plot$labels$title tmpfile <- tempfile(fileext = '.png') ggsave( filename = tmpfile, plot = plot + NoLegend() + NoAxes() + theme(plot.title = element_blank()), width = width, height = height, dpi = dpi ) img <- readPNG(source = tmpfile) file.remove(tmpfile) blank <- ggplot( data = plot$data, mapping = aes_string(x = xyparams$x, y = xyparams$y) ) + geom_blank() blank <- blank + plot$theme + ggtitle(label = title) blank <- blank + annotation_raster( raster = img, xmin = range.values[1], xmax = range.values[2], ymin = range.values[3], ymax = range.values[4] ) return(blank) } #' Automagically calculate a point size for ggplot2-based scatter plots #' #' It happens to look good #' #' @param data A data frame being passed to ggplot2 #' @param raster If TRUE, point size is set to 1 #' #' @return The "optimal" point size for visualizing these data #' #' @export #' @concept visualization #' #' @examples #' df <- data.frame(x = rnorm(n = 10000), y = runif(n = 10000)) #' AutoPointSize(data = df) #' AutoPointSize <- function(data, raster = NULL) { return(ifelse( test = isTRUE(x = raster), yes = 1, no = min(1583 / nrow(x = data), 1) )) } #' Determine text color based on background color #' #' @param background A vector of background colors; supports R color names and #' hexadecimal codes #' @param threshold Intensity threshold for light/dark cutoff; intensities #' greater than \code{theshold} yield \code{dark}, others yield \code{light} #' @param w3c Use \href{http://www.w3.org/TR/WCAG20/}{W3C} formula for calculating #' background text color; ignores \code{threshold} #' @param dark Color for dark text #' @param light Color for light text #' #' @return A named vector of either \code{dark} or \code{light}, depending on #' \code{background}; names of vector are \code{background} #' #' @export #' @concept visualization #' #' @source \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} #' #' @examples #' BGTextColor(background = c('black', 'white', '#E76BF3')) #' BGTextColor <- function( background, threshold = 186, w3c = FALSE, dark = 'black', light = 'white' ) { if (w3c) { luminance <- Luminance(color = background) threshold <- 179 return(ifelse( test = luminance > sqrt(x = 1.05 * 0.05) - 0.05, yes = dark, no = light )) } return(ifelse( test = Intensity(color = background) > threshold, yes = dark, no = light )) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases BlackAndWhite #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = BlackAndWhite()) #' BlackAndWhite <- function(mid = NULL, k = 50) { return(CustomPalette(low = "white", high = "black", mid = mid, k = k)) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases BlueAndRed #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = BlueAndRed()) #' BlueAndRed <- function(k = 50) { return(CustomPalette(low = "#313695" , high = "#A50026", mid = "#FFFFBF", k = k)) } #' Cell Selector #' #' Select points on a scatterplot and get information about them #' #' @param plot A ggplot2 plot #' @param object An optional Seurat object; if passes, will return an object #' with the identities of selected cells set to \code{ident} #' @param ident An optional new identity class to assign the selected cells #' @param ... Ignored #' #' @return If \code{object} is \code{NULL}, the names of the points selected; #' otherwise, a Seurat object with the selected cells identity classes set to #' \code{ident} #' #' @importFrom miniUI miniPage gadgetTitleBar miniTitleBarButton #' miniContentPanel #' @importFrom shiny fillRow plotOutput brushOpts reactiveValues observeEvent #' stopApp brushedPoints renderPlot runGadget #' #' @export #' @concept visualization #' #' @seealso \code{\link{DimPlot}} \code{\link{FeaturePlot}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' # Follow instructions in the terminal to select points #' cells.located <- CellSelector(plot = plot) #' cells.located #' # Automatically set the identity class of selected cells and return a new Seurat object #' pbmc_small <- CellSelector(plot = plot, object = pbmc_small, ident = 'SelectedCells') #' } #' CellSelector <- function(plot, object = NULL, ident = 'SelectedCells', ...) { # Set up the gadget UI ui <- miniPage( gadgetTitleBar( title = "Cell Selector", left = miniTitleBarButton(inputId = "reset", label = "Reset") ), miniContentPanel( fillRow( plotOutput( outputId = "plot", height = '100%', brush = brushOpts( id = 'brush', delay = 100, delayType = 'debounce', clip = TRUE, resetOnNew = FALSE ) ) ), ) ) # Get some plot information if (inherits(x = plot, what = 'patchwork')) { if (length(x = plot$patches$plots)) { warning( "Multiple plots passed, using last plot", call. = FALSE, immediate. = TRUE ) } class(x = plot) <- grep( pattern = 'patchwork', x = class(x = plot), value = TRUE, invert = TRUE ) } xy.aes <- GetXYAesthetics(plot = plot) dark.theme <- !is.null(x = plot$theme$plot.background$fill) && plot$theme$plot.background$fill == 'black' plot.data <- GGpointToBase(plot = plot, do.plot = FALSE) plot.data$selected_ <- FALSE rownames(x = plot.data) <- rownames(x = plot$data) colnames(x = plot.data) <- gsub( pattern = '-', replacement = '.', x = colnames(x = plot.data) ) # Server function server <- function(input, output, session) { plot.env <- reactiveValues(data = plot.data) # Event handlers observeEvent( eventExpr = input$done, handlerExpr = { PlotBuild(data = plot.env$data, dark.theme = dark.theme) selected <- rownames(x = plot.data)[plot.env$data$selected_] if (inherits(x = object, what = 'Seurat')) { if (!all(selected %in% Cells(x = object))) { stop("Cannot find the selected cells in the Seurat object, please be sure you pass the same object used to generate the plot") } Idents(object = object, cells = selected) <- ident selected <- object } stopApp(returnValue = selected) } ) observeEvent( eventExpr = input$reset, handlerExpr = { plot.env$data <- plot.data session$resetBrush(brushId = 'brush') } ) observeEvent( eventExpr = input$brush, handlerExpr = { plot.env$data <- brushedPoints( df = plot.data, brush = input$brush, xvar = xy.aes$x, yvar = xy.aes$y, allRows = TRUE ) plot.env$data$color <- ifelse( test = plot.env$data$selected_, yes = '#DE2D26', no = '#C3C3C3' ) } ) # Render the plot output$plot <- renderPlot(expr = PlotBuild( data = plot.env$data, dark.theme = dark.theme )) } return(runGadget(app = ui, server = server)) } #' Move outliers towards center on dimension reduction plot #' #' @param object Seurat object #' @param reduction Name of DimReduc to adjust #' @param dims Dimensions to visualize #' @param group.by Group (color) cells in different ways (for example, orig.ident) #' @param outlier.sd Controls the outlier distance #' @param reduction.key Key for DimReduc that is returned #' #' @return Returns a DimReduc object with the modified embeddings #' #' @export #' @concept visualization #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small <- FindClusters(pbmc_small, resolution = 1.1) #' pbmc_small <- RunUMAP(pbmc_small, dims = 1:5) #' DimPlot(pbmc_small, reduction = "umap") #' pbmc_small[["umap_new"]] <- CollapseEmbeddingOutliers(pbmc_small, #' reduction = "umap", reduction.key = 'umap_', outlier.sd = 0.5) #' DimPlot(pbmc_small, reduction = "umap_new") #' } #' CollapseEmbeddingOutliers <- function( object, reduction = 'umap', dims = 1:2, group.by = 'ident', outlier.sd = 2, reduction.key = 'UMAP_' ) { embeddings <- Embeddings(object = object[[reduction]])[, dims] idents <- FetchData(object = object, vars = group.by) data.medians <- sapply(X = dims, FUN = function(x) { tapply(X = embeddings[, x], INDEX = idents, FUN = median) }) data.sd <- apply(X = data.medians, MARGIN = 2, FUN = sd) data.medians.scale <- as.matrix(x = scale(x = data.medians, center = TRUE, scale = TRUE)) data.medians.scale[abs(x = data.medians.scale) < outlier.sd] <- 0 data.medians.scale <- sign(x = data.medians.scale) * (abs(x = data.medians.scale) - outlier.sd) data.correct <- Sweep( x = data.medians.scale, MARGIN = 2, STATS = data.sd, FUN = "*" ) data.correct <- data.correct[abs(x = apply(X = data.correct, MARGIN = 1, FUN = min)) > 0, ] new.embeddings <- embeddings for (i in rownames(x = data.correct)) { cells.correct <- rownames(x = idents)[idents[, "ident"] == i] new.embeddings[cells.correct, ] <- Sweep( x = new.embeddings[cells.correct,], MARGIN = 2, STATS = data.correct[i, ], FUN = "-" ) } reduc <- CreateDimReducObject( embeddings = new.embeddings, loadings = Loadings(object = object[[reduction]]), assay = slot(object = object[[reduction]], name = "assay.used"), key = reduction.key ) return(reduc) } #' Combine ggplot2-based plots into a single plot #' #' @param plots A list of gg objects #' @param ncol Number of columns #' @param legend Combine legends into a single legend #' choose from 'right' or 'bottom'; pass 'none' to remove legends, or \code{NULL} #' to leave legends as they are #' @param ... Extra parameters passed to plot_grid #' #' @return A combined plot #' #' @importFrom cowplot plot_grid get_legend #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' pbmc_small[['group']] <- sample( #' x = c('g1', 'g2'), #' size = ncol(x = pbmc_small), #' replace = TRUE #' ) #' plot1 <- FeaturePlot( #' object = pbmc_small, #' features = 'MS4A1', #' split.by = 'group' #' ) #' plot2 <- FeaturePlot( #' object = pbmc_small, #' features = 'FCN1', #' split.by = 'group' #' ) #' CombinePlots( #' plots = list(plot1, plot2), #' legend = 'none', #' nrow = length(x = unique(x = pbmc_small[['group', drop = TRUE]])) #' ) #' CombinePlots <- function(plots, ncol = NULL, legend = NULL, ...) { .Deprecated(msg = "CombinePlots is being deprecated. Plots should now be combined using the patchwork system.") plots.combined <- if (length(x = plots) > 1) { if (!is.null(x = legend)) { if (legend != 'none') { plot.legend <- get_legend(plot = plots[[1]] + theme(legend.position = legend)) } plots <- lapply( X = plots, FUN = function(x) { return(x + NoLegend()) } ) } plots.combined <- plot_grid( plotlist = plots, ncol = ncol, align = 'hv', ... ) if (!is.null(x = legend)) { plots.combined <- switch( EXPR = legend, 'bottom' = plot_grid( plots.combined, plot.legend, ncol = 1, rel_heights = c(1, 0.2) ), 'right' = plot_grid( plots.combined, plot.legend, rel_widths = c(3, 0.3) ), plots.combined ) } plots.combined } else { plots[[1]] } return(plots.combined) } #' Create a custom color palette #' #' Creates a custom color palette based on low, middle, and high color values #' #' @param low low color #' @param high high color #' @param mid middle color. Optional. #' @param k number of steps (colors levels) to include between low and high values #' #' @return A color palette for plotting #' #' @importFrom grDevices col2rgb rgb #' @export #' @concept visualization #' #' @rdname CustomPalette #' @examples #' myPalette <- CustomPalette() #' myPalette #' CustomPalette <- function( low = "white", high = "red", mid = NULL, k = 50 ) { low <- col2rgb(col = low) / 255 high <- col2rgb(col = high) / 255 if (is.null(x = mid)) { r <- seq(from = low[1], to = high[1], len = k) g <- seq(from = low[2], to = high[2], len = k) b <- seq(from = low[3], to = high[3], len = k) } else { k2 <- round(x = k / 2) mid <- col2rgb(col = mid) / 255 r <- c( seq(from = low[1], to = mid[1], len = k2), seq(from = mid[1], to = high[1], len = k2) ) g <- c( seq(from = low[2], to = mid[2], len = k2), seq(from = mid[2], to = high[2],len = k2) ) b <- c( seq(from = low[3], to = mid[3], len = k2), seq(from = mid[3], to = high[3], len = k2) ) } return(rgb(red = r, green = g, blue = b)) } #' Discrete colour palettes from the pals package #' #' These are included here because pals depends on a number of compiled #' packages, and this can lead to increases in run time for Travis, #' and generally should be avoided when possible. #' #' These palettes are a much better default for data with many classes #' than the default ggplot2 palette. #' #' Many thanks to Kevin Wright for writing the pals package. #' #' @param n Number of colours to be generated. #' @param palette Options are #' "alphabet", "alphabet2", "glasbey", "polychrome", and "stepped". #' Can be omitted and the function will use the one based on the requested n. #' #' @return A vector of colors #' #' @details #' Taken from the pals package (Licence: GPL-3). #' \url{https://cran.r-project.org/package=pals} #' Credit: Kevin Wright #' #' @export #' @concept visualization #' DiscretePalette <- function(n, palette = NULL) { palettes <- list( alphabet = c( "#F0A0FF", "#0075DC", "#993F00", "#4C005C", "#191919", "#005C31", "#2BCE48", "#FFCC99", "#808080", "#94FFB5", "#8F7C00", "#9DCC00", "#C20088", "#003380", "#FFA405", "#FFA8BB", "#426600", "#FF0010", "#5EF1F2", "#00998F", "#E0FF66", "#740AFF", "#990000", "#FFFF80", "#FFE100", "#FF5005" ), alphabet2 = c( "#AA0DFE", "#3283FE", "#85660D", "#782AB6", "#565656", "#1C8356", "#16FF32", "#F7E1A0", "#E2E2E2", "#1CBE4F", "#C4451C", "#DEA0FD", "#FE00FA", "#325A9B", "#FEAF16", "#F8A19F", "#90AD1C", "#F6222E", "#1CFFCE", "#2ED9FF", "#B10DA1", "#C075A6", "#FC1CBF", "#B00068", "#FBE426", "#FA0087" ), glasbey = c( "#0000FF", "#FF0000", "#00FF00", "#000033", "#FF00B6", "#005300", "#FFD300", "#009FFF", "#9A4D42", "#00FFBE", "#783FC1", "#1F9698", "#FFACFD", "#B1CC71", "#F1085C", "#FE8F42", "#DD00FF", "#201A01", "#720055", "#766C95", "#02AD24", "#C8FF00", "#886C00", "#FFB79F", "#858567", "#A10300", "#14F9FF", "#00479E", "#DC5E93", "#93D4FF", "#004CFF", "#F2F318" ), polychrome = c( "#5A5156", "#E4E1E3", "#F6222E", "#FE00FA", "#16FF32", "#3283FE", "#FEAF16", "#B00068", "#1CFFCE", "#90AD1C", "#2ED9FF", "#DEA0FD", "#AA0DFE", "#F8A19F", "#325A9B", "#C4451C", "#1C8356", "#85660D", "#B10DA1", "#FBE426", "#1CBE4F", "#FA0087", "#FC1CBF", "#F7E1A0", "#C075A6", "#782AB6", "#AAF400", "#BDCDFF", "#822E1C", "#B5EFB5", "#7ED7D1", "#1C7F93", "#D85FF7", "#683B79", "#66B0FF", "#3B00FB" ), stepped = c( "#990F26", "#B33E52", "#CC7A88", "#E6B8BF", "#99600F", "#B3823E", "#CCAA7A", "#E6D2B8", "#54990F", "#78B33E", "#A3CC7A", "#CFE6B8", "#0F8299", "#3E9FB3", "#7ABECC", "#B8DEE6", "#3D0F99", "#653EB3", "#967ACC", "#C7B8E6", "#333333", "#666666", "#999999", "#CCCCCC" ) ) if (is.null(x = palette)) { if (n <= 26) { palette <- "alphabet" } else if (n <= 32) { palette <- "glasbey" } else { palette <- "polychrome" } } palette.vec <- palettes[[palette]] if (n > length(x = palette.vec)) { warning("Not enough colours in specified palette") } palette.vec[seq_len(length.out = n)] } #' @rdname CellSelector #' @export #' @concept visualization #' FeatureLocator <- function(plot, ...) { .Defunct( new = 'CellSelector', package = 'Seurat', msg = "'FeatureLocator' has been replaced by 'CellSelector'" ) } #' Hover Locator #' #' Get quick information from a scatterplot by hovering over points #' #' @param plot A ggplot2 plot #' @param information An optional dataframe or matrix of extra information to be displayed on hover #' @param dark.theme Plot using a dark theme? #' @param axes Display or hide x- and y-axes #' @param ... Extra parameters to be passed to \code{\link[plotly]{layout}} #' #' @importFrom ggplot2 ggplot_build #' @importFrom plotly plot_ly layout add_annotations #' @export #' @concept visualization #' #' @seealso \code{\link[plotly]{layout}} \code{\link[ggplot2]{ggplot_build}} #' \code{\link{DimPlot}} \code{\link{FeaturePlot}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' HoverLocator(plot = plot, information = FetchData(object = pbmc_small, vars = 'percent.mito')) #' } #' HoverLocator <- function( plot, information = NULL, axes = TRUE, dark.theme = FALSE, ... ) { # Use GGpointToBase because we already have ggplot objects # with colors (which are annoying in plotly) plot.build <- suppressWarnings(expr = GGpointToPlotlyBuild( plot = plot, information = information, ... )) data <- ggplot_build(plot = plot)$plot$data # Set up axis labels here # Also, a bunch of stuff to get axis lines done properly if (axes) { xaxis <- list( title = names(x = data)[1], showgrid = FALSE, zeroline = FALSE, showline = TRUE ) yaxis <- list( title = names(x = data)[2], showgrid = FALSE, zeroline = FALSE, showline = TRUE ) } else { xaxis <- yaxis <- list(visible = FALSE) } # Check for dark theme if (dark.theme) { title <- list(color = 'white') xaxis <- c(xaxis, color = 'white') yaxis <- c(yaxis, color = 'white') plotbg <- 'black' } else { title = list(color = 'black') plotbg = 'white' } # The `~' means pull from the data passed (this is why we reset the names) # Use I() to get plotly to accept the colors from the data as is # Set hoverinfo to 'text' to override the default hover information # rather than append to it p <- layout( p = plot_ly( data = plot.build, x = ~x, y = ~y, type = 'scatter', mode = 'markers', color = ~I(color), hoverinfo = 'text', text = ~feature ), xaxis = xaxis, yaxis = yaxis, title = plot$labels$title, titlefont = title, paper_bgcolor = plotbg, plot_bgcolor = plotbg, ... ) # Add labels label.layer <- which(x = sapply( X = plot$layers, FUN = function(x) { return(inherits(x = x$geom, what = c('GeomText', 'GeomTextRepel'))) } )) if (length(x = label.layer) == 1) { p <- add_annotations( p = p, x = plot$layers[[label.layer]]$data[, 1], y = plot$layers[[label.layer]]$data[, 2], xref = "x", yref = "y", text = plot$layers[[label.layer]]$data[, 3], xanchor = 'right', showarrow = FALSE, font = list(size = plot$layers[[label.layer]]$aes_params$size * 4) ) } return(p) } #' Get the intensity and/or luminance of a color #' #' @param color A vector of colors #' #' @return A vector of intensities/luminances for each color #' #' @name contrast-theory #' @rdname contrast-theory #' #' @importFrom grDevices col2rgb #' #' @export #' @concept visualization #' #' @source \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} #' #' @examples #' Intensity(color = c('black', 'white', '#E76BF3')) #' Intensity <- function(color) { intensities <- apply( X = col2rgb(col = color), MARGIN = 2, FUN = function(col) { col <- rbind(as.vector(x = col), c(0.299, 0.587, 0.114)) return(sum(apply(X = col, MARGIN = 2, FUN = prod))) } ) names(x = intensities) <- color return(intensities) } #' Label clusters on a ggplot2-based scatter plot #' #' @param plot A ggplot2-based scatter plot #' @param id Name of variable used for coloring scatter plot #' @param clusters Vector of cluster ids to label #' @param labels Custom labels for the clusters #' @param split.by Split labels by some grouping label, useful when using #' \code{\link[ggplot2]{facet_wrap}} or \code{\link[ggplot2]{facet_grid}} #' @param repel Use \code{geom_text_repel} to create nicely-repelled labels #' @param geom Name of geom to get X/Y aesthetic names for #' @param box Use geom_label/geom_label_repel (includes a box around the text #' labels) #' @param position How to place the label if repel = FALSE. If "median", place #' the label at the median position. If "nearest" place the label at the #' position of the nearest data point to the median. #' @param ... Extra parameters to \code{\link[ggrepel]{geom_text_repel}}, such as \code{size} #' #' @return A ggplot2-based scatter plot with cluster labels #' #' @importFrom stats median na.omit #' @importFrom ggrepel geom_text_repel geom_label_repel #' @importFrom ggplot2 aes_string geom_text geom_label layer_scales #' @importFrom RANN nn2 #' #' @export #' @concept visualization #' #' @seealso \code{\link[ggrepel]{geom_text_repel}} \code{\link[ggplot2]{geom_text}} #' #' @examples #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' LabelClusters(plot = plot, id = 'ident') #' LabelClusters <- function( plot, id, clusters = NULL, labels = NULL, split.by = NULL, repel = TRUE, box = FALSE, geom = 'GeomPoint', position = "median", ... ) { xynames <- unlist(x = GetXYAesthetics(plot = plot, geom = geom), use.names = TRUE) if (!id %in% colnames(x = plot$data)) { stop("Cannot find variable ", id, " in plotting data") } if (!is.null(x = split.by) && !split.by %in% colnames(x = plot$data)) { warning("Cannot find splitting variable ", id, " in plotting data") split.by <- NULL } data <- plot$data[, c(xynames, id, split.by)] possible.clusters <- as.character(x = na.omit(object = unique(x = data[, id]))) groups <- clusters %||% as.character(x = na.omit(object = unique(x = data[, id]))) if (any(!groups %in% possible.clusters)) { stop("The following clusters were not found: ", paste(groups[!groups %in% possible.clusters], collapse = ",")) } pb <- ggplot_build(plot = plot) if (geom == 'GeomSpatial') { xrange.save <- layer_scales(plot = plot)$x$range$range yrange.save <- layer_scales(plot = plot)$y$range$range data[, xynames["y"]] = max(data[, xynames["y"]]) - data[, xynames["y"]] + min(data[, xynames["y"]]) if (!pb$plot$plot_env$crop) { y.transform <- c(0, nrow(x = pb$plot$plot_env$image)) - pb$layout$panel_params[[1]]$y.range data[, xynames["y"]] <- data[, xynames["y"]] + sum(y.transform) } } data <- cbind(data, color = pb$data[[1]][[1]]) labels.loc <- lapply( X = groups, FUN = function(group) { data.use <- data[data[, id] == group, , drop = FALSE] data.medians <- if (!is.null(x = split.by)) { do.call( what = 'rbind', args = lapply( X = unique(x = data.use[, split.by]), FUN = function(split) { medians <- apply( X = data.use[data.use[, split.by] == split, xynames, drop = FALSE], MARGIN = 2, FUN = median, na.rm = TRUE ) medians <- as.data.frame(x = t(x = medians)) medians[, split.by] <- split return(medians) } ) ) } else { as.data.frame(x = t(x = apply( X = data.use[, xynames, drop = FALSE], MARGIN = 2, FUN = median, na.rm = TRUE ))) } data.medians[, id] <- group data.medians$color <- data.use$color[1] return(data.medians) } ) if (position == "nearest") { labels.loc <- lapply(X = labels.loc, FUN = function(x) { group.data <- data[as.character(x = data[, id]) == as.character(x[3]), ] nearest.point <- nn2(data = group.data[, 1:2], query = as.matrix(x = x[c(1,2)]), k = 1)$nn.idx x[1:2] <- group.data[nearest.point, 1:2] return(x) }) } labels.loc <- do.call(what = 'rbind', args = labels.loc) labels.loc[, id] <- factor(x = labels.loc[, id], levels = levels(data[, id])) labels <- labels %||% groups if (length(x = unique(x = labels.loc[, id])) != length(x = labels)) { stop("Length of labels (", length(x = labels), ") must be equal to the number of clusters being labeled (", length(x = labels.loc), ").") } names(x = labels) <- groups for (group in groups) { labels.loc[labels.loc[, id] == group, id] <- labels[group] } if (box) { geom.use <- ifelse(test = repel, yes = geom_label_repel, no = geom_label) plot <- plot + geom.use( data = labels.loc, mapping = aes_string(x = xynames['x'], y = xynames['y'], label = id, fill = id), show.legend = FALSE, ... ) + scale_fill_manual(values = labels.loc$color[order(labels.loc[, id])]) } else { geom.use <- ifelse(test = repel, yes = geom_text_repel, no = geom_text) plot <- plot + geom.use( data = labels.loc, mapping = aes_string(x = xynames['x'], y = xynames['y'], label = id), show.legend = FALSE, ... ) } # restore old axis ranges if (geom == 'GeomSpatial') { plot <- suppressMessages(expr = plot + coord_fixed(xlim = xrange.save, ylim = yrange.save)) } return(plot) } #' Add text labels to a ggplot2 plot #' #' @param plot A ggplot2 plot with a GeomPoint layer #' @param points A vector of points to label; if \code{NULL}, will use all points in the plot #' @param labels A vector of labels for the points; if \code{NULL}, will use #' rownames of the data provided to the plot at the points selected #' @param repel Use \code{geom_text_repel} to create a nicely-repelled labels; this #' is slow when a lot of points are being plotted. If using \code{repel}, set \code{xnudge} #' and \code{ynudge} to 0 #' @param xnudge,ynudge Amount to nudge X and Y coordinates of labels by #' @param ... Extra parameters passed to \code{geom_text} #' #' @return A ggplot object #' #' @importFrom ggrepel geom_text_repel #' @importFrom ggplot2 geom_text aes_string #' @export #' @concept visualization #' #' @aliases Labeler #' @seealso \code{\link[ggplot2]{geom_text}} #' #' @examples #' data("pbmc_small") #' ff <- TopFeatures(object = pbmc_small[['pca']]) #' cc <- TopCells(object = pbmc_small[['pca']]) #' plot <- FeatureScatter(object = pbmc_small, feature1 = ff[1], feature2 = ff[2]) #' LabelPoints(plot = plot, points = cc) #' LabelPoints <- function( plot, points, labels = NULL, repel = FALSE, xnudge = 0.3, ynudge = 0.05, ... ) { xynames <- GetXYAesthetics(plot = plot) points <- points %||% rownames(x = plot$data) if (is.numeric(x = points)) { points <- rownames(x = plot$data) } points <- intersect(x = points, y = rownames(x = plot$data)) if (length(x = points) == 0) { stop("Cannot find points provided") } labels <- labels %||% points labels <- as.character(x = labels) label.data <- plot$data[points, ] label.data$labels <- labels geom.use <- ifelse(test = repel, yes = geom_text_repel, no = geom_text) if (repel) { if (!all(c(xnudge, ynudge) == 0)) { message("When using repel, set xnudge and ynudge to 0 for optimal results") } } plot <- plot + geom.use( mapping = aes_string(x = xynames$x, y = xynames$y, label = 'labels'), data = label.data, nudge_x = xnudge, nudge_y = ynudge, ... ) return(plot) } #' @name contrast-theory #' @rdname contrast-theory #' #' @importFrom grDevices col2rgb #' #' @export #' @concept visualization #' #' @examples #' Luminance(color = c('black', 'white', '#E76BF3')) #' Luminance <- function(color) { luminance <- apply( X = col2rgb(col = color), MARGIN = 2, function(col) { col <- as.vector(x = col) / 255 col <- sapply( X = col, FUN = function(x) { return(ifelse( test = x <= 0.03928, yes = x / 12.92, no = ((x + 0.055) / 1.055) ^ 2.4 )) } ) col <- rbind(col, c(0.2126, 0.7152, 0.0722)) return(sum(apply(X = col, MARGIN = 2, FUN = prod))) } ) names(x = luminance) <- color return(luminance) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases PurpleAndYellow #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = PurpleAndYellow()) #' PurpleAndYellow <- function(k = 50) { return(CustomPalette(low = "magenta", high = "yellow", mid = "black", k = k)) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Seurat themes #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Seurat Themes #' #' Various themes to be applied to ggplot2-based plots #' \describe{ #' \item{\code{SeuratTheme}}{The curated Seurat theme, consists of ...} #' \item{\code{DarkTheme}}{A dark theme, axes and text turn to white, the background becomes black} #' \item{\code{NoAxes}}{Removes axis lines, text, and ticks} #' \item{\code{NoLegend}}{Removes the legend} #' \item{\code{FontSize}}{Sets axis and title font sizes} #' \item{\code{NoGrid}}{Removes grid lines} #' \item{\code{SeuratAxes}}{Set Seurat-style axes} #' \item{\code{SpatialTheme}}{A theme designed for spatial visualizations (eg \code{\link{PolyFeaturePlot}}, \code{\link{PolyDimPlot}})} #' \item{\code{RestoreLegend}}{Restore a legend after removal} #' \item{\code{RotatedAxis}}{Rotate X axis text 45 degrees} #' \item{\code{BoldTitle}}{Enlarges and emphasizes the title} #' } #' #' @param ... Extra parameters to be passed to \code{theme} #' #' @return A ggplot2 theme object #' #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @seealso \code{\link[ggplot2]{theme}} #' @aliases SeuratTheme #' SeuratTheme <- function() { return(DarkTheme() + NoLegend() + NoGrid() + SeuratAxes()) } #' @importFrom ggplot2 theme element_text #' #' @rdname SeuratTheme #' @export #' @concept visualization #' #' @aliases CenterTitle #' CenterTitle <- function(...) { return(theme(plot.title = element_text(hjust = 0.5), validate = TRUE, ...)) } #' @importFrom ggplot2 theme element_rect element_text element_line margin #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases DarkTheme #' #' @examples #' # Generate a plot with a dark theme #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + DarkTheme(legend.position = 'none') #' DarkTheme <- function(...) { # Some constants for easier changing in the future black.background <- element_rect(fill = 'black') black.background.no.border <- element_rect(fill = 'black', size = 0) font.margin <- 4 white.text <- element_text( colour = 'white', margin = margin( t = font.margin, r = font.margin, b = font.margin, l = font.margin ) ) white.line <- element_line(colour = 'white', size = 1) no.line <- element_line(size = 0) # Create the dark theme dark.theme <- theme( # Set background colors plot.background = black.background, panel.background = black.background, legend.background = black.background, legend.box.background = black.background.no.border, legend.key = black.background.no.border, strip.background = element_rect(fill = 'grey50', colour = NA), # Set text colors plot.title = white.text, plot.subtitle = white.text, axis.title = white.text, axis.text = white.text, legend.title = white.text, legend.text = white.text, strip.text = white.text, # Set line colors axis.line.x = white.line, axis.line.y = white.line, panel.grid = no.line, panel.grid.minor = no.line, # Validate the theme validate = TRUE, # Extra parameters ... ) return(dark.theme) } #' @param x.text,y.text X and Y axis text sizes #' @param x.title,y.title X and Y axis title sizes #' @param main Plot title size #' #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases FontSize #' FontSize <- function( x.text = NULL, y.text = NULL, x.title = NULL, y.title = NULL, main = NULL, ... ) { font.size <- theme( # Set font sizes axis.text.x = element_text(size = x.text), axis.text.y = element_text(size = y.text), axis.title.x = element_text(size = x.title), axis.title.y = element_text(size = y.title), plot.title = element_text(size = main), # Validate the theme validate = TRUE, # Extra parameters ... ) } #' @param keep.text Keep axis text #' @param keep.ticks Keep axis ticks #' #' @importFrom ggplot2 theme element_blank #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoAxes #' #' @examples #' # Generate a plot with no axes #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoAxes() #' NoAxes <- function(..., keep.text = FALSE, keep.ticks = FALSE) { blank <- element_blank() no.axes.theme <- theme( # Remove the axis elements axis.line.x = blank, axis.line.y = blank, # Validate the theme validate = TRUE, ... ) if (!keep.text) { no.axes.theme <- no.axes.theme + theme( axis.text.x = blank, axis.text.y = blank, axis.title.x = blank, axis.title.y = blank, validate = TRUE, ... ) } if (!keep.ticks){ no.axes.theme <- no.axes.theme + theme( axis.ticks.x = blank, axis.ticks.y = blank, validate = TRUE, ... ) } return(no.axes.theme) } #' @importFrom ggplot2 theme #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoLegend #' #' @examples #' # Generate a plot with no legend #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoLegend() #' NoLegend <- function(...) { no.legend.theme <- theme( # Remove the legend legend.position = 'none', # Validate the theme validate = TRUE, ... ) return(no.legend.theme) } #' @importFrom ggplot2 theme element_blank #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoGrid #' #' @examples #' # Generate a plot with no grid lines #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoGrid() #' NoGrid <- function(...) { no.grid.theme <- theme( # Set grid lines to blank panel.grid.major = element_blank(), panel.grid.minor = element_blank(), # Validate the theme validate = TRUE, ... ) return(no.grid.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases SeuratAxes #' SeuratAxes <- function(...) { axes.theme <- theme( # Set axis things axis.title = element_text(face = 'bold', color = '#990000', size = 16), axis.text = element_text(vjust = 0.5, size = 12), # Validate the theme validate = TRUE, ... ) return(axes.theme) } #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases SpatialTheme #' SpatialTheme <- function(...) { return(DarkTheme() + NoAxes() + NoGrid() + NoLegend(...)) } #' @param position A position to restore the legend to #' #' @importFrom ggplot2 theme #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases RestoreLegend #' RestoreLegend <- function(..., position = 'right') { restored.theme <- theme( # Restore legend position legend.position = 'right', # Validate the theme validate = TRUE, ... ) return(restored.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases RotatedAxis #' RotatedAxis <- function(...) { rotated.theme <- theme( # Rotate X axis text axis.text.x = element_text(angle = 45, hjust = 1), # Validate the theme validate = TRUE, ... ) return(rotated.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases BoldTitle #' BoldTitle <- function(...) { bold.theme <- theme( # Make the title bold plot.title = element_text(size = 20, face = 'bold'), # Validate the theme validate = TRUE, ... ) return(bold.theme) } #' @importFrom ggplot2 theme element_rect #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases WhiteBackground #' WhiteBackground <- function(...) { white.rect = element_rect(fill = 'white') white.theme <- theme( # Make the plot, panel, and legend key backgrounds white plot.background = white.rect, panel.background = white.rect, legend.key = white.rect, # Validate the theme validate = TRUE, ... ) return(white.theme) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Calculate bandwidth for use in ggplot2-based smooth scatter plots # # Inspired by MASS::bandwidth.nrd and graphics:::.smoothScatterCalcDensity # # @param data A two-column data frame with X and Y coordinates for a plot # # @return The calculated bandwidth # #' @importFrom stats quantile var # Bandwidth <- function(data) { r <- diff(x = apply( X = data, MARGIN = 2, FUN = quantile, probs = c(0.05, 0.95), na.rm = TRUE, names = FALSE )) h <- abs(x = r[2L] - r[1L]) / 1.34 h <- ifelse(test = h == 0, yes = 1, no = h) bandwidth <- 4 * 1.06 * min(sqrt(x = apply(X = data, MARGIN = 2, FUN = var)), h) * nrow(x = data) ^ (-0.2) return(bandwidth) } # Blend expression values together # # @param data A two-column data frame with expression values for two features # # @return A three-column data frame with transformed and blended expression values # BlendExpression <- function(data) { if (ncol(x = data) != 2) { stop("'BlendExpression' only blends two features") } features <- colnames(x = data) data <- as.data.frame(x = apply( X = data, MARGIN = 2, FUN = function(x) { return(round(x = 9 * (x - min(x)) / (max(x) - min(x)))) } )) data[, 3] <- data[, 1] + data[, 2] * 10 colnames(x = data) <- c(features, paste(features, collapse = '_')) for (i in 1:ncol(x = data)) { data[, i] <- factor(x = data[, i]) } return(data) } # Create a heatmap of blended colors # # @param color.matrix A color matrix of blended colors # # @return A ggplot object # #' @importFrom grid unit #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string scale_fill_manual geom_raster #' theme scale_y_continuous scale_x_continuous scale_fill_manual # # @seealso \code{\link{BlendMatrix}} # BlendMap <- function(color.matrix) { color.heat <- matrix( data = 1:prod(dim(x = color.matrix)) - 1, nrow = nrow(x = color.matrix), ncol = ncol(x = color.matrix), dimnames = list( 1:nrow(x = color.matrix), 1:ncol(x = color.matrix) ) ) xbreaks <- seq.int(from = 0, to = nrow(x = color.matrix), by = 2) ybreaks <- seq.int(from = 0, to = ncol(x = color.matrix), by = 2) color.heat <- Melt(x = color.heat) color.heat$rows <- as.numeric(x = as.character(x = color.heat$rows)) color.heat$cols <- as.numeric(x = as.character(x = color.heat$cols)) color.heat$vals <- factor(x = color.heat$vals) plot <- ggplot( data = color.heat, mapping = aes_string(x = 'rows', y = 'cols', fill = 'vals') ) + geom_raster(show.legend = FALSE) + theme(plot.margin = unit(x = rep.int(x = 0, times = 4), units = 'cm')) + scale_x_continuous(breaks = xbreaks, expand = c(0, 0), labels = xbreaks) + scale_y_continuous(breaks = ybreaks, expand = c(0, 0), labels = ybreaks) + scale_fill_manual(values = as.vector(x = color.matrix)) + theme_cowplot() return(plot) } # Create a color matrix of blended colors # # @param n Dimensions of blended matrix (n x n) # @param col.threshold The color cutoff from weak signal to strong signal; ranges from 0 to 1. # @param two.colors Two colors used for the blend expression. # # @return An n x n matrix of blended colors # #' @importFrom grDevices col2rgb # BlendMatrix <- function( n = 10, col.threshold = 0.5, two.colors = c("#ff0000", "#00ff00"), negative.color = "black" ) { if (0 > col.threshold || col.threshold > 1) { stop("col.threshold must be between 0 and 1") } C0 <- as.vector(col2rgb(negative.color, alpha = TRUE)) C1 <- as.vector(col2rgb(two.colors[1], alpha = TRUE)) C2 <- as.vector(col2rgb(two.colors[2], alpha = TRUE)) blend_alpha <- (C1[4] + C2[4])/2 C0 <- C0[-4] C1 <- C1[-4] C2 <- C2[-4] merge.weight <- min(255 / (C1 + C2 + C0 + 0.01)) sigmoid <- function(x) { return(1 / (1 + exp(-x))) } blend_color <- function( i, j, col.threshold, n, C0, C1, C2, alpha, merge.weight ) { c.min <- sigmoid(5 * (1 / n - col.threshold)) c.max <- sigmoid(5 * (1 - col.threshold)) c1_weight <- sigmoid(5 * (i / n - col.threshold)) c2_weight <- sigmoid(5 * (j / n - col.threshold)) c0_weight <- sigmoid(5 * ((i + j) / (2 * n) - col.threshold)) c1_weight <- (c1_weight - c.min) / (c.max - c.min) c2_weight <- (c2_weight - c.min) / (c.max - c.min) c0_weight <- (c0_weight - c.min) / (c.max - c.min) C1_length <- sqrt(sum((C1 - C0) ** 2)) C2_length <- sqrt(sum((C2 - C0) ** 2)) C1_unit <- (C1 - C0) / C1_length C2_unit <- (C2 - C0) / C2_length C1_weight <- C1_unit * c1_weight C2_weight <- C2_unit * c2_weight C_blend <- C1_weight * (i - 1) * C1_length / (n - 1) + C2_weight * (j - 1) * C2_length / (n - 1) + (i - 1) * (j - 1) * c0_weight * C0 / (n - 1) ** 2 + C0 C_blend[C_blend > 255] <- 255 C_blend[C_blend < 0] <- 0 return(rgb( red = C_blend[1], green = C_blend[2], blue = C_blend[3], alpha = alpha, maxColorValue = 255 )) } blend_matrix <- matrix(nrow = n, ncol = n) for (i in 1:n) { for (j in 1:n) { blend_matrix[i, j] <- blend_color( i = i, j = j, col.threshold = col.threshold, n = n, C0 = C0, C1 = C1, C2 = C2, alpha = blend_alpha, merge.weight = merge.weight ) } } return(blend_matrix) } # Convert R colors to hexadecimal # # @param ... R colors # # @return The hexadecimal representations of input colors # #' @importFrom grDevices rgb col2rgb # Col2Hex <- function(...) { colors <- as.character(x = c(...)) alpha <- rep.int(x = 255, times = length(x = colors)) if (sum(sapply(X = colors, FUN = grepl, pattern = '^#')) != 0) { hex <- colors[which(x = grepl(pattern = '^#', x = colors))] hex.length <- sapply(X = hex, FUN = nchar) if (9 %in% hex.length) { hex.alpha <- hex[which(x = hex.length == 9)] hex.vals <- sapply(X = hex.alpha, FUN = substr, start = 8, stop = 9) dec.vals <- sapply(X = hex.vals, FUN = strtoi, base = 16) alpha[match(x = hex[which(x = hex.length == 9)], table = colors)] <- dec.vals } } colors <- t(x = col2rgb(col = colors)) colors <- mapply( FUN = function(i, alpha) { return(rgb(colors[i, , drop = FALSE], alpha = alpha, maxColorValue = 255)) }, i = 1:nrow(x = colors), alpha = alpha ) return(colors) } # Plot feature expression by identity # # Basically combines the codebase for VlnPlot and RidgePlot # # @param object Seurat object # @param type Plot type, choose from 'ridge', 'violin', or 'splitViolin' # @param features Features to plot (gene expression, metrics, PC scores, # anything that can be retreived by FetchData) # @param idents Which classes to include in the plot (default is all) # @param ncol Number of columns if multiple plots are displayed # @param sort Sort identity classes (on the x-axis) by the average expression of the attribute being potted, # or, if stack is True, sort both identity classes and features by hierarchical clustering # @param y.max Maximum y axis value # @param same.y.lims Set all the y-axis limits to the same values # @param adjust Adjust parameter for geom_violin # @param pt.size Point size for geom_violin # @param cols Colors to use for plotting # @param group.by Group (color) cells in different ways (for example, orig.ident) # @param split.by A variable to split the plot by # @param log plot Y axis on log scale # @param slot Use non-normalized counts data for plotting # @param stack Horizontally stack plots for multiple feature # @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} # ggplot object. If \code{FALSE}, return a list of ggplot objects # @param fill.by Color violins/ridges based on either 'feature' or 'ident' # @param flip flip plot orientation (identities on x-axis) # @param raster Convert points to raster format, default is \code{NULL} which # automatically rasterizes if plotting more than 100,000 cells # # @return A \code{\link[patchwork]{patchwork}ed} ggplot object if # \code{combine = TRUE}; otherwise, a list of ggplot objects # #' @importFrom scales hue_pal #' @importFrom ggplot2 xlab ylab #' @importFrom patchwork wrap_plots # ExIPlot <- function( object, features, type = 'violin', idents = NULL, ncol = NULL, sort = FALSE, assay = NULL, y.max = NULL, same.y.lims = FALSE, adjust = 1, cols = NULL, pt.size = 0, group.by = NULL, split.by = NULL, log = FALSE, slot = 'data', stack = FALSE, combine = TRUE, fill.by = NULL, flip = FALSE, raster = NULL ) { assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay if (isTRUE(x = stack)) { if (!is.null(x = ncol)) { warning( "'ncol' is ignored with 'stack' is TRUE", call. = FALSE, immediate. = TRUE ) } if (!is.null(x = y.max)) { warning( "'y.max' is ignored when 'stack' is TRUE", call. = FALSE, immediate. = TRUE ) } } else { ncol <- ncol %||% ifelse( test = length(x = features) > 9, yes = 4, no = min(length(x = features), 3) ) } data <- FetchData(object = object, vars = features, slot = slot) pt.size <- pt.size %||% AutoPointSize(data = object) features <- colnames(x = data) if (is.null(x = idents)) { cells <- colnames(x = object) } else { cells <- names(x = Idents(object = object)[Idents(object = object) %in% idents]) } data <- data[cells, , drop = FALSE] idents <- if (is.null(x = group.by)) { Idents(object = object)[cells] } else { object[[group.by, drop = TRUE]][cells] } if (!is.factor(x = idents)) { idents <- factor(x = idents) } if (is.null(x = split.by)) { split <- NULL } else { split <- object[[split.by, drop = TRUE]][cells] if (!is.factor(x = split)) { split <- factor(x = split) } if (is.null(x = cols)) { cols <- hue_pal()(length(x = levels(x = idents))) cols <- Interleave(cols, InvertHex(hexadecimal = cols)) } else if (length(x = cols) == 1 && cols == 'interaction') { split <- interaction(idents, split) cols <- hue_pal()(length(x = levels(x = idents))) } else { cols <- Col2Hex(cols) } if (length(x = cols) < length(x = levels(x = split))) { cols <- Interleave(cols, InvertHex(hexadecimal = cols)) } cols <- rep_len(x = cols, length.out = length(x = levels(x = split))) names(x = cols) <- levels(x = split) if ((length(x = cols) > 2) & (type == "splitViolin")) { warning("Split violin is only supported for <3 groups, using multi-violin.") type <- "violin" } } if (same.y.lims && is.null(x = y.max)) { y.max <- max(data) } if (isTRUE(x = stack)) { return(MultiExIPlot( type = type, data = data, idents = idents, split = split, sort = sort, same.y.lims = same.y.lims, adjust = adjust, cols = cols, pt.size = pt.size, log = log, fill.by = fill.by, flip = flip )) } plots <- lapply( X = features, FUN = function(x) { return(SingleExIPlot( type = type, data = data[, x, drop = FALSE], idents = idents, split = split, sort = sort, y.max = y.max, adjust = adjust, cols = cols, pt.size = pt.size, log = log, raster = raster )) } ) label.fxn <- switch( EXPR = type, 'violin' = if (stack) { xlab } else { ylab }, "splitViolin" = if (stack) { xlab } else { ylab }, 'ridge' = xlab, stop("Unknown ExIPlot type ", type, call. = FALSE) ) for (i in 1:length(x = plots)) { key <- paste0(unlist(x = strsplit(x = features[i], split = '_'))[1], '_') obj <- names(x = which(x = Key(object = object) == key)) if (length(x = obj) == 1) { if (inherits(x = object[[obj]], what = 'DimReduc')) { plots[[i]] <- plots[[i]] + label.fxn(label = 'Embeddings Value') } else if (inherits(x = object[[obj]], what = 'Assay')) { next } else { warning("Unknown object type ", class(x = object), immediate. = TRUE, call. = FALSE) plots[[i]] <- plots[[i]] + label.fxn(label = NULL) } } else if (!features[i] %in% rownames(x = object)) { plots[[i]] <- plots[[i]] + label.fxn(label = NULL) } } if (combine) { plots <- wrap_plots(plots, ncol = ncol) if (length(x = features) > 1) { plots <- plots & NoLegend() } } return(plots) } # Make a theme for facet plots # # @inheritParams SeuratTheme # @export # # @rdname SeuratTheme # @aliases FacetTheme # FacetTheme <- function(...) { return(theme( strip.background = element_blank(), strip.text = element_text(face = 'bold'), # Validate the theme validate = TRUE, ... )) } #' @importFrom RColorBrewer brewer.pal #' @importFrom grDevices colorRampPalette #' #' SpatialColors <- colorRampPalette(colors = rev(x = brewer.pal(n = 11, name = "Spectral"))) # Feature plot palettes # FeaturePalettes <- list( 'Spatial' = SpatialColors(n = 100), 'Seurat' = c('lightgrey', 'blue') ) # Splits features into groups based on log expression levels # # @param object Seurat object # @param assay Assay for expression data # @param min.cells Only compute for features in at least this many cells # @param ngroups Number of groups to split into # # @return A Seurat object with the feature group stored as a factor in # metafeatures # #' @importFrom Matrix rowMeans rowSums # GetFeatureGroups <- function(object, assay, min.cells = 5, ngroups = 6) { cm <- GetAssayData(object = object[[assay]], slot = "counts") # subset to keep only genes detected in at least min.cells cells cm <- cm[rowSums(cm > 0) >= min.cells, ] # use the geometric mean of the features to group them # (using the arithmetic mean would usually not change things much) # could use sctransform:::row_gmean here but not exported feature.gmean <- exp(x = rowMeans(log1p(x = cm))) - 1 feature.grp.breaks <- seq( from = min(log10(x = feature.gmean)) - 10*.Machine$double.eps, to = max(log10(x = feature.gmean)), length.out = ngroups + 1 ) feature.grp <- cut( x = log10(x = feature.gmean), breaks = feature.grp.breaks, ordered_result = TRUE ) feature.grp <- factor( x = feature.grp, levels = rev(x = levels(x = feature.grp)), ordered = TRUE ) names(x = feature.grp) <- names(x = feature.gmean) return(feature.grp) } # Get X and Y aesthetics from a plot for a certain geom # # @param plot A ggplot2 object # @param geom Geom class to filter to # @param plot.first Use plot-wide X/Y aesthetics before geom-specific aesthetics # # @return A named list with values 'x' for the name of the x aesthetic and 'y' for the y aesthetic # #' @importFrom rlang as_label # GetXYAesthetics <- function(plot, geom = 'GeomPoint', plot.first = TRUE) { geoms <- sapply( X = plot$layers, FUN = function(layer) { return(class(x = layer$geom)[1]) } ) # handle case where raster is set to True if (geom == "GeomPoint" && "GeomScattermore" %in% geoms){ geom <- "GeomScattermore" } geoms <- which(x = geoms == geom) if (length(x = geoms) == 0) { stop("Cannot find a geom of class ", geom) } geoms <- min(geoms) if (plot.first) { # x <- as.character(x = plot$mapping$x %||% plot$layers[[geoms]]$mapping$x)[2] x <- as_label(x = plot$mapping$x %||% plot$layers[[geoms]]$mapping$x) # y <- as.character(x = plot$mapping$y %||% plot$layers[[geoms]]$mapping$y)[2] y <- as_label(x = plot$mapping$y %||% plot$layers[[geoms]]$mapping$y) } else { x <- as_label(x = plot$layers[[geoms]]$mapping$x %||% plot$mapping$x) y <- as_label(x = plot$layers[[geoms]]$mapping$y %||% plot$mapping$y) } return(list('x' = x, 'y' = y)) } # For plotting the tissue image #' @importFrom ggplot2 ggproto Geom aes ggproto_parent alpha draw_key_point #' @importFrom grid unit gpar editGrob pointsGrob viewport gTree addGrob grobName #' GeomSpatial <- ggproto( "GeomSpatial", Geom, required_aes = c("x", "y"), extra_params = c("na.rm", "image", "image.alpha", "crop"), default_aes = aes( shape = 21, colour = "black", point.size.factor = 1.0, fill = NA, alpha = NA, stroke = 0.25 ), setup_data = function(self, data, params) { data <- ggproto_parent(Geom, self)$setup_data(data, params) # We need to flip the image as the Y coordinates are reversed data$y = max(data$y) - data$y + min(data$y) data }, draw_key = draw_key_point, draw_panel = function(data, panel_scales, coord, image, image.alpha, crop) { # This should be in native units, where # Locations and sizes are relative to the x- and yscales for the current viewport. if (!crop) { y.transform <- c(0, nrow(x = image)) - panel_scales$y.range data$y <- data$y + sum(y.transform) panel_scales$x$continuous_range <- c(0, ncol(x = image)) panel_scales$y$continuous_range <- c(0, nrow(x = image)) panel_scales$y.range <- c(0, nrow(x = image)) panel_scales$x.range <- c(0, ncol(x = image)) } z <- coord$transform( data.frame(x = c(0, ncol(x = image)), y = c(0, nrow(x = image))), panel_scales ) # Flip Y axis for image z$y <- -rev(z$y) + 1 wdth <- z$x[2] - z$x[1] hgth <- z$y[2] - z$y[1] vp <- viewport( x = unit(x = z$x[1], units = "npc"), y = unit(x = z$y[1], units = "npc"), width = unit(x = wdth, units = "npc"), height = unit(x = hgth, units = "npc"), just = c("left", "bottom") ) img.grob <- GetImage(object = image) img <- editGrob(grob = img.grob, vp = vp) # spot.size <- slot(object = image, name = "spot.radius") spot.size <- Radius(object = image) coords <- coord$transform(data, panel_scales) pts <- pointsGrob( x = coords$x, y = coords$y, pch = data$shape, size = unit(spot.size, "npc") * data$point.size.factor, gp = gpar( col = alpha(colour = coords$colour, alpha = coords$alpha), fill = alpha(colour = coords$fill, alpha = coords$alpha), lwd = coords$stroke) ) vp <- viewport() gt <- gTree(vp = vp) if (image.alpha > 0) { if (image.alpha != 1) { img$raster = as.raster( x = matrix( data = alpha(colour = img$raster, alpha = image.alpha), nrow = nrow(x = img$raster), ncol = ncol(x = img$raster), byrow = TRUE) ) } gt <- addGrob(gTree = gt, child = img) } gt <- addGrob(gTree = gt, child = pts) # Replacement for ggname gt$name <- grobName(grob = gt, prefix = 'geom_spatial') return(gt) # ggplot2:::ggname("geom_spatial", gt) } ) # influenced by: https://stackoverflow.com/questions/49475201/adding-tables-to-ggplot2-with-facet-wrap-in-r # https://ggplot2.tidyverse.org/articles/extending-ggplot2.html #' @importFrom ggplot2 layer #' #' geom_spatial <- function( mapping = NULL, data = NULL, image = image, image.alpha = image.alpha, crop = crop, stat = "identity", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, ... ) { layer( geom = GeomSpatial, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list(na.rm = na.rm, image = image, image.alpha = image.alpha, crop = crop, ...) ) } #' @importFrom grid viewport editGrob grobName #' @importFrom ggplot2 ggproto Geom ggproto_parent # GeomSpatialInteractive <- ggproto( "GeomSpatialInteractive", Geom, setup_data = function(self, data, params) { data <- ggproto_parent(parent = Geom, self = self)$setup_data(data, params) data }, draw_group = function(data, panel_scales, coord) { vp <- viewport(x = data$x, y = data$y) g <- editGrob(grob = data$grob[[1]], vp = vp) # Replacement for ggname g$name <- grobName(grob = g, prefix = 'geom_spatial_interactive') return(g) # return(ggname(prefix = "geom_spatial", grob = g)) }, required_aes = c("grob","x","y") ) #' @importFrom ggplot2 layer # geom_spatial_interactive <- function( mapping = NULL, data = NULL, stat = "identity", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = FALSE, ... ) { layer( geom = GeomSpatialInteractive, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list(na.rm = na.rm, ...) ) } # A split violin plot geom # #' @importFrom scales zero_range #' @importFrom ggplot2 GeomPolygon #' @importFrom grid grobTree grobName # # @author jan-glx on StackOverflow # @references \url{https://stackoverflow.com/questions/35717353/split-violin-plot-with-ggplot2} # @seealso \code{\link[ggplot2]{geom_violin}} # GeomSplitViolin <- ggproto( "GeomSplitViolin", GeomViolin, # setup_data = function(data, params) { # data$width <- data$width %||% params$width %||% (resolution(data$x, FALSE) * 0.9) # data <- plyr::ddply(data, "group", transform, xmin = x - width/2, xmax = x + width/2) # e <- globalenv() # name <- paste(sample(x = letters, size = 5), collapse = '') # message("Saving initial data to ", name) # e[[name]] <- data # return(data) # }, draw_group = function(self, data, ..., draw_quantiles = NULL) { data$xminv <- data$x - data$violinwidth * (data$x - data$xmin) data$xmaxv <- data$x + data$violinwidth * (data$xmax - data$x) grp <- data[1, 'group'] if (grp %% 2 == 1) { data$x <- data$xminv data.order <- data$y } else { data$x <- data$xmaxv data.order <- -data$y } newdata <- data[order(data.order), , drop = FALSE] newdata <- rbind( newdata[1, ], newdata, newdata[nrow(x = newdata), ], newdata[1, ] ) newdata[c(1, nrow(x = newdata) - 1, nrow(x = newdata)), 'x'] <- round(x = newdata[1, 'x']) grob <- if (length(x = draw_quantiles) > 0 & !zero_range(x = range(data$y))) { stopifnot(all(draw_quantiles >= 0), all(draw_quantiles <= 1)) quantiles <- QuantileSegments(data = data, draw.quantiles = draw_quantiles) aesthetics <- data[rep.int(x = 1, times = nrow(x = quantiles)), setdiff(x = names(x = data), y = c("x", "y")), drop = FALSE] aesthetics$alpha <- rep.int(x = 1, nrow(x = quantiles)) both <- cbind(quantiles, aesthetics) quantile.grob <- GeomPath$draw_panel(both, ...) grobTree(GeomPolygon$draw_panel(newdata, ...), name = quantile.grob) } else { GeomPolygon$draw_panel(newdata, ...) } grob$name <- grobName(grob = grob, prefix = 'geom_split_violin') return(grob) } ) # Create a split violin plot geom # # @inheritParams ggplot2::geom_violin # #' @importFrom ggplot2 layer # # @author jan-glx on StackOverflow # @references \url{https://stackoverflow.com/questions/35717353/split-violin-plot-with-ggplot2} # @seealso \code{\link[ggplot2]{geom_violin}} # geom_split_violin <- function( mapping = NULL, data = NULL, stat = 'ydensity', position = 'identity', ..., draw_quantiles = NULL, trim = TRUE, scale = 'area', na.rm = FALSE, show.legend = NA, inherit.aes = TRUE ) { return(layer( data = data, mapping = mapping, stat = stat, geom = GeomSplitViolin, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list( trim = trim, scale = scale, draw_quantiles = draw_quantiles, na.rm = na.rm, ... ) )) } # Convert a ggplot2 scatterplot to base R graphics # # @param plot A ggplot2 scatterplot # @param do.plot Create the plot with base R graphics # @param cols A named vector of column names to pull. Vector names must be 'x', # 'y', 'colour', 'shape', and/or 'size'; vector values must be the names of # columns in plot data that correspond to these values. May pass only values that # differ from the default (eg. \code{cols = c('size' = 'point.size.factor')}) # @param ... Extra parameters passed to PlotBuild # # @return A dataframe with the data that created the ggplot2 scatterplot # #' @importFrom ggplot2 ggplot_build # GGpointToBase <- function( plot, do.plot = TRUE, cols = c( 'x' = 'x', 'y' = 'y', 'colour' = 'colour', 'shape' = 'shape', 'size' = 'size' ), ... ) { plot.build <- ggplot_build(plot = plot) default.cols <- c( 'x' = 'x', 'y' = 'y', 'colour' = 'colour', 'shape' = 'shape', 'size' = 'size' ) cols <- cols %||% default.cols if (is.null(x = names(x = cols))) { if (length(x = cols) > length(x = default.cols)) { warning( "Too many columns provided, selecting only first ", length(x = default.cols), call. = FALSE, immediate. = TRUE ) cols <- cols[1:length(x = default.cols)] } names(x = cols) <- names(x = default.cols)[1:length(x = cols)] } cols <- c( cols[intersect(x = names(x = default.cols), y = names(x = cols))], default.cols[setdiff(x = names(x = default.cols), y = names(x = cols))] ) cols <- cols[names(x = default.cols)] build.use <- which(x = vapply( X = plot.build$data, FUN = function(dat) { return(all(cols %in% colnames(x = dat))) }, FUN.VALUE = logical(length = 1L) )) if (length(x = build.use) == 0) { stop("GGpointToBase only works on geom_point ggplot objects") } build.data <- plot.build$data[[min(build.use)]] plot.data <- build.data[, cols] names(x = plot.data) <- c( plot.build$plot$labels$x, plot.build$plot$labels$y, 'color', 'pch', 'cex' ) if (do.plot) { PlotBuild(data = plot.data, ...) } return(plot.data) } # Convert a ggplot2 scatterplot to plotly graphics # # @inheritParams GGpointToBase # @param information Extra information for hovering # @param ... Ignored # # @return A dataframe with the data that greated the ggplot2 scatterplot #' @importFrom ggplot2 ggplot_build # GGpointToPlotlyBuild <- function( plot, information = NULL, cols = eval(expr = formals(fun = GGpointToBase)$cols), ... ) { CheckDots(...) plot.build <- GGpointToBase(plot = plot, do.plot = FALSE, cols = cols) data <- ggplot_build(plot = plot)$plot$data rownames(x = plot.build) <- rownames(data) # Reset the names to 'x' and 'y' names(x = plot.build) <- c( 'x', 'y', names(x = plot.build)[3:length(x = plot.build)] ) # Add the hover information we're looking for if (is.null(x = information)) { plot.build$feature <- rownames(x = data) } else { info <- apply( X = information, MARGIN = 1, FUN = function(x, names) { return(paste0(names, ': ', x, collapse = '
')) }, names = colnames(x = information) ) data.info <- data.frame( feature = paste(rownames(x = information), info, sep = '
'), row.names = rownames(x = information) ) plot.build <- merge(x = plot.build, y = data.info, by = 0) rownames(x = plot.build) <- plot.build$Row.names plot.build <- plot.build[, which(x = colnames(x = plot.build) != 'Row.names'), drop = FALSE] } return(plot.build) } #' @importFrom stats quantile #' InvertCoordinate <- function(x, MARGIN = 2) { if (!is.null(x = x)) { switch( EXPR = MARGIN, '1' = { rmin <- 'left' rmax <- 'right' cmin <- 'xmin' cmax <- 'xmax' }, '2' = { rmin <- 'bottom' rmax <- 'top' cmin <- 'ymin' cmax <- 'ymax' }, stop("'MARGIN' must be either 1 or 2", call. = FALSE) ) # Fix the range so that rmin becomes rmax and vice versa # Needed for both points and brushes range <- x$range x$range[[rmin]] <- range[[rmax]] x$range[[rmax]] <- range[[rmin]] # Fix the cmin and cmax values, if provided # These are used for brush boundaries coords <- c(x[[cmin]], x[[cmax]]) if (all(!is.null(x = coords))) { names(x = coords) <- c(cmin, cmax) x[[cmin]] <- quantile( x = x$range[[rmin]]:x$range[[rmax]], probs = 1 - (coords[cmax] / x$range[[rmax]]), names = FALSE ) x[[cmax]] <- quantile( x = x$range[[rmin]]:x$range[[rmax]], probs = 1 - (coords[cmin] / x$range[[rmax]]), names = FALSE ) } } return(x) } # Invert a Hexadecimal color # # @param hexadecimal A character vector of hexadecimal colors # # @return Hexadecimal representations of the inverted color # # @author Matt Lagrandeur # @references \url{http://www.mattlag.com/scripting/hexcolorinverter.php} # InvertHex <- function(hexadecimal) { return(vapply( X = toupper(x = hexadecimal), FUN = function(hex) { hex <- unlist(x = strsplit( x = gsub(pattern = '#', replacement = '', x = hex), split = '' )) key <- toupper(x = as.hexmode(x = 15:0)) if (!all(hex %in% key)) { stop('All hexadecimal colors must be valid hexidecimal numbers from 0-9 and A-F') } if (length(x = hex) == 8) { alpha <- hex[7:8] hex <- hex[1:6] } else if (length(x = hex) == 6) { alpha <- NULL } else { stop("All hexidecimal colors must be either 6 or 8 characters in length, excluding the '#'") } value <- rev(x = key) inv.hex <- vapply( X = hex, FUN = function(x) { return(value[grep(pattern = x, x = key)]) }, FUN.VALUE = character(length = 1L) ) inv.hex <- paste(inv.hex, collapse = '') return(paste0('#', inv.hex, paste(alpha, collapse = ''))) }, FUN.VALUE = character(length = 1L), USE.NAMES = FALSE )) } # Make label information for ggplot2-based scatter plots # # @param data A three-column data frame (accessed with \code{plot$data}) # The first column should be the X axis, the second the Y, and the third should be grouping information # # @return A dataframe with three columns: centers along the X axis, centers along the Y axis, and group information # #' @importFrom stats median na.omit # MakeLabels <- function(data) { groups <- as.character(x = na.omit(object = unique(x = data[, 3]))) labels <- lapply( X = groups, FUN = function(group) { data.use <- data[data[, 3] == group, 1:2] return(apply(X = data.use, MARGIN = 2, FUN = median, na.rm = TRUE)) } ) names(x = labels) <- groups labels <- as.data.frame(x = t(x = as.data.frame(x = labels))) labels[, colnames(x = data)[3]] <- groups return(labels) } # Plot expression of multiple features by identity on a plot # # @param data Data to plot # @param idents Idents to use # @param type Make either a 'ridge' or 'violin' plot # @param sort Sort identity classes and features based on hierarchical clustering # @param same.y.lims Indicates whether to use the same ylim for each feature # @param adjust Adjust parameter for geom_violin # @param cols Colors to use for plotting # @param log plot Y axis on log scale # @param fill.by Color violins/ridges based on either 'feature' or 'ident' # @param seed.use Random seed to use. If NULL, don't set a seed # @param flip flip plot orientation (identities on x-axis) # # @return A ggplot-based Expression-by-Identity plot # #' @importFrom cowplot theme_cowplot #' @importFrom utils globalVariables #' @importFrom stats rnorm dist hclust #' @importFrom ggridges geom_density_ridges theme_ridges #' @importFrom ggplot2 ggplot aes_string facet_grid theme labs geom_rect #' geom_violin geom_jitter ylim position_jitterdodge scale_fill_manual #' scale_y_log10 scale_x_log10 scale_y_discrete scale_x_continuous #' scale_y_continuous waiver #' MultiExIPlot <- function( data, idents, split = NULL, type = 'violin', sort = FALSE, same.y.lims = same.y.lims, adjust = 1, pt.size = 0, cols = NULL, seed.use = 42, log = FALSE, fill.by = NULL, flip = NULL ) { if (!(fill.by %in% c("feature", "ident"))) { stop("`fill.by` must be either `feature` or `ident`") } if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.data.frame(x = data) || ncol(x = data) < 2) { stop("MultiExIPlot requires a data frame with >1 column") } data <- Melt(x = data) data <- data.frame( feature = data$cols, expression = data$vals, ident = rep_len(x = idents, length.out = nrow(x = data)) ) if ((is.character(x = sort) && nchar(x = sort) > 0) || sort) { data$feature <- as.vector(x = data$feature) data$ident <- as.vector(x = data$ident) # build matrix of average expression (#-features by #-idents), lexical ordering avgs.matrix <- sapply( X = split(x = data, f = data$ident), FUN = function(df) { return(tapply( X = df$expression, INDEX = df$feature, FUN = mean )) } ) idents.order <- hclust(d = dist(x = t(x = L2Norm(mat = avgs.matrix, MARGIN = 2))))$order avgs.matrix <- avgs.matrix[,idents.order] avgs.matrix <- L2Norm(mat = avgs.matrix, MARGIN = 1) # order feature clusters by position of their "rank-1 idents" position <- apply(X = avgs.matrix, MARGIN = 1, FUN = which.max) mat <- hclust(d = dist(x = avgs.matrix))$merge orderings <- list() for (i in 1:nrow(mat)) { x <- if (mat[i,1] < 0) -mat[i,1] else orderings[[mat[i,1]]] y <- if (mat[i,2] < 0) -mat[i,2] else orderings[[mat[i,2]]] x.pos <- min(x = position[x]) y.pos <- min(x = position[y]) orderings[[i]] <- if (x.pos < y.pos) { c(x, y) } else { c(y, x) } } features.order <- orderings[[length(x = orderings)]] data$feature <- factor( x = data$feature, levels = unique(x = sort(x = data$feature))[features.order] ) data$ident <- factor( x = data$ident, levels = unique(x = sort(x = data$ident))[rev(x = idents.order)] ) } else { data$feature <- factor(x = data$feature, levels = unique(x = data$feature)) } if (log) { noise <- rnorm(n = nrow(x = data)) / 200 data$expression <- data$expression + 1 } else { noise <- rnorm(n = nrow(x = data)) / 100000 } for (f in unique(x = data$feature)) { if (all(data$expression[(data$feature == f)] == data$expression[(data$feature == f)][1])) { warning( "All cells have the same value of ", f, call. = FALSE, immediate. = TRUE ) } else { data$expression[(data$feature == f)] <- data$expression[(data$feature == f)] + noise[(data$feature == f)] } } if (type == 'violin' && !is.null(x = split)) { data$split <- rep_len(x = split, length.out = nrow(data)) vln.geom <- geom_violin fill.by <- 'split' } else if (type == 'splitViolin' && !is.null(x = split)) { data$split <- rep_len(x = split, length.out = nrow(data)) vln.geom <- geom_split_violin fill.by <- 'split' type <- 'violin' } else { vln.geom <- geom_violin } switch( EXPR = type, 'violin' = { geom <- list(vln.geom(scale = 'width', adjust = adjust, trim = TRUE)) }, 'ridge' = { geom <- list( geom_density_ridges(scale = 4), theme_ridges(), scale_y_discrete(expand = c(0.01, 0)) ) }, stop("Unknown plot type: ", type) ) if (flip) { x <- 'ident' x.label <- 'Identity' y <- 'expression' y.label <- 'Expression Level' } else { y <- 'ident' y.label <- 'Identity' x <- 'expression' x.label <- 'Expression Level' } plot <- ggplot( data = data, mapping = aes_string(x = x, y = y, fill = fill.by)[c(2, 3, 1)] ) + labs(x = x.label, y = y.label, fill = NULL) + theme_cowplot() plot <- do.call(what = '+', args = list(plot, geom)) if (flip) { plot <- plot + scale_y_continuous( expand = c(0, 0), labels = function(x) c(rep(x = '', times = length(x)-2), x[length(x) - 1], '')) + facet_grid(feature ~ ., scales = (if (same.y.lims) 'fixed' else 'free')) + FacetTheme( panel.spacing = unit(0, 'lines'), panel.background = element_rect(fill = NA, color = "black"), axis.text.y = element_text(size = 7), axis.text.x = element_text(angle = 45, hjust = 1), strip.text.y.right = element_text(angle = 0)) } else { plot <- plot + scale_x_continuous( expand = c(0, 0), labels = function(x) c(rep(x = '', times = length(x)-2), x[length(x) - 1], '')) + facet_grid(. ~ feature, scales = (if (same.y.lims) 'fixed' else 'free')) + FacetTheme( panel.spacing = unit(0, 'lines'), panel.background = element_rect(fill = NA, color = "black"), axis.text.x = element_text(size = 7), strip.text.x = element_text(angle = -90)) } if (log) { plot <- plot + scale_x_log10() } if (!is.null(x = cols)) { if (!is.null(x = split)) { idents <- unique(x = as.vector(x = data$ident)) splits <- unique(x = as.vector(x = data$split)) labels <- if (length(x = splits) == 2) { splits } else { unlist(x = lapply( X = idents, FUN = function(pattern, x) { x.mod <- gsub( pattern = paste0(pattern, '.'), replacement = paste0(pattern, ': '), x = x, fixed = TRUE ) x.keep <- grep(pattern = ': ', x = x.mod, fixed = TRUE) x.return <- x.mod[x.keep] names(x = x.return) <- x[x.keep] return(x.return) }, x = unique(x = as.vector(x = data$split)) )) } if (is.null(x = names(x = labels))) { names(x = labels) <- labels } } else { labels <- levels(x = droplevels(data$ident)) } plot <- plot + scale_fill_manual(values = cols, labels = labels) } return(plot) } # Create a scatterplot with data from a ggplot2 scatterplot # # @param plot.data The original ggplot2 scatterplot data # This is taken from ggplot2::ggplot_build # @param dark.theme Plot using a dark theme # @param smooth Use a smooth scatterplot instead of a standard scatterplot # @param ... Extra parameters passed to graphics::plot or graphics::smoothScatter # #' @importFrom graphics axis plot smoothScatter # PlotBuild <- function(data, dark.theme = FALSE, smooth = FALSE, ...) { # Do we use a smooth scatterplot? # Take advantage of functions as first class objects # to dynamically choose normal vs smooth scatterplot myplot <- ifelse(test = smooth, yes = smoothScatter, no = plot) CheckDots(..., fxns = myplot) if (dark.theme) { par(bg = 'black') axes = FALSE col.lab = 'white' } else { axes = 'TRUE' col.lab = 'black' } myplot( data[, c(1, 2)], col = data$color, pch = data$pch, cex = vapply( X = data$cex, FUN = function(x) { return(max(x / 2, 0.5)) }, FUN.VALUE = numeric(1) ), axes = axes, col.lab = col.lab, col.main = col.lab, ... ) if (dark.theme) { axis( side = 1, at = NULL, labels = TRUE, col.axis = col.lab, col = col.lab ) axis( side = 2, at = NULL, labels = TRUE, col.axis = col.lab, col = col.lab ) } } # Locate points on a plot and return them # # @param plot A ggplot2 plot # @param recolor Do we recolor the plot to highlight selected points? # @param dark.theme Plot using a dark theme # @param ... Exptra parameters to PlotBuild # # @return A dataframe of x and y coordinates for points selected # #' @importFrom graphics locator # @importFrom SDMTools pnt.in.poly # PointLocator <- function(plot, recolor = TRUE, dark.theme = FALSE, ...) { .Defunct(new = "CellSelector") # # Convert the ggplot object to a data.frame # PackageCheck('SDMTools') # plot.data <- GGpointToBase(plot = plot, dark.theme = dark.theme, ...) # npoints <- nrow(x = plot.data) # cat("Click around the cluster of points you wish to select\n") # cat("ie. select the vertecies of a shape around the cluster you\n") # cat("are interested in. Press when finished (right click for R-terminal users)\n\n") # polygon <- locator(n = npoints, type = 'l') # polygon <- data.frame(polygon) # # pnt.in.poly returns a data.frame of points # points.all <- SDMTools::pnt.in.poly( # pnts = plot.data[, c(1, 2)], # poly.pnts = polygon # ) # # Find the located points # points.located <- points.all[which(x = points.all$pip == 1), ] # # If we're recoloring, do the recolor # if (recolor) { # no <- ifelse(test = dark.theme, yes = 'white', no = '#C3C3C3') # points.all$color <- ifelse(test = points.all$pip == 1, yes = '#DE2D26', no = no) # plot.data$color <- points.all$color # PlotBuild(data = plot.data, dark.theme = dark.theme, ...) # } # return(points.located[, c(1, 2)]) } # Create quantile segments for quantiles on violin plots in ggplot2 # # @param data Data being plotted # @param draw.quantiles Quantiles to draw # #' @importFrom stats approxfun # # @author Hadley Wickham (I presume) # @seealso \code{\link[ggplot2]{geom_violin}} # QuantileSegments <- function(data, draw.quantiles) { densities <- cumsum(x = data$density) / sum(data$density) ecdf <- approxfun(x = densities, y = data$y) ys <- ecdf(v = draw.quantiles) violin.xminvs <- approxfun(x = data$y, y = data$xminv)(v = ys) violin.xmaxvs <- approxfun(x = data$y, y = data$xmaxv)(v = ys) return(data.frame( x = as.vector(x = t(x = data.frame(violin.xminvs, violin.xmaxvs))), y = rep(x = ys, each = 2), group = rep(x = ys, each = 2) )) } # Scale vector to min and max cutoff values # # @param vec a vector # @param cutoffs A two-length vector of cutoffs to be passed to \code{\link{SetQuantile}} # # @return Returns a vector # ScaleColumn <- function(vec, cutoffs) { if (!length(x = cutoffs) == 2) { stop("Two cutoffs (a low and high) are needed") } cutoffs <- sapply( X = cutoffs, FUN = SetQuantile, data = vec ) vec[vec < min(cutoffs)] <- min(cutoffs) vec[vec > max(cutoffs)] <- max(cutoffs) return(vec) } # Set highlight information # # @param cells.highlight Cells to highlight # @param cells.all A character vector of all cell names # @param sizes.highlight Sizes of cells to highlight # @param cols.highlight Colors to highlight cells as # @param col.base Base color to use for unselected cells # @param pt.size Size of unselected cells # # @return A list will cell highlight information # \describe{ # \item{plot.order}{An order to plot cells in} # \item{highlight}{A vector giving group information for each cell} # \item{size}{A vector giving size information for each cell} # \item{color}{Colors for highlighting in the order of plot.order} # } # SetHighlight <- function( cells.highlight, cells.all, sizes.highlight, cols.highlight, col.base = 'black', pt.size = 1 ) { if (is.character(x = cells.highlight)) { cells.highlight <- list(cells.highlight) } else if (is.data.frame(x = cells.highlight) || !is.list(x = cells.highlight)) { cells.highlight <- as.list(x = cells.highlight) } cells.highlight <- lapply( X = cells.highlight, FUN = function(cells) { cells.return <- if (is.character(x = cells)) { cells[cells %in% cells.all] } else { cells <- as.numeric(x = cells) cells <- cells[cells <= length(x = cells.all)] cells.all[cells] } return(cells.return) } ) cells.highlight <- Filter(f = length, x = cells.highlight) names.highlight <- if (is.null(x = names(x = cells.highlight))) { paste0('Group_', 1L:length(x = cells.highlight)) } else { names(x = cells.highlight) } sizes.highlight <- rep_len( x = sizes.highlight, length.out = length(x = cells.highlight) ) cols.highlight <- c( col.base, rep_len(x = cols.highlight, length.out = length(x = cells.highlight)) ) size <- rep_len(x = pt.size, length.out = length(x = cells.all)) highlight <- rep_len(x = NA_character_, length.out = length(x = cells.all)) if (length(x = cells.highlight) > 0) { for (i in 1:length(x = cells.highlight)) { cells.check <- cells.highlight[[i]] index.check <- match(x = cells.check, cells.all) highlight[index.check] <- names.highlight[i] size[index.check] <- sizes.highlight[i] } } plot.order <- sort(x = unique(x = highlight), na.last = TRUE) plot.order[is.na(x = plot.order)] <- 'Unselected' highlight[is.na(x = highlight)] <- 'Unselected' highlight <- factor(x = highlight, levels = plot.order) return(list( plot.order = plot.order, highlight = highlight, size = size, color = cols.highlight )) } #' @importFrom shiny brushedPoints # ShinyBrush <- function(plot.data, brush, outputs, inverts = character(length = 0L)) {#}, selected = NULL) { selected <- NULL if (!is.null(x = brush)) { if (brush$outputId %in% outputs) { selected <- rownames(x = brushedPoints(df = plot.data, brush = brush)) } else if (brush$outputId %in% inverts) { selected <- rownames(x = brushedPoints( df = plot.data, brush = InvertCoordinate(x = brush) )) } } return(selected) } globalVariables(names = '..density..', package = 'Seurat') #' A single correlation plot #' #' @param data A data frame with two columns to be plotted #' @param col.by A vector or factor of values to color the plot by #' @param cols An optional vector of colors to use #' @param pt.size Point size for the plot #' @param smooth Make a smoothed scatter plot #' @param rows.highight A vector of rows to highlight (like cells.highlight in #' \code{\link{SingleDimPlot}}) #' @param legend.title Optional legend title #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is #' greater than 100,000 #' @param raster.dpi the pixel resolution for rastered plots, passed to geom_scattermore(). #' Default is c(512, 512) #' @param plot.cor ... #' @param jitter Jitter for easier visualization of crowded points #' #' @return A ggplot2 object #' #' @importFrom stats cor #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 ggplot aes_string geom_point labs scale_color_brewer #' scale_color_manual guides stat_density2d aes scale_fill_continuous #' @importFrom scattermore geom_scattermore #' #' @keywords internal #' #' @export #' SingleCorPlot <- function( data, col.by = NULL, cols = NULL, pt.size = NULL, smooth = FALSE, rows.highlight = NULL, legend.title = NULL, na.value = 'grey50', span = NULL, raster = NULL, raster.dpi = NULL, plot.cor = TRUE, jitter = TRUE ) { pt.size <- pt.size %||% AutoPointSize(data = data, raster = raster) if ((nrow(x = data) > 1e5) & !isFALSE(raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") } raster <- raster %||% (nrow(x = data) > 1e5) if (!is.null(x = raster.dpi)) { if (!is.numeric(x = raster.dpi) || length(x = raster.dpi) != 2) stop("'raster.dpi' must be a two-length numeric vector") } orig.names <- colnames(x = data) names.plot <- colnames(x = data) <- gsub( pattern = '-', replacement = '.', x = colnames(x = data), fixed = TRUE ) names.plot <- colnames(x = data) <- gsub( pattern = ':', replacement = '.', x = colnames(x = data), fixed = TRUE ) if (ncol(x = data) < 2) { msg <- "Too few variables passed" if (ncol(x = data) == 1) { msg <- paste0(msg, ', only have ', colnames(x = data)[1]) } stop(msg, call. = FALSE) } plot.cor <- if (isTRUE(x = plot.cor)) { round(x = cor(x = data[, 1], y = data[, 2]), digits = 2) } else( "" ) if (!is.null(x = rows.highlight)) { highlight.info <- SetHighlight( cells.highlight = rows.highlight, cells.all = rownames(x = data), sizes.highlight = pt.size, cols.highlight = 'red', col.base = 'black', pt.size = pt.size ) cols <- highlight.info$color col.by <- factor( x = highlight.info$highlight, levels = rev(x = highlight.info$plot.order) ) plot.order <- order(col.by) data <- data[plot.order, ] col.by <- col.by[plot.order] } if (!is.null(x = col.by)) { data$colors <- col.by } plot <- ggplot( data = data, mapping = aes_string(x = names.plot[1], y = names.plot[2]) ) + labs( x = orig.names[1], y = orig.names[2], title = plot.cor, color = legend.title ) if (smooth) { # density <- kde2d(x = data[, names.plot[1]], y = data[, names.plot[2]], h = Bandwidth(data = data[, names.plot]), n = 200) # density <- data.frame( # expand.grid( # x = density$x, # y = density$y # ), # density = as.vector(x = density$z) # ) plot <- plot + stat_density2d( mapping = aes(fill = ..density.. ^ 0.25), geom = 'tile', contour = FALSE, n = 200, h = Bandwidth(data = data[, names.plot]) ) + # geom_tile( # mapping = aes_string( # x = 'x', # y = 'y', # fill = 'density' # ), # data = density # ) + scale_fill_continuous(low = 'white', high = 'dodgerblue4') + guides(fill = FALSE) } position <- NULL if (jitter) { position <- 'jitter' } else { position <- 'identity' } if (!is.null(x = col.by)) { if (raster) { plot <- plot + geom_scattermore( mapping = aes_string(color = 'colors'), position = position, pointsize = pt.size, pixels = raster.dpi ) } else { plot <- plot + geom_point( mapping = aes_string(color = 'colors'), position = position, size = pt.size ) } } else { if (raster) { plot <- plot + geom_scattermore(position = position, pointsize = pt.size, pixels = raster.dpi) } else { plot <- plot + geom_point(position = position, size = pt.size) } } if (!is.null(x = cols)) { cols.scale <- if (length(x = cols) == 1 && cols %in% rownames(x = brewer.pal.info)) { scale_color_brewer(palette = cols) } else { scale_color_manual(values = cols, na.value = na.value) } plot <- plot + cols.scale if (!is.null(x = rows.highlight)) { plot <- plot + guides(color = FALSE) } } plot <- plot + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) if (!is.null(x = span)) { plot <- plot + geom_smooth( mapping = aes_string(x = names.plot[1], y = names.plot[2]), method = 'loess', span = span ) } return(plot) } #' Plot a single dimension #' #' @param data Data to plot #' @param dims A two-length numeric vector with dimensions to use #' @param col.by ... #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character or numeric value corresponding to a #' palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}.By #' default, ggplot2 assigns colors #' @param pt.size Adjust point size for plotting #' @param shape.by If NULL, all points are circles (default). You can specify #' any cell attribute (that can be pulled with \code{\link{FetchData}}) #' allowing for both different colors and different shapes on cells. #' @param alpha.by Mapping variable for the point alpha value #' @param order Specify the order of plotting for the idents. This can be #' useful for crowded plots if points of interest are being buried. Provide #' either a full list of valid idents or a subset to be plotted last (on top). #' @param label Whether to label the clusters #' @param repel Repel labels #' @param label.size Sets size of labels #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply #' pass a vector instead of a list. If set, colors selected cells to the color(s) #' in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); #' will also resize to the size(s) passed to \code{sizes.highlight} #' @param cols.highlight A vector of colors to highlight the cells as; will #' repeat to the length groups in cells.highlight #' @param sizes.highlight Size of highlighted cells; will repeat to the length #' groups in cells.highlight #' @param na.value Color value for NA points when using custom scale. #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is #' greater than 100,000 #' @param raster.dpi the pixel resolution for rastered plots, passed to geom_scattermore(). #' Default is c(512, 512) #' #' @return A ggplot2 object #' #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 ggplot aes_string geom_point labs guides scale_color_brewer #' scale_color_manual element_rect guide_legend discrete_scale #' #' @keywords internal #' #' @export #' SingleDimPlot <- function( data, dims, col.by = NULL, cols = NULL, pt.size = NULL, shape.by = NULL, alpha.by = NULL, order = NULL, label = FALSE, repel = FALSE, label.size = 4, cells.highlight = NULL, cols.highlight = '#DE2D26', sizes.highlight = 1, na.value = 'grey50', raster = NULL, raster.dpi = NULL ) { pt.size <- pt.size %||% AutoPointSize(data = data, raster = raster) if ((nrow(x = data) > 1e5) & !isFALSE(raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") } raster <- raster %||% (nrow(x = data) > 1e5) if (!is.null(x = raster.dpi)) { if (!is.numeric(x = raster.dpi) || length(x = raster.dpi) != 2) stop("'raster.dpi' must be a two-length numeric vector") } if (length(x = dims) != 2) { stop("'dims' must be a two-length vector") } if (!is.data.frame(x = data)) { data <- as.data.frame(x = data) } if (is.character(x = dims) && !all(dims %in% colnames(x = data))) { stop("Cannot find dimensions to plot in data") } else if (is.numeric(x = dims)) { dims <- colnames(x = data)[dims] } if (!is.null(x = cells.highlight)) { highlight.info <- SetHighlight( cells.highlight = cells.highlight, cells.all = rownames(x = data), sizes.highlight = sizes.highlight %||% pt.size, cols.highlight = cols.highlight, col.base = cols[1] %||% '#C3C3C3', pt.size = pt.size ) order <- highlight.info$plot.order data$highlight <- highlight.info$highlight col.by <- 'highlight' pt.size <- highlight.info$size cols <- highlight.info$color } if (!is.null(x = order) && !is.null(x = col.by)) { if (typeof(x = order) == "logical") { if (order) { data <- data[order(!is.na(x = data[, col.by]), data[, col.by]), ] } } else { order <- rev(x = c( order, setdiff(x = unique(x = data[, col.by]), y = order) )) data[, col.by] <- factor(x = data[, col.by], levels = order) new.order <- order(x = data[, col.by]) data <- data[new.order, ] if (length(x = pt.size) == length(x = new.order)) { pt.size <- pt.size[new.order] } } } if (!is.null(x = col.by) && !col.by %in% colnames(x = data)) { warning("Cannot find ", col.by, " in plotting data, not coloring plot") col.by <- NULL } else { # col.index <- grep(pattern = col.by, x = colnames(x = data), fixed = TRUE) col.index <- match(x = col.by, table = colnames(x = data)) if (grepl(pattern = '^\\d', x = col.by)) { # Do something for numbers col.by <- paste0('x', col.by) } else if (grepl(pattern = '-', x = col.by)) { # Do something for dashes col.by <- gsub(pattern = '-', replacement = '.', x = col.by) } colnames(x = data)[col.index] <- col.by } if (!is.null(x = shape.by) && !shape.by %in% colnames(x = data)) { warning("Cannot find ", shape.by, " in plotting data, not shaping plot") } if (!is.null(x = alpha.by) && !alpha.by %in% colnames(x = data)) { warning( "Cannot find alpha variable ", alpha.by, " in data, setting to NULL", call. = FALSE, immediate. = TRUE ) alpha.by <- NULL } plot <- ggplot(data = data) plot <- if (isTRUE(x = raster)) { plot + geom_scattermore( mapping = aes_string( x = dims[1], y = dims[2], color = paste0("`", col.by, "`"), shape = shape.by, alpha = alpha.by ), pointsize = pt.size, pixels = raster.dpi ) } else { plot + geom_point( mapping = aes_string( x = dims[1], y = dims[2], color = paste0("`", col.by, "`"), shape = shape.by, alpha = alpha.by ), size = pt.size ) } plot <- plot + guides(color = guide_legend(override.aes = list(size = 3))) + labs(color = NULL, title = col.by) + CenterTitle() if (label && !is.null(x = col.by)) { plot <- LabelClusters( plot = plot, id = col.by, repel = repel, size = label.size ) } if (!is.null(x = cols)) { if (length(x = cols) == 1 && (is.numeric(x = cols) || cols %in% rownames(x = brewer.pal.info))) { scale <- scale_color_brewer(palette = cols, na.value = na.value) } else if (length(x = cols) == 1 && (cols %in% c('alphabet', 'alphabet2', 'glasbey', 'polychrome', 'stepped'))) { colors <- DiscretePalette(length(unique(data[[col.by]])), palette = cols) scale <- scale_color_manual(values = colors, na.value = na.value) } else { scale <- scale_color_manual(values = cols, na.value = na.value) } plot <- plot + scale } plot <- plot + theme_cowplot() return(plot) } #' Plot a single expression by identity on a plot #' #' @param data Data to plot #' @param idents Idents to use #' @param split Use a split violin plot #' @param type Make either a \dQuote{ridge} or \dQuote{violin} plot #' @param sort Sort identity classes (on the x-axis) by the average #' expression of the attribute being potted #' @param y.max Maximum Y value to plot #' @param adjust Adjust parameter for geom_violin #' @param pt.size Size of points for violin plots #' @param cols Colors to use for plotting #' @param seed.use Random seed to use. If NULL, don't set a seed #' @param log plot Y axis on log scale #' @param raster Convert points to raster format. Requires 'ggrastr' to be installed. #' default is \code{NULL} which automatically rasterizes if ggrastr is installed and #' number of points exceed 100,000. #' #' @return A ggplot-based Expression-by-Identity plot #' #' @importFrom stats rnorm #' @importFrom utils globalVariables #' @importFrom ggridges geom_density_ridges theme_ridges #' @importFrom ggplot2 ggplot aes_string theme labs geom_violin geom_jitter #' ylim position_jitterdodge scale_fill_manual scale_y_log10 scale_x_log10 #' scale_y_discrete scale_x_continuous waiver #' @importFrom cowplot theme_cowplot #' #' @keywords internal #' @export #' SingleExIPlot <- function( data, idents, split = NULL, type = 'violin', sort = FALSE, y.max = NULL, adjust = 1, pt.size = 0, cols = NULL, seed.use = 42, log = FALSE, raster = NULL ) { if (!is.null(x = raster) && isTRUE(x = raster)){ if (!PackageCheck('ggrastr', error = FALSE)) { stop("Please install ggrastr from CRAN to enable rasterization.") } } if (PackageCheck('ggrastr', error = FALSE)) { # Set rasterization to true if ggrastr is installed and # number of points exceeds 100,000 if ((nrow(x = data) > 1e5) & !isFALSE(raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") } raster <- TRUE } if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.data.frame(x = data) || ncol(x = data) != 1) { stop("'SingleExIPlot requires a data frame with 1 column") } feature <- colnames(x = data) data$ident <- idents if ((is.character(x = sort) && nchar(x = sort) > 0) || sort) { data$ident <- factor( x = data$ident, levels = names(x = rev(x = sort( x = tapply( X = data[, feature], INDEX = data$ident, FUN = mean ), decreasing = grepl(pattern = paste0('^', tolower(x = sort)), x = 'decreasing') ))) ) } if (log) { noise <- rnorm(n = length(x = data[, feature])) / 200 data[, feature] <- data[, feature] + 1 } else { noise <- rnorm(n = length(x = data[, feature])) / 100000 } if (all(data[, feature] == data[, feature][1])) { warning(paste0("All cells have the same value of ", feature, ".")) } else{ data[, feature] <- data[, feature] + noise } axis.label <- 'Expression Level' y.max <- y.max %||% max(data[, feature][is.finite(x = data[, feature])]) if (type == 'violin' && !is.null(x = split)) { data$split <- split vln.geom <- geom_violin fill <- 'split' } else if (type == 'splitViolin' && !is.null(x = split )) { data$split <- split vln.geom <- geom_split_violin fill <- 'split' type <- 'violin' } else { vln.geom <- geom_violin fill <- 'ident' } switch( EXPR = type, 'violin' = { x <- 'ident' y <- paste0("`", feature, "`") xlab <- 'Identity' ylab <- axis.label geom <- list( vln.geom(scale = 'width', adjust = adjust, trim = TRUE), theme(axis.text.x = element_text(angle = 45, hjust = 1)) ) if (is.null(x = split)) { if (isTRUE(x = raster)) { jitter <- ggrastr::rasterize(geom_jitter(height = 0, size = pt.size, show.legend = FALSE)) } else { jitter <- geom_jitter(height = 0, size = pt.size, show.legend = FALSE) } } else { if (isTRUE(x = raster)) { jitter <- ggrastr::rasterize(geom_jitter( position = position_jitterdodge(jitter.width = 0.4, dodge.width = 0.9), size = pt.size, show.legend = FALSE )) } else { jitter <- geom_jitter( position = position_jitterdodge(jitter.width = 0.4, dodge.width = 0.9), size = pt.size, show.legend = FALSE ) } } log.scale <- scale_y_log10() axis.scale <- ylim }, 'ridge' = { x <- paste0("`", feature, "`") y <- 'ident' xlab <- axis.label ylab <- 'Identity' geom <- list( geom_density_ridges(scale = 4), theme_ridges(), scale_y_discrete(expand = c(0.01, 0)), scale_x_continuous(expand = c(0, 0)) ) jitter <- geom_jitter(width = 0, size = pt.size, show.legend = FALSE) log.scale <- scale_x_log10() axis.scale <- function(...) { invisible(x = NULL) } }, stop("Unknown plot type: ", type) ) plot <- ggplot( data = data, mapping = aes_string(x = x, y = y, fill = fill)[c(2, 3, 1)] ) + labs(x = xlab, y = ylab, title = feature, fill = NULL) + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) plot <- do.call(what = '+', args = list(plot, geom)) plot <- plot + if (log) { log.scale } else { axis.scale(min(data[, feature]), y.max) } if (pt.size > 0) { plot <- plot + jitter } if (!is.null(x = cols)) { if (!is.null(x = split)) { idents <- unique(x = as.vector(x = data$ident)) splits <- unique(x = as.vector(x = data$split)) labels <- if (length(x = splits) == 2) { splits } else { unlist(x = lapply( X = idents, FUN = function(pattern, x) { x.mod <- gsub( pattern = paste0(pattern, '.'), replacement = paste0(pattern, ': '), x = x, fixed = TRUE ) x.keep <- grep(pattern = ': ', x = x.mod, fixed = TRUE) x.return <- x.mod[x.keep] names(x = x.return) <- x[x.keep] return(x.return) }, x = unique(x = as.vector(x = data$split)) )) } if (is.null(x = names(x = labels))) { names(x = labels) <- labels } } else { labels <- levels(x = droplevels(data$ident)) } plot <- plot + scale_fill_manual(values = cols, labels = labels) } return(plot) } #' A single heatmap from base R using \code{\link[graphics]{image}} #' #' @param data matrix of data to plot #' @param order optional vector of cell names to specify order in plot #' @param title Title for plot #' #' @return No return, generates a base-R heatmap using \code{\link[graphics]{image}} #' #' @importFrom graphics axis image par plot.new title #' #' @keywords internal #' #' @export #' SingleImageMap <- function(data, order = NULL, title = NULL) { if (!is.null(x = order)) { data <- data[order, ] } par(mar = c(1, 1, 3, 3)) plot.new() image( x = as.matrix(x = data), axes = FALSE, add = TRUE, col = PurpleAndYellow() ) axis( side = 4, at = seq(from = 0, to = 1, length = ncol(x = data)), labels = colnames(x = data), las = 1, tick = FALSE, mgp = c(0, -0.5, 0), cex.axis = 0.75 ) title(main = title) } # A single polygon plot # # @param data Data to plot # @param group.by Grouping variable # @param ... Extra parameters passed to \code{\link[cowplot]{theme_cowplot}} # # @return A ggplot-based plot # #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_polygon # # @seealso \code{\link[cowplot]{theme_cowplot}} # SinglePolyPlot <- function(data, group.by, ...) { plot <- ggplot(data = data, mapping = aes_string(x = 'x', y = 'y')) + geom_polygon(mapping = aes_string(fill = group.by, group = 'cell')) + coord_fixed() + theme_cowplot(...) return(plot) } #' A single heatmap from ggplot2 using geom_raster #' #' @param data A matrix or data frame with data to plot #' @param raster switch between geom_raster and geom_tile #' @param cell.order ... #' @param feature.order ... #' @param colors A vector of colors to use #' @param disp.min Minimum display value (all values below are clipped) #' @param disp.max Maximum display value (all values above are clipped) #' @param limits A two-length numeric vector with the limits for colors on the plot #' @param group.by A vector to group cells by, should be one grouping identity per cell #' #' @return A ggplot2 object # #' @importFrom ggplot2 ggplot aes_string geom_raster scale_fill_gradient #' scale_fill_gradientn theme element_blank labs geom_point guides #' guide_legend geom_tile #' #' @keywords internal #' #' @export # SingleRasterMap <- function( data, raster = TRUE, cell.order = NULL, feature.order = NULL, colors = PurpleAndYellow(), disp.min = -2.5, disp.max = 2.5, limits = NULL, group.by = NULL ) { data <- MinMax(data = data, min = disp.min, max = disp.max) data <- Melt(x = t(x = data)) colnames(x = data) <- c('Feature', 'Cell', 'Expression') if (!is.null(x = feature.order)) { data$Feature <- factor(x = data$Feature, levels = unique(x = feature.order)) } if (!is.null(x = cell.order)) { data$Cell <- factor(x = data$Cell, levels = unique(x = cell.order)) } if (!is.null(x = group.by)) { data$Identity <- group.by[data$Cell] } limits <- limits %||% c(min(data$Expression), max(data$Expression)) if (length(x = limits) != 2 || !is.numeric(x = limits)) { stop("limits' must be a two-length numeric vector") } my_geom <- ifelse(test = raster, yes = geom_raster, no = geom_tile) plot <- ggplot(data = data) + my_geom(mapping = aes_string(x = 'Cell', y = 'Feature', fill = 'Expression')) + theme(axis.text.x = element_blank(), axis.ticks.x = element_blank()) + scale_fill_gradientn(limits = limits, colors = colors, na.value = "white") + labs(x = NULL, y = NULL, fill = group.by %iff% 'Expression') + WhiteBackground() + NoAxes(keep.text = TRUE) if (!is.null(x = group.by)) { plot <- plot + geom_point( mapping = aes_string(x = 'Cell', y = 'Feature', color = 'Identity'), alpha = 0 ) + guides(color = guide_legend(override.aes = list(alpha = 1))) } return(plot) } #' Base plotting function for all Spatial plots #' #' @param data Data.frame with info to be plotted #' @param image \code{SpatialImage} object to be plotted #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character #' or numeric value corresponding to a palette as specified by #' \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns #' colors #' @param image.alpha Adjust the opacity of the background images. Set to 0 to #' remove. #' @param pt.alpha Adjust the opacity of the points if plotting a #' \code{SpatialDimPlot} #' @param crop Crop the plot in to focus on points plotted. Set to \code{FALSE} #' to show entire background image. #' @param pt.size.factor Sets the size of the points relative to spot.radius #' @param stroke Control the width of the border around the spots #' @param col.by Mapping variable for the point color #' @param alpha.by Mapping variable for the point alpha value #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply pass a vector #' instead of a list. If set, colors selected cells to the color(s) in #' cols.highlight #' @param cols.highlight A vector of colors to highlight the cells as; ordered #' the same as the groups in cells.highlight; last color corresponds to #' unselected cells. #' @param geom Switch between normal spatial geom and geom to enable hover #' functionality #' @param na.value Color for spots with NA values #' #' @return A ggplot2 object #' #' @importFrom tibble tibble #' @importFrom ggplot2 ggplot aes_string coord_fixed geom_point xlim ylim #' coord_cartesian labs theme_void theme scale_fill_brewer #' #' @keywords internal #' #' @export #' SingleSpatialPlot <- function( data, image, cols = NULL, image.alpha = 1, pt.alpha = NULL, crop = TRUE, pt.size.factor = NULL, stroke = 0.25, col.by = NULL, alpha.by = NULL, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), geom = c('spatial', 'interactive', 'poly'), na.value = 'grey50' ) { geom <- match.arg(arg = geom) if (!is.null(x = col.by) && !col.by %in% colnames(x = data)) { warning("Cannot find '", col.by, "' in data, not coloring", call. = FALSE, immediate. = TRUE) col.by <- NULL } col.by <- col.by %iff% paste0("`", col.by, "`") alpha.by <- alpha.by %iff% paste0("`", alpha.by, "`") if (!is.null(x = cells.highlight)) { highlight.info <- SetHighlight( cells.highlight = cells.highlight, cells.all = rownames(x = data), sizes.highlight = pt.size.factor, cols.highlight = cols.highlight[1], col.base = cols.highlight[2] ) order <- highlight.info$plot.order data$highlight <- highlight.info$highlight col.by <- 'highlight' levels(x = data$ident) <- c(order, setdiff(x = levels(x = data$ident), y = order)) data <- data[order(data$ident), ] } plot <- ggplot(data = data, aes_string( x = colnames(x = data)[2], y = colnames(x = data)[1], fill = col.by, alpha = alpha.by )) plot <- switch( EXPR = geom, 'spatial' = { if (is.null(x = pt.alpha)) { plot <- plot + geom_spatial( point.size.factor = pt.size.factor, data = data, image = image, image.alpha = image.alpha, crop = crop, stroke = stroke, ) } else { plot <- plot + geom_spatial( point.size.factor = pt.size.factor, data = data, image = image, image.alpha = image.alpha, crop = crop, stroke = stroke, alpha = pt.alpha ) } plot + coord_fixed() + theme(aspect.ratio = 1) }, 'interactive' = { plot + geom_spatial_interactive( data = tibble(grob = list(GetImage(object = image, mode = 'grob'))), mapping = aes_string(grob = 'grob'), x = 0.5, y = 0.5 ) + geom_point(mapping = aes_string(color = col.by)) + xlim(0, ncol(x = image)) + ylim(nrow(x = image), 0) + coord_cartesian(expand = FALSE) }, 'poly' = { data$cell <- rownames(x = data) data[, c('x', 'y')] <- NULL data <- merge( x = data, y = GetTissueCoordinates(object = image, qhulls = TRUE), by = "cell" ) plot + geom_polygon( data = data, mapping = aes_string(fill = col.by, group = 'cell') ) + coord_fixed() + theme_cowplot() }, stop("Unknown geom, choose from 'spatial' or 'interactive'", call. = FALSE) ) if (!is.null(x = cells.highlight)) { plot <- plot + scale_fill_manual(values = cols.highlight) } if (!is.null(x = cols) && is.null(x = cells.highlight)) { if (length(x = cols) == 1 && (is.numeric(x = cols) || cols %in% rownames(x = brewer.pal.info))) { scale <- scale_fill_brewer(palette = cols, na.value = na.value) } else if (length(x = cols) == 1 && (cols %in% c('alphabet', 'alphabet2', 'glasbey', 'polychrome', 'stepped'))) { colors <- DiscretePalette(length(unique(data[[col.by]])), palette = cols) scale <- scale_fill_manual(values = colors, na.value = na.value) } else { scale <- scale_fill_manual(values = cols, na.value = na.value) } plot <- plot + scale } plot <- plot + NoAxes() + theme(panel.background = element_blank()) return(plot) } # Reimplementation of ggplot2 coord$transform # # @param data A data frame with x-coordinates in the first column and y-coordinates # in the second # @param xlim,ylim X- and Y-limits for the transformation, must be two-length # numeric vectors # # @return \code{data} with transformed coordinates # #' @importFrom ggplot2 transform_position #' @importFrom scales rescale squish_infinite # Transform <- function(data, xlim = c(-Inf, Inf), ylim = c(-Inf, Inf)) { # Quick input argument checking if (!all(sapply(X = list(xlim, ylim), FUN = length) == 2)) { stop("'xlim' and 'ylim' must be two-length numeric vectors", call. = FALSE) } # Save original names df.names <- colnames(x = data) colnames(x = data)[1:2] <- c('x', 'y') # Rescale the X and Y values data <- transform_position( df = data, trans_x = function(df) { return(rescale(x = df, from = xlim)) }, trans_y = function(df) { return(rescale(x = df, from = ylim)) } ) # Something that ggplot2 does data <- transform_position( df = data, trans_x = squish_infinite, trans_y = squish_infinite ) # Restore original names colnames(x = data) <- df.names return(data) } Seurat/R/preprocessing.R0000644000176200001440000035321414170106500014726 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Calculate the Barcode Distribution Inflection #' #' This function calculates an adaptive inflection point ("knee") of the barcode distribution #' for each sample group. This is useful for determining a threshold for removing #' low-quality samples. #' #' The function operates by calculating the slope of the barcode number vs. rank #' distribution, and then finding the point at which the distribution changes most #' steeply (the "knee"). Of note, this calculation often must be restricted as to the #' range at which it performs, so `threshold` parameters are provided to restrict the #' range of the calculation based on the rank of the barcodes. [BarcodeInflectionsPlot()] #' is provided as a convenience function to visualize and test different thresholds and #' thus provide more sensical end results. #' #' See [BarcodeInflectionsPlot()] to visualize the calculated inflection points and #' [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. #' #' @param object Seurat object #' @param barcode.column Column to use as proxy for barcodes ("nCount_RNA" by default) #' @param group.column Column to group by ("orig.ident" by default) #' @param threshold.high Ignore barcodes of rank above thisf threshold in inflection calculation #' @param threshold.low Ignore barcodes of rank below this threshold in inflection calculation #' #' @return Returns Seurat object with a new list in the `tools` slot, `CalculateBarcodeInflections` with values: #' #' * `barcode_distribution` - contains the full barcode distribution across the entire dataset #' * `inflection_points` - the calculated inflection points within the thresholds #' * `threshold_values` - the provided (or default) threshold values to search within for inflections #' * `cells_pass` - the cells that pass the inflection point calculation #' #' @importFrom methods slot #' @importFrom stats ave aggregate #' #' @export #' @concept preprocessing #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{BarcodeInflectionsPlot}} \code{\link{SubsetByBarcodeInflections}} #' #' @examples #' data("pbmc_small") #' CalculateBarcodeInflections(pbmc_small, group.column = 'groups') #' CalculateBarcodeInflections <- function( object, barcode.column = "nCount_RNA", group.column = "orig.ident", threshold.low = NULL, threshold.high = NULL ) { ## Check that barcode.column exists in meta.data if (!(barcode.column %in% colnames(x = object[[]]))) { stop("`barcode.column` specified not present in Seurat object provided") } # Calculation of barcode distribution ## Append rank by grouping x umi column # barcode_dist <- as.data.frame(object@meta.data)[, c(group.column, barcode.column)] barcode_dist <- object[[c(group.column, barcode.column)]] barcode_dist <- barcode_dist[do.call(what = order, args = barcode_dist), ] # order by columns left to right barcode_dist$rank <- ave( x = barcode_dist[, barcode.column], barcode_dist[, group.column], FUN = function(x) { return(rev(x = order(x))) } ) barcode_dist <- barcode_dist[order(barcode_dist[, group.column], barcode_dist[, 'rank']), ] ## calculate rawdiff and append per group top <- aggregate( x = barcode_dist[, barcode.column], by = list(barcode_dist[, group.column]), FUN = function(x) { return(c(0, diff(x = log10(x = x + 1)))) })$x bot <- aggregate( x = barcode_dist[, 'rank'], by = list(barcode_dist[, group.column]), FUN = function(x) { return(c(0, diff(x = x))) } )$x barcode_dist$rawdiff <- unlist(x = mapply( FUN = function(x, y) { return(ifelse(test = is.na(x = x / y), yes = 0, no = x / y)) }, x = top, y = bot )) # Calculation of inflection points ## Set thresholds for rank of barcodes to ignore threshold.low <- threshold.low %||% 1 threshold.high <- threshold.high %||% max(barcode_dist$rank) ## Subset the barcode distribution by thresholds barcode_dist_sub <- barcode_dist[barcode_dist$rank > threshold.low & barcode_dist$rank < threshold.high, ] ## Calculate inflection points ## note: if thresholds are s.t. it produces the same length across both groups, ## aggregate will create a data.frame with x.* columns, where * is the length ## using the same combine approach will yield non-symmetrical results! whichmin_list <- aggregate( x = barcode_dist_sub[, 'rawdiff'], by = list(barcode_dist_sub[, group.column]), FUN = function(x) { return(x == min(x)) } )$x ## workaround for aggregate behavior noted above if (is.list(x = whichmin_list)) { # uneven lengths is_inflection <- unlist(x = whichmin_list) } else if (is.matrix(x = whichmin_list)) { # even lengths is_inflection <- as.vector(x = t(x = whichmin_list)) } tmp <- cbind(barcode_dist_sub, is_inflection) # inflections <- tmp[tmp$is_inflection == TRUE, c(group.column, barcode.column, "rank")] inflections <- tmp[which(x = tmp$is_inflection), c(group.column, barcode.column, 'rank')] # Use inflection point for what cells to keep ## use the inflection points to cut the subsetted dist to what to keep ## keep only the barcodes above the inflection points keep <- unlist(x = lapply( X = whichmin_list, FUN = function(x) { keep <- !x if (sum(keep) == length(x = keep)) { return(keep) # prevents bug in case of keeping all cells } # toss <- which(keep == FALSE):length(x = keep) # the end cells below knee toss <- which(x = !keep):length(x = keep) keep[toss] <- FALSE return(keep) } )) barcode_dist_sub_keep <- barcode_dist_sub[keep, ] cells_keep <- rownames(x = barcode_dist_sub_keep) # Bind thresholds to keep track of where they are placed thresholds <- data.frame( threshold = c('threshold.low', 'threshold.high'), rank = c(threshold.low, threshold.high) ) # Combine relevant info together ## Combine Barcode dist, inflection point, and cells to keep into list info <- list( barcode_distribution = barcode_dist, inflection_points = inflections, threshold_values = thresholds, cells_pass = cells_keep ) # save results into object Tool(object = object) <- info return(object) } #' Demultiplex samples based on data from cell 'hashing' #' #' Assign sample-of-origin for each cell, annotate doublets. #' #' @param object Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized. #' @param assay Name of the Hashtag assay (HTO by default) #' @param positive.quantile The quantile of inferred 'negative' distribution for each hashtag - over which the cell is considered 'positive'. Default is 0.99 #' @param init Initial number of clusters for hashtags. Default is the # of hashtag oligo names + 1 (to account for negatives) #' @param kfunc Clustering function for initial hashtag grouping. Default is "clara" for fast k-medoids clustering on large applications, also support "kmeans" for kmeans clustering #' @param nsamples Number of samples to be drawn from the dataset used for clustering, for kfunc = "clara" #' @param nstarts nstarts value for k-means clustering (for kfunc = "kmeans"). 100 by default #' @param seed Sets the random seed. If NULL, seed is not set #' @param verbose Prints the output #' #' @return The Seurat object with the following demultiplexed information stored in the meta data: #' \describe{ #' \item{hash.maxID}{Name of hashtag with the highest signal} #' \item{hash.secondID}{Name of hashtag with the second highest signal} #' \item{hash.margin}{The difference between signals for hash.maxID and hash.secondID} #' \item{classification}{Classification result, with doublets/multiplets named by the top two highest hashtags} #' \item{classification.global}{Global classification result (singlet, doublet or negative)} #' \item{hash.ID}{Classification result where doublet IDs are collapsed} #' } #' #' @importFrom cluster clara #' @importFrom Matrix colSums #' @importFrom fitdistrplus fitdist #' @importFrom stats pnbinom kmeans #' #' @export #' @concept preprocessing #' #' @seealso \code{\link{HTOHeatmap}} #' #' @examples #' \dontrun{ #' object <- HTODemux(object) #' } #' HTODemux <- function( object, assay = "HTO", positive.quantile = 0.99, init = NULL, nstarts = 100, kfunc = "clara", nsamples = 100, seed = 42, verbose = TRUE ) { if (!is.null(x = seed)) { set.seed(seed = seed) } #initial clustering assay <- assay %||% DefaultAssay(object = object) data <- GetAssayData(object = object, assay = assay) counts <- GetAssayData( object = object, assay = assay, slot = 'counts' )[, colnames(x = object)] counts <- as.matrix(x = counts) ncenters <- init %||% (nrow(x = data) + 1) switch( EXPR = kfunc, 'kmeans' = { init.clusters <- kmeans( x = t(x = GetAssayData(object = object, assay = assay)), centers = ncenters, nstart = nstarts ) #identify positive and negative signals for all HTO Idents(object = object, cells = names(x = init.clusters$cluster)) <- init.clusters$cluster }, 'clara' = { #use fast k-medoid clustering init.clusters <- clara( x = t(x = GetAssayData(object = object, assay = assay)), k = ncenters, samples = nsamples ) #identify positive and negative signals for all HTO Idents(object = object, cells = names(x = init.clusters$clustering), drop = TRUE) <- init.clusters$clustering }, stop("Unknown k-means function ", kfunc, ", please choose from 'kmeans' or 'clara'") ) #average hto signals per cluster #work around so we don't average all the RNA levels which takes time average.expression <- AverageExpression( object = object, assays = assay, verbose = FALSE )[[assay]] #checking for any cluster with all zero counts for any barcode if (sum(average.expression == 0) > 0) { stop("Cells with zero counts exist as a cluster.") } #create a matrix to store classification result discrete <- GetAssayData(object = object, assay = assay) discrete[discrete > 0] <- 0 # for each HTO, we will use the minimum cluster for fitting for (iter in rownames(x = data)) { values <- counts[iter, colnames(object)] #commented out if we take all but the top cluster as background #values_negative=values[setdiff(object@cell.names,WhichCells(object,which.max(average.expression[iter,])))] values.use <- values[WhichCells( object = object, idents = levels(x = Idents(object = object))[[which.min(x = average.expression[iter, ])]] )] fit <- suppressWarnings(expr = fitdist(data = values.use, distr = "nbinom")) cutoff <- as.numeric(x = quantile(x = fit, probs = positive.quantile)$quantiles[1]) discrete[iter, names(x = which(x = values > cutoff))] <- 1 if (verbose) { message(paste0("Cutoff for ", iter, " : ", cutoff, " reads")) } } # now assign cells to HTO based on discretized values npositive <- colSums(x = discrete) classification.global <- npositive classification.global[npositive == 0] <- "Negative" classification.global[npositive == 1] <- "Singlet" classification.global[npositive > 1] <- "Doublet" donor.id = rownames(x = data) hash.max <- apply(X = data, MARGIN = 2, FUN = max) hash.maxID <- apply(X = data, MARGIN = 2, FUN = which.max) hash.second <- apply(X = data, MARGIN = 2, FUN = MaxN, N = 2) hash.maxID <- as.character(x = donor.id[sapply( X = 1:ncol(x = data), FUN = function(x) { return(which(x = data[, x] == hash.max[x])[1]) } )]) hash.secondID <- as.character(x = donor.id[sapply( X = 1:ncol(x = data), FUN = function(x) { return(which(x = data[, x] == hash.second[x])[1]) } )]) hash.margin <- hash.max - hash.second doublet_id <- sapply( X = 1:length(x = hash.maxID), FUN = function(x) { return(paste(sort(x = c(hash.maxID[x], hash.secondID[x])), collapse = "_")) } ) # doublet_names <- names(x = table(doublet_id))[-1] # Not used classification <- classification.global classification[classification.global == "Negative"] <- "Negative" classification[classification.global == "Singlet"] <- hash.maxID[which(x = classification.global == "Singlet")] classification[classification.global == "Doublet"] <- doublet_id[which(x = classification.global == "Doublet")] classification.metadata <- data.frame( hash.maxID, hash.secondID, hash.margin, classification, classification.global ) colnames(x = classification.metadata) <- paste( assay, c('maxID', 'secondID', 'margin', 'classification', 'classification.global'), sep = '_' ) object <- AddMetaData(object = object, metadata = classification.metadata) Idents(object) <- paste0(assay, '_classification') # Idents(object, cells = rownames(object@meta.data[object@meta.data$classification.global == "Doublet", ])) <- "Doublet" doublets <- rownames(x = object[[]])[which(object[[paste0(assay, "_classification.global")]] == "Doublet")] Idents(object = object, cells = doublets) <- 'Doublet' # object@meta.data$hash.ID <- Idents(object) object$hash.ID <- Idents(object = object) return(object) } #' Calculate pearson residuals of features not in the scale.data #' #' This function calls sctransform::get_residuals. #' #' @param object A seurat object #' @param features Name of features to add into the scale.data #' @param assay Name of the assay of the seurat object generated by SCTransform #' @param umi.assay Name of the assay of the seurat object containing UMI matrix #' and the default is RNA #' @param clip.range Numeric of length two specifying the min and max values the #' Pearson residual will be clipped to #' @param replace.value Recalculate residuals for all features, even if they are #' already present. Useful if you want to change the clip.range. #' @param na.rm For features where there is no feature model stored, return NA #' for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only #' return residuals for features with a model stored for all cells. #' @param verbose Whether to print messages and progress bars #' #' @return Returns a Seurat object containing Pearson residuals of added #' features in its scale.data #' #' @importFrom sctransform get_residuals #' @importFrom matrixStats rowAnyNAs #' #' @export #' @concept preprocessing #' #' @seealso \code{\link[sctransform]{get_residuals}} #' #' @examples #' data("pbmc_small") #' pbmc_small <- SCTransform(object = pbmc_small, variable.features.n = 20) #' pbmc_small <- GetResidual(object = pbmc_small, features = c('MS4A1', 'TCL1A')) #' GetResidual <- function( object, features, assay = NULL, umi.assay = NULL, clip.range = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) if (IsSCT(assay = object[[assay]])) { object[[assay]] <- as(object[[assay]], 'SCTAssay') } if (!inherits(x = object[[assay]], what = "SCTAssay")) { stop(assay, " assay was not generated by SCTransform") } sct.models <- levels(x = object[[assay]]) if (length(x = sct.models) == 0) { warning("SCT model not present in assay", call. = FALSE, immediate. = TRUE) return(object) } possible.features <- unique(x = unlist(x = lapply(X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) } ))) bad.features <- setdiff(x = features, y = possible.features) if (length(x = bad.features) > 0) { warning("The following requested features are not present in any models: ", paste(bad.features, collapse = ", "), call. = FALSE) features <- intersect(x = features, y = possible.features) } features.orig <- features if (na.rm) { # only compute residuals when feature model info is present in all features <- names(x = which(x = table(unlist(x = lapply( X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) } ))) == length(x = sct.models))) if (length(x = features) == 0) { return(object) } } features <- intersect(x = features.orig, y = features) if (length(x = sct.models) > 1 && verbose) { message( "This SCTAssay contains multiple SCT models. Computing residuals for cells using different models" ) } new.residuals <- lapply( X = sct.models, FUN = function(x) { GetResidualSCTModel( object = object, assay = assay, SCTModel = x, new_features = features, replace.value = replace.value, clip.range = clip.range, verbose = verbose ) } ) existing.data <- GetAssayData(object = object, slot = 'scale.data', assay = assay) all.features <- union(x = rownames(x = existing.data), y = features) new.scale <- matrix( data = NA, nrow = length(x = all.features), ncol = ncol(x = object), dimnames = list(all.features, Cells(x = object)) ) if (nrow(x = existing.data) > 0){ new.scale[1:nrow(x = existing.data), ] <- existing.data } if (length(x = new.residuals) == 1 & is.list(x = new.residuals)) { new.residuals <- new.residuals[[1]] } else { new.residuals <- Reduce(cbind, new.residuals) } new.scale[rownames(x = new.residuals), colnames(x = new.residuals)] <- new.residuals if (na.rm) { new.scale <- new.scale[!rowAnyNAs(x = new.scale), ] } object <- SetAssayData( object = object, assay = assay, slot = "scale.data", new.data = new.scale ) if (any(!features.orig %in% rownames(x = new.scale))) { bad.features <- features.orig[which(!features.orig %in% rownames(x = new.scale))] warning("Residuals not computed for the following requested features: ", paste(bad.features, collapse = ", "), call. = FALSE) } return(object) } #' Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object #' #' @inheritParams Read10X #' @inheritParams SeuratObject::CreateSeuratObject #' @param data.dir Directory containing the H5 file specified by \code{filename} #' and the image data in a subdirectory called \code{spatial} #' @param filename Name of H5 file containing the feature barcode matrix #' @param slice Name for the stored image of the tissue slice #' @param filter.matrix Only keep spots that have been determined to be over #' tissue #' @param to.upper Converts all feature names to upper case. This can provide an #' approximate conversion of mouse to human gene names which can be useful in an #' explorative analysis. For cross-species comparisons, orthologous genes should #' be identified across species and used instead. #' @param image An object of class VisiumV1. Typically, an output from \code{\link{Read10X_Image}} #' @param ... Arguments passed to \code{\link{Read10X_h5}} #' #' @return A \code{Seurat} object #' #' @importFrom png readPNG #' @importFrom grid rasterGrob #' @importFrom jsonlite fromJSON #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show filtered_feature_bc_matrix.h5 #' Load10X_Spatial(data.dir = data_dir) #' } #' Load10X_Spatial <- function( data.dir, filename = 'filtered_feature_bc_matrix.h5', assay = 'Spatial', slice = 'slice1', filter.matrix = TRUE, to.upper = FALSE, image = NULL, ... ) { if (length(x = data.dir) > 1) { warning("'Load10X_Spatial' accepts only one 'data.dir'", immediate. = TRUE) data.dir <- data.dir[1] } data <- Read10X_h5(filename = file.path(data.dir, filename), ...) if (to.upper) { rownames(x = data) <- toupper(x = rownames(x = data)) } object <- CreateSeuratObject(counts = data, assay = assay) if (is.null(x = image)) { image <- Read10X_Image( image.dir = file.path(data.dir, 'spatial'), filter.matrix = filter.matrix ) } else { if (!inherits(x = image, what = "VisiumV1")) stop("Image must be an object of class 'VisiumV1'.") } image <- image[Cells(x = object)] DefaultAssay(object = image) <- assay object[[slice]] <- image return(object) } #' Load STARmap data #' #' @param data.dir location of data directory that contains the counts matrix, #' gene name, qhull, and centroid files. #' @param counts.file name of file containing the counts matrix (csv) #' @param gene.file name of file containing the gene names (csv) #' @param qhull.file name of file containing the hull coordinates (tsv) #' @param centroid.file name of file containing the centroid positions (tsv) #' @param assay Name of assay to associate spatial data to #' @param image Name of "image" object storing spatial coordinates #' #' @return A \code{\link{Seurat}} object #' #' @importFrom methods new #' @importFrom utils read.csv read.table #' #' @seealso \code{\link{STARmap}} #' #' @export #' @concept preprocessing #' LoadSTARmap <- function( data.dir, counts.file = "cell_barcode_count.csv", gene.file = "genes.csv", qhull.file = "qhulls.tsv", centroid.file = "centroids.tsv", assay = "Spatial", image = "image" ) { if (!dir.exists(paths = data.dir)) { stop("Cannot find directory ", data.dir, call. = FALSE) } counts <- read.csv( file = file.path(data.dir, counts.file), as.is = TRUE, header = FALSE ) gene.names <- read.csv( file = file.path(data.dir, gene.file), as.is = TRUE, header = FALSE ) qhulls <- read.table( file = file.path(data.dir, qhull.file), sep = '\t', col.names = c('cell', 'y', 'x'), as.is = TRUE ) centroids <- read.table( file = file.path(data.dir, centroid.file), sep = '\t', as.is = TRUE, col.names = c('y', 'x') ) colnames(x = counts) <- gene.names[, 1] rownames(x = counts) <- paste0('starmap', seq(1:nrow(x = counts))) counts <- as.matrix(x = counts) rownames(x = centroids) <- rownames(x = counts) qhulls$cell <- paste0('starmap', qhulls$cell) centroids <- as.matrix(x = centroids) starmap <- CreateSeuratObject(counts = t(x = counts), assay = assay) starmap[[image]] <- new( Class = 'STARmap', assay = assay, coordinates = as.data.frame(x = centroids), qhulls = qhulls ) return(starmap) } #' Normalize raw data #' #' Normalize count data per cell and transform to log scale #' #' @param data Matrix with the raw count data #' @param scale.factor Scale the data. Default is 1e4 #' @param verbose Print progress #' #' @return Returns a matrix with the normalize and log transformed data #' #' @importFrom methods as #' #' @export #' @concept preprocessing #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) #' mat #' mat_norm <- LogNormalize(data = mat) #' mat_norm #' LogNormalize <- function(data, scale.factor = 1e4, verbose = TRUE) { if (is.data.frame(x = data)) { data <- as.matrix(x = data) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as(object = data, Class = "dgCMatrix") } # call Rcpp function to normalize if (verbose) { cat("Performing log-normalization\n", file = stderr()) } norm.data <- LogNorm(data, scale_factor = scale.factor, display_progress = verbose) colnames(x = norm.data) <- colnames(x = data) rownames(x = norm.data) <- rownames(x = data) return(norm.data) } #' Demultiplex samples based on classification method from MULTI-seq (McGinnis et al., bioRxiv 2018) #' #' Identify singlets, doublets and negative cells from multiplexing experiments. Annotate singlets by tags. #' #' @param object Seurat object. Assumes that the specified assay data has been added #' @param assay Name of the multiplexing assay (HTO by default) #' @param quantile The quantile to use for classification #' @param autoThresh Whether to perform automated threshold finding to define the best quantile. Default is FALSE #' @param maxiter Maximum number of iterations if autoThresh = TRUE. Default is 5 #' @param qrange A range of possible quantile values to try if autoThresh = TRUE #' @param verbose Prints the output #' #' @return A Seurat object with demultiplexing results stored at \code{object$MULTI_ID} #' #' @export #' @concept preprocessing #' #' @references \url{https://www.biorxiv.org/content/10.1101/387241v1} #' #' @examples #' \dontrun{ #' object <- MULTIseqDemux(object) #' } #' MULTIseqDemux <- function( object, assay = "HTO", quantile = 0.7, autoThresh = FALSE, maxiter = 5, qrange = seq(from = 0.1, to = 0.9, by = 0.05), verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) multi_data_norm <- t(x = GetAssayData( object = object, slot = "data", assay = assay )) if (autoThresh) { iter <- 1 negatives <- c() neg.vector <- c() while (iter <= maxiter) { # Iterate over q values to find ideal barcode thresholding results by maximizing singlet classifications bar.table_sweep.list <- list() n <- 0 for (q in qrange) { n <- n + 1 # Generate list of singlet/doublet/negative classifications across q sweep bar.table_sweep.list[[n]] <- ClassifyCells(data = multi_data_norm, q = q) names(x = bar.table_sweep.list)[n] <- paste0("q=" , q) } # Determine which q values results in the highest pSinglet res_round <- FindThresh(call.list = bar.table_sweep.list)$res res.use <- res_round[res_round$Subset == "pSinglet", ] q.use <- res.use[which.max(res.use$Proportion),"q"] if (verbose) { message("Iteration ", iter) message("Using quantile ", q.use) } round.calls <- ClassifyCells(data = multi_data_norm, q = q.use) #remove negative cells neg.cells <- names(x = round.calls)[which(x = round.calls == "Negative")] neg.vector <- c(neg.vector, rep(x = "Negative", length(x = neg.cells))) negatives <- c(negatives, neg.cells) if (length(x = neg.cells) == 0) { break } multi_data_norm <- multi_data_norm[-which(x = rownames(x = multi_data_norm) %in% neg.cells), ] iter <- iter + 1 } names(x = neg.vector) <- negatives demux_result <- c(round.calls,neg.vector) demux_result <- demux_result[rownames(x = object[[]])] } else{ demux_result <- ClassifyCells(data = multi_data_norm, q = quantile) } demux_result <- demux_result[rownames(x = object[[]])] object[['MULTI_ID']] <- factor(x = demux_result) Idents(object = object) <- "MULTI_ID" bcs <- colnames(x = multi_data_norm) bc.max <- bcs[apply(X = multi_data_norm, MARGIN = 1, FUN = which.max)] bc.second <- bcs[unlist(x = apply( X = multi_data_norm, MARGIN = 1, FUN = function(x) { return(which(x == MaxN(x))) } ))] doublet.names <- unlist(x = lapply( X = 1:length(x = bc.max), FUN = function(x) { return(paste(sort(x = c(bc.max[x], bc.second[x])), collapse = "_")) } )) doublet.id <- which(x = demux_result == "Doublet") MULTI_classification <- as.character(object$MULTI_ID) MULTI_classification[doublet.id] <- doublet.names[doublet.id] object$MULTI_classification <- factor(x = MULTI_classification) return(object) } #' Load in data from 10X #' #' Enables easy loading of sparse data matrices provided by 10X genomics. #' #' @param data.dir Directory containing the matrix.mtx, genes.tsv (or features.tsv), and barcodes.tsv #' files provided by 10X. A vector or named vector can be given in order to load #' several data directories. If a named vector is given, the cell barcode names #' will be prefixed with the name. #' @param gene.column Specify which column of genes.tsv or features.tsv to use for gene names; default is 2 #' @param cell.column Specify which column of barcodes.tsv to use for cell names; default is 1 #' @param unique.features Make feature names unique (default TRUE) #' @param strip.suffix Remove trailing "-1" if present in all cell barcodes. #' #' @return If features.csv indicates the data has multiple data types, a list #' containing a sparse matrix of the data from each type will be returned. #' Otherwise a sparse matrix containing the expression data will be returned. #' #' @importFrom Matrix readMM #' @importFrom utils read.delim #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' # For output from CellRanger < 3.0 #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show barcodes.tsv, genes.tsv, and matrix.mtx #' expression_matrix <- Read10X(data.dir = data_dir) #' seurat_object = CreateSeuratObject(counts = expression_matrix) #' #' # For output from CellRanger >= 3.0 with multiple data types #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show barcodes.tsv.gz, features.tsv.gz, and matrix.mtx.gz #' data <- Read10X(data.dir = data_dir) #' seurat_object = CreateSeuratObject(counts = data$`Gene Expression`) #' seurat_object[['Protein']] = CreateAssayObject(counts = data$`Antibody Capture`) #' } #' Read10X <- function( data.dir, gene.column = 2, cell.column = 1, unique.features = TRUE, strip.suffix = FALSE ) { full.data <- list() for (i in seq_along(along.with = data.dir)) { run <- data.dir[i] if (!dir.exists(paths = run)) { stop("Directory provided does not exist") } barcode.loc <- file.path(run, 'barcodes.tsv') gene.loc <- file.path(run, 'genes.tsv') features.loc <- file.path(run, 'features.tsv.gz') matrix.loc <- file.path(run, 'matrix.mtx') # Flag to indicate if this data is from CellRanger >= 3.0 pre_ver_3 <- file.exists(gene.loc) if (!pre_ver_3) { addgz <- function(s) { return(paste0(s, ".gz")) } barcode.loc <- addgz(s = barcode.loc) matrix.loc <- addgz(s = matrix.loc) } if (!file.exists(barcode.loc)) { stop("Barcode file missing. Expecting ", basename(path = barcode.loc)) } if (!pre_ver_3 && !file.exists(features.loc) ) { stop("Gene name or features file missing. Expecting ", basename(path = features.loc)) } if (!file.exists(matrix.loc)) { stop("Expression matrix file missing. Expecting ", basename(path = matrix.loc)) } data <- readMM(file = matrix.loc) cell.barcodes <- read.table(file = barcode.loc, header = FALSE, sep = '\t', row.names = NULL) if (ncol(x = cell.barcodes) > 1) { cell.names <- cell.barcodes[, cell.column] } else { cell.names <- readLines(con = barcode.loc) } if (all(grepl(pattern = "\\-1$", x = cell.names)) & strip.suffix) { cell.names <- as.vector(x = as.character(x = sapply( X = cell.names, FUN = ExtractField, field = 1, delim = "-" ))) } if (is.null(x = names(x = data.dir))) { if (length(x = data.dir) < 2) { colnames(x = data) <- cell.names } else { colnames(x = data) <- paste0(i, "_", cell.names) } } else { colnames(x = data) <- paste0(names(x = data.dir)[i], "_", cell.names) } feature.names <- read.delim( file = ifelse(test = pre_ver_3, yes = gene.loc, no = features.loc), header = FALSE, stringsAsFactors = FALSE ) if (any(is.na(x = feature.names[, gene.column]))) { warning( 'Some features names are NA. Replacing NA names with ID from the opposite column requested', call. = FALSE, immediate. = TRUE ) na.features <- which(x = is.na(x = feature.names[, gene.column])) replacement.column <- ifelse(test = gene.column == 2, yes = 1, no = 2) feature.names[na.features, gene.column] <- feature.names[na.features, replacement.column] } if (unique.features) { fcols = ncol(x = feature.names) if (fcols < gene.column) { stop(paste0("gene.column was set to ", gene.column, " but feature.tsv.gz (or genes.tsv) only has ", fcols, " columns.", " Try setting the gene.column argument to a value <= to ", fcols, ".")) } rownames(x = data) <- make.unique(names = feature.names[, gene.column]) } # In cell ranger 3.0, a third column specifying the type of data was added # and we will return each type of data as a separate matrix if (ncol(x = feature.names) > 2) { data_types <- factor(x = feature.names$V3) lvls <- levels(x = data_types) if (length(x = lvls) > 1 && length(x = full.data) == 0) { message("10X data contains more than one type and is being returned as a list containing matrices of each type.") } expr_name <- "Gene Expression" if (expr_name %in% lvls) { # Return Gene Expression first lvls <- c(expr_name, lvls[-which(x = lvls == expr_name)]) } data <- lapply( X = lvls, FUN = function(l) { return(data[data_types == l, , drop = FALSE]) } ) names(x = data) <- lvls } else{ data <- list(data) } full.data[[length(x = full.data) + 1]] <- data } # Combine all the data from different directories into one big matrix, note this # assumes that all data directories essentially have the same features files list_of_data <- list() for (j in 1:length(x = full.data[[1]])) { list_of_data[[j]] <- do.call(cbind, lapply(X = full.data, FUN = `[[`, j)) # Fix for Issue #913 list_of_data[[j]] <- as(object = list_of_data[[j]], Class = "dgCMatrix") } names(x = list_of_data) <- names(x = full.data[[1]]) # If multiple features, will return a list, otherwise # a matrix. if (length(x = list_of_data) == 1) { return(list_of_data[[1]]) } else { return(list_of_data) } } #' Read 10X hdf5 file #' #' Read count matrix from 10X CellRanger hdf5 file. #' This can be used to read both scATAC-seq and scRNA-seq matrices. #' #' @param filename Path to h5 file #' @param use.names Label row names with feature names rather than ID numbers. #' @param unique.features Make feature names unique (default TRUE) #' #' @return Returns a sparse matrix with rows and columns labeled. If multiple #' genomes are present, returns a list of sparse matrices (one per genome). #' #' @export #' @concept preprocessing #' Read10X_h5 <- function(filename, use.names = TRUE, unique.features = TRUE) { if (!requireNamespace('hdf5r', quietly = TRUE)) { stop("Please install hdf5r to read HDF5 files") } if (!file.exists(filename)) { stop("File not found") } infile <- hdf5r::H5File$new(filename = filename, mode = 'r') genomes <- names(x = infile) output <- list() if (hdf5r::existsGroup(infile, 'matrix')) { # cellranger version 3 if (use.names) { feature_slot <- 'features/name' } else { feature_slot <- 'features/id' } } else { if (use.names) { feature_slot <- 'gene_names' } else { feature_slot <- 'genes' } } for (genome in genomes) { counts <- infile[[paste0(genome, '/data')]] indices <- infile[[paste0(genome, '/indices')]] indptr <- infile[[paste0(genome, '/indptr')]] shp <- infile[[paste0(genome, '/shape')]] features <- infile[[paste0(genome, '/', feature_slot)]][] barcodes <- infile[[paste0(genome, '/barcodes')]] sparse.mat <- sparseMatrix( i = indices[] + 1, p = indptr[], x = as.numeric(x = counts[]), dims = shp[], giveCsparse = FALSE ) if (unique.features) { features <- make.unique(names = features) } rownames(x = sparse.mat) <- features colnames(x = sparse.mat) <- barcodes[] sparse.mat <- as(object = sparse.mat, Class = 'dgCMatrix') # Split v3 multimodal if (infile$exists(name = paste0(genome, '/features'))) { types <- infile[[paste0(genome, '/features/feature_type')]][] types.unique <- unique(x = types) if (length(x = types.unique) > 1) { message("Genome ", genome, " has multiple modalities, returning a list of matrices for this genome") sparse.mat <- sapply( X = types.unique, FUN = function(x) { return(sparse.mat[which(x = types == x), ]) }, simplify = FALSE, USE.NAMES = TRUE ) } } output[[genome]] <- sparse.mat } infile$close_all() if (length(x = output) == 1) { return(output[[genome]]) } else{ return(output) } } #' Load a 10X Genomics Visium Image #' #' @param image.dir Path to directory with 10X Genomics visium image data; #' should include files \code{tissue_lowres_image.png}, #' @param image.name The file name of the image. Defaults to tissue_lowres_image.png. #' \code{scalefactors_json.json} and \code{tissue_positions_list.csv} #' @param filter.matrix Filter spot/feature matrix to only include spots that #' have been determined to be over tissue. #' @param ... Ignored for now #' #' @return A \code{\link{VisiumV1}} object #' #' @importFrom png readPNG #' @importFrom jsonlite fromJSON #' #' @seealso \code{\link{VisiumV1}} \code{\link{Load10X_Spatial}} #' #' @export #' @concept preprocessing #' Read10X_Image <- function(image.dir, image.name = "tissue_lowres_image.png", filter.matrix = TRUE, ...) { image <- readPNG(source = file.path(image.dir, image.name)) scale.factors <- fromJSON(txt = file.path(image.dir, 'scalefactors_json.json')) tissue.positions <- read.csv( file = file.path(image.dir, 'tissue_positions_list.csv'), col.names = c('barcodes', 'tissue', 'row', 'col', 'imagerow', 'imagecol'), header = FALSE, as.is = TRUE, row.names = 1 ) if (filter.matrix) { tissue.positions <- tissue.positions[which(x = tissue.positions$tissue == 1), , drop = FALSE] } unnormalized.radius <- scale.factors$fiducial_diameter_fullres * scale.factors$tissue_lowres_scalef spot.radius <- unnormalized.radius / max(dim(x = image)) return(new( Class = 'VisiumV1', image = image, scale.factors = scalefactors( spot = scale.factors$tissue_hires_scalef, fiducial = scale.factors$fiducial_diameter_fullres, hires = scale.factors$tissue_hires_scalef, scale.factors$tissue_lowres_scalef ), coordinates = tissue.positions, spot.radius = spot.radius )) } #' Load in data from remote or local mtx files #' #' Enables easy loading of sparse data matrices #' #' @param mtx Name or remote URL of the mtx file #' @param cells Name or remote URL of the cells/barcodes file #' @param features Name or remote URL of the features/genes file #' @param cell.column Specify which column of cells file to use for cell names; default is 1 #' @param feature.column Specify which column of features files to use for feature/gene names; default is 2 #' @param cell.sep Specify the delimiter in the cell name file #' @param feature.sep Specify the delimiter in the feature name file #' @param skip.cell Number of lines to skip in the cells file before beginning to read cell names #' @param skip.feature Number of lines to skip in the features file before beginning to gene names #' @param mtx.transpose Transpose the matrix after reading in #' @param unique.features Make feature names unique (default TRUE) #' @param strip.suffix Remove trailing "-1" if present in all cell barcodes. #' #' @return A sparse matrix containing the expression data. #' #' @importFrom Matrix readMM #' @importFrom utils read.delim #' @importFrom httr build_url parse_url #' @importFrom tools file_ext #' #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' # For local files: #' #' expression_matrix <- ReadMtx( #' mtx = "count_matrix.mtx.gz", features = "features.tsv.gz", #' cells = "barcodes.tsv.gz" #' ) #' seurat_object <- CreateSeuratObject(counts = expression_matrix) #' #' # For remote files: #' #' expression_matrix <- ReadMtx(mtx = "http://localhost/matrix.mtx", #' cells = "http://localhost/barcodes.tsv", #' features = "http://localhost/genes.tsv") #' seurat_object <- CreateSeuratObject(counts = data) #' } #' ReadMtx <- function( mtx, cells, features, cell.column = 1, feature.column = 2, cell.sep = "\t", feature.sep = "\t", skip.cell = 0, skip.feature = 0, mtx.transpose = FALSE, unique.features = TRUE, strip.suffix = FALSE ) { all.files <- list( "expression matrix" = mtx, "barcode list" = cells, "feature list" = features ) for (i in seq_along(along.with = all.files)) { uri <- normalizePath(all.files[[i]], mustWork = FALSE) err <- paste("Cannot find", names(x = all.files)[i], "at", uri) uri <- build_url(url = parse_url(url = uri)) if (grepl(pattern = '^:///', x = uri)) { uri <- gsub(pattern = '^://', replacement = '', x = uri) if (!file.exists(uri)) { stop(err, call. = FALSE) } } else { if (!Online(url = uri, seconds = 2L)) { stop(err, call. = FALSE) } if (file_ext(uri) == 'gz') { con <- url(description = uri) uri <- gzcon(con = con, text = TRUE) } } all.files[[i]] <- uri } cell.barcodes <- read.table( file = all.files[['barcode list']], header = FALSE, sep = cell.sep, row.names = NULL, skip = skip.cell ) feature.names <- read.table( file = all.files[['feature list']], header = FALSE, sep = feature.sep, row.names = NULL, skip = skip.feature ) # read barcodes bcols <- ncol(x = cell.barcodes) if (bcols < cell.column) { stop( "cell.column was set to ", cell.column, " but ", cells, " only has ", bcols, " columns.", " Try setting the cell.column argument to a value <= to ", bcols, "." ) } cell.names <- cell.barcodes[, cell.column] if (all(grepl(pattern = "\\-1$", x = cell.names)) & strip.suffix) { cell.names <- as.vector(x = as.character(x = sapply( X = cell.names, FUN = ExtractField, field = 1, delim = "-" ))) } # read features fcols <- ncol(x = feature.names) if (fcols < feature.column) { stop( "feature.column was set to ", feature.column, " but ", features, " only has ", fcols, " column(s).", " Try setting the feature.column argument to a value <= to ", fcols, "." ) } if (any(is.na(x = feature.names[, feature.column]))) { na.features <- which(x = is.na(x = feature.names[, feature.column])) replacement.column <- ifelse(test = feature.column == 2, yes = 1, no = 2) if (replacement.column > fcols) { stop( "Some features names are NA in column ", feature.column, ". Try specifiying a different column.", call. = FALSE ) } else { warning( "Some features names are NA in column ", feature.column, ". Replacing NA names with ID from column ", replacement.column, ".", call. = FALSE ) } feature.names[na.features, feature.column] <- feature.names[na.features, replacement.column] } feature.names <- feature.names[, feature.column] if (unique.features) { feature.names <- make.unique(names = feature.names) } data <- readMM(file = all.files[['expression matrix']]) if (mtx.transpose) { data <- t(x = data) } if (length(x = cell.names) != ncol(x = data)) { stop( "Matrix has ", ncol(data), " columns but found ", length(cell.names), " barcodes. ", ifelse( test = length(x = cell.names) > ncol(x = data), yes = "Try increasing `skip.cell`. ", no = "" ), call. = FALSE ) } if (length(x = feature.names) != nrow(x = data)) { stop( "Matrix has ", nrow(data), " rows but found ", length(feature.names), " features. ", ifelse( test = length(x = feature.names) > nrow(x = data), yes = "Try increasing `skip.feature`. ", no = "" ), call. = FALSE ) } colnames(x = data) <- cell.names rownames(x = data) <- feature.names data <- as(data, Class = "dgCMatrix") return(data) } #' Load Slide-seq spatial data #' #' @param coord.file Path to csv file containing bead coordinate positions #' @param assay Name of assay to associate image to #' #' @return A \code{\link{SlideSeq}} object #' #' @importFrom utils read.csv #' #' @seealso \code{\link{SlideSeq}} #' #' @export #' @concept preprocessing #' ReadSlideSeq <- function(coord.file, assay = 'Spatial') { if (!file.exists(paths = coord.file)) { stop("Cannot find coord file ", coord.file, call. = FALSE) } slide.seq <- new( Class = 'SlideSeq', assay = assay, coordinates = read.csv( file = coord.file, header = TRUE, as.is = TRUE, row.names = 1 ) ) return(slide.seq) } #' Normalize raw data to fractions #' #' Normalize count data to relative counts per cell by dividing by the total #' per cell. Optionally use a scale factor, e.g. for counts per million (CPM) #' use \code{scale.factor = 1e6}. #' #' @param data Matrix with the raw count data #' @param scale.factor Scale the result. Default is 1 #' @param verbose Print progress #' @return Returns a matrix with the relative counts #' #' @importFrom methods as #' @importFrom Matrix colSums #' #' @export #' @concept preprocessing #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) #' mat #' mat_norm <- RelativeCounts(data = mat) #' mat_norm #' RelativeCounts <- function(data, scale.factor = 1, verbose = TRUE) { if (is.data.frame(x = data)) { data <- as.matrix(x = data) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as(object = data, Class = "dgCMatrix") } if (verbose) { cat("Performing relative-counts-normalization\n", file = stderr()) } norm.data <- data norm.data@x <- norm.data@x / rep.int(Matrix::colSums(norm.data), diff(norm.data@p)) * scale.factor return(norm.data) } #' Run the mark variogram computation on a given position matrix and expression #' matrix. #' #' Wraps the functionality of markvario from the spatstat package. #' #' @param spatial.location A 2 column matrix giving the spatial locations of #' each of the data points also in data #' @param data Matrix containing the data used as "marks" (e.g. gene expression) #' @param ... Arguments passed to markvario #' #' @importFrom spatstat.core markvario #' @importFrom spatstat.geom ppp #' #' @export #' @concept preprocessing #' RunMarkVario <- function( spatial.location, data, ... ) { pp <- ppp( x = spatial.location[, 1], y = spatial.location[, 2], xrange = range(spatial.location[, 1]), yrange = range(spatial.location[, 2]) ) if (nbrOfWorkers() > 1) { chunks <- nbrOfWorkers() features <- rownames(x = data) features <- split( x = features, f = ceiling(x = seq_along(along.with = features) / (length(x = features) / chunks)) ) mv <- future_lapply(X = features, FUN = function(x) { pp[["marks"]] <- as.data.frame(x = t(x = data[x, ])) markvario(X = pp, normalise = TRUE, ...) }) mv <- unlist(x = mv, recursive = FALSE) names(x = mv) <- rownames(x = data) } else { pp[["marks"]] <- as.data.frame(x = t(x = data)) mv <- markvario(X = pp, normalise = TRUE, ...) } return(mv) } #' Compute Moran's I value. #' #' Wraps the functionality of the Moran.I function from the ape package. #' Weights are computed as 1/distance. #' #' @param data Expression matrix #' @param pos Position matrix #' @param verbose Display messages/progress #' #' @importFrom stats dist #' #' @export #' @concept preprocessing #' RunMoransI <- function(data, pos, verbose = TRUE) { mysapply <- sapply if (verbose) { message("Computing Moran's I") mysapply <- pbsapply } Rfast2.installed <- PackageCheck("Rfast2", error = FALSE) if (Rfast2.installed) { MyMoran <- Rfast2::moranI } else if (!PackageCheck('ape', error = FALSE)) { stop( "'RunMoransI' requires either Rfast2 or ape to be installed", call. = FALSE ) } else { MyMoran <- ape::Moran.I if (getOption('Seurat.Rfast2.msg', TRUE)) { message( "For a more efficient implementation of the Morans I calculation,", "\n(selection.method = 'moransi') please install the Rfast2 package", "\n--------------------------------------------", "\ninstall.packages('Rfast2')", "\n--------------------------------------------", "\nAfter installation of Rfast2, Seurat will automatically use the more ", "\nefficient implementation (no further action necessary).", "\nThis message will be shown once per session" ) options(Seurat.Rfast2.msg = FALSE) } } pos.dist <- dist(x = pos) pos.dist.mat <- as.matrix(x = pos.dist) # weights as 1/dist^2 weights <- 1/pos.dist.mat^2 diag(x = weights) <- 0 results <- mysapply(X = 1:nrow(x = data), FUN = function(x) { tryCatch( expr = MyMoran(data[x, ], weights), error = function(x) c(1,1,1,1) ) }) pcol <- ifelse(test = Rfast2.installed, yes = 2, no = 4) results <- data.frame( observed = unlist(x = results[1, ]), p.value = unlist(x = results[pcol, ]) ) rownames(x = results) <- rownames(x = data) return(results) } #' Sample UMI #' #' Downsample each cell to a specified number of UMIs. Includes #' an option to upsample cells below specified UMI as well. #' #' @param data Matrix with the raw count data #' @param max.umi Number of UMIs to sample to #' @param upsample Upsamples all cells with fewer than max.umi #' @param verbose Display the progress bar #' #' @importFrom methods as #' #' @return Matrix with downsampled data #' #' @export #' @concept preprocessing #' #' @examples #' data("pbmc_small") #' counts = as.matrix(x = GetAssayData(object = pbmc_small, assay = "RNA", slot = "counts")) #' downsampled = SampleUMI(data = counts) #' head(x = downsampled) #' SampleUMI <- function( data, max.umi = 1000, upsample = FALSE, verbose = FALSE ) { data <- as(object = data, Class = "dgCMatrix") if (length(x = max.umi) == 1) { new_data <- RunUMISampling( data = data, sample_val = max.umi, upsample = upsample, display_progress = verbose ) } else if (length(x = max.umi) != ncol(x = data)) { stop("max.umi vector not equal to number of cells") } else { new_data <- RunUMISamplingPerCell( data = data, sample_val = max.umi, upsample = upsample, display_progress = verbose ) } dimnames(x = new_data) <- dimnames(x = data) return(new_data) } #' Use regularized negative binomial regression to normalize UMI count data #' #' This function calls sctransform::vst. The sctransform package is available at #' https://github.com/ChristophH/sctransform. #' Use this function as an alternative to the NormalizeData, #' FindVariableFeatures, ScaleData workflow. Results are saved in a new assay #' (named SCT by default) with counts being (corrected) counts, data being log1p(counts), #' scale.data being pearson residuals; sctransform::vst intermediate results are saved #' in misc slot of new assay. #' #' @param object A seurat object #' @param assay Name of assay to pull the count data from; default is 'RNA' #' @param new.assay.name Name for the new assay containing the normalized data #' @param reference.SCT.model If not NULL, compute residuals for the object #' using the provided SCT model; supports only log_umi as the latent variable. #' If residual.features are not specified, compute for the top variable.features.n #' specified in the model which are also present in the object. If #' residual.features are specified, the variable features of the resulting SCT #' assay are set to the top variable.features.n in the model. #' @param do.correct.umi Place corrected UMI matrix in assay counts slot; default is TRUE #' @param ncells Number of subsampling cells used to build NB regression; default is 5000 #' @param residual.features Genes to calculate residual features for; default is NULL (all genes). #' If specified, will be set to VariableFeatures of the returned object. #' @param variable.features.n Use this many features as variable features after #' ranking by residual variance; default is 3000. Only applied if residual.features is not set. #' @param variable.features.rv.th Instead of setting a fixed number of variable features, #' use this residual variance cutoff; this is only used when \code{variable.features.n} #' is set to NULL; default is 1.3. Only applied if residual.features is not set. #' @param vars.to.regress Variables to regress out in a second non-regularized linear #' regression. For example, percent.mito. Default is NULL #' @param do.scale Whether to scale residuals to have unit variance; default is FALSE #' @param do.center Whether to center residuals to have mean zero; default is TRUE #' @param clip.range Range to clip the residuals to; default is \code{c(-sqrt(n/30), sqrt(n/30))}, #' where n is the number of cells #' @param conserve.memory If set to TRUE the residual matrix for all genes is never #' created in full; useful for large data sets, but will take longer to run; #' this will also set return.only.var.genes to TRUE; default is FALSE #' @param return.only.var.genes If set to TRUE the scale.data matrices in output assay are #' subset to contain only the variable genes; default is TRUE #' @param seed.use Set a random seed. By default, sets the seed to 1448145. Setting #' NULL will not set a seed. #' @param verbose Whether to print messages and progress bars #' @param ... Additional parameters passed to \code{sctransform::vst} #' #' @return Returns a Seurat object with a new assay (named SCT by default) with #' counts being (corrected) counts, data being log1p(counts), scale.data being #' pearson residuals; sctransform::vst intermediate results are saved in misc #' slot of the new assay. #' #' @importFrom stats setNames #' @importFrom sctransform vst get_residual_var get_residuals correct_counts #' #' @seealso \code{\link[sctransform]{correct_counts}} \code{\link[sctransform]{get_residuals}} #' @export #' @concept preprocessing #' #' @examples #' data("pbmc_small") #' SCTransform(object = pbmc_small) #' SCTransform <- function( object, assay = 'RNA', new.assay.name = 'SCT', reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object[[assay]]) / 30), sqrt(x = ncol(x = object[[assay]]) / 30)), conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } assay <- assay %||% DefaultAssay(object = object) assay.obj <- GetAssay(object = object, assay = assay) umi <- GetAssayData(object = assay.obj, slot = 'counts') cell.attr <- slot(object = object, name = 'meta.data') vst.args <- list(...) # check for batch_var in meta data if ('batch_var' %in% names(x = vst.args)) { if (!(vst.args[['batch_var']] %in% colnames(x = cell.attr))) { stop('batch_var not found in seurat object meta data') } } # parameter checking when reference.SCT.model is set if (!is.null(x = reference.SCT.model) ) { if (inherits(x = reference.SCT.model, what = "SCTModel")) { reference.SCT.model <- SCTModel_to_vst(SCTModel = reference.SCT.model) } if (is.list(x = reference.SCT.model) & inherits(x = reference.SCT.model[[1]], what = "SCTModel")) { stop("reference.SCT.model must be one SCTModel rather than a list of SCTModel") } if ('latent_var' %in% names(x = vst.args)) { stop('custom latent variables are not supported when reference.SCT.model is given') } if (reference.SCT.model$model_str != 'y ~ log_umi') { stop('reference.SCT.model must be derived using default SCT regression formula, `y ~ log_umi`') } } # check for latent_var in meta data if ('latent_var' %in% names(x = vst.args)) { known.attr <- c('umi', 'gene', 'log_umi', 'log_gene', 'umi_per_gene', 'log_umi_per_gene') if (!all(vst.args[['latent_var']] %in% c(colnames(x = cell.attr), known.attr))) { stop('latent_var values are not from the set of cell attributes sctransform calculates by default and cannot be found in seurat object meta data') } } # check for vars.to.regress in meta data if (any(!vars.to.regress %in% colnames(x = cell.attr))) { stop('problem with second non-regularized linear regression; not all variables found in seurat object meta data; check vars.to.regress parameter') } if (any(c('cell_attr', 'verbosity', 'return_cell_attr', 'return_gene_attr', 'return_corrected_umi') %in% names(x = vst.args))) { warning( 'the following arguments will be ignored because they are set within this function:', paste( c( 'cell_attr', 'verbosity', 'return_cell_attr', 'return_gene_attr', 'return_corrected_umi' ), collapse = ', ' ), call. = FALSE, immediate. = TRUE ) } vst.args[['umi']] <- umi vst.args[['cell_attr']] <- cell.attr vst.args[['verbosity']] <- as.numeric(x = verbose) * 2 vst.args[['return_cell_attr']] <- TRUE vst.args[['return_gene_attr']] <- TRUE vst.args[['return_corrected_umi']] <- do.correct.umi vst.args[['n_cells']] <- min(ncells, ncol(x = umi)) residual.type <- vst.args[['residual_type']] %||% 'pearson' res.clip.range <- vst.args[['res_clip_range']] %||% c(-sqrt(x = ncol(x = umi)), sqrt(x = ncol(x = umi))) # set sct normalization method if (!is.null( reference.SCT.model)) { sct.method <- "reference.model" } else if (!is.null(x = residual.features)) { sct.method <- "residual.features" } else if (conserve.memory) { sct.method <- "conserve.memory" } else { sct.method <- "default" } # set vst model vst.out <- switch( EXPR = sct.method, 'reference.model' = { if (verbose) { message("Using reference SCTModel to calculate pearson residuals") } do.center <- FALSE do.correct.umi <- FALSE vst.out <- reference.SCT.model clip.range <- vst.out$arguments$sct.clip.range umi.field <- paste0("nCount_", assay) vst.out$cell_attr <- if (umi.field %in% colnames(x = object[[]])) { data.frame(log_umi = log10(x = object[[umi.field, drop = T]])) } else { data.frame(log_umi = log10(x = CalcN(object = object[[assay]])$nCount)) } all.features <- intersect( x = rownames(x = vst.out$gene_attr), y = rownames(x = umi) ) vst.out$gene_attr <- vst.out$gene_attr[all.features ,] vst.out$model_pars_fit <- vst.out$model_pars_fit[all.features,] vst.out }, 'residual.features' = { if (verbose) { message("Computing residuals for the ", length(x = residual.features), " specified features") } return.only.var.genes <- TRUE do.correct.umi <- FALSE vst.args[['return_corrected_umi']] <- FALSE vst.args[['residual_type']] <- 'none' vst.out <- do.call(what = 'vst', args = vst.args) vst.out$gene_attr$residual_variance <- NA_real_ vst.out }, 'conserve.memory' = { return.only.var.genes <- TRUE vst.args[['residual_type']] <- 'none' vst.out <- do.call(what = 'vst', args = vst.args) feature.variance <- get_residual_var( vst_out = vst.out, umi = umi, residual_type = residual.type, res_clip_range = res.clip.range ) vst.out$gene_attr$residual_variance <- NA_real_ vst.out$gene_attr[names(x = feature.variance), 'residual_variance'] <- feature.variance vst.out }, 'default' = { vst.out <- do.call(what = 'vst', args = vst.args) vst.out }) feature.variance <- vst.out$gene_attr[,"residual_variance"] names(x = feature.variance) <- rownames(x = vst.out$gene_attr) if (verbose) { message('Determine variable features') } feature.variance <- sort(x = feature.variance, decreasing = TRUE) if (!is.null(x = variable.features.n)) { top.features <- names(x = feature.variance)[1:min(variable.features.n, length(x = feature.variance))] } else { top.features <- names(x = feature.variance)[feature.variance >= variable.features.rv.th] } # get residuals vst.out <- switch( EXPR = sct.method, 'reference.model' = { if (is.null(x = residual.features)) { residual.features <- top.features } residual.features <- Reduce( f = intersect, x = list(residual.features, rownames(x = umi), rownames(x = vst.out$model_pars_fit)) ) residual.feature.mat <- get_residuals( vst_out = vst.out, umi = umi[residual.features, , drop = FALSE], verbosity = as.numeric(x = verbose)*2 ) vst.out$gene_attr <- vst.out$gene_attr[residual.features ,] ref.residuals.mean <- vst.out$gene_attr[,"residual_mean"] vst.out$y <- sweep( x = residual.feature.mat, MARGIN = 1, STATS = ref.residuals.mean, FUN = "-" ) vst.out }, 'residual.features' = { residual.features <- intersect( x = residual.features, y = rownames(x = vst.out$gene_attr) ) residual.feature.mat <- get_residuals( vst_out = vst.out, umi = umi[residual.features, , drop = FALSE], verbosity = as.numeric(x = verbose)*2 ) vst.out$y <- residual.feature.mat vst.out$gene_attr$residual_mean <- NA_real_ vst.out$gene_attr$residual_variance <- NA_real_ vst.out$gene_attr[residual.features, "residual_mean"] <- rowMeans2(x = vst.out$y) vst.out$gene_attr[residual.features, "residual_variance"] <- RowVar(x = vst.out$y) vst.out }, 'conserve.memory' = { vst.out$y <- get_residuals( vst_out = vst.out, umi = umi[top.features, ], residual_type = residual.type, res_clip_range = res.clip.range, verbosity = as.numeric(x = verbose)*2 ) vst.out$gene_attr$residual_mean <- NA_real_ vst.out$gene_attr[top.features, "residual_mean"] = rowMeans2(x = vst.out$y) if (do.correct.umi & residual.type == 'pearson') { vst.out$umi_corrected <- correct_counts( x = vst.out, umi = umi, verbosity = as.numeric(x = verbose) * 2 ) } vst.out }, 'default' = { if (return.only.var.genes) { vst.out$y <- vst.out$y[top.features, ] } vst.out }) # create output assay and put (corrected) umi counts in count slot if (do.correct.umi & residual.type == 'pearson') { if (verbose) { message('Place corrected count matrix in counts slot') } # TODO: restore once check.matrix is in SeuratObject # assay.out <- CreateAssayObject(counts = vst.out$umi_corrected, check.matrix = FALSE) assay.out <- CreateAssayObject(counts = vst.out$umi_corrected,) vst.out$umi_corrected <- NULL } else { # TODO: restore once check.matrix is in SeuratObject # assay.out <- CreateAssayObject(counts = umi, check.matrix = FALSE) assay.out <- CreateAssayObject(counts = umi) } # set the variable genes VariableFeatures(object = assay.out) <- residual.features %||% top.features # put log1p transformed counts in data assay.out <- SetAssayData( object = assay.out, slot = 'data', new.data = log1p(x = GetAssayData(object = assay.out, slot = 'counts')) ) scale.data <- vst.out$y # clip the residuals scale.data[scale.data < clip.range[1]] <- clip.range[1] scale.data[scale.data > clip.range[2]] <- clip.range[2] # 2nd regression scale.data <- ScaleData( scale.data, features = NULL, vars.to.regress = vars.to.regress, latent.data = cell.attr[, vars.to.regress, drop = FALSE], model.use = 'linear', use.umi = FALSE, do.scale = do.scale, do.center = do.center, scale.max = Inf, block.size = 750, min.cells.to.block = 3000, verbose = verbose ) assay.out <- SetAssayData( object = assay.out, slot = 'scale.data', new.data = scale.data ) # save vst output (except y) in @misc slot vst.out$y <- NULL # save clip.range into vst model vst.out$arguments$sct.clip.range <- clip.range vst.out$arguments$sct.method <- sct.method Misc(object = assay.out, slot = 'vst.out') <- vst.out assay.out <- as(object = assay.out, Class = "SCTAssay") assay.out <- SCTAssay(assay.out, assay.orig = assay) slot(object = slot(object = assay.out, name = "SCTModel.list")[[1]], name = "umi.assay") <- assay object[[new.assay.name]] <- assay.out if (verbose) { message(paste("Set default assay to", new.assay.name)) } DefaultAssay(object = object) <- new.assay.name object <- LogSeuratCommand(object = object) return(object) } #' Subset a Seurat Object based on the Barcode Distribution Inflection Points #' #' This convenience function subsets a Seurat object based on calculated inflection points. #' #' See [CalculateBarcodeInflections()] to calculate inflection points and #' [BarcodeInflectionsPlot()] to visualize and test inflection point calculations. #' #' @param object Seurat object #' #' @return Returns a subsetted Seurat object. #' #' @export #' @concept preprocessing #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{CalculateBarcodeInflections}} \code{\link{BarcodeInflectionsPlot}} #' #' @examples #' data("pbmc_small") #' pbmc_small <- CalculateBarcodeInflections( #' object = pbmc_small, #' group.column = 'groups', #' threshold.low = 20, #' threshold.high = 30 #' ) #' SubsetByBarcodeInflections(object = pbmc_small) #' SubsetByBarcodeInflections <- function(object) { cbi.data <- Tool(object = object, slot = 'CalculateBarcodeInflections') if (is.null(x = cbi.data)) { stop("Barcode inflections not calculated, please run CalculateBarcodeInflections") } return(object[, cbi.data$cells_pass]) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param selection.method How to choose top variable features. Choose one of : #' \itemize{ #' \item{vst:}{ First, fits a line to the relationship of log(variance) and #' log(mean) using local polynomial regression (loess). Then standardizes the #' feature values using the observed mean and expected variance (given by the #' fitted line). Feature variance is then calculated on the standardized values #' after clipping to a maximum (see clip.max parameter).} #' \item{mean.var.plot (mvp):}{ First, uses a function to calculate average #' expression (mean.function) and dispersion (dispersion.function) for each #' feature. Next, divides features into num.bin (deafult 20) bins based on #' their average expression, and calculates z-scores for dispersion within #' each bin. The purpose of this is to identify variable features while #' controlling for the strong relationship between variability and average #' expression.} #' \item{dispersion (disp):}{ selects the genes with the highest dispersion values} #' } #' @param loess.span (vst method) Loess span parameter used when fitting the #' variance-mean relationship #' @param clip.max (vst method) After standardization values larger than #' clip.max will be set to clip.max; default is 'auto' which sets this value to #' the square root of the number of cells #' @param mean.function Function to compute x-axis value (average expression). #' Default is to take the mean of the detected (i.e. non-zero) values #' @param dispersion.function Function to compute y-axis value (dispersion). #' Default is to take the standard deviation of all values #' @param num.bin Total number of bins to use in the scaled analysis (default #' is 20) #' @param binning.method Specifies how the bins should be computed. Available #' methods are: #' \itemize{ #' \item{equal_width:}{ each bin is of equal width along the x-axis [default]} #' \item{equal_frequency:}{ each bin contains an equal number of features (can #' increase statistical power to detect overdispersed features at high #' expression values, at the cost of reduced resolution along the x-axis)} #' } #' @param verbose show progress bar for calculations #' #' @rdname FindVariableFeatures #' @concept preprocessing #' @export #' FindVariableFeatures.default <- function( object, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) { CheckDots(...) if (!inherits(x = object, 'Matrix')) { object <- as(object = as.matrix(x = object), Class = 'Matrix') } if (!inherits(x = object, what = 'dgCMatrix')) { object <- as(object = object, Class = 'dgCMatrix') } if (selection.method == "vst") { if (clip.max == 'auto') { clip.max <- sqrt(x = ncol(x = object)) } hvf.info <- data.frame(mean = rowMeans(x = object)) hvf.info$variance <- SparseRowVar2( mat = object, mu = hvf.info$mean, display_progress = verbose ) hvf.info$variance.expected <- 0 hvf.info$variance.standardized <- 0 not.const <- hvf.info$variance > 0 fit <- loess( formula = log10(x = variance) ~ log10(x = mean), data = hvf.info[not.const, ], span = loess.span ) hvf.info$variance.expected[not.const] <- 10 ^ fit$fitted # use c function to get variance after feature standardization hvf.info$variance.standardized <- SparseRowVarStd( mat = object, mu = hvf.info$mean, sd = sqrt(hvf.info$variance.expected), vmax = clip.max, display_progress = verbose ) colnames(x = hvf.info) <- paste0('vst.', colnames(x = hvf.info)) } else { if (!inherits(x = mean.function, what = 'function')) { stop("'mean.function' must be a function") } if (!inherits(x = dispersion.function, what = 'function')) { stop("'dispersion.function' must be a function") } feature.mean <- mean.function(object, verbose) feature.dispersion <- dispersion.function(object, verbose) names(x = feature.mean) <- names(x = feature.dispersion) <- rownames(x = object) feature.dispersion[is.na(x = feature.dispersion)] <- 0 feature.mean[is.na(x = feature.mean)] <- 0 data.x.breaks <- switch( EXPR = binning.method, 'equal_width' = num.bin, 'equal_frequency' = c( quantile( x = feature.mean[feature.mean > 0], probs = seq.int(from = 0, to = 1, length.out = num.bin) ) ), stop("Unknown binning method: ", binning.method) ) data.x.bin <- cut(x = feature.mean, breaks = data.x.breaks, include.lowest = TRUE) names(x = data.x.bin) <- names(x = feature.mean) mean.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = mean) sd.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = sd) feature.dispersion.scaled <- (feature.dispersion - mean.y[as.numeric(x = data.x.bin)]) / sd.y[as.numeric(x = data.x.bin)] names(x = feature.dispersion.scaled) <- names(x = feature.mean) hvf.info <- data.frame(feature.mean, feature.dispersion, feature.dispersion.scaled) rownames(x = hvf.info) <- rownames(x = object) colnames(x = hvf.info) <- paste0('mvp.', c('mean', 'dispersion', 'dispersion.scaled')) } return(hvf.info) } #' @param nfeatures Number of features to select as top variable features; #' only used when \code{selection.method} is set to \code{'dispersion'} or #' \code{'vst'} #' @param mean.cutoff A two-length numeric vector with low- and high-cutoffs for #' feature means #' @param dispersion.cutoff A two-length numeric vector with low- and high-cutoffs for #' feature dispersions #' #' @rdname FindVariableFeatures #' @concept preprocessing #' #' @importFrom utils head #' @export #' @method FindVariableFeatures Assay #' FindVariableFeatures.Assay <- function( object, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) { if (length(x = mean.cutoff) != 2 || length(x = dispersion.cutoff) != 2) { stop("Both 'mean.cutoff' and 'dispersion.cutoff' must be two numbers") } if (selection.method == "vst") { data <- GetAssayData(object = object, slot = "counts") # if (ncol(x = data) < 1 || nrow(x = data) < 1) { if (IsMatrixEmpty(x = data)) { warning("selection.method set to 'vst' but count slot is empty; will use data slot instead") data <- GetAssayData(object = object, slot = "data") } } else { data <- GetAssayData(object = object, slot = "data") } hvf.info <- FindVariableFeatures( object = data, selection.method = selection.method, loess.span = loess.span, clip.max = clip.max, mean.function = mean.function, dispersion.function = dispersion.function, num.bin = num.bin, binning.method = binning.method, verbose = verbose, ... ) object[[names(x = hvf.info)]] <- hvf.info hvf.info <- hvf.info[which(x = hvf.info[, 1, drop = TRUE] != 0), ] if (selection.method == "vst") { hvf.info <- hvf.info[order(hvf.info$vst.variance.standardized, decreasing = TRUE), , drop = FALSE] } else { hvf.info <- hvf.info[order(hvf.info$mvp.dispersion, decreasing = TRUE), , drop = FALSE] } selection.method <- switch( EXPR = selection.method, 'mvp' = 'mean.var.plot', 'disp' = 'dispersion', selection.method ) top.features <- switch( EXPR = selection.method, 'mean.var.plot' = { means.use <- (hvf.info[, 1] > mean.cutoff[1]) & (hvf.info[, 1] < mean.cutoff[2]) dispersions.use <- (hvf.info[, 3] > dispersion.cutoff[1]) & (hvf.info[, 3] < dispersion.cutoff[2]) rownames(x = hvf.info)[which(x = means.use & dispersions.use)] }, 'dispersion' = head(x = rownames(x = hvf.info), n = nfeatures), 'vst' = head(x = rownames(x = hvf.info), n = nfeatures), stop("Unknown selection method: ", selection.method) ) VariableFeatures(object = object) <- top.features vf.name <- ifelse( test = selection.method == 'vst', yes = 'vst', no = 'mvp' ) vf.name <- paste0(vf.name, '.variable') object[[vf.name]] <- rownames(x = object[[]]) %in% top.features return(object) } #' @rdname FindVariableFeatures #' @export #' @method FindVariableFeatures SCTAssay #' FindVariableFeatures.SCTAssay <- function( object, nfeatures = 2000, ... ) { if (length(x = slot(object = object, name = "SCTModel.list")) > 1) { stop("SCT assay is comprised of multiple SCT models. To change the variable features, please set manually with VariableFeatures<-", call. = FALSE) } feature.attr <- SCTResults(object = object, slot = "feature.attributes") nfeatures <- min(nfeatures, nrow(x = feature.attr)) top.features <- rownames(x = feature.attr)[order(feature.attr$residual_variance, decreasing = TRUE)[1:nfeatures]] VariableFeatures(object = object) <- top.features return(object) } #' @param assay Assay to use #' #' @rdname FindVariableFeatures #' @concept preprocessing #' @export #' @method FindVariableFeatures Seurat #' FindVariableFeatures.Seurat <- function( object, assay = NULL, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) assay.data <- FindVariableFeatures( object = assay.data, selection.method = selection.method, loess.span = loess.span, clip.max = clip.max, mean.function = mean.function, dispersion.function = dispersion.function, num.bin = num.bin, binning.method = binning.method, nfeatures = nfeatures, mean.cutoff = mean.cutoff, dispersion.cutoff = dispersion.cutoff, verbose = verbose, ... ) object[[assay]] <- assay.data if (inherits(x = object[[assay]], what = "SCTAssay")) { object <- GetResidual( object = object, assay = assay, features = VariableFeatures(object = assay.data), verbose = FALSE ) } object <- LogSeuratCommand(object = object) return(object) } #' @param object A Seurat object, assay, or expression matrix #' @param spatial.location Coordinates for each cell/spot/bead #' @param selection.method Method for selecting spatially variable features. #' \itemize{ #' \item \code{markvariogram}: See \code{\link{RunMarkVario}} for details #' \item \code{moransi}: See \code{\link{RunMoransI}} for details. #' } #' #' @param r.metric r value at which to report the "trans" value of the mark #' variogram #' @param x.cuts Number of divisions to make in the x direction, helps define #' the grid over which binning is performed #' @param y.cuts Number of divisions to make in the y direction, helps define #' the grid over which binning is performed #' @param verbose Print messages and progress #' #' @method FindSpatiallyVariableFeatures default #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' #' FindSpatiallyVariableFeatures.default <- function( object, spatial.location, selection.method = c('markvariogram', 'moransi'), r.metric = 5, x.cuts = NULL, y.cuts = NULL, verbose = TRUE, ... ) { # error check dimensions if (ncol(x = object) != nrow(x = spatial.location)) { stop("Please provide the same number of observations as spatial locations.") } if (!is.null(x = x.cuts) & !is.null(x = y.cuts)) { binned.data <- BinData( data = object, pos = spatial.location, x.cuts = x.cuts, y.cuts = y.cuts, verbose = verbose ) object <- binned.data$data spatial.location <- binned.data$pos } svf.info <- switch( EXPR = selection.method, 'markvariogram' = RunMarkVario( spatial.location = spatial.location, data = object ), 'moransi' = RunMoransI( data = object, pos = spatial.location, verbose = verbose ), stop("Invalid selection method. Please choose one of: markvariogram, moransi.") ) return(svf.info) } #' @param slot Slot in the Assay to pull data from #' @param features If provided, only compute on given features. Otherwise, #' compute for all features. #' @param nfeatures Number of features to mark as the top spatially variable. #' #' @method FindSpatiallyVariableFeatures Assay #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' FindSpatiallyVariableFeatures.Assay <- function( object, slot = "scale.data", spatial.location, selection.method = c('markvariogram', 'moransi'), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) { features <- features %||% rownames(x = object) if (selection.method == "markvariogram" && "markvariogram" %in% names(x = Misc(object = object))) { features.computed <- names(x = Misc(object = object, slot = "markvariogram")) features <- features[! features %in% features.computed] } data <- GetAssayData(object = object, slot = slot) missing.features <- which(x = ! features %in% rownames(x = data)) if (length(x = missing.features) > 0) { remaining.features <- length(x = features) - length(x = missing.features) if (length(x = remaining.features) > 0) { warning("Not all requested features are present in the requested slot (", slot, "). Removing ", length(x = missing.features), " missing features and continuing with ", remaining.features, " remaining features.", immediate. = TRUE, call. = FALSE) features <- features[features %in% rownames(x = data)] } else { stop("None of the requested features are present in the requested slot (", slot, ").", call. = FALSE) } } image.cells <- rownames(x = spatial.location) data <- as.matrix(x = data[features, image.cells, drop = FALSE]) rv <- RowVar(x = data) rv.small <- which(x = rv < 1e-16) rv.remove <- c() if (length(x = rv.small) > 0) { for (i in rv.small) { if (var(x = data[i, ]) == 0) { rv.remove <- c(rv.remove, i) } } } if (length(x = rv.remove) > 0) { data <- data[-c(rv.remove), , drop = FALSE] } if (nrow(x = data) != 0) { svf.info <- FindSpatiallyVariableFeatures( object = data, spatial.location = spatial.location, selection.method = selection.method, r.metric = r.metric, x.cuts = x.cuts, y.cuts = y.cuts, verbose = verbose, ... ) } else { svf.info <- c() } if (selection.method == "markvariogram") { if ("markvariogram" %in% names(x = Misc(object = object))) { svf.info <- c(svf.info, Misc(object = object, slot = "markvariogram")) } suppressWarnings(expr = Misc(object = object, slot = "markvariogram") <- svf.info) svf.info <- ComputeRMetric(mv = svf.info, r.metric) svf.info <- svf.info[order(svf.info[, 1]), , drop = FALSE] } if (selection.method == "moransi") { colnames(x = svf.info) <- paste0("MoransI_", colnames(x = svf.info)) svf.info <- svf.info[order(svf.info[, 2], -abs(svf.info[, 1])), , drop = FALSE] } var.name <- paste0(selection.method, ".spatially.variable") var.name.rank <- paste0(var.name, ".rank") svf.info[[var.name]] <- FALSE svf.info[[var.name]][1:(min(nrow(x = svf.info), nfeatures))] <- TRUE svf.info[[var.name.rank]] <- 1:nrow(x = svf.info) object[[names(x = svf.info)]] <- svf.info return(object) } #' @param assay Assay to pull the features (marks) from #' @param image Name of image to pull the coordinates from #' #' @method FindSpatiallyVariableFeatures Seurat #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' FindSpatiallyVariableFeatures.Seurat <- function( object, assay = NULL, slot = "scale.data", features = NULL, image = NULL, selection.method = c('markvariogram', 'moransi'), r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = 2000, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) features <- features %||% rownames(x = object[[assay]]) image <- image %||% DefaultImage(object = object) tc <- GetTissueCoordinates(object = object[[image]]) # check if markvariogram has been run on necessary features # only run for new ones object[[assay]] <- FindSpatiallyVariableFeatures( object = object[[assay]], slot = slot, features = features, spatial.location = tc, selection.method = selection.method, r.metric = r.metric, x.cuts = x.cuts, y.cuts = y.cuts, nfeatures = nfeatures, verbose = verbose, ... ) object <- LogSeuratCommand(object = object) } #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @param normalization.method Method for normalization. #' \itemize{ #' \item{LogNormalize: }{Feature counts for each cell are divided by the total #' counts for that cell and multiplied by the scale.factor. This is then #' natural-log transformed using log1p.} #' \item{CLR: }{Applies a centered log ratio transformation} #' \item{RC: }{Relative counts. Feature counts for each cell are divided by the total #' counts for that cell and multiplied by the scale.factor. No log-transformation is applied. #' For counts per million (CPM) set \code{scale.factor = 1e6}} #' } #' @param scale.factor Sets the scale factor for cell-level normalization #' @param margin If performing CLR normalization, normalize across features (1) or cells (2) # @param across If performing CLR normalization, normalize across either "features" or "cells". #' @param block.size How many cells should be run in each chunk, will try to split evenly across threads #' @param verbose display progress bar for normalization procedure #' #' @rdname NormalizeData #' @concept preprocessing #' @export #' NormalizeData.default <- function( object, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, block.size = NULL, verbose = TRUE, ... ) { CheckDots(...) if (is.null(x = normalization.method)) { return(object) } normalized.data <- if (nbrOfWorkers() > 1) { norm.function <- switch( EXPR = normalization.method, 'LogNormalize' = LogNormalize, 'CLR' = CustomNormalize, 'RC' = RelativeCounts, stop("Unknown normalization method: ", normalization.method) ) if (normalization.method != 'CLR') { margin <- 2 } tryCatch( expr = Parenting(parent.find = 'Seurat', margin = margin), error = function(e) { invisible(x = NULL) } ) dsize <- switch( EXPR = margin, '1' = nrow(x = object), '2' = ncol(x = object), stop("'margin' must be 1 or 2") ) chunk.points <- ChunkPoints( dsize = dsize, csize = block.size %||% ceiling(x = dsize / nbrOfWorkers()) ) normalized.data <- future_lapply( X = 1:ncol(x = chunk.points), FUN = function(i) { block <- chunk.points[, i] data <- if (margin == 1) { object[block[1]:block[2], , drop = FALSE] } else { object[, block[1]:block[2], drop = FALSE] } clr_function <- function(x) { return(log1p(x = x / (exp(x = sum(log1p(x = x[x > 0]), na.rm = TRUE) / length(x = x))))) } args <- list( data = data, scale.factor = scale.factor, verbose = FALSE, custom_function = clr_function, margin = margin ) args <- args[names(x = formals(fun = norm.function))] return(do.call( what = norm.function, args = args )) } ) do.call( what = switch( EXPR = margin, '1' = 'rbind', '2' = 'cbind', stop("'margin' must be 1 or 2") ), args = normalized.data ) } else { switch( EXPR = normalization.method, 'LogNormalize' = LogNormalize( data = object, scale.factor = scale.factor, verbose = verbose ), 'CLR' = CustomNormalize( data = object, custom_function = function(x) { return(log1p(x = x / (exp(x = sum(log1p(x = x[x > 0]), na.rm = TRUE) / length(x = x))))) }, margin = margin, verbose = verbose # across = across ), 'RC' = RelativeCounts( data = object, scale.factor = scale.factor, verbose = verbose ), stop("Unknown normalization method: ", normalization.method) ) } return(normalized.data) } #' @rdname NormalizeData #' @concept preprocessing #' @export #' @method NormalizeData Assay #' NormalizeData.Assay <- function( object, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, verbose = TRUE, ... ) { object <- SetAssayData( object = object, slot = 'data', new.data = NormalizeData( object = GetAssayData(object = object, slot = 'counts'), normalization.method = normalization.method, scale.factor = scale.factor, verbose = verbose, margin = margin, ... ) ) return(object) } #' @param assay Name of assay to use #' #' @rdname NormalizeData #' @concept preprocessing #' @export #' @method NormalizeData Seurat #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' pmbc_small <- NormalizeData(object = pbmc_small) #' } #' NormalizeData.Seurat <- function( object, assay = NULL, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) assay.data <- NormalizeData( object = assay.data, normalization.method = normalization.method, scale.factor = scale.factor, verbose = verbose, margin = margin, ... ) object[[assay]] <- assay.data object <- LogSeuratCommand(object = object) return(object) } #' @importFrom future nbrOfWorkers #' #' @param features Vector of features names to scale/center. Default is variable features. #' @param vars.to.regress Variables to regress out (previously latent.vars in #' RegressOut). For example, nUMI, or percent.mito. #' @param latent.data Extra data to regress out, should be cells x latent data #' @param split.by Name of variable in object metadata or a vector or factor defining #' grouping of cells. See argument \code{f} in \code{\link[base]{split}} for more details #' @param model.use Use a linear model or generalized linear model #' (poisson, negative binomial) for the regression. Options are 'linear' #' (default), 'poisson', and 'negbinom' #' @param use.umi Regress on UMI count data. Default is FALSE for linear #' modeling, but automatically set to TRUE if model.use is 'negbinom' or 'poisson' #' @param do.scale Whether to scale the data. #' @param do.center Whether to center the data. #' @param scale.max Max value to return for scaled data. The default is 10. #' Setting this can help reduce the effects of features that are only expressed in #' a very small number of cells. If regressing out latent variables and using a #' non-linear model, the default is 50. #' @param block.size Default size for number of features to scale at in a single #' computation. Increasing block.size may speed up calculations but at an #' additional memory cost. #' @param min.cells.to.block If object contains fewer than this number of cells, #' don't block for scaling calculations. #' @param verbose Displays a progress bar for scaling procedure #' #' @importFrom future.apply future_lapply #' #' @rdname ScaleData #' @concept preprocessing #' @export #' ScaleData.default <- function( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { CheckDots(...) features <- features %||% rownames(x = object) features <- as.vector(x = intersect(x = features, y = rownames(x = object))) object <- object[features, , drop = FALSE] object.names <- dimnames(x = object) min.cells.to.block <- min(min.cells.to.block, ncol(x = object)) suppressWarnings(expr = Parenting( parent.find = "ScaleData.Assay", features = features, min.cells.to.block = min.cells.to.block )) split.by <- split.by %||% TRUE split.cells <- split(x = colnames(x = object), f = split.by) CheckGC() if (!is.null(x = vars.to.regress)) { if (is.null(x = latent.data)) { latent.data <- data.frame(row.names = colnames(x = object)) } else { latent.data <- latent.data[colnames(x = object), , drop = FALSE] rownames(x = latent.data) <- colnames(x = object) } if (any(vars.to.regress %in% rownames(x = object))) { latent.data <- cbind( latent.data, t(x = object[vars.to.regress[vars.to.regress %in% rownames(x = object)], ]) ) } # Currently, RegressOutMatrix will do nothing if latent.data = NULL notfound <- setdiff(x = vars.to.regress, y = colnames(x = latent.data)) if (length(x = notfound) == length(x = vars.to.regress)) { stop( "None of the requested variables to regress are present in the object.", call. = FALSE ) } else if (length(x = notfound) > 0) { warning( "Requested variables to regress not in object: ", paste(notfound, collapse = ", "), call. = FALSE, immediate. = TRUE ) vars.to.regress <- colnames(x = latent.data) } if (verbose) { message("Regressing out ", paste(vars.to.regress, collapse = ', ')) } chunk.points <- ChunkPoints(dsize = nrow(x = object), csize = block.size) if (nbrOfWorkers() > 1) { # TODO: lapply chunks <- expand.grid( names(x = split.cells), 1:ncol(x = chunk.points), stringsAsFactors = FALSE ) object <- future_lapply( X = 1:nrow(x = chunks), FUN = function(i) { row <- chunks[i, ] group <- row[[1]] index <- as.numeric(x = row[[2]]) return(RegressOutMatrix( data.expr = object[chunk.points[1, index]:chunk.points[2, index], split.cells[[group]], drop = FALSE], latent.data = latent.data[split.cells[[group]], , drop = FALSE], features.regress = features, model.use = model.use, use.umi = use.umi, verbose = FALSE )) } ) if (length(x = split.cells) > 1) { merge.indices <- lapply( X = 1:length(x = split.cells), FUN = seq.int, to = length(x = object), by = length(x = split.cells) ) object <- lapply( X = merge.indices, FUN = function(x) { return(do.call(what = 'rbind', args = object[x])) } ) object <- do.call(what = 'cbind', args = object) } else { object <- do.call(what = 'rbind', args = object) } } else { object <- lapply( X = names(x = split.cells), FUN = function(x) { if (verbose && length(x = split.cells) > 1) { message("Regressing out variables from split ", x) } return(RegressOutMatrix( data.expr = object[, split.cells[[x]], drop = FALSE], latent.data = latent.data[split.cells[[x]], , drop = FALSE], features.regress = features, model.use = model.use, use.umi = use.umi, verbose = verbose )) } ) object <- do.call(what = 'cbind', args = object) } dimnames(x = object) <- object.names CheckGC() } if (verbose && (do.scale || do.center)) { msg <- paste( na.omit(object = c( ifelse(test = do.center, yes = 'centering', no = NA_character_), ifelse(test = do.scale, yes = 'scaling', no = NA_character_) )), collapse = ' and ' ) msg <- paste0( toupper(x = substr(x = msg, start = 1, stop = 1)), substr(x = msg, start = 2, stop = nchar(x = msg)), ' data matrix' ) message(msg) } if (inherits(x = object, what = c('dgCMatrix', 'dgTMatrix'))) { scale.function <- FastSparseRowScale } else { object <- as.matrix(x = object) scale.function <- FastRowScale } if (nbrOfWorkers() > 1) { blocks <- ChunkPoints(dsize = length(x = features), csize = block.size) chunks <- expand.grid( names(x = split.cells), 1:ncol(x = blocks), stringsAsFactors = FALSE ) scaled.data <- future_lapply( X = 1:nrow(x = chunks), FUN = function(index) { row <- chunks[index, ] group <- row[[1]] block <- as.vector(x = blocks[, as.numeric(x = row[[2]])]) arg.list <- list( mat = object[features[block[1]:block[2]], split.cells[[group]], drop = FALSE], scale = do.scale, center = do.center, scale_max = scale.max, display_progress = FALSE ) arg.list <- arg.list[intersect(x = names(x = arg.list), y = names(x = formals(fun = scale.function)))] data.scale <- do.call(what = scale.function, args = arg.list) dimnames(x = data.scale) <- dimnames(x = object[features[block[1]:block[2]], split.cells[[group]]]) suppressWarnings(expr = data.scale[is.na(x = data.scale)] <- 0) CheckGC() return(data.scale) } ) if (length(x = split.cells) > 1) { merge.indices <- lapply( X = 1:length(x = split.cells), FUN = seq.int, to = length(x = scaled.data), by = length(x = split.cells) ) scaled.data <- lapply( X = merge.indices, FUN = function(x) { return(suppressWarnings(expr = do.call(what = 'rbind', args = scaled.data[x]))) } ) scaled.data <- suppressWarnings(expr = do.call(what = 'cbind', args = scaled.data)) } else { suppressWarnings(expr = scaled.data <- do.call(what = 'rbind', args = scaled.data)) } } else { scaled.data <- matrix( data = NA_real_, nrow = nrow(x = object), ncol = ncol(x = object), dimnames = object.names ) max.block <- ceiling(x = length(x = features) / block.size) for (x in names(x = split.cells)) { if (verbose) { if (length(x = split.cells) > 1 && (do.scale || do.center)) { message(gsub(pattern = 'matrix', replacement = 'from split ', x = msg), x) } pb <- txtProgressBar(min = 0, max = max.block, style = 3, file = stderr()) } for (i in 1:max.block) { my.inds <- ((block.size * (i - 1)):(block.size * i - 1)) + 1 my.inds <- my.inds[my.inds <= length(x = features)] arg.list <- list( mat = object[features[my.inds], split.cells[[x]], drop = FALSE], scale = do.scale, center = do.center, scale_max = scale.max, display_progress = FALSE ) arg.list <- arg.list[intersect(x = names(x = arg.list), y = names(x = formals(fun = scale.function)))] data.scale <- do.call(what = scale.function, args = arg.list) dimnames(x = data.scale) <- dimnames(x = object[features[my.inds], split.cells[[x]]]) scaled.data[features[my.inds], split.cells[[x]]] <- data.scale rm(data.scale) CheckGC() if (verbose) { setTxtProgressBar(pb = pb, value = i) } } if (verbose) { close(con = pb) } } } dimnames(x = scaled.data) <- object.names scaled.data[is.na(x = scaled.data)] <- 0 CheckGC() return(scaled.data) } #' @rdname ScaleData #' @concept preprocessing #' @export #' @method ScaleData Assay #' ScaleData.Assay <- function( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { use.umi <- ifelse(test = model.use != 'linear', yes = TRUE, no = use.umi) slot.use <- ifelse(test = use.umi, yes = 'counts', no = 'data') features <- features %||% VariableFeatures(object) if (length(x = features) == 0) { features <- rownames(x = GetAssayData(object = object, slot = slot.use)) } object <- SetAssayData( object = object, slot = 'scale.data', new.data = ScaleData( object = GetAssayData(object = object, slot = slot.use), features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) ) suppressWarnings(expr = Parenting( parent.find = "ScaleData.Seurat", features = features, min.cells.to.block = min.cells.to.block, use.umi = use.umi )) return(object) } #' @param assay Name of Assay to scale #' #' @rdname ScaleData #' @concept preprocessing #' @export #' @method ScaleData Seurat #' ScaleData.Seurat <- function( object, features = NULL, assay = NULL, vars.to.regress = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) if (any(vars.to.regress %in% colnames(x = object[[]]))) { latent.data <- object[[vars.to.regress[vars.to.regress %in% colnames(x = object[[]])]]] } else { latent.data <- NULL } if (is.character(x = split.by) && length(x = split.by) == 1) { split.by <- object[[split.by]] } assay.data <- ScaleData( object = assay.data, features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) object[[assay]] <- assay.data object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Bin spatial regions into grid and average expression values # # @param dat Expression data # @param pos Position information/coordinates for each sample # @param x.cuts Number of cuts to make in the x direction (defines grid along # with y.cuts) # @param y.cuts Number of cuts to make in the y direction # # @return returns a list with positions as centers of the bins and average # expression within the bins # #' @importFrom Matrix rowMeans # BinData <- function(data, pos, x.cuts = 10, y.cuts = x.cuts, verbose = TRUE) { if (verbose) { message("Binning spatial data") } pos$x.cuts <- cut(x = pos[, 1], breaks = x.cuts) pos$y.cuts <- cut(x = pos[, 2], breaks = y.cuts) pos$bin <- paste0(pos$x.cuts, "_", pos$y.cuts) all.bins <- unique(x = pos$bin) new.pos <- matrix(data = numeric(), nrow = length(x = all.bins), ncol = 2) new.dat <- matrix(data = numeric(), nrow = nrow(x = data), ncol = length(x = all.bins)) for(i in 1:length(x = all.bins)) { samples <- rownames(x = pos)[which(x = pos$bin == all.bins[i])] dat <- data[, samples] if (is.null(x = dim(x = dat))) { new.dat[, i] <- dat } else { new.dat[, i] <- rowMeans(data[, samples]) } new.pos[i, 1] <- mean(pos[samples, "x"]) new.pos[i, 2] <- mean(pos[samples, "y"]) } rownames(x = new.dat) <- rownames(x = data) colnames(x = new.dat) <- all.bins rownames(x = new.pos) <- all.bins colnames(x = new.pos) <- colnames(x = pos)[1:2] return(list(data = new.dat, pos = new.pos)) } # Sample classification from MULTI-seq # # Identify singlets, doublets and negative cells from multiplexing experiments. # # @param data Data frame with the raw count data (cell x tags) # @param q Scale the data. Default is 1e4 # # @return Returns a named vector with demultiplexed identities # #' @importFrom KernSmooth bkde #' @importFrom stats approxfun quantile # # @author Chris McGinnis, Gartner Lab, UCSF # # @examples # demux_result <- ClassifyCells(data = counts_data, q = 0.7) # ClassifyCells <- function(data, q) { ## Generate Thresholds: Gaussian KDE with bad barcode detection, outlier trimming ## local maxima estimation with bad barcode detection, threshold definition and adjustment # n_BC <- ncol(x = data) n_cells <- nrow(x = data) bc_calls <- vector(mode = "list", length = n_cells) n_bc_calls <- numeric(length = n_cells) for (i in 1:ncol(x = data)) { model <- tryCatch( expr = approxfun(x = bkde(x = data[, i], kernel = "normal")), error = function(e) { message("No threshold found for ", colnames(x = data)[i], "...") } ) if (is.null(x = model)) { next } x <- seq.int( from = quantile(x = data[, i], probs = 0.001), to = quantile(x = data[, i], probs = 0.999), length.out = 100 ) extrema <- LocalMaxima(x = model(x)) if (length(x = extrema) <= 1) { message("No threshold found for ", colnames(x = data)[i], "...") next } low.extremum <- min(extrema) high.extremum <- max(extrema) thresh <- (x[high.extremum] + x[low.extremum])/2 ## Account for GKDE noise by adjusting low threshold to most prominent peak low.extremae <- extrema[which(x = x[extrema] <= thresh)] new.low.extremum <- low.extremae[which.max(x = model(x)[low.extremae])] thresh <- quantile(x = c(x[high.extremum], x[new.low.extremum]), probs = q) ## Find which cells are above the ith threshold cell_i <- which(x = data[, i] >= thresh) n <- length(x = cell_i) if (n == 0) { ## Skips to next BC if no cells belong to the ith group next } bc <- colnames(x = data)[i] if (n == 1) { bc_calls[[cell_i]] <- c(bc_calls[[cell_i]], bc) n_bc_calls[cell_i] <- n_bc_calls[cell_i] + 1 } else { # have to iterate, lame for (cell in cell_i) { bc_calls[[cell]] <- c(bc_calls[[cell]], bc) n_bc_calls[cell] <- n_bc_calls[cell] + 1 } } } calls <- character(length = n_cells) for (i in 1:n_cells) { if (n_bc_calls[i] == 0) { calls[i] <- "Negative"; next } if (n_bc_calls[i] > 1) { calls[i] <- "Doublet"; next } if (n_bc_calls[i] == 1) { calls[i] <- bc_calls[[i]] } } names(x = calls) <- rownames(x = data) return(calls) } # Computes the metric at a given r (radius) value and stores in meta.features # # @param mv Results of running markvario # @param r.metric r value at which to report the "trans" value of the mark # variogram # # @return Returns a data.frame with r.metric values # # ComputeRMetric <- function(mv, r.metric = 5) { if (!inherits(x = mv, what = "list")) { mv <- list(mv) } r.metric.results <- unlist(x = lapply( X = mv, FUN = function(x) { x$trans[which.min(x = abs(x = x$r - r.metric))] } )) r.metric.results <- as.data.frame(x = r.metric.results) colnames(r.metric.results) <- paste0("r.metric.", r.metric) return(r.metric.results) } # Normalize a given data matrix # # Normalize a given matrix with a custom function. Essentially just a wrapper # around apply. Used primarily in the context of CLR normalization. # # @param data Matrix with the raw count data # @param custom_function A custom normalization function # @param margin Which way to we normalize. Set 1 for rows (features) or 2 for columns (genes) # @parm across Which way to we normalize? Choose form 'cells' or 'features' # @param verbose Show progress bar # # @return Returns a matrix with the custom normalization # #' @importFrom Matrix t #' @importFrom methods as #' @importFrom pbapply pbapply # CustomNormalize <- function(data, custom_function, margin, verbose = TRUE) { if (is.data.frame(x = data)) { data <- as.matrix(x = data) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as(object = data, Class = "dgCMatrix") } myapply <- ifelse(test = verbose, yes = pbapply, no = apply) # margin <- switch( # EXPR = across, # 'cells' = 2, # 'features' = 1, # stop("'across' must be either 'cells' or 'features'") # ) if (verbose) { message("Normalizing across ", c('features', 'cells')[margin]) } norm.data <- myapply( X = data, MARGIN = margin, FUN = custom_function) if (margin == 1) { norm.data = Matrix::t(x = norm.data) } colnames(x = norm.data) <- colnames(x = data) rownames(x = norm.data) <- rownames(x = data) return(norm.data) } # Inter-maxima quantile sweep to find ideal barcode thresholds # # Finding ideal thresholds for positive-negative signal classification per multiplex barcode # # @param call.list A list of sample classification result from different quantiles using ClassifyCells # # @return A list with two values: \code{res} and \code{extrema}: # \describe{ # \item{res}{A data.frame named res_id documenting the quantile used, subset, number of cells and proportion} # \item{extrema}{...} # } # # @author Chris McGinnis, Gartner Lab, UCSF # # @examples # FindThresh(call.list = bar.table_sweep.list) # FindThresh <- function(call.list) { # require(reshape2) res <- as.data.frame(x = matrix( data = 0L, nrow = length(x = call.list), ncol = 4 )) colnames(x = res) <- c("q","pDoublet","pNegative","pSinglet") q.range <- unlist(x = strsplit(x = names(x = call.list), split = "q=")) res$q <- as.numeric(x = q.range[grep(pattern = "0", x = q.range)]) nCell <- length(x = call.list[[1]]) for (i in 1:nrow(x = res)) { temp <- table(call.list[[i]]) if ("Doublet" %in% names(x = temp) == TRUE) { res$pDoublet[i] <- temp[which(x = names(x = temp) == "Doublet")] } if ( "Negative" %in% names(temp) == TRUE ) { res$pNegative[i] <- temp[which(x = names(x = temp) == "Negative")] } res$pSinglet[i] <- sum(temp[which(x = !names(x = temp) %in% c("Doublet", "Negative"))]) } res.q <- res$q q.ind <- grep(pattern = 'q', x = colnames(x = res)) res <- Melt(x = res[, -q.ind]) res[, 1] <- rep.int(x = res.q, times = length(x = unique(res[, 2]))) colnames(x = res) <- c('q', 'variable', 'value') res[, 4] <- res$value/nCell colnames(x = res)[2:4] <- c("Subset", "nCells", "Proportion") extrema <- res$q[LocalMaxima(x = res$Proportion[which(x = res$Subset == "pSinglet")])] return(list(res = res, extrema = extrema)) } # Calculate pearson residuals of features not in the scale.data # This function is the secondary function under GetResidual # # @param object A seurat object # @param features Name of features to add into the scale.data # @param assay Name of the assay of the seurat object generated by SCTransform # @param vst_out The SCT parameter list # @param clip.range Numeric of length two specifying the min and max values the Pearson residual # will be clipped to # Useful if you want to change the clip.range. # @param verbose Whether to print messages and progress bars # # @return Returns a matrix containing not-centered pearson residuals of added features # #' @importFrom sctransform get_residuals # GetResidualSCTModel <- function( object, assay, SCTModel, new_features, clip.range, replace.value, verbose ) { clip.range <- clip.range %||% SCTResults(object = object[[assay]], slot = "clips", model = SCTModel)$sct model.features <- rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = SCTModel)) umi.assay <- SCTResults(object = object[[assay]], slot = "umi.assay", model = SCTModel) model.cells <- Cells(x = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) sct.method <- SCTResults(object = object[[assay]], slot = "arguments", model = SCTModel)$sct.method %||% "default" scale.data.cells <- colnames(x = GetAssayData(object = object, assay = assay, slot = "scale.data")) if (length(x = setdiff(x = model.cells, y = scale.data.cells)) == 0) { existing_features <- names(x = which(x = ! apply( X = GetAssayData(object = object, assay = assay, slot = "scale.data")[, model.cells], MARGIN = 1, FUN = anyNA) )) } else { existing_features <- character() } if (replace.value) { features_to_compute <- new_features } else { features_to_compute <- setdiff(x = new_features, y = existing_features) } if (sct.method == "reference.model") { if (verbose) { message("sct.model ", SCTModel, " is from reference, so no residuals will be recalculated") } features_to_compute <- character() } if (!umi.assay %in% Assays(object = object)) { warning("The umi assay (", umi.assay, ") is not present in the object. ", "Cannot compute additional residuals.", call. = FALSE, immediate. = TRUE) return(NULL) } diff_features <- setdiff(x = features_to_compute, y = model.features) intersect_features <- intersect(x = features_to_compute, y = model.features) if (length(x = diff_features) == 0) { umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts" )[features_to_compute, model.cells, drop = FALSE] } else { warning( "In the SCTModel ", SCTModel, ", the following ", length(x = diff_features), " features do not exist in the counts slot: ", paste(diff_features, collapse = ", ") ) if (length(x = intersect_features) == 0) { return(matrix( data = NA, nrow = length(x = features_to_compute), ncol = length(x = model.cells), dimnames = list(features_to_compute, model.cells) )) } umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts")[intersect_features, model.cells, drop = FALSE] } clip.max <- max(clip.range) clip.min <- min(clip.range) if (nrow(x = umi) > 0) { vst_out <- SCTModel_to_vst(SCTModel = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) if (verbose) { message("sct.model: ", SCTModel) } new_residual <- get_residuals( vst_out = vst_out, umi = umi, residual_type = "pearson", res_clip_range = c(clip.min, clip.max), verbosity = as.numeric(x = verbose) * 2 ) new_residual <- as.matrix(x = new_residual) # centered data new_residual <- new_residual - rowMeans(x = new_residual) } else { new_residual <- matrix(data = NA, nrow = 0, ncol = length(x = model.cells), dimnames = list(c(), model.cells)) } old.features <- setdiff(x = new_features, y = features_to_compute) if (length(x = old.features) > 0) { old_residuals <- GetAssayData(object = object[[assay]], slot = "scale.data")[old.features, model.cells, drop = FALSE] new_residual <- rbind(new_residual, old_residuals)[new_features, ] } return(new_residual) } # Convert SCTModel class to vst_out used in the sctransform # @param SCTModel # @return Return a list containing sct model # SCTModel_to_vst <- function(SCTModel) { feature.params <- c("theta", "(Intercept)", "log_umi") feature.attrs <- c("residual_mean", "residual_variance" ) vst_out <- list() vst_out$model_str <- slot(object = SCTModel, name = "model") vst_out$model_pars_fit <- as.matrix(x = slot(object = SCTModel, name = "feature.attributes")[, feature.params]) vst_out$gene_attr <- slot(object = SCTModel, name = "feature.attributes")[, feature.attrs] vst_out$cell_attr <- slot(object = SCTModel, name = "cell.attributes") vst_out$arguments <- slot(object = SCTModel, name = "arguments") return(vst_out) } # Local maxima estimator # # Finding local maxima given a numeric vector # # @param x A continuous vector # # @return Returns a (named) vector showing positions of local maximas # # @author Tommy # @references \url{https://stackoverflow.com/questions/6836409/finding-local-maxima-and-minima} # # @examples # x <- c(1, 2, 9, 9, 2, 1, 1, 5, 5, 1) # LocalMaxima(x = x) # LocalMaxima <- function(x) { # Use -Inf instead if x is numeric (non-integer) y <- diff(x = c(-.Machine$integer.max, x)) > 0L y <- cumsum(x = rle(x = y)$lengths) y <- y[seq.int(from = 1L, to = length(x = y), by = 2L)] if (x[[1]] == x[[2]]) { y <- y[-1] } return(y) } # #' @importFrom stats residuals # NBResiduals <- function(fmla, regression.mat, gene, return.mode = FALSE) { fit <- 0 try( fit <- glm.nb( formula = fmla, data = regression.mat ), silent = TRUE) if (is.numeric(x = fit)) { message(sprintf('glm.nb failed for gene %s; falling back to scale(log(y+1))', gene)) resid <- scale(x = log(x = regression.mat[, 'GENE'] + 1))[, 1] mode <- 'scale' } else { resid <- residuals(fit, type = 'pearson') mode = 'nbreg' } do.return <- list(resid = resid, mode = mode) if (return.mode) { return(do.return) } else { return(do.return$resid) } } # Regress out techincal effects and cell cycle from a matrix # # Remove unwanted effects from a matrix # # @parm data.expr An expression matrix to regress the effects of latent.data out # of should be the complete expression matrix in genes x cells # @param latent.data A matrix or data.frame of latent variables, should be cells # x latent variables, the colnames should be the variables to regress # @param features.regress An integer vector representing the indices of the # genes to run regression on # @param model.use Model to use, one of 'linear', 'poisson', or 'negbinom'; pass # NULL to simply return data.expr # @param use.umi Regress on UMI count data # @param verbose Display a progress bar # #' @importFrom stats as.formula lm #' @importFrom utils txtProgressBar setTxtProgressBar # RegressOutMatrix <- function( data.expr, latent.data = NULL, features.regress = NULL, model.use = NULL, use.umi = FALSE, verbose = TRUE ) { # Do we bypass regression and simply return data.expr? bypass <- vapply( X = list(latent.data, model.use), FUN = is.null, FUN.VALUE = logical(length = 1L) ) if (any(bypass)) { return(data.expr) } # Check model.use possible.models <- c("linear", "poisson", "negbinom") if (!model.use %in% possible.models) { stop(paste( model.use, "is not a valid model. Please use one the following:", paste0(possible.models, collapse = ", ") )) } # Check features.regress if (is.null(x = features.regress)) { features.regress <- 1:nrow(x = data.expr) } if (is.character(x = features.regress)) { features.regress <- intersect(x = features.regress, y = rownames(x = data.expr)) if (length(x = features.regress) == 0) { stop("Cannot use features that are beyond the scope of data.expr") } } else if (max(features.regress) > nrow(x = data.expr)) { stop("Cannot use features that are beyond the scope of data.expr") } # Check dataset dimensions if (nrow(x = latent.data) != ncol(x = data.expr)) { stop("Uneven number of cells between latent data and expression data") } use.umi <- ifelse(test = model.use != 'linear', yes = TRUE, no = use.umi) # Create formula for regression vars.to.regress <- colnames(x = latent.data) fmla <- paste('GENE ~', paste(vars.to.regress, collapse = '+')) fmla <- as.formula(object = fmla) if (model.use == "linear") { # In this code, we'll repeatedly regress different Y against the same X # (latent.data) in order to calculate residuals. Rather that repeatedly # call lm to do this, we'll avoid recalculating the QR decomposition for the # latent.data matrix each time by reusing it after calculating it once regression.mat <- cbind(latent.data, data.expr[1,]) colnames(regression.mat) <- c(colnames(x = latent.data), "GENE") qr <- lm(fmla, data = regression.mat, qr = TRUE)$qr rm(regression.mat) } # Make results matrix data.resid <- matrix( nrow = nrow(x = data.expr), ncol = ncol(x = data.expr) ) if (verbose) { pb <- txtProgressBar(char = '=', style = 3, file = stderr()) } for (i in 1:length(x = features.regress)) { x <- features.regress[i] regression.mat <- cbind(latent.data, data.expr[x, ]) colnames(x = regression.mat) <- c(vars.to.regress, 'GENE') regression.mat <- switch( EXPR = model.use, 'linear' = qr.resid(qr = qr, y = data.expr[x,]), 'poisson' = residuals(object = glm( formula = fmla, family = 'poisson', data = regression.mat), type = 'pearson' ), 'negbinom' = NBResiduals( fmla = fmla, regression.mat = regression.mat, gene = x ) ) data.resid[i, ] <- regression.mat if (verbose) { setTxtProgressBar(pb = pb, value = i / length(x = features.regress)) } } if (verbose) { close(con = pb) } if (use.umi) { data.resid <- log1p(x = Sweep( x = data.resid, MARGIN = 1, STATS = apply(X = data.resid, MARGIN = 1, FUN = min), FUN = '-' )) } dimnames(x = data.resid) <- dimnames(x = data.expr) return(data.resid) } Seurat/R/differential_expression.R0000644000176200001440000022031014170333512016751 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% globalVariables( names = c('myAUC', 'p_val', 'avg_logFC'), package = 'Seurat', add = TRUE ) #' Gene expression markers for all identity classes #' #' Finds markers (differentially expressed genes) for each of the identity classes in a dataset #' #' @inheritParams FindMarkers #' @param node A node to find markers for and all its children; requires #' \code{\link{BuildClusterTree}} to have been run previously; replaces \code{FindAllMarkersNode} #' @param return.thresh Only return markers that have a p-value < return.thresh, or a power > return.thresh (if the test is ROC) #' #' @return Matrix containing a ranked list of putative markers, and associated #' statistics (p-values, ROC score, etc.) #' #' @importFrom stats setNames #' #' @export #' #' @aliases FindAllMarkersNode #' @concept differential_expression #' #' @examples #' data("pbmc_small") #' # Find markers for all clusters #' all.markers <- FindAllMarkers(object = pbmc_small) #' head(x = all.markers) #' \dontrun{ #' # Pass a value to node as a replacement for FindAllMarkersNode #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' all.markers <- FindAllMarkers(object = pbmc_small, node = 4) #' head(x = all.markers) #' } #' FindAllMarkers <- function( object, assay = NULL, features = NULL, logfc.threshold = 0.25, test.use = 'wilcox', slot = 'data', min.pct = 0.1, min.diff.pct = -Inf, node = NULL, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, return.thresh = 1e-2, densify = FALSE, ... ) { MapVals <- function(vec, from, to) { vec2 <- setNames(object = to, nm = from)[as.character(x = vec)] vec2[is.na(x = vec2)] <- vec[is.na(x = vec2)] return(unname(obj = vec2)) } if ((test.use == "roc") && (return.thresh == 1e-2)) { return.thresh <- 0.7 } if (is.null(x = node)) { idents.all <- sort(x = unique(x = Idents(object = object))) } else { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } tree <- Tool(object = object, slot = 'BuildClusterTree') if (is.null(x = tree)) { stop("Please run 'BuildClusterTree' before finding markers on nodes") } descendants <- DFT(tree = tree, node = node, include.children = TRUE) all.children <- sort(x = tree$edge[, 2][!tree$edge[, 2] %in% tree$edge[, 1]]) descendants <- MapVals( vec = descendants, from = all.children, to = tree$tip.label ) drop.children <- setdiff(x = tree$tip.label, y = descendants) keep.children <- setdiff(x = tree$tip.label, y = drop.children) orig.nodes <- c( node, as.numeric(x = setdiff(x = descendants, y = keep.children)) ) tree <- ape::drop.tip(phy = tree, tip = drop.children) new.nodes <- unique(x = tree$edge[, 1, drop = TRUE]) idents.all <- (tree$Nnode + 2):max(tree$edge) } genes.de <- list() messages <- list() for (i in 1:length(x = idents.all)) { if (verbose) { message("Calculating cluster ", idents.all[i]) } genes.de[[i]] <- tryCatch( expr = { FindMarkers( object = object, assay = assay, ident.1 = if (is.null(x = node)) { idents.all[i] } else { tree }, ident.2 = if (is.null(x = node)) { NULL } else { idents.all[i] }, features = features, logfc.threshold = logfc.threshold, test.use = test.use, slot = slot, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, fc.name = fc.name, base = base, densify = densify, ... ) }, error = function(cond) { return(cond$message) } ) if (is.character(x = genes.de[[i]])) { messages[[i]] <- genes.de[[i]] genes.de[[i]] <- NULL } } gde.all <- data.frame() for (i in 1:length(x = idents.all)) { if (is.null(x = unlist(x = genes.de[i]))) { next } gde <- genes.de[[i]] if (nrow(x = gde) > 0) { if (test.use == "roc") { gde <- subset( x = gde, subset = (myAUC > return.thresh | myAUC < (1 - return.thresh)) ) } else if (is.null(x = node) || test.use %in% c('bimod', 't')) { gde <- gde[order(gde$p_val, -gde[, 2]), ] gde <- subset(x = gde, subset = p_val < return.thresh) } if (nrow(x = gde) > 0) { gde$cluster <- idents.all[i] gde$gene <- rownames(x = gde) } if (nrow(x = gde) > 0) { gde.all <- rbind(gde.all, gde) } } } if ((only.pos) && nrow(x = gde.all) > 0) { return(subset(x = gde.all, subset = gde.all[, 2] > 0)) } rownames(x = gde.all) <- make.unique(names = as.character(x = gde.all$gene)) if (nrow(x = gde.all) == 0) { warning("No DE genes identified", call. = FALSE, immediate. = TRUE) } if (length(x = messages) > 0) { warning("The following tests were not performed: ", call. = FALSE, immediate. = TRUE) for (i in 1:length(x = messages)) { if (!is.null(x = messages[[i]])) { warning("When testing ", idents.all[i], " versus all:\n\t", messages[[i]], call. = FALSE, immediate. = TRUE) } } } if (!is.null(x = node)) { gde.all$cluster <- MapVals( vec = gde.all$cluster, from = new.nodes, to = orig.nodes ) } return(gde.all) } #' Finds markers that are conserved between the groups #' #' @inheritParams FindMarkers #' @param ident.1 Identity class to define markers for #' @param ident.2 A second identity class for comparison. If NULL (default) - #' use all other cells for comparison. #' @param grouping.var grouping variable #' @param assay of assay to fetch data for (default is RNA) #' @param meta.method method for combining p-values. Should be a function from #' the metap package (NOTE: pass the function, not a string) #' @param \dots parameters to pass to FindMarkers #' #' @return data.frame containing a ranked list of putative conserved markers, and #' associated statistics (p-values within each group and a combined p-value #' (such as Fishers combined p-value or others from the metap package), #' percentage of cells expressing the marker, average differences). Name of group is appended to each #' associated output column (e.g. CTRL_p_val). If only one group is tested in the grouping.var, max #' and combined p-values are not returned. #' #' @export #' @concept differential_expression #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' # Create a simulated grouping variable #' pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) #' FindConservedMarkers(pbmc_small, ident.1 = 0, ident.2 = 1, grouping.var = "groups") #' } #' FindConservedMarkers <- function( object, ident.1, ident.2 = NULL, grouping.var, assay = 'RNA', slot = 'data', min.cells.group = 3, meta.method = metap::minimump, verbose = TRUE, ... ) { metap.installed <- PackageCheck("metap", error = FALSE) if (!metap.installed[1]) { stop( "Please install the metap package to use FindConservedMarkers.", "\nThis can be accomplished with the following commands: ", "\n----------------------------------------", "\ninstall.packages('BiocManager')", "\nBiocManager::install('multtest')", "\ninstall.packages('metap')", "\n----------------------------------------", call. = FALSE ) } if (!is.function(x = meta.method)) { stop("meta.method should be a function from the metap package. Please see https://cran.r-project.org/web/packages/metap/metap.pdf for a detailed description of the available functions.") } object.var <- FetchData(object = object, vars = grouping.var) object <- SetIdent( object = object, cells = colnames(x = object), value = paste(Idents(object = object), object.var[, 1], sep = "_") ) levels.split <- names(x = sort(x = table(object.var[, 1]))) num.groups <- length(levels.split) cells <- list() for (i in 1:num.groups) { cells[[i]] <- rownames( x = object.var[object.var[, 1] == levels.split[i], , drop = FALSE] ) } marker.test <- list() # do marker tests ident.2.save <- ident.2 for (i in 1:num.groups) { level.use <- levels.split[i] ident.use.1 <- paste(ident.1, level.use, sep = "_") ident.use.1.exists <- ident.use.1 %in% Idents(object = object) if (!all(ident.use.1.exists)) { bad.ids <- ident.1[!ident.use.1.exists] warning( "Identity: ", paste(bad.ids, collapse = ", "), " not present in group ", level.use, ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } ident.2 <- ident.2.save cells.1 <- WhichCells(object = object, idents = ident.use.1) if (length(cells.1) < min.cells.group) { warning( level.use, " has fewer than ", min.cells.group, " cells in Identity: ", paste(ident.1, collapse = ", "), ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } if (is.null(x = ident.2)) { cells.2 <- setdiff(x = cells[[i]], y = cells.1) ident.use.2 <- names(x = which(x = table(Idents(object = object)[cells.2]) > 0)) ident.2 <- gsub(pattern = paste0("_", level.use), replacement = "", x = ident.use.2) if (length(x = ident.use.2) == 0) { stop(paste("Only one identity class present:", ident.1)) } } else { ident.use.2 <- paste(ident.2, level.use, sep = "_") } if (verbose) { message( "Testing group ", level.use, ": (", paste(ident.1, collapse = ", "), ") vs (", paste(ident.2, collapse = ", "), ")" ) } ident.use.2.exists <- ident.use.2 %in% Idents(object = object) if (!all(ident.use.2.exists)) { bad.ids <- ident.2[!ident.use.2.exists] warning( "Identity: ", paste(bad.ids, collapse = ", "), " not present in group ", level.use, ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } marker.test[[i]] <- FindMarkers( object = object, assay = assay, slot = slot, ident.1 = ident.use.1, ident.2 = ident.use.2, verbose = verbose, ... ) names(x = marker.test)[i] <- levels.split[i] } marker.test <- Filter(f = Negate(f = is.null), x = marker.test) genes.conserved <- Reduce( f = intersect, x = lapply( X = marker.test, FUN = function(x) { return(rownames(x = x)) } ) ) markers.conserved <- list() for (i in 1:length(x = marker.test)) { markers.conserved[[i]] <- marker.test[[i]][genes.conserved, ] colnames(x = markers.conserved[[i]]) <- paste( names(x = marker.test)[i], colnames(x = markers.conserved[[i]]), sep = "_" ) } markers.combined <- Reduce(cbind, markers.conserved) pval.codes <- colnames(x = markers.combined)[grepl(pattern = "*_p_val$", x = colnames(x = markers.combined))] if (length(x = pval.codes) > 1) { markers.combined$max_pval <- apply( X = markers.combined[, pval.codes, drop = FALSE], MARGIN = 1, FUN = max ) combined.pval <- data.frame(cp = apply( X = markers.combined[, pval.codes, drop = FALSE], MARGIN = 1, FUN = function(x) { return(meta.method(x)$p) } )) meta.method.name <- as.character(x = formals()$meta.method) if (length(x = meta.method.name) == 3) { meta.method.name <- meta.method.name[3] } colnames(x = combined.pval) <- paste0(meta.method.name, "_p_val") markers.combined <- cbind(markers.combined, combined.pval) markers.combined <- markers.combined[order(markers.combined[, paste0(meta.method.name, "_p_val")]), ] } else { warning("Only a single group was tested", call. = FALSE, immediate. = TRUE) } return(markers.combined) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param cells.1 Vector of cell names belonging to group 1 #' @param cells.2 Vector of cell names belonging to group 2 #' @param counts Count matrix if using scale.data for DE tests. This is used for #' computing pct.1 and pct.2 and for filtering features based on fraction #' expressing #' @param features Genes to test. Default is to use all genes #' @param logfc.threshold Limit testing to genes which show, on average, at least #' X-fold difference (log-scale) between the two groups of cells. Default is 0.25 #' Increasing logfc.threshold speeds up the function, but can miss weaker signals. #' @param test.use Denotes which test to use. Available options are: #' \itemize{ #' \item{"wilcox"} : Identifies differentially expressed genes between two #' groups of cells using a Wilcoxon Rank Sum test (default) #' \item{"bimod"} : Likelihood-ratio test for single cell gene expression, #' (McDavid et al., Bioinformatics, 2013) #' \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. #' For each gene, evaluates (using AUC) a classifier built on that gene alone, #' to classify between two groups of cells. An AUC value of 1 means that #' expression values for this gene alone can perfectly classify the two #' groupings (i.e. Each of the cells in cells.1 exhibit a higher level than #' each of the cells in cells.2). An AUC value of 0 also means there is perfect #' classification, but in the other direction. A value of 0.5 implies that #' the gene has no predictive power to classify the two groups. Returns a #' 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially #' expressed genes. #' \item{"t"} : Identify differentially expressed genes between two groups of #' cells using the Student's t-test. #' \item{"negbinom"} : Identifies differentially expressed genes between two #' groups of cells using a negative binomial generalized linear model. #' Use only for UMI-based datasets #' \item{"poisson"} : Identifies differentially expressed genes between two #' groups of cells using a poisson generalized linear model. #' Use only for UMI-based datasets #' \item{"LR"} : Uses a logistic regression framework to determine differentially #' expressed genes. Constructs a logistic regression model predicting group #' membership based on each feature individually and compares this to a null #' model with a likelihood ratio test. #' \item{"MAST"} : Identifies differentially expressed genes between two groups #' of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST #' package to run the DE testing. #' \item{"DESeq2"} : Identifies differentially expressed genes between two groups #' of cells based on a model using DESeq2 which uses a negative binomial #' distribution (Love et al, Genome Biology, 2014).This test does not support #' pre-filtering of genes based on average difference (or percent detection rate) #' between cell groups. However, genes may be pre-filtered based on their #' minimum detection rate (min.pct) across both cell groups. To use this method, #' please install DESeq2, using the instructions at #' https://bioconductor.org/packages/release/bioc/html/DESeq2.html #' } #' @param min.pct only test genes that are detected in a minimum fraction of #' min.pct cells in either of the two populations. Meant to speed up the function #' by not testing genes that are very infrequently expressed. Default is 0.1 #' @param min.diff.pct only test genes that show a minimum difference in the #' fraction of detection between the two groups. Set to -Inf by default #' @param only.pos Only return positive markers (FALSE by default) #' @param verbose Print a progress bar once expression testing begins #' @param max.cells.per.ident Down sample each identity class to a max number. #' Default is no downsampling. Not activated by default (set to Inf) #' @param random.seed Random seed for downsampling #' @param latent.vars Variables to test, used only when \code{test.use} is one of #' 'LR', 'negbinom', 'poisson', or 'MAST' #' @param min.cells.feature Minimum number of cells expressing the feature in at least one #' of the two groups, currently only used for poisson and negative binomial tests #' @param min.cells.group Minimum number of cells in one of the groups #' @param pseudocount.use Pseudocount to add to averaged expression values when #' calculating logFC. 1 by default. #' @param fc.results data.frame from FoldChange #' @param densify Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE #' #' #' @importFrom Matrix rowMeans #' @importFrom stats p.adjust #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers default #' FindMarkers.default <- function( object, slot = "data", counts = numeric(), cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = 'wilcox', min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, fc.results = NULL, densify = FALSE, ... ) { ValidateCellGroups( object = object, cells.1 = cells.1, cells.2 = cells.2, min.cells.group = min.cells.group ) features <- features %||% rownames(x = object) # reset parameters so no feature filtering is performed if (test.use %in% DEmethods_noprefilter()) { features <- rownames(x = object) min.diff.pct <- -Inf logfc.threshold <- 0 } data <- switch( EXPR = slot, 'scale.data' = counts, object ) # feature selection (based on percentages) alpha.min <- pmax(fc.results$pct.1, fc.results$pct.2) names(x = alpha.min) <- rownames(x = fc.results) features <- names(x = which(x = alpha.min >= min.pct)) if (length(x = features) == 0) { warning("No features pass min.pct threshold; returning empty data.frame") return(fc.results[features, ]) } alpha.diff <- alpha.min - pmin(fc.results$pct.1, fc.results$pct.2) features <- names( x = which(x = alpha.min >= min.pct & alpha.diff >= min.diff.pct) ) if (length(x = features) == 0) { warning("No features pass min.diff.pct threshold; returning empty data.frame") return(fc.results[features, ]) } # feature selection (based on logFC) if (slot != "scale.data") { total.diff <- fc.results[, 1] #first column is logFC names(total.diff) <- rownames(fc.results) features.diff <- if (only.pos) { names(x = which(x = total.diff >= logfc.threshold)) } else { names(x = which(x = abs(x = total.diff) >= logfc.threshold)) } features <- intersect(x = features, y = features.diff) if (length(x = features) == 0) { warning("No features pass logfc.threshold threshold; returning empty data.frame") return(fc.results[features, ]) } } # subsample cell groups if they are too large if (max.cells.per.ident < Inf) { set.seed(seed = random.seed) if (length(x = cells.1) > max.cells.per.ident) { cells.1 <- sample(x = cells.1, size = max.cells.per.ident) } if (length(x = cells.2) > max.cells.per.ident) { cells.2 <- sample(x = cells.2, size = max.cells.per.ident) } if (!is.null(x = latent.vars)) { latent.vars <- latent.vars[c(cells.1, cells.2), , drop = FALSE] } } de.results <- PerformDE( object = object, cells.1 = cells.1, cells.2 = cells.2, features = features, test.use = test.use, verbose = verbose, min.cells.feature = min.cells.feature, latent.vars = latent.vars, densify = densify, ... ) de.results <- cbind(de.results, fc.results[rownames(x = de.results), , drop = FALSE]) if (only.pos) { de.results <- de.results[de.results[, 2] > 0, , drop = FALSE] } if (test.use %in% DEmethods_nocorrect()) { de.results <- de.results[order(-de.results$power, -de.results[, 1]), ] } else { de.results <- de.results[order(de.results$p_val, -de.results[, 1]), ] de.results$p_val_adj = p.adjust( p = de.results$p_val, method = "bonferroni", n = nrow(x = object) ) } return(de.results) } #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers Assay #' FindMarkers.Assay <- function( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = 'wilcox', min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) { data.slot <- ifelse( test = test.use %in% DEmethods_counts(), yes = 'counts', no = slot ) data.use <- GetAssayData(object = object, slot = data.slot) counts <- switch( EXPR = data.slot, 'scale.data' = GetAssayData(object = object, slot = "counts"), numeric() ) fc.results <- FoldChange( object = object, slot = data.slot, cells.1 = cells.1, cells.2 = cells.2, features = features, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, fc.name = fc.name, base = base ) de.results <- FindMarkers( object = data.use, slot = data.slot, counts = counts, cells.1 = cells.1, cells.2 = cells.2, features = features, logfc.threshold = logfc.threshold, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, fc.results = fc.results, densify = densify, ... ) return(de.results) } #' @param recorrect_umi Recalculate corrected UMI counts using minimum of the median UMIs when performing DE using multiple SCT objects; default is TRUE #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers SCTAssay #' FindMarkers.SCTAssay <- function( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = 'wilcox', min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, recorrect_umi = TRUE, ... ) { data.slot <- ifelse( test = test.use %in% DEmethods_counts(), yes = 'counts', no = slot ) if (recorrect_umi && length(x = levels(x = object)) > 1) { cell_attributes <- SCTResults(object = object, slot = "cell.attributes") observed_median_umis <- lapply( X = cell_attributes, FUN = function(x) median(x[, "umi"]) ) model.list <- slot(object = object, "SCTModel.list") median_umi.status <- lapply(X = model.list, FUN = function(x) { return(tryCatch( expr = slot(object = x, name = 'median_umi'), error = function(...) {return(NULL)}) )}) if (any(is.null(unlist(median_umi.status)))){ stop("SCT assay does not contain median UMI information.", "Run `PrepSCTFindMarkers()` before running `FindMarkers()` or invoke `FindMarkers(recorrect_umi=FALSE)`.") } model_median_umis <- SCTResults(object = object, slot = "median_umi") min_median_umi <- min(unlist(x = observed_median_umis)) if (any(unlist(model_median_umis) != min_median_umi)){ stop("Object contains multiple models with unequal library sizes. Run `PrepSCTFindMarkers()` before running `FindMarkers()`.") } } data.use <- GetAssayData(object = object, slot = data.slot) counts <- switch( EXPR = data.slot, 'scale.data' = GetAssayData(object = object, slot = "counts"), numeric() ) fc.results <- FoldChange( object = object, slot = data.slot, cells.1 = cells.1, cells.2 = cells.2, features = features, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, fc.name = fc.name, base = base ) de.results <- FindMarkers( object = data.use, slot = data.slot, counts = counts, cells.1 = cells.1, cells.2 = cells.2, features = features, logfc.threshold = logfc.threshold, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, fc.results = fc.results, densify = densify, ... ) return(de.results) } #' @importFrom Matrix rowMeans #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers DimReduc #' FindMarkers.DimReduc <- function( object, cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = rowMeans, fc.name = NULL, densify = FALSE, ... ) { if (test.use %in% DEmethods_counts()) { stop("The following tests cannot be used for differential expression on a reduction as they assume a count model: ", paste(DEmethods_counts(), collapse=", ")) } data <- t(x = Embeddings(object = object)) ValidateCellGroups( object = data, cells.1 = cells.1, cells.2 = cells.2, min.cells.group = min.cells.group ) features <- features %||% rownames(x = data) # reset parameters so no feature filtering is performed if (test.use %in% DEmethods_noprefilter()) { features <- rownames(x = data) min.diff.pct <- -Inf logfc.threshold <- 0 } fc.results <- FoldChange( object = object, cells.1 = cells.1, cells.2 = cells.2, features = features, mean.fxn = mean.fxn, fc.name = fc.name ) # subsample cell groups if they are too large if (max.cells.per.ident < Inf) { set.seed(seed = random.seed) if (length(x = cells.1) > max.cells.per.ident) { cells.1 <- sample(x = cells.1, size = max.cells.per.ident) } if (length(x = cells.2) > max.cells.per.ident) { cells.2 <- sample(x = cells.2, size = max.cells.per.ident) } if (!is.null(x = latent.vars)) { latent.vars <- latent.vars[c(cells.1, cells.2), , drop = FALSE] } } de.results <- PerformDE( object = data, cells.1 = cells.1, cells.2 = cells.2, features = features, test.use = test.use, verbose = verbose, min.cells.feature = min.cells.feature, latent.vars = latent.vars, densify = densify, ... ) de.results <- cbind(de.results, fc.results) if (only.pos) { de.results <- de.results[de.results$avg_diff > 0, , drop = FALSE] } if (test.use %in% DEmethods_nocorrect()) { de.results <- de.results[order(-de.results$power, -de.results$avg_diff), ] } else { de.results <- de.results[order(de.results$p_val, -de.results$avg_diff), ] de.results$p_val_adj = p.adjust( p = de.results$p_val, method = "bonferroni", n = nrow(x = object) ) } return(de.results) } #' @param ident.1 Identity class to define markers for; pass an object of class #' \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; #' passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run #' @param ident.2 A second identity class for comparison; if \code{NULL}, #' use all other cells for comparison; if an object of class \code{phylo} or #' 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for #' @param reduction Reduction to use in differential expression testing - will test for DE on cell embeddings #' @param group.by Regroup cells into a different identity class prior to performing differential expression (see example) #' @param subset.ident Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example) #' @param assay Assay to use in differential expression testing #' @param slot Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", #' \code{slot} will be set to "counts" #' @param mean.fxn Function to use for fold change or average difference calculation. #' If NULL, the appropriate function will be chose according to the slot used #' @param fc.name Name of the fold change, average difference, or custom function column #' in the output data.frame. If NULL, the fold change column will be named #' according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data #' slot "avg_diff". #' @param base The base with respect to which logarithms are computed. #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers Seurat #' FindMarkers.Seurat <- function( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = 'data', reduction = NULL, features = NULL, logfc.threshold = 0.25, test.use = "wilcox", min.pct = 0.1, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) { if (!is.null(x = group.by)) { if (!is.null(x = subset.ident)) { object <- subset(x = object, idents = subset.ident) } Idents(object = object) <- group.by } if (!is.null(x = assay) && !is.null(x = reduction)) { stop("Please only specify either assay or reduction.") } if (length(x = ident.1) == 0) { stop("At least 1 ident must be specified in `ident.1`") } # select which data to use if (is.null(x = reduction)) { assay <- assay %||% DefaultAssay(object = object) data.use <- object[[assay]] cellnames.use <- colnames(x = data.use) } else { data.use <- object[[reduction]] cellnames.use <- rownames(x = data.use) } cells <- IdentsToCells( object = object, ident.1 = ident.1, ident.2 = ident.2, cellnames.use = cellnames.use ) # fetch latent.vars if (!is.null(x = latent.vars)) { latent.vars <- FetchData( object = object, vars = latent.vars, cells = c(cells$cells.1, cells$cells.2) ) } # check normalization method norm.command <- paste0("NormalizeData.", assay) if (norm.command %in% Command(object = object) && is.null(x = reduction)) { norm.method <- Command( object = object, command = norm.command, value = "normalization.method" ) if (norm.method != "LogNormalize") { mean.fxn <- function(x) { return(log(x = rowMeans(x = x) + pseudocount.use, base = base)) } } } de.results <- FindMarkers( object = data.use, slot = slot, cells.1 = cells$cells.1, cells.2 = cells$cells.2, features = features, logfc.threshold = logfc.threshold, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, base = base, fc.name = fc.name, densify = densify, ... ) return(de.results) } #' @param cells.1 Vector of cell names belonging to group 1 #' @param cells.2 Vector of cell names belonging to group 2 #' @param features Features to calculate fold change for. #' If NULL, use all features #' @importFrom Matrix rowSums #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange default FoldChange.default <- function( object, cells.1, cells.2, mean.fxn, fc.name, features = NULL, ... ) { features <- features %||% rownames(x = object) # Calculate percent expressed thresh.min <- 0 pct.1 <- round( x = rowSums(x = object[features, cells.1, drop = FALSE] > thresh.min) / length(x = cells.1), digits = 3 ) pct.2 <- round( x = rowSums(x = object[features, cells.2, drop = FALSE] > thresh.min) / length(x = cells.2), digits = 3 ) # Calculate fold change data.1 <- mean.fxn(object[features, cells.1, drop = FALSE]) data.2 <- mean.fxn(object[features, cells.2, drop = FALSE]) fc <- (data.1 - data.2) fc.results <- as.data.frame(x = cbind(fc, pct.1, pct.2)) colnames(fc.results) <- c(fc.name, "pct.1", "pct.2") return(fc.results) } #' @importFrom Matrix rowMeans #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange Assay FoldChange.Assay <- function( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, ... ) { data <- GetAssayData(object = object, slot = slot) mean.fxn <- mean.fxn %||% switch( EXPR = slot, 'data' = function(x) { return(log(x = rowMeans(x = expm1(x = x)) + pseudocount.use, base = base)) }, 'scale.data' = rowMeans, function(x) { return(log(x = rowMeans(x = x) + pseudocount.use, base = base)) } ) # Omit the decimal value of e from the column name if base == exp(1) base.text <- ifelse( test = base == exp(1), yes = "", no = base ) fc.name <- fc.name %||% ifelse( test = slot == "scale.data", yes = "avg_diff", no = paste0("avg_log", base.text, "FC") ) FoldChange( object = data, cells.1 = cells.1, cells.2 = cells.2, features = features, mean.fxn = mean.fxn, fc.name = fc.name ) } #' @importFrom Matrix rowMeans #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange DimReduc FoldChange.DimReduc <- function( object, cells.1, cells.2, features = NULL, slot = NULL, pseudocount.use = NULL, fc.name = NULL, mean.fxn = NULL, ... ) { mean.fxn <- mean.fxn %||% rowMeans fc.name <- fc.name %||% "avg_diff" data <- t(x = Embeddings(object = object)) features <- features %||% rownames(x = data) # Calculate avg difference data.1 <- mean.fxn(data[features, cells.1, drop = FALSE]) data.2 <- mean.fxn(data[features, cells.2, drop = FALSE]) fc <- (data.1 - data.2) fc.results <- data.frame(fc) colnames(fc.results) <- fc.name return(fc.results) } #' @param ident.1 Identity class to calculate fold change for; pass an object of class #' \code{phylo} or 'clustertree' to calculate fold change for a node in a cluster tree; #' passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run #' @param ident.2 A second identity class for comparison; if \code{NULL}, #' use all other cells for comparison; if an object of class \code{phylo} or #' 'clustertree' is passed to \code{ident.1}, must pass a node to calculate fold change for #' @param reduction Reduction to use - will calculate average difference on cell embeddings #' @param group.by Regroup cells into a different identity class prior to #' calculating fold change (see example in \code{\link{FindMarkers}}) #' @param subset.ident Subset a particular identity class prior to regrouping. #' Only relevant if group.by is set (see example in \code{\link{FindMarkers}}) #' @param assay Assay to use in fold change calculation #' @param slot Slot to pull data from #' @param pseudocount.use Pseudocount to add to averaged expression values when #' calculating logFC. 1 by default. #' @param mean.fxn Function to use for fold change or average difference calculation #' @param base The base with respect to which logarithms are computed. #' @param fc.name Name of the fold change, average difference, or custom function column #' in the output data.frame #' #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange Seurat FoldChange.Seurat <- function( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = 'data', reduction = NULL, features = NULL, pseudocount.use = 1, mean.fxn = NULL, base = 2, fc.name = NULL, ... ) { if (!is.null(x = group.by)) { if (!is.null(x = subset.ident)) { object <- subset(x = object, idents = subset.ident) } Idents(object = object) <- group.by } if (!is.null(x = assay) && !is.null(x = reduction)) { stop("Please only specify either assay or reduction.") } # select which data to use if (is.null(x = reduction)) { assay <- assay %||% DefaultAssay(object = object) data.use <- object[[assay]] cellnames.use <- colnames(x = data.use) } else { data.use <- object[[reduction]] cellnames.use <- rownames(data.use) } cells <- IdentsToCells( object = object, ident.1 = ident.1, ident.2 = ident.2, cellnames.use = cellnames.use ) fc.results <- FoldChange( object = data.use, cells.1 = cells$cells.1, cells.2 = cells$cells.2, features = features, slot = slot, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, base = base, fc.name = fc.name ) return(fc.results) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # internal function to calculate AUC values #' @importFrom pbapply pblapply # AUCMarkerTest <- function(data1, data2, mygenes, print.bar = TRUE) { myAUC <- unlist(x = lapply( X = mygenes, FUN = function(x) { return(DifferentialAUC( x = as.numeric(x = data1[x, ]), y = as.numeric(x = data2[x, ]) )) } )) myAUC[is.na(x = myAUC)] <- 0 iterate.fxn <- ifelse(test = print.bar, yes = pblapply, no = lapply) avg_diff <- unlist(x = iterate.fxn( X = mygenes, FUN = function(x) { return( ExpMean( x = as.numeric(x = data1[x, ]) ) - ExpMean( x = as.numeric(x = data2[x, ]) ) ) } )) toRet <- data.frame(cbind(myAUC, avg_diff), row.names = mygenes) toRet <- toRet[rev(x = order(toRet$myAUC)), ] return(toRet) } #internal function to run mcdavid et al. DE test # #' @importFrom stats sd dnorm # bimodLikData <- function(x, xmin = 0) { x1 <- x[x <= xmin] x2 <- x[x > xmin] xal <- MinMax( data = length(x = x2) / length(x = x), min = 1e-5, max = (1 - 1e-5) ) likA <- length(x = x1) * log(x = 1 - xal) if (length(x = x2) < 2) { mysd <- 1 } else { mysd <- sd(x = x2) } likB <- length(x = x2) * log(x = xal) + sum(dnorm(x = x2, mean = mean(x = x2), sd = mysd, log = TRUE)) return(likA + likB) } # returns tests that do not support feature pre-filtering DEmethods_noprefilter <- function() { c("DESeq2") } # returns tests that support latent variables (latent.vars) DEmethods_latent <- function() { c('negbinom', 'poisson', 'MAST', "LR") } # returns tests that require CheckDots DEmethods_checkdots <- function() { c('wilcox', 'MAST', 'DESeq2') } # returns tests that do not use Bonferroni correction on the DE results DEmethods_nocorrect <- function() { c('roc') } # returns tests that require count data DEmethods_counts <- function() { c("negbinom", "poisson", "DESeq2") } # Differential expression using DESeq2 # # Identifies differentially expressed genes between two groups of cells using # DESeq2 # # @references Love MI, Huber W and Anders S (2014). "Moderated estimation of # fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. # https://bioconductor.org/packages/release/bioc/html/DESeq2.html # @param data.use Data matrix to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param verbose Print a progress bar # @param ... Extra parameters to pass to DESeq2::results # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # # @details # This test does not support pre-filtering of genes based on average difference # (or percent detection rate) between cell groups. However, genes may be # pre-filtered based on their minimum detection rate (min.pct) across both cell # groups. To use this method, please install DESeq2, using the instructions at # https://bioconductor.org/packages/release/bioc/html/DESeq2.html # # @export # # @examples # \dontrun{ # data("pbmc_small") # pbmc_small # DESeq2DETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # } # DESeq2DETest <- function( data.use, cells.1, cells.2, verbose = TRUE, ... ) { if (!PackageCheck('DESeq2', error = FALSE)) { stop("Please install DESeq2 - learn more at https://bioconductor.org/packages/release/bioc/html/DESeq2.html") } CheckDots(..., fxns = 'DESeq2::results') group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) group.info$wellKey <- rownames(x = group.info) dds1 <- DESeq2::DESeqDataSetFromMatrix( countData = data.use, colData = group.info, design = ~ group ) dds1 <- DESeq2::estimateSizeFactors(object = dds1) dds1 <- DESeq2::estimateDispersions(object = dds1, fitType = "local") dds1 <- DESeq2::nbinomWaldTest(object = dds1) res <- DESeq2::results( object = dds1, contrast = c("group", "Group1", "Group2"), alpha = 0.05, ... ) to.return <- data.frame(p_val = res$pvalue, row.names = rownames(res)) return(to.return) } # internal function to calculate AUC values #' @importFrom ROCR prediction performance #' DifferentialAUC <- function(x, y) { prediction.use <- prediction( predictions = c(x, y), labels = c(rep(x = 1, length(x = x)), rep(x = 0, length(x = y))), label.ordering = 0:1 ) perf.use <- performance(prediction.obj = prediction.use, measure = "auc") auc.use <- round(x = perf.use@y.values[[1]], digits = 3) return(auc.use) } #internal function to run mcdavid et al. DE test # #' @importFrom stats pchisq # DifferentialLRT <- function(x, y, xmin = 0) { lrtX <- bimodLikData(x = x) lrtY <- bimodLikData(x = y) lrtZ <- bimodLikData(x = c(x, y)) lrt_diff <- 2 * (lrtX + lrtY - lrtZ) return(pchisq(q = lrt_diff, df = 3, lower.tail = F)) } # Likelihood ratio test for zero-inflated data # # Identifies differentially expressed genes between two groups of cells using # the LRT model proposed in McDavid et al, Bioinformatics, 2013 # # @inheritParams FindMarkers # @param object Seurat object # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param assay.type Type of assay to fetch data for (default is RNA) # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # @examples # data("pbmc_small") # pbmc_small # DiffExpTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # DiffExpTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(DifferentialLRT( x = as.numeric(x = data.use[x, cells.1]), y = as.numeric(x = data.use[x, cells.2]) )) } ) ) to.return <- data.frame(p_val, row.names = rownames(x = data.use)) return(to.return) } # Differential expression testing using Student's t-test # # Identify differentially expressed genes between two groups of cells using # the Student's t-test # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom stats t.test #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # # @examples # data("pbmc_small") # pbmc_small # DiffTTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) DiffTTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(data.use), FUN = function(x) { t.test(x = data.use[x, cells.1], y = data.use[x, cells.2])$p.value } ) ) to.return <- data.frame(p_val,row.names = rownames(x = data.use)) return(to.return) } # Tests for UMI-count based data # # Identifies differentially expressed genes between two groups of cells using # either a negative binomial or poisson generalized linear model # # @param data.use Data to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param min.cells Minimum number of cells threshold # @param latent.vars Latent variables to test # @param test.use parameterizes the glm # @param verbose Print progress bar # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom MASS glm.nb #' @importFrom pbapply pbsapply #' @importFrom stats var as.formula #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers #' # @export # # @examples # data("pbmc_small") # pbmc_small # # Note, not recommended for particularly small datasets - expect warnings # NegBinomDETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # GLMDETest <- function( data.use, cells.1, cells.2, min.cells = 3, latent.vars = NULL, test.use = NULL, verbose = TRUE ) { group.info <- data.frame( group = rep( x = c('Group1', 'Group2'), times = c(length(x = cells.1), length(x = cells.2)) ) ) rownames(group.info) <- c(cells.1, cells.2) group.info[, "group"] <- factor(x = group.info[, "group"]) latent.vars <- if (is.null(x = latent.vars)) { group.info } else { cbind(x = group.info, latent.vars) } latent.var.names <- colnames(x = latent.vars) my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(data.use), FUN = function(x) { latent.vars[, "GENE"] <- as.numeric(x = data.use[x, ]) # check that gene is expressed in specified number of cells in one group if (sum(latent.vars$GENE[latent.vars$group == "Group1"] > 0) < min.cells && sum(latent.vars$GENE[latent.vars$group == "Group2"] > 0) < min.cells) { warning(paste0( "Skipping gene --- ", x, ". Fewer than ", min.cells, " cells in both clusters." )) return(2) } # check that variance between groups is not 0 if (var(x = latent.vars$GENE) == 0) { warning(paste0( "Skipping gene -- ", x, ". No variance in expression between the two clusters." )) return(2) } fmla <- as.formula(object = paste( "GENE ~", paste(latent.var.names, collapse = "+") )) p.estimate <- 2 if (test.use == "negbinom") { try( expr = p.estimate <- summary( object = glm.nb(formula = fmla, data = latent.vars) )$coef[2, 4], silent = TRUE ) return(p.estimate) } else if (test.use == "poisson") { return(summary(object = glm( formula = fmla, data = latent.vars, family = "poisson" ))$coef[2,4]) } } ) ) features.keep <- rownames(data.use) if (length(x = which(x = p_val == 2)) > 0) { features.keep <- features.keep[-which(x = p_val == 2)] p_val <- p_val[!p_val == 2] } to.return <- data.frame(p_val, row.names = features.keep) return(to.return) } # Helper function for FindMarkers.Seurat and FoldChange.Seurat # Convert idents to cells # #' @importFrom methods is # IdentsToCells <- function( object, ident.1, ident.2, cellnames.use ) { # if (is.null(x = ident.1)) { stop("Please provide ident.1") } else if ((length(x = ident.1) == 1 && ident.1[1] == 'clustertree') || is(object = ident.1, class2 = 'phylo')) { if (is.null(x = ident.2)) { stop("Please pass a node to 'ident.2' to run FindMarkers on a tree") } tree <- if (is(object = ident.1, class2 = 'phylo')) { ident.1 } else { Tool(object = object, slot = 'BuildClusterTree') } if (is.null(x = tree)) { stop("Please run 'BuildClusterTree' or pass an object of class 'phylo' as 'ident.1'") } ident.1 <- tree$tip.label[GetLeftDescendants(tree = tree, node = ident.2)] ident.2 <- tree$tip.label[GetRightDescendants(tree = tree, node = ident.2)] } if (length(x = as.vector(x = ident.1)) > 1 && any(as.character(x = ident.1) %in% cellnames.use)) { bad.cells <- cellnames.use[which(x = !as.character(x = ident.1) %in% cellnames.use)] if (length(x = bad.cells) > 0) { stop(paste0("The following cell names provided to ident.1 are not present in the object: ", paste(bad.cells, collapse = ", "))) } } else { ident.1 <- WhichCells(object = object, idents = ident.1) } # if NULL for ident.2, use all other cells if (length(x = as.vector(x = ident.2)) > 1 && any(as.character(x = ident.2) %in% cellnames.use)) { bad.cells <- cellnames.use[which(!as.character(x = ident.2) %in% cellnames.use)] if (length(x = bad.cells) > 0) { stop(paste0("The following cell names provided to ident.2 are not present in the object: ", paste(bad.cells, collapse = ", "))) } } else { if (is.null(x = ident.2)) { ident.2 <- setdiff(x = cellnames.use, y = ident.1) } else { ident.2 <- WhichCells(object = object, idents = ident.2) } } return(list(cells.1 = ident.1, cells.2 = ident.2)) } # Perform differential expression testing using a logistic regression framework # # Constructs a logistic regression model predicting group membership based on a # given feature and compares this to a null model with a likelihood ratio test. # # @param data.use expression matrix # @param cells.1 Vector of cells in group 1 # @param cells2. Vector of cells in group 2 # @param latent.vars Latent variables to include in model # @param verbose Print messages # #' @importFrom lmtest lrtest #' @importFrom pbapply pbsapply #' @importFrom stats as.formula glm #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # LRDETest <- function( data.use, cells.1, cells.2, latent.vars = NULL, verbose = TRUE ) { group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) data.use <- data.use[, rownames(group.info), drop = FALSE] latent.vars <- latent.vars[rownames(group.info), , drop = FALSE] my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { if (is.null(x = latent.vars)) { model.data <- cbind(GENE = data.use[x, ], group.info) fmla <- as.formula(object = "group ~ GENE") fmla2 <- as.formula(object = "group ~ 1") } else { model.data <- cbind(GENE = data.use[x, ], group.info, latent.vars) fmla <- as.formula(object = paste( "group ~ GENE +", paste(colnames(x = latent.vars), collapse = "+") )) fmla2 <- as.formula(object = paste( "group ~", paste(colnames(x = latent.vars), collapse = "+") )) } model1 <- glm(formula = fmla, data = model.data, family = "binomial") model2 <- glm(formula = fmla2, data = model.data, family = "binomial") lrtest <- lrtest(model1, model2) return(lrtest$Pr[2]) } ) to.return <- data.frame(p_val, row.names = rownames(data.use)) return(to.return) } # ROC-based marker discovery # # Identifies 'markers' of gene expression using ROC analysis. For each gene, # evaluates (using AUC) a classifier built on that gene alone, to classify # between two groups of cells. # # An AUC value of 1 means that expression values for this gene alone can # perfectly classify the two groupings (i.e. Each of the cells in cells.1 # exhibit a higher level than each of the cells in cells.2). An AUC value of 0 # also means there is perfect classification, but in the other direction. A # value of 0.5 implies that the gene has no predictive power to classify the # two groups. # # @return Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of # putative differentially expressed genes. # # @export # # @examples # data("pbmc_small") # pbmc_small # MarkerTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # MarkerTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { to.return <- AUCMarkerTest( data1 = data.use[, cells.1, drop = FALSE], data2 = data.use[, cells.2, drop = FALSE], mygenes = rownames(x = data.use), print.bar = verbose ) to.return$power <- abs(x = to.return$myAUC - 0.5) * 2 return(to.return) } # Differential expression using MAST # # Identifies differentially expressed genes between two groups of cells using # a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run # the DE testing. # # @references Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based # Analysis of Single Cell Transcriptomics. R package version 1.2.1. # https://github.com/RGLab/MAST/ # # @param data.use Data to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param latent.vars Confounding variables to adjust for in DE test # @param verbose print output # @param \dots Additional parameters to zero-inflated regression (zlm) function # in MAST # @details # To use this method, please install MAST, using instructions at https://github.com/RGLab/MAST/ # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom stats relevel MASTDETest <- function( data.use, cells.1, cells.2, latent.vars = NULL, verbose = TRUE, ... ) { # Check for MAST if (!PackageCheck('MAST', error = FALSE)) { stop("Please install MAST - learn more at https://github.com/RGLab/MAST") } group.info <- data.frame(row.names = c(cells.1, cells.2)) latent.vars <- latent.vars %||% group.info group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) latent.vars.names <- c("condition", colnames(x = latent.vars)) latent.vars <- cbind(latent.vars, group.info) latent.vars$wellKey <- rownames(x = latent.vars) fdat <- data.frame(rownames(x = data.use)) colnames(x = fdat)[1] <- "primerid" rownames(x = fdat) <- fdat[, 1] sca <- MAST::FromMatrix( exprsArray = as.matrix(x = data.use), check_sanity = FALSE, cData = latent.vars, fData = fdat ) cond <- factor(x = SummarizedExperiment::colData(sca)$group) cond <- relevel(x = cond, ref = "Group1") SummarizedExperiment::colData(sca)$condition <- cond fmla <- as.formula( object = paste0(" ~ ", paste(latent.vars.names, collapse = "+")) ) zlmCond <- MAST::zlm(formula = fmla, sca = sca, ...) summaryCond <- MAST::summary(object = zlmCond, doLRT = 'conditionGroup2') summaryDt <- summaryCond$datatable # fcHurdle <- merge( # summaryDt[contrast=='conditionGroup2' & component=='H', .(primerid, `Pr(>Chisq)`)], #hurdle P values # summaryDt[contrast=='conditionGroup2' & component=='logFC', .(primerid, coef, ci.hi, ci.lo)], by='primerid' # ) #logFC coefficients # fcHurdle[,fdr:=p.adjust(`Pr(>Chisq)`, 'fdr')] p_val <- summaryDt[summaryDt[, "component"] == "H", 4] genes.return <- summaryDt[summaryDt[, "component"] == "H", 1] # p_val <- subset(summaryDt, component == "H")[, 4] # genes.return <- subset(summaryDt, component == "H")[, 1] to.return <- data.frame(p_val, row.names = genes.return) return(to.return) } # compare two negative binomial regression models # model one uses only common factors (com.fac) # model two additionally uses group factor (grp.fac) # #' @importFrom stats glm anova coef # NBModelComparison <- function(y, theta, latent.data, com.fac, grp.fac) { tab <- as.matrix(x = table(y > 0, latent.data[, grp.fac])) freqs <- tab['TRUE', ] / apply(X = tab, MARGIN = 2, FUN = sum) fit2 <- 0 fit4 <- 0 try( expr = fit2 <- glm( formula = y ~ ., data = latent.data[, com.fac, drop = FALSE], family = MASS::negative.binomial(theta = theta) ), silent=TRUE ) try( fit4 <- glm( formula = y ~ ., data = latent.data[, c(com.fac, grp.fac)], family = MASS::negative.binomial(theta = theta) ), silent = TRUE ) if (is.numeric(x = fit2) || is.numeric(x = fit4)) { message('One of the glm.nb calls failed') return(c(rep(x = NA, 5), freqs)) } pval <- anova(fit2, fit4, test = 'Chisq')$'Pr(>Chi)'[2] foi <- 2 + length(x = com.fac) log2.fc <- log2(x = 1 / exp(x = coef(object = fit4)[foi])) ret <- c( fit2$deviance, fit4$deviance, pval, coef(object = fit4)[foi], log2.fc, freqs ) names(x = ret) <- c( 'dev1', 'dev2', 'pval', 'coef', 'log2.fc', 'freq1', 'freq2' ) return(ret) } PerformDE <- function( object, cells.1, cells.2, features, test.use, verbose, min.cells.feature, latent.vars, densify, ... ) { if (!(test.use %in% DEmethods_latent()) && !is.null(x = latent.vars)) { warning( "'latent.vars' is only used for the following tests: ", paste(DEmethods_latent(), collapse=", "), call. = FALSE, immediate. = TRUE ) } if (!test.use %in% DEmethods_checkdots()) { CheckDots(...) } data.use <- object[features, c(cells.1, cells.2), drop = FALSE] if (densify){ data.use <- as.matrix(x = data.use) } de.results <- switch( EXPR = test.use, 'wilcox' = WilcoxDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose, ... ), 'bimod' = DiffExpTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 'roc' = MarkerTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 't' = DiffTTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 'negbinom' = GLMDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, min.cells = min.cells.feature, latent.vars = latent.vars, test.use = test.use, verbose = verbose ), 'poisson' = GLMDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, min.cells = min.cells.feature, latent.vars = latent.vars, test.use = test.use, verbose = verbose ), 'MAST' = MASTDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, latent.vars = latent.vars, verbose = verbose, ... ), "DESeq2" = DESeq2DETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose, ... ), "LR" = LRDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, latent.vars = latent.vars, verbose = verbose ), stop("Unknown test: ", test.use) ) return(de.results) } #' Prepare object to run differential expression on SCT assay with multiple models #' #' Given a merged object with multiple SCT models, this function uses minimum #' of the median UMI (calculated using the raw UMI counts) of individual objects #' to reverse the individual SCT regression model using minimum of median UMI #' as the sequencing depth covariate. #' The counts slot of the SCT assay is replaced with recorrected counts and #' the data slot is replaced with log1p of recorrected counts. #' @param object Seurat object with SCT assays #' @param assay Assay name where for SCT objects are stored; Default is 'SCT' #' @param verbose Print messages and progress #' @importFrom Matrix Matrix #' @importFrom sctransform correct_counts #' #' @return Returns a Seurat object with recorrected counts and data in the SCT assay. #' @export #' #' @concept differential_expression #' @examples #' data("pbmc_small") #' pbmc_small1 <- SCTransform(object = pbmc_small, variable.features.n = 20) #' pbmc_small2 <- SCTransform(object = pbmc_small, variable.features.n = 20) #' pbmc_merged <- merge(x = pbmc_small1, y = pbmc_small2) #' pbmc_merged <- PrepSCTFindMarkers(object = pbmc_merged) #' markers <- FindMarkers( #' object = pbmc_merged, #' ident.1 = "0", #' ident.2 = "1", #' assay = "SCT" #' ) #' pbmc_subset <- subset(pbmc_merged, idents = c("0", "1")) #' markers_subset <- FindMarkers( #' object = pbmc_subset, #' ident.1 = "0", #' ident.2 = "1", #' assay = "SCT", #' recorrect_umi = FALSE #' ) #' PrepSCTFindMarkers <- function(object, assay = "SCT", verbose = TRUE) { if (length(x = levels(x = object[[assay]])) == 1) { if (verbose) { message("Only one SCT model is stored - skipping recalculating corrected counts") } return(object) } observed_median_umis <- lapply( X = SCTResults(object = object[[assay]], slot = "cell.attributes"), FUN = function(x) median(x[, "umi"]) ) model.list <- slot(object = object[[assay]], name = "SCTModel.list") median_umi.status <- lapply(X = model.list, FUN = function(x) { return(tryCatch( expr = slot(object = x, name = 'median_umi'), error = function(...) {return(NULL)}) )}) if (any(is.null(x = unlist(x = median_umi.status)))){ # For old SCT objects median_umi is set to median umi as calculated from obserbed UMIs slot(object = object[[assay]], name = "SCTModel.list") <- lapply(X = model.list, FUN = UpdateSlots) SCTResults(object = object[[assay]], slot = "median_umi") <- observed_median_umis } model_median_umis <- SCTResults(object = object[[assay]], slot = "median_umi") min_median_umi <- min(unlist(x = observed_median_umis)) if (all(unlist(x = model_median_umis) == min_median_umi)){ if (verbose){ message("Minimum UMI unchanged. Skipping re-correction.") } return(object) } if (verbose) { message(paste0("Found ", length(x = levels(x = object[[assay]])), " SCT models.", " Recorrecting SCT counts using minimum median counts: ", min_median_umi)) } umi.assay <- unique( x = unlist( x = SCTResults(object = object[[assay]], slot = "umi.assay") ) ) if (length(x = umi.assay) > 1) { stop("Multiple UMI assays are used for SCTransform: ", paste(umi.assay, collapse = ", ") ) } raw_umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts") corrected_counts <- Matrix( nrow = nrow(x = raw_umi), ncol = ncol(x = raw_umi), data = 0, dimnames = dimnames(x = raw_umi), sparse = TRUE ) cell_attr <- SCTResults(object = object[[assay]], slot = "cell.attributes") model_pars_fit <- lapply( X = SCTResults(object = object[[assay]], slot = "feature.attributes"), FUN = function(x) x[, c("theta", "(Intercept)", "log_umi")] ) arguments <- SCTResults(object = object[[assay]], slot = "arguments") model_str <- SCTResults(object = object[[assay]], slot = "model") set_median_umi <- rep(min_median_umi, length(levels(x = object[[assay]]))) names(set_median_umi) <- levels(x = object[[assay]]) set_median_umi <- as.list(set_median_umi) # correct counts for (model_name in levels(x = object[[assay]])) { model_genes <- rownames(x = model_pars_fit[[model_name]]) x <- list( model_str = model_str[[model_name]], arguments = arguments[[model_name]], model_pars_fit = as.matrix(x = model_pars_fit[[model_name]]), cell_attr = cell_attr[[model_name]] ) cells <- rownames(x = cell_attr[[model_name]]) umi <- raw_umi[model_genes, cells] umi_corrected <- correct_counts( x = x, umi = umi, verbosity = 0, scale_factor = min_median_umi ) corrected_counts[rownames(umi_corrected), colnames(umi_corrected)] <- umi_corrected } corrected_data <- log1p(corrected_counts) suppressWarnings({object <- SetAssayData(object = object, assay = assay, slot = "counts", new.data = corrected_counts)}) suppressWarnings({object <- SetAssayData(object = object, assay = assay, slot = "data", new.data = corrected_data)}) SCTResults(object = object[[assay]], slot = "median_umi") <- set_median_umi return(object) } # given a UMI count matrix, estimate NB theta parameter for each gene # and use fit of relationship with mean to assign regularized theta to each gene # #' @importFrom stats glm loess poisson #' @importFrom utils txtProgressBar setTxtProgressBar # RegularizedTheta <- function(cm, latent.data, min.theta = 0.01, bin.size = 128) { genes.regress <- rownames(x = cm) bin.ind <- ceiling(x = 1:length(x = genes.regress) / bin.size) max.bin <- max(bin.ind) message('Running Poisson regression (to get initial mean), and theta estimation per gene') pb <- txtProgressBar(min = 0, max = max.bin, style = 3, file = stderr()) theta.estimate <- c() for (i in 1:max.bin) { genes.bin.regress <- genes.regress[bin.ind == i] bin.theta.estimate <- unlist( x = parallel::mclapply( X = genes.bin.regress, FUN = function(j) { return(as.numeric(x = MASS::theta.ml( y = cm[j, ], mu = glm( formula = cm[j, ] ~ ., data = latent.data, family = poisson )$fitted ))) } ), use.names = FALSE ) theta.estimate <- c(theta.estimate, bin.theta.estimate) setTxtProgressBar(pb = pb, value = i) } close(con = pb) UMI.mean <- apply(X = cm, MARGIN = 1, FUN = mean) var.estimate <- UMI.mean + (UMI.mean ^ 2) / theta.estimate for (span in c(1/3, 1/2, 3/4, 1)) { fit <- loess( formula = log10(x = var.estimate) ~ log10(x = UMI.mean), span = span ) if (! any(is.na(x = fit$fitted))) { message(sprintf( 'Used loess with span %1.2f to fit mean-variance relationship\n', span )) break } } if (any(is.na(x = fit$fitted))) { stop('Problem when fitting NB gene variance in RegularizedTheta - NA values were fitted.') } theta.fit <- (UMI.mean ^ 2) / ((10 ^ fit$fitted) - UMI.mean) names(x = theta.fit) <- genes.regress to.fix <- theta.fit <= min.theta | is.infinite(x = theta.fit) if (any(to.fix)) { message( 'Fitted theta below ', min.theta, ' for ', sum(to.fix), ' genes, setting them to ', min.theta ) theta.fit[to.fix] <- min.theta } return(theta.fit) } # FindMarkers helper function for cell grouping error checking ValidateCellGroups <- function( object, cells.1, cells.2, min.cells.group ) { if (length(x = cells.1) == 0) { stop("Cell group 1 is empty - no cells with identity class ", cells.1) } else if (length(x = cells.2) == 0) { stop("Cell group 2 is empty - no cells with identity class ", cells.2) return(NULL) } else if (length(x = cells.1) < min.cells.group) { stop("Cell group 1 has fewer than ", min.cells.group, " cells") } else if (length(x = cells.2) < min.cells.group) { stop("Cell group 2 has fewer than ", min.cells.group, " cells") } else if (any(!cells.1 %in% colnames(x = object))) { bad.cells <- colnames(x = object)[which(x = !as.character(x = cells.1) %in% colnames(x = object))] stop( "The following cell names provided to cells.1 are not present: ", paste(bad.cells, collapse = ", ") ) } else if (any(!cells.2 %in% colnames(x = object))) { bad.cells <- colnames(x = object)[which(x = !as.character(x = cells.2) %in% colnames(x = object))] stop( "The following cell names provided to cells.2 are not present: ", paste(bad.cells, collapse = ", ") ) } } # Differential expression using Wilcoxon Rank Sum # # Identifies differentially expressed genes between two groups of cells using # a Wilcoxon Rank Sum test. Makes use of limma::rankSumTestWithCorrelation for a # more efficient implementation of the wilcoxon test. Thanks to Yunshun Chen and # Gordon Smyth for suggesting the limma implementation. # # @param data.use Data matrix to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param verbose Print a progress bar # @param ... Extra parameters passed to wilcox.test # # @return Returns a p-value ranked matrix of putative differentially expressed # features # #' @importFrom pbapply pbsapply #' @importFrom stats wilcox.test #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # # @examples # data("pbmc_small") # pbmc_small # WilcoxDETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # WilcoxDETest <- function( data.use, cells.1, cells.2, verbose = TRUE, ... ) { data.use <- data.use[, c(cells.1, cells.2), drop = FALSE] j <- seq_len(length.out = length(x = cells.1)) my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) overflow.check <- ifelse( test = is.na(x = suppressWarnings(length(x = data.use[1, ]) * length(x = data.use[1, ]))), yes = FALSE, no = TRUE ) limma.check <- PackageCheck("limma", error = FALSE) if (limma.check[1] && overflow.check) { p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(min(2 * min(limma::rankSumTestWithCorrelation(index = j, statistics = data.use[x, ])), 1)) } ) } else { if (getOption('Seurat.limma.wilcox.msg', TRUE) && overflow.check) { message( "For a more efficient implementation of the Wilcoxon Rank Sum Test,", "\n(default method for FindMarkers) please install the limma package", "\n--------------------------------------------", "\ninstall.packages('BiocManager')", "\nBiocManager::install('limma')", "\n--------------------------------------------", "\nAfter installation of limma, Seurat will automatically use the more ", "\nefficient implementation (no further action necessary).", "\nThis message will be shown once per session" ) options(Seurat.limma.wilcox.msg = FALSE) } group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) data.use <- data.use[, rownames(x = group.info), drop = FALSE] p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(wilcox.test(data.use[x, ] ~ group.info[, "group"], ...)$p.value) } ) } return(data.frame(p_val, row.names = rownames(x = data.use))) } Seurat/NEWS.md0000644000176200001440000005630214170333512012620 0ustar liggesusers# Seurat 4.1.0 (2022-01-14) ## Added - Add `raster.dpi` parameter to `DimPlot/FeaturePlot` to optionally rasterize individual points ([#5392](https://github.com/satijalab/seurat/pull/5392)) - Add support for sctransform v2, differential expression on with SCT ## Changes - Update `ReadParseBio` to support split-pipe 0.9.6p ([#5446](https://github.com/satijalab/seurat/pull/5446)) - Fixes for MAST differential expression ([#5441](https://github.com/satijalab/seurat/issues/5441)) - Fix scaling options when using `split.by` in `FeaturePlot()` ([#5243](https://github.com/satijalab/seurat/issues/5243)) # Seurat 4.0.6 (2021-12-16) ## Added - Implement supervised LSI ## Changes - Add `raster` parameter to `VlnPlot` to optionally rasterize individual points ([#5076](https://github.com/satijalab/seurat/pull/5076)) - Add `min.cells.group` parameter to `FindConservedMarkers` ([#5079](https://github.com/satijalab/seurat/pull/5079)) - Set `do.center` to FALSE for `lsiproject` in `FindTransferAnchors` - Fix error message in `ReadMtx()` ([#5158](https://github.com/satijalab/seurat/issues/5158)) - Add `label.color` parameter to `FeaturePlot` ([#5314](https://github.com/satijalab/seurat/pull/5314)) - Fix issues in `ProjectUMAP` ([#5257](https://github.com/satijalab/seurat/issues/5257), [#5104](https://github.com/satijalab/seurat/issues/5104), [#5373](https://github.com/satijalab/seurat/issues/5373)) # Seurat 4.0.5 (2021-10-04) ## Changes - Update documentation for `to.upper` parameter in `Load10X_Spatial()` ([#4576](https://github.com/satijalab/seurat/issues/4576)) - Update concept tags for `RunSPCA()` ([#4978](https://github.com/satijalab/seurat/discussions/4987)) - Conditionally run tests/packages that use suggested packages ([#5160](https://github.com/satijalab/seurat/pull/5160)) - Set random state in `RunUMAP()` when using the `umap-learn` method ([#5194](https://github.com/satijalab/seurat/issues/5194)) # Seurat 4.0.4 (2021-08-19) ## Added - Add `reduction` parameter to `BuildClusterTree()` ([#4598](https://github.com/satijalab/seurat/issues/4598)) - Add DensMAP option to `RunUMAP()` ([#4630](https://github.com/satijalab/seurat/pull/4630)) - Add `image` parameter to `Load10X_Spatial()` and `image.name` parameter to `Read10X_Image()` ([#4641](https://github.com/satijalab/seurat/pull/4641)) - Add `ReadSTARsolo()` function to read output from STARsolo - Add `densify` parameter to `FindMarkers()` - Add `ReadParsebio()` function to read output from Parse Biosciences - Add the `image.alpha` parameter to `SpatialDimPlot()` and `SpatialFeaturePlot()` - Add support for the correlation metric in `RunUMAP` ([#4972](https://github.com/satijalab/seurat/issues/4972)) ## Changes - Warn and continue rather than erroring if not all features are available in `FindSpatiallyVariableFeatures()` ([#4611](https://github.com/satijalab/seurat/issues/4611)) - Bug fix for SCT-based integration in selecting proper reference model ([#4355](https://github.com/satijalab/seurat/issues/4355)) - Bug fix for reading from absolute paths in ReadMtx ([#4723](https://github.com/satijalab/seurat/issues/4723)) - Bug fix in SingleCellExperiment conversion ([#4633](https://github.com/satijalab/seurat/issues/4633)) - Bug fix in `FindVariableFeatures()` when using `selection.method = "mvp"` and `binning.method = "equal_frequency"` ([#4712](https://github.com/satijalab/seurat/issues/4712)) - Bug fix in `DoHeatmap()` to remove random characters from plot legend([#4660](https://github.com/satijalab/seurat/issues/4660)) - Fix cell renaming in `RunCCA()` - Fix issue in SingleCellExperiment conversion where the mainExp would not be set properly - Fix for default dispersion info displayed in `VariableFeaturePlot()` # Seurat 4.0.3 (2021-06-10) ## Added - Add `jitter` parameter to `FeatureScatter()` ## Changes - Fix issues with `as.SingleCellExperiment.Seurat()` for the latest verion of SingleCellExperiment ([#4532](https://github.com/satijalab/seurat/pull/4532)) - Ensure proper reference.reduction is used in `MapQuery()` - Fix to `UpdateSymbolList()`, no longer searches aliases and exposes the `search.types` parameter in `GeneSymbolThesarus()` ([#4545](https://github.com/satijalab/seurat/issues/4545)) - Transfer `scale.data` slot as well when converting with `as.SingleCellExperiment.Seurat()` - Enable `alpha` parameter for `SpatialDimPlot()` - Fix `as.SingleCellExperiment.Seurat()` conversion for atypical `reducedDim` components # Seurat 4.0.2 (2020-05-20) ## Added - New `AddAzimuthScores()` and `AddAzimuthResults()` functions - Add `shuffle` parameter to `FeatureScatter()` ([#4280](https://github.com/satijalab/seurat/pull/4280)) - Add `lsiproject` and `rpca` options for `FindTransferAnchors()` - Add `rlsi` option for `FindIntegrationAnchors()` ## Changes - Preserve feature metadata when converting from `SingleCellExperiment` to `SeuratObject` class ([#4205](https://github.com/satijalab/seurat/issues/4205)) - Preserve multiple assays when converting from `SingleCellExperiment` to `SeuratObject` class ([#3764](https://github.com/satijalab/seurat/issues/3764)) - Fix passing of `score.thresh` parameter in `ScoreJackStraw()` ([#4268](https://github.com/satijalab/seurat/pull/4268)) - Fix FC calculation in `FindMarkers()` non-log transformed data. - Add umap-learn version >= 0.5.0 compatibility for `RunUMAP()` - Fix `DotPlot` to use `log1p` when `scale=False` ([#4298](https://github.com/satijalab/seurat/issues/4298)) - Fix split and shuffled `DimPlot` - Disallow NULL or another length 0 vector for `ident.1` in `FindMarkers()` - Fix range shift when labeling clusters on a GeomSpatial plot - Fix SpatialPlot distortion for non-square images. - Fix future-related warnings in `FindIntegrationAnchors()` - Fix `fc.name` parameter in `FindMarkers()` ([#4474](https://github.com/satijalab/seurat/issues/4474)) - Deprecate `group.by` parameter in `PlotPerturbScore()` in favor of `mixscape.class`. # Seurat 4.0.1 (2020-03-17) ## Added - Add direction option to `PlotClusterTree()` - Add `cols` parameter to `JackStrawPlot()` - Add `ReadMtx()` to read local and remote mtx files with associated cell and feature name files ## Changes - Equality added to differential expression thresholds in `FindMarkers` (e.g, >= logfc.threshold rather than >) - `Read10X()` now prepends dataset number for first dataset when reading multiple datasets - Bug fix for `subset.AnchorSet()` - Bug fix for fold change values in `FindMarkers()` when setting a different pseudocount ([#4111](https://github.com/satijalab/seurat/pull/4111)) - Bug fix for `RunLDA()` related to proper passing of assay parameter. - When using `order=TRUE` in `SingleDimPlot()`, print NA points under all others. - Remove default parameter value for `data.dir` in `Read10X()` - Import spatstat fxns from subpackages (spatstat.core, spatstat.geom) - `RunUMAP` now checks for graph/neighbor consistency # Seurat 4.0.0 (2020-01-27) ## Added - Expose `FoldChange()` component in `FindMarkers()`. - Add the `merge.DimReduc` method - Add `IntegrateEmbeddings()` to correct embeddings of `DimReduc`s - Add `ProjectUMAP()` to project query cells into a reference UMAP space - Add `MapQuery()` as a wrapper around `IntegrateData()`, `IntegrateEmbeddings()`, and `ProjectUMAP()` - Add `MappingScore` to compute a per-cell mapping score used in Azimuth - Add `AggregateExpression()` for summation based pseudobulk calculations - Add mixscape functionality via `CalcPerturbSig()`, `PrepLDA()`, `RunLDA()`, `DEenrichRPlot()`, `MixscapeHeatmap()`, `MixscapeLDA()`, `PlotPerturbScore()`, `RunMixscape()` - Add `FindSubCluster()` to further cluster existing clusters - Add supervised PCA functionality via `RunSPCA()` - Add functionality to enable weighted nearest neighbor analyses via `FindMultiModalNeighbors()` - Add neighbor visualization plot via `NNPlot()`. - Add `PredictAssay()` to impute expression or embeddings from nearest neighbors - Add `Graphs()` function to access the names of the stored Graph objects or pull a specific one - Add checks for NA, NaN, logical, non-integer, and infinite values during CreateAssayObject and NormalizeData.default - Add `AnnotateAnchors()` to aid in AnchorSet interpretation as well as `subset.AnchorSet()` - Add flexibility of choice for cell column in `Read10X()` - Add rasterization option to `FeatureScatter()` and `VariableFeaturePlot()` - Add step1 feature parameters in the SCTModel via `PrepVSTResults()` ## Changes - Default neighbor finding algorithm changed from "rann" to "annoy" - Default `ncells` parameter in `SCTransform()` changed to 5000 - Default fold change in `FindMarkers()` changed from ln to log2 - Implementation improvements to `AverageExpression()` - `AnchorSet` class re-implemented as a virtual class from which `IntegrationAnchorSet` and `TransferAnchorSet` now inherit. - Point size in `VlnPlot()` now set automatically if not specified - Return the sample.tree properly when integrating with a single reference dataset - Replace `as.character.quosure` usage with `as_label` due to deprecation - Minor changes to the exact calculation of the anchor weight matrix - Default rasterization limit in `DimPlot()` and `FeaturePlot()` changed from 50,000 to 100,000 - `SCTransform()` now returns a formalized `Assay` subclass `SCTAssay()` - When using `normalization.method='SCT'` in `FindTransferAnchors()`, normalize query using reference SCT model when possible. - Change default Neighbor name in `FindNeighbors` to `Assay.nn` ## Removed - `CreateGeneActivityMatrix` replaced by `Signac::GeneActivity()` - `RunLSI` replaced by by `Signac::RunTFIDF()` and `Signac::RunSVD()` - `ReadAlevin` and `ReadAlevinCsv` moved to SeuratWrappers - `ExportToCellbrowser` and `StopCellbrowser` moved to SeuratWrappers # Seurat 3.2.3 - 2020-12-14 ## Added - Titles added to `DimPlot` when specifying `group.by` parameter - `keep.scale` parameter added to `FeaturePlot` to control scaling across multiple features and/or splits. ## Changes - `Same` deprecated in favor of `base::identity` - Fix in `DietSeurat` to work with specialized `Assay` objects - Fix p-value return when using the `ape` implementation of Moran's I - Fix bug in FindMarkers when using MAST with a latent variable - Updates to `Key<-.DimReduc` that allow handling of empty reduction column names - Allow setting `ctrl` in `CellCycleScoring` - Modify subset.Seurat to allow specialized Assay subsetting methods - Fix image selection in interactive spatial plots - Update Rcpp functions with `export(rng=FALSE)` to avoid potential future warnings - Fix RenameCells bug for integrated SCT assays - Fix highlight order with proper factor levels when using `SetHighlight` in plots - Small change in CellRanger version detection logic of h5 file to improve robustness to outside tools. - `do.cpp` deprecated and will default to true # Seurat 3.2.2 (2020-09-25) ## Changes - Set the seed in `WhichCells` regardless of whether or not `idents` is passed - Retain Graph and Neighbor objects when subsetting only on features - Fix data.frame input to `CreateAssayObject()` when data.frame has no rownames. - Default annoy search to sequential if not using multicore future plans. - Require sctransform >= 0.3.0 # Seurat 3.2.1 (2020-09-04) ## Added - Added support for nearest neighbor input and `return.model` parameter in `RunUMAP()` - Enable named color vectors in `DoHeatmap()` - Add `label.color` and `label.box` parameters to `DimPlot` - Added `shuffle` and `seed` parameters to `DimPlot()` to help with overplotting - Added new stacked violin plot functionality ## Changes - Allow setting `slot` parameter in `RunUMAP` - Added support for FIt-SNE v1.2+ - Fix for `Spatial*Plot` when running with interactive=TRUE - Set max for number of items returned by `Top` and remove duplicate items when balanced=TRUE - Fix logging bug when functions were run via `do.call()` - Fix handling of weight.by.var parameter when approx=FALSE in `RunPCA()` - Fix issue where feature names with dashes crashed `CellSelector` - Fix issue where errors in subsetting were being swallowed - Fix issue where labeling uncropped spatial plots was broken ## Deprecated - `CreateActivityMatrix` deprecated in favor of `Signac::GeneActivity` - `ReadAlevin` and `ReadAlevinCsv` deprecated in favor of `SeuratWrappers::ReadAlevin` - `ExportToCellbrowser` and `StopCellbrowser` deprecated in favor of `SeuratWrappers::ExportToCellbrowser` and `SeuratWrappers::StopCellbrowser` - `ReadH5AD` and `WriteH5AD` deprecated in favor of h5Seurat/H5AD functionality found in SeuratDisk - `as.loom` and `as.Seurat.loom` deprecated in favor of functionality found in SeuratDisk # Seurat 3.2.0 (2020-07-15) ## Added - Added ability to create a Seurat object from an existing Assay object, or any object inheriting from the Assay class - Added ability to cluster idents and group features in `DotPlot` - Added ability to use RColorBrewer plaettes for split `DotPlots` - Added visualization and analysis functionality for spatially resolved datasets (Visium, Slide-seq). ## Changes - Removed `add.iter` parameter from `RunTSNE` function - Fixed integer overflow error in the WilcoxDETest function - Minor visual fixes in `DoHeatmap` group bar + labels - Efficiency improvements in anchor scoring (`ScoreAnchors`) - Fix bug in `FindClusters()` when the last node has no edges - Default to weighted = TRUE when constructing igraph objects in `RunLeiden`. Remove corresponding weights parameter from `FindClusters()`. - Fix handling of keys in `FeatureScatter()` - Change `CellSelector` to use Shiny gadgets instead of SDMTools - Mark `PointLocator` as defunct - Remove `SDMTools` - Fixed data slot return in `AverageExpression` when subsetting features and returning a Seurat object # Seurat 3.1.5 (2020-04-14) ## Added - New `scale` parameter in `DotPlot` - New `keep.sparse parameter in `CreateGeneActivityMatrix` for a more memory efficient option - Added ability to store model learned by UMAP and project new data - New `strip.suffix` option in `Read10X`. **This changes the default behavior of `Read10X`**. A trailing `-1` present in all cell names will not be removed by default. - Added `group.by` parameter to `FeatureScatter` ## Changes - Replace wilcox.test with limma implementation for a faster FindMarkers default method - Better point separation for `VlnPlot`s when using the `split.by` option - Efficiency improvements for anchor pairing - Deprecate redundant `sort.cell` parameter in `FeaturePlot` - Fixes to ensure correct class of Matrix passed to c++ functions - Fixes for underscores in ident labels for `DotPlot` - Ensure preservation of matrix dimnames in `SampleUMI` - Fix non-standard evaluation problems in `subset` and `WhichCells` - Default split violin option is now a multi group option - Preserve alpha in `FeaturePlot` when using `blend` - Update `assay.used` slot for `DimReduc`s when Assay is renamed # Seurat 3.1.4 (2020-02-20) ## Changes - Fixes to `DoHeatmap` to remain compatible with ggplot2 v3.3 - Adoption of `patchwork` framework to replace `CombinePlots` # Seurat 3.1.3 (2020-02-07) ## Added - New system agnostic `Which` function to address problems with FItSNE on Windows ## Changes - Export `CellsByIdentities` and `RowMergeSparseMatrices` functions - nCount and nFeature metadata variables retained after subset and updated properly with `UpdateSeuratObject` - Fix uwot support for running directly on feature matrices - Fixes for keys with underscores - Fix issue with leiden option for `FindClusters` - Fix for data transfer when using sctransform - SDMTools moved to Suggests as package is orphaned # Seurat 3.1.2 (2019-12-11) ## Added - New silent slot updater - New random seed options to `RunCCA`, `RunTSNE`, `WhichCells`, `HTODemux`, `AddModuleScore`, `VlnPlot`, and `RidgePlot` - Enhancements for dealing with `Assay`-derived objects ## Changed - Only run `CalcN` (generates nFeatures and nCounts) when `counts` changes - Fix issue regarding colons in feature names - Change object class testing to use `inherits` or `is.*` for R 4.0 compatability # Seurat 3.1.1 (2019-09-20) ## Added - New `RegroupIdents` function to reassign idents based on metadata column majority - `UpdateSymbolList` function to pull new gene names from HGNC - Added support for H5AD layers as additional assays in a `Seurat` object ## Changed - Fix rownames issue when running UMAP on dist object - Add support for new H5AD `obsm` and `varm` stucture - Fix issue when trying to read non-existent feature-level metadata from an H5AD file - Fix in integration workflow when using SCTransform - Improved error checking for `AddModuleScore` - cbind fix in reference-based integration (`MapQuery`) - Fix for convenience plots error hanging - Ensure Seurat objects aren't stored in the command logs # Seurat 3.1.0 (2019-08-20) ## Added - New `PrepSCTIntegration` function to facilitate integration after `SCTransform` - Reference-based integration with the `reference` parameter in `FindIntegrationAnchors` - Reciprocal PCA as a `reduction` option in `FindIntegrationAnchors` - New `CollapseEmbeddingOutliers` function - Enable `FindTransferAnchors` after `SCTransform` - Added back `ColorDimSplit` functionality - Include a code of conduct - Added uwot support as new default UMAP method - Added `CheckDots` to catch unused parameters and suggest updated names - `Reductions` and `Assays` assays functions to list stored DimReducs and Assays ## Changed - Fix regex in `LogSeuratCommand` - Check for NAs in feature names in `Read10X` - Prevent dimnames for counts/data/scale.data matrices from being arrays - Updates `ReadH5AD` to distinguish FVF methods - Fixes to UpdateSeuratObject for v2 objects - Sink all output from stdout to stderr - Fix to scale.data cell ordering after subsetting - Enable `Assay` specification in `BuildClusterTree` - Fix `FeaturePlot` when using both `blend` and `split.by` - Fix to `WhichCells` when passing `cells` and `invert` - Fix to `HoverLocator` labels and title - Ensure features names don't contain pipes (`|`) - Deprecation of `RunLSI` and `RunALRA` - Fix legend bug when sorting in `ExIPlot` # Seurat 3.0.2 (2019-06-07) ## Added - Flag to skip singleton grouping in `FindClusters` - New custom colors for blended `FeaturePlot`s - New `GetResidual` function - New Seurat/Monocle converters ## Changed - Fix issue where certain assays weren't being shown in the `Seurat` object - Fix issue where we weren't updating `DimReduc` object column names - Fix line spacers in `DoHeatmap` - Fix uninformative labels in `FeaturePlot` - Fix unset identities when converting from SCE to Seurat - Fix single colors being interpreted as palettes in `SingleDimPlot` - Ensure factor levels are always numerically increasing after `FindClusters` - Better cell highlighting colors for `DimPlot` - Fix to `levels<-.Seurat` - Add ability to use counts/scaled data in `BuildClusterTree` - Minor fix to split `ScaleData` # Seurat 3.0.1 (2019-05-16) ## Added - Add global option (Seurat.memsafe) to skip gc() calls - Restore draw.lines to DoHeatmap, maintain size of color bar with different number of features (#1429) - Enable split.by parameter for ScaleData - Add slot parameter to FeaturePlot (#1483) - Add assay parameter to DotPlot (#1404) ## Changed - Fix to color options for VlnPlot with split.by option (#1425) - Improvements to conversion functions (loom, SCE) - Fix for cluster tree reordering (#1434) - Fix PercentageFeatureSet for single feature case - Fix to fold change calculation and filtering for other slots in FindMarkers (#1454) - Keep title vectorized in AugmentPlot (#1515) - Export LogSeuratCommand function - Fix for FindConservedMarkers when one ident is missing from a group (#1517) # Seurat 3.0.0 (2019-04-16) ## Added - New method for identifying anchors across single-cell datasets - Parallelization support via future - Additional method for demultiplexing with MULTIseqDemux - Support normalization via sctransform - New option for clustering with the Leiden algorithm - Support for reading 10X v3 files - New function to export Seurat objects for the UCSC cell browser - Support for data import from Alevin outputs - Imputation of dropped out values via ALRA ## Changed - Significant code restructuring - Most occurances of "gene(s)" in function names/arguments renamed to "feature(s)" - Changes to the Seurat object class to facilitate multimodal data - New BlendPlot implementation # Seurat 2.3.4 (2018-07-13) ## Added - GetIdent function added to pull identity info ## Changed - DiffusionMap dependency replaced with destiny to avoid archival - Java dependency removed and functionality rewritten in Rcpp - Speed and efficiency improvements for Rcpp code - More robust duplicate handling in CellCycleScoring # Seurat 2.3.3 (2018-07-02) ## Added - New HTOHeatmap function - Support for custom PNG arguments for vector-friendly plotting - Fix for 'NA'-labeled cells disappearing with custom color scale ## Changed - Replaced FNN with RANN - Removed unused compiler flags - Moved several lightly-used packages from 'imports' to 'suggests' # Seurat 2.3.2 (2018-06-11) ## Added - RenameCells added for easy renaming of all cells - Read10X_h5 added to read in 10X formatted h5 files - SetAssayData ensures cell order is the same between assay objects and the Seurat object - Compatability updates for ggplot2 v2.3.0 # Seurat 2.3.1 (2018-05-03) ## Added - Support for [UMAP](https://github.com/lmcinnes/umap) dimensional reduction technique - New conversion functions for SingleCellExperiment and anndata ## Changed - FetchData preserves cell order - Require Matrix 1.2-14 or higher - AddModuleScore no longer densifies sparse-matrices - Various visualization fixes and improvements - Default value for latent.vars in FindMarkers/FindAllMarkers changed to NULL. # Seurat 2.3.0 (2018-03-22) ## Added - Support for HTO demultiplexing - Utility functions: TransferIdent, CombineIdent, SplitObject, vector.friendly - C++ implementation for parts of BuildSNN - Preliminary parallelization support (regression and JackStraw) - Support for FItSNE ## Changed - MetaDE replaced with metap for combining p-values (MetaDE was removed from CRAN) - NMF heatmaps replaced (NMF to be archived by CRAN) # Seurat 2.2.1 (2018-02-14) ## Changed - MetaDE replaced with metap for combining p-values (MetaDE was removed from CRAN) - NMF heatmaps replaced (NMF to be archived by CRAN) # Seurat 2.2.0 (2018-01-10) ## Added - Multiple alignment functionality with RunMultiCCA and AlignSubspace extended to multiple datasets - CalcAlignmentScore added to evaluate alignment quality - MetageneBicorPlot added to guide CC selection - Change cluster order in DoHeatmap with group.order parameter - Ability to change plotting order and add a title to DimPlot - do.clean and subset.raw options for SubsetData ## Changed - JoyPlot has been replaced with RidgePlot - FindClusters is now more robust in making temp files - MetaDE support for combining p-values in DE testing # Seurat 2.1.0 (2017-10-12) ## Added - Support for using MAST and DESeq2 packages for differential expression testing in FindMarkers - Support for multi-modal single-cell data via @assay slot ## Changed - Default DE test changed to Wilcoxon rank sum test # Seurat 2.0.1 (2017-08-18) ## Added - Now available on CRAN - Updated documentation complete with examples - Example datasets: `pbmc_small` and `cc.genes` - C++ implementation for parts of FindVariableGenes - Minor bug fixes # Seurat 2.0.0 (2017-07-26) ## Added - New method for aligning scRNA-seq datasets - Significant code restructuring - New methods for scoring gene expression and cell-cycle phases - New visualization features (do.hover, do.identify) Seurat/MD50000644000176200001440000003341014170340713012026 0ustar liggesusers170a01ee63273a1bafb38974f8dad9a7 *DESCRIPTION c2ef7c6f9278eb1816e1c98c41ea726b *LICENSE 0c4287f8aeae660f698fcfefc8d891d3 *NAMESPACE 6cf77e30abc9010f41ad33b85ffbb272 *NEWS.md 2dc8d94314ead6f77537d1fe0fc272bc *R/RcppExports.R 93af66cb2a87bf52bf5006e2ffb8c3c2 *R/clustering.R f9850a98e7bea7331b9de0e4b9562224 *R/convenience.R 0b3f35535683b39246cb65edb6ba397c *R/data.R 8090050d79ec3b18c763d90b1dcb0808 *R/differential_expression.R 4da889ad2a1ad5e5763c9216731c1bf2 *R/dimensional_reduction.R 5805fb83542494ec58dfb336d63f42ef *R/generics.R fd362ca19f975457182a750cba7ce552 *R/integration.R 33a2459aac940f09ed76fbcc25c93cf3 *R/mixscape.R c1fa83987205e341cbc688b630cc3425 *R/objects.R 07f1cad86062aa1a8230e5468bd1c0ea *R/preprocessing.R 0f60a5b2dcd4f0a5cc7a4f69e6325666 *R/reexports.R 161fa08bbfc520b06399d420a6a762a4 *R/tree.R 24e96eb11ba6889567c0517c0c412720 *R/utilities.R be221422498a10122a5608528cda5657 *R/visualization.R 19eff2d42b0cc3f708a07000df8c2689 *R/zzz.R 00f761744abd0aeb39b5cdb8ac85854a *README.md 9541ce732274cfc27ab7d89b9ac714a2 *build/partial.rdb e84661f995a9f29d1a09ebcdc27d3a82 *data/cc.genes.rda 55d7e35793436d5e91646d774d9f86c4 *data/cc.genes.updated.2019.rda b5e59c959809d6be32d83366230ab3b9 *inst/CITATION d07fe4eee5c09cc44e96c17e683cb10e *inst/extdata/pbmc_raw.txt 7c36195b7392ae6314ce16350d5cee25 *man/AddAzimuthResults.Rd 462158a180fdcfdf426dfaf7da866273 *man/AddAzimuthScores.Rd 0d5cd843791accb854028e7d0957ad07 *man/AddModuleScore.Rd a8d8764f40430a47b1d478b199b7405a *man/AggregateExpression.Rd bd97300744c4b1065ba22d53c8f754da *man/AnchorSet-class.Rd 07c839ccf3fffc7a95d5035d3e294d11 *man/AnnotateAnchors.Rd 52233895af88c8e1697dd981c73667fe *man/Assay-class.Rd 66351cc0e0184ccc08cd3ec15b070079 *man/AugmentPlot.Rd d40f4485b82201107f8fcc38465cdc63 *man/AutoPointSize.Rd fc82bcb104c153c4af2246c78c5b8893 *man/AverageExpression.Rd d5eda677b7c80de9db62ee62052e761a *man/BGTextColor.Rd 7c84e2390af0071f75b8d66ebefe06d0 *man/BarcodeInflectionsPlot.Rd bebdf1e7843ba4bb43aa80f299c01271 *man/BuildClusterTree.Rd 33f13558c1c1be298a623aca4b19504b *man/CalcPerturbSig.Rd 6b19b5a68a1fbed9d71ac82343b21aab *man/CalculateBarcodeInflections.Rd 3550bef6cb5f40f4540f5dc70e80598d *man/CaseMatch.Rd b9abf47109862ea85c36ec6c511ac787 *man/CellCycleScoring.Rd 3aec17d0cae2627d1450ff2ea9732957 *man/CellScatter.Rd 9da9ef92375a1ad85e84fc0037a48784 *man/CellSelector.Rd f800b57b5031f111e80452c149221df1 *man/Cells.Rd 5b63f4f233cc0b576c2d4d506ff9be6b *man/CellsByImage.Rd 5600133820f9f0343d5edcff87c533c9 *man/CollapseEmbeddingOutliers.Rd 27a6b0d8531b5c59861ebca821ded6e9 *man/CollapseSpeciesExpressionMatrix.Rd 8d8fe977ac83bc5fad71851af4aa774a *man/ColorDimSplit.Rd 9eeec370a2aad6836e8b9027d654812a *man/CombinePlots.Rd 0a3d849e27f68d1390b4ef5ceb29a80c *man/CreateSCTAssayObject.Rd 86f23babe36a2c534f3401c93bf5a97c *man/CustomDistance.Rd 8ab923c6ef1bb0a548e140f45bf97ed5 *man/CustomPalette.Rd b71f27232ee879ac957ca8ebba678fe0 *man/DEenrichRPlot.Rd d3079f44fd87467d6897fd5a75b92409 *man/DietSeurat.Rd d9cb5fcbd90eea8518965240e031c80a *man/DimHeatmap.Rd 01076ef56e9733a1be9e4393e15f2f36 *man/DimPlot.Rd 6e505cdb81626e425958657f99813304 *man/DimReduc-class.Rd 7a75e214c7b5dce55c356fb185747a4b *man/DiscretePalette.Rd 9a4cdc630121bbf71d0747e53672040e *man/DoHeatmap.Rd f15067ea24f5255c765c84ec4217f83a *man/DotPlot.Rd 93448d7ad049632784186f00f0e825b8 *man/ElbowPlot.Rd 152070beaee889ad0c57d824c6b890b7 *man/ExpMean.Rd 8e2d2db070c229523b03d9841df4b089 *man/ExpSD.Rd 497e0fc0845d50baae3cff33fc98f9c7 *man/ExpVar.Rd 8b154c79dcd203853c3984baf37684c4 *man/FastRowScale.Rd 090d39ab7c777441e580547695458990 *man/FeaturePlot.Rd c8cd10a841c72a6c99aa285da3a33408 *man/FeatureScatter.Rd 8baf10ef47e718c9fc4aef521df50b64 *man/FilterSlideSeq.Rd 01bdd0ac10c08e766c52335599bf8551 *man/FindAllMarkers.Rd 1d688bcaad9c750e263169b0c1261149 *man/FindClusters.Rd a53264ad3caa397f14cb257a14c1cdb9 *man/FindConservedMarkers.Rd 82e76d88f388928c2bbff397c3ba22f4 *man/FindIntegrationAnchors.Rd d6ba3821ee3e557ab37908c0b74cb846 *man/FindMarkers.Rd 21881fce274bec77239e6df4ffe53f1b *man/FindMultiModalNeighbors.Rd 3fe5d8fa8b6736f5dd8a4ecd8b12be8b *man/FindNeighbors.Rd dd25658d0fdf38328a141d4f52dddecf *man/FindSpatiallyVariableFeatures.Rd d7110f47c2ecbd4354ee57832fdef4d2 *man/FindSubCluster.Rd 33046ff6448c4b2645e4a18166ae3ed1 *man/FindTransferAnchors.Rd 32599948ec351127dff90e94e09e9b31 *man/FindVariableFeatures.Rd 3dd0d73537caeaff4c313e4e75fe9478 *man/FoldChange.Rd b666afaab9ed8ac708a81e141ec4683b *man/GetAssay.Rd d134eeb6dee0f04678bbc6b3bfce768b *man/GetImage.Rd 55c7a101f7dffe9d50f12da10aff9b2e *man/GetIntegrationData.Rd 486a4693d365ad5f8fe722bf0be65b81 *man/GetResidual.Rd b32f57933f1a1cc82e9bc7a94f359ea2 *man/GetTissueCoordinates.Rd 35761ade67ea12a72c813f24130029fd *man/GetTransferPredictions.Rd c4adc0a85fc90e4e43d074460a06648c *man/Graph-class.Rd a8befbd8423c290c19261556acd8ff08 *man/GroupCorrelation.Rd 885bb61fa2d0a622b047dd56fa254a91 *man/GroupCorrelationPlot.Rd 4fe184e4b4c0eb3f87bef21123e29fe2 *man/HTODemux.Rd 72111f313e6e7fd80db16c58aeef7794 *man/HTOHeatmap.Rd 91a256c9547d50eb506e15f919b7bd3b *man/HVFInfo.SCTAssay.Rd ed1b839f07cf7f7ae24be300628ac47c *man/HoverLocator.Rd f7f6b58d2fc988ab8530ee9742f84b6f *man/IFeaturePlot.Rd 23213955f9eec6e6f018b6698c003b56 *man/ISpatialDimPlot.Rd 7fc5149890daca231e42471d50f17c36 *man/ISpatialFeaturePlot.Rd a54a94d6810b898e678110c8dc26ebbb *man/IntegrateData.Rd 1f15dd281aac92e779334f8fa2d3c60e *man/IntegrateEmbeddings.Rd 4ee5eb07563971a104e4cbab9e3d5919 *man/IntegrationAnchorSet-class.Rd d69da6ee725f4978305025b6de6df811 *man/IntegrationData-class.Rd a4c189c65347eb5a25c975e435b4cbf7 *man/JackStraw.Rd 19f4c49e6346b73f981cc2878d0cf8a0 *man/JackStrawData-class.Rd 2e3257c67d4b09ef94eb2052b532c42f *man/JackStrawPlot.Rd 3fe0920d29478f95724b9f0206055b25 *man/L2CCA.Rd 86eeddaaa3e6f0e51059523a23f627a3 *man/L2Dim.Rd 40c09a3566db9e1528716be9317cf882 *man/LabelClusters.Rd c7876bf181dd49fbfe1d6490c4fc5b0f *man/LabelPoints.Rd 2c733b2bdf8e2faf2371b170650be3e6 *man/LinkedPlots.Rd 6f9bfbabd79c9361c262eef7f1528e78 *man/Load10X_Spatial.Rd 9250526a506c24545de8856006893c67 *man/LoadAnnoyIndex.Rd af949a6114516b3ab62bb2f4b53a0e34 *man/LoadSTARmap.Rd 95af3201e2ef04e32b9d0ffd3f0feb85 *man/LocalStruct.Rd 3e0c659ba5264a49e73dabbc9e0e8af2 *man/LogNormalize.Rd 39eac70a16e242177b7840ae51d2071d *man/LogVMR.Rd e170c2cd21171f253f924db5785f60e5 *man/MULTIseqDemux.Rd bb0aa411e42426c9c798d3a384e0dba2 *man/MapQuery.Rd 1ed6848f442b000f162577967509cba9 *man/MappingScore.Rd 1b9c655f92a327b11582498272a88d1a *man/MetaFeature.Rd 3c9c2bbec11d6e88c372d87d08d2dbd9 *man/MinMax.Rd 90a4c3d5498f3ccf368e0825c755134f *man/MixingMetric.Rd c5d7cb74cc86d3a84a5c5da2ded6ee72 *man/MixscapeHeatmap.Rd c1e591b2e7f793e6c35933931f19b8fb *man/MixscapeLDA.Rd 8f87e82e16bc30af62cd4fd1df7d39e0 *man/ModalityWeights-class.Rd 85a1c011625963d7843baab5ffb0006c *man/NNPlot.Rd efabeb1b41ce7efcf7be4662c3bd0b99 *man/Neighbor-class.Rd 11c497fb2432d7b09fbedd83c7b64bcc *man/NormalizeData.Rd c3d6d895a2930a0c7d2ce6346a07a84f *man/PCASigGenes.Rd fe0087f1c4031926afee79c4a6e6a0d9 *man/PercentAbove.Rd 41b1017edf7a418ba6e5a3e806a60f37 *man/PercentageFeatureSet.Rd 704a2e0d792d8e022076ac5bdf1a11e0 *man/PlotClusterTree.Rd 9133ec5810b07a6e4336db5eda6463ba *man/PlotPerturbScore.Rd 60eef96f3631b33a83817398d092b40f *man/PolyDimPlot.Rd 98f32eebc4b09b2bfb37b544fd5b4bef *man/PolyFeaturePlot.Rd f17f639ccd8cd59c5a4e03e2becf4696 *man/PredictAssay.Rd 10061dc8b0080c7bfa4b07ed56e4fec6 *man/PrepLDA.Rd 969cbf5bdc32dbfb41662dcbafd23e41 *man/PrepSCTFindMarkers.Rd 8876533ee9fb0f09ceb17404d0155a0f *man/PrepSCTIntegration.Rd 5a806a28fc2e940b92438c8d9ec5ed7e *man/ProjectDim.Rd e030ec03f24899c203327c87662904d2 *man/ProjectUMAP.Rd 78d75698b1cd44ae3b8a0f54689f7477 *man/Radius.Rd 39cd8326812aa77d6afefdb44020eb99 *man/Read10X.Rd 3209a36f8b20f928b7dc1bc91353cb02 *man/Read10X_Image.Rd 265cea6403e2bbdc809197a2622cfd07 *man/Read10X_h5.Rd 5762e9f4a7845230cd51bc15ed3b566a *man/ReadMtx.Rd 66132a3df08e866a8cf5b3ca6dfde0b1 *man/ReadParseBio.Rd f1ad11d0fca856124de5a51b74030208 *man/ReadSTARsolo.Rd d1890cac6fdd515b30b1d531860a91cc *man/ReadSlideSeq.Rd 1c547a95ca73f0797b3e5f5d1eedc121 *man/RegroupIdents.Rd 6b79dfaedf538506e893999f5883b60f *man/RelativeCounts.Rd a646d1b292f533da84baa95481c60e07 *man/RenameCells.Rd f3f53d714db4ec20cae6833278179a2c *man/RidgePlot.Rd 8c7e4c4917d8a13de26ca3763ce0406b *man/RunCCA.Rd 5e682dcc723d70793d1779200fb4f9d3 *man/RunICA.Rd dd27a4ffc07fc03a6cfb15314a83bfb9 *man/RunLDA.Rd 2cc88e408cf01a584c6ce9cbcc624eb5 *man/RunMarkVario.Rd a143c942a5fcb95de1423015f019561a *man/RunMixscape.Rd d4f90f5829c8313f3993d969b0dd4fd3 *man/RunMoransI.Rd 73c91ebf79ada8153c5f7358245d5aa3 *man/RunPCA.Rd fbeae51137e1875397177590d807006d *man/RunSLSI.Rd 12b3c3f7324f05f062ca11206da979f7 *man/RunSPCA.Rd e15f10d4cb42bef2939772c87784749d *man/RunTSNE.Rd 2fc96bdb2c3c33be7c1004dcb2c83c29 *man/RunUMAP.Rd eb75f0ff1ef042223504375fa21ce96f *man/SCTAssay-class.Rd 2d3d4cb04473bbca3bc30cf424121383 *man/SCTResults.Rd b17667147f68e8f0ccd64986730b7fd1 *man/SCTransform.Rd 30878060b6be825ac6c5e3613f2c72c8 *man/STARmap-class.Rd d5a943cf68955f0f7d1ac2cd9c7ed9fe *man/SampleUMI.Rd f693b40dd329878965a9fde4d8f715da *man/SaveAnnoyIndex.Rd 7996a9f557b0fa2bc69f8f20e2024157 *man/ScaleData.Rd 5847c32cd6ec6ffece89dc0f8c27994a *man/ScaleFactors.Rd 6a5a8f3d33c62a3302c03ddd7ae9aa1a *man/ScoreJackStraw.Rd 6a5621426e2d0a21126d7db9a6b71730 *man/SelectIntegrationFeatures.Rd b9d1869d6d91a923a32b44a53ded7928 *man/SetIntegrationData.Rd 36ac188e2049ee2afd46f86d72a519db *man/SetQuantile.Rd 303755316142cecedbbb571101e68308 *man/Seurat-class.Rd 96151168913a2fe502df012ce97de198 *man/Seurat-package.Rd 25019aff00c3dba933466df65a45aaa7 *man/SeuratCommand-class.Rd c75bfcb240a635f5d043b11ff2ba4945 *man/SeuratTheme.Rd 229eafd00b869a798cfd14f2a8824e25 *man/SingleCorPlot.Rd aaf8591996b04b87e324e7685fecc8d3 *man/SingleDimPlot.Rd 9e844e0bbe2f568d980ad4a1c2a75ca4 *man/SingleExIPlot.Rd 66ac79ddf7b86556f61db2a84af6ab52 *man/SingleImageMap.Rd f7634526d44a43bb62510bbbdd583797 *man/SingleRasterMap.Rd fcc2fa12797822e6fcd6bfa151c87082 *man/SingleSpatialPlot.Rd c903891a1ca061b6f15aa0c4bcd088ae *man/SlideSeq-class.Rd 2cf1ea98315c627fdb61cebb8f833dc2 *man/SpatialImage-class.Rd 6ff7d1a969d497a2d7530b4bd0666486 *man/SpatialPlot.Rd 693a850d4184b4e4ff54f1be3def8789 *man/SplitObject.Rd d78c8fa1d9543389a777500aae7f4633 *man/SubsetByBarcodeInflections.Rd 01f9df5bf0a798d91c7ced43f6d56052 *man/TopCells.Rd 98af7163f68d50deaf0a5247ce59e3d8 *man/TopFeatures.Rd e327422353f55d89f20ae13f1b1a153f *man/TopNeighbors.Rd 8ad4830f4b6516cc43e67c068450f52f *man/TransferAnchorSet-class.Rd 3a89681fe260b9db7ff8b7e879ea8a5c *man/TransferData.Rd e217e168b9ad9c8fa569d039f2c7560b *man/UpdateSCTAssays.Rd 93c5b0322467bbbf7d21fde062ffb97f *man/UpdateSymbolList.Rd 734b27d61ce81dcdd873dbd2161c3d25 *man/VariableFeaturePlot.Rd 78cac046284771a0c0bc42e2e7b28a71 *man/VisiumV1-class.Rd 5378c30ae1c19b5f2bfb2c1480f6ef6d *man/VizDimLoadings.Rd 576ac3fc59284b298fe5259d95775c76 *man/VlnPlot.Rd 4c05321384fe1f6619592fa1eaf1f228 *man/as.CellDataSet.Rd 03a3609f7132baaa37f738f6eacdb26b *man/as.Seurat.Rd 683d625a693896729e469f149c286fd7 *man/as.SingleCellExperiment.Rd 189cbc16f37bd7140a55c9b7b00a081e *man/as.sparse.Rd 18fba05b0ea57229dde949d7521f1a65 *man/cc.genes.Rd 35279bd7531c2f26183b3647e55dfd76 *man/cc.genes.updated.2019.Rd d843eff800711487e12a102038e5e7d4 *man/contrast-theory.Rd 5ac13a66d32c56c822f9a83c0c85a2b8 *man/merge.SCTAssay.Rd 8b5a27b1f6dbfc687c071198215810fd *man/reexports.Rd 660b46a409867f33a33a2c74cd70aba3 *man/subset.AnchorSet.Rd e3d3cb360fbfdb3c6974e14eb5f09870 *src/Makevars d3bfefbd9ecbdd96dce901d0d0193668 *src/ModularityOptimizer.cpp 92e4aaaaf2d6921d718999c5bd9e9913 *src/ModularityOptimizer.h 24a6ab7279d09348f254732fd5ce2b8f *src/RModularityOptimizer.cpp d3a792f8147b4cc85d4174d9f1945f64 *src/RcppExports.cpp eeb4b89f57a28c13b7d353eac4f0f67a *src/data_manipulation.cpp 531a0da4cef6f8521d98bac8b9160e1e *src/data_manipulation.h cee56bd61191250ef5ae907838891d4a *src/fast_NN_dist.cpp be7cbf63b6797d5d000871b4ca2c9e8c *src/integration.cpp 91debeffa7ebcac8c26af5d56d94ee0a *src/integration.h c86d8ffb7b0af9bd0ffe54222f0553f5 *src/snn.cpp 694a8ab034ccab2c5f7c35bf47b3469e *src/snn.h 1114ed8c2f704fd1d41de74d80f35ecc *src/stats.cpp f33c09d14c160d9f29a89251fd91a036 *src/valid_pointer.c 1d5009992bb3e703bb3f2482f9b8ddd2 *tests/testdata/barcodes.tsv d2554fb8e4d5af543605d0a729e36109 *tests/testdata/cr3.0/barcodes.tsv.gz 70e1ed4c347e2563a90fd2144f375a26 *tests/testdata/cr3.0/features.tsv.gz 75dc5f8ba97f4c3bc30ceb7926c5759b *tests/testdata/cr3.0/matrix.mtx.gz 91aa47aeda59ef6d82105100a09d1497 *tests/testdata/genes.tsv 6693f4ae1a15e0985ed513ee83fad927 *tests/testdata/matrix.mtx 646ac9cb85813dbe838d804f78f717a4 *tests/testdata/nbt_small.Rdata 0231c122b953c71d602ca29437c61d53 *tests/testdata/visium/filtered_feature_bc_matrix.h5 ccef3ff06fc1fbc97f8335d812801186 *tests/testdata/visium/spatial/scalefactors_json.json 9d57b44060c559c4a9922ac85d0fde94 *tests/testdata/visium/spatial/tissue_lowres_image.png e940f8084d865f7121e2b29418a773b0 *tests/testdata/visium/spatial/tissue_positions_list.csv 34b405818933dc96aac3fcd6594b483b *tests/testthat.R b61b1bd8a65e1d6a3ad5d29beabeb05d *tests/testthat/test_data_manipulation.R 5c8686a8c5f668f084ac753874130724 *tests/testthat/test_differential_expression.R cb6d9cbe4ed7f0154b6fa345f0273841 *tests/testthat/test_dimensional_reduction.R c32939c4ac618758ef9f8650458a09fa *tests/testthat/test_integratedata.R 19a2b1c4173eb25203ea1ac6cff5f939 *tests/testthat/test_integration.R cfd86b80e338712aeb56f5145d6b4bfd *tests/testthat/test_load_10X.R e096ae1c8e0ea82b43c0e5ff4c5ba116 *tests/testthat/test_modularity_optimizer.R 60f7cea18766fd1b79337071cb54f59f *tests/testthat/test_objects.R f8c3238335503c04c59de667fd9b242d *tests/testthat/test_preprocessing.R 1e9febbc165630e2064509c7f2a61133 *tests/testthat/test_read_mtx.R 94c8c5c0f01913bfdd82825cc3444030 *tests/testthat/test_transferdata.R ff95f76eb7359e819872a162efa58972 *tests/testthat/test_utilities.R 7c0f611a074d58d7a90acc82fca4aa4e *tests/testthat/test_visualization.R Seurat/inst/0000755000176200001440000000000014152476164012504 5ustar liggesusersSeurat/inst/extdata/0000755000176200001440000000000013712563445014136 5ustar liggesusersSeurat/inst/extdata/pbmc_raw.txt0000644000176200001440000011572013712563445016477 0ustar liggesusersATGCCAGAACGACT CATGGCCTGTGCAT GAACCTGATGAACC TGACTGGATTCTCA AGTCAGACTGCACA TCTGATACACGTGT TGGTATCTAAACAG GCAGCTCTGTTTCT GATATAACACGCAT AATGTTGACAGTCA AGGTCATGAGTGTC AGAGATGATCTCGC GGGTAACTCTAGTG CATGAGACACGGGA TACGCCACTCCGAA CTAAACCTGTGCAT GTAAGCACTCATTC TTGGTACTGAATCC CATCATACGGAGCA TACATCACGCTAAC TTACCATGAATCGC ATAGGAGAAACAGA GCGCACGACTTTAC ACTCGCACGAAAGT ATTACCTGCCTTAT CCCAACTGCAATCG AAATTCGAATCACG CCATCCGATTCGCC TCCACTCTGAGCTT CATCAGGATGCACA CTAAACCTCTGACA GATAGAGAAGGGTG CTAACGGAACCGAT AGATATACCCGTAA TACTCTGAATCGAC GCGCATCTTGCTCC GTTGACGATATCGG ACAGGTACTGGTGT GGCATATGCTTATC CATTACACCAACTG TAGGGACTGAACTC GCTCCATGAGAAGT TACAATGATGCTAG CTTCATGACCGAAT CTGCCAACAGGAGC TTGCATTGAGCTAC AAGCAAGAGCTTAG CGGCACGAACTCAG GGTGGAGATTACTC GGCCGATGTACTCT CGTAGCCTGTATGC TGAGCTGAATGCTG CCTATAACGAGACG ATAAGTTGGTACGT AAGCGACTTTGACG ACCAGTGAATACCG ATTGCACTTGCTTT CTAGGTGATGGTTG GCACTAGACCTTTA CATGCGCTAGTCAC TTGAGGACTACGCA ATACCACTCTAAGC CATATAGACTAAGC TTTAGCTGTACTCT GACATTCTCCACCT ACGTGATGCCATGA ATTGTAGATTCCCG GATAGAGATCACGA AATGCGTGGACGGA GCGTAAACACGGTT ATTCAGCTCATTGG GGCATATGGGGAGT ATCATCTGACACCA GTCATACTTCGCCT TTACGTACGTTCAG GAGTTGTGGTAGCT GACGCTCTCTCTCG AGTCTTACTTCGGA GGAACACTTCAGAC CTTGATTGATCTTC MS4A1 0 0 0 0 0 0 0 0 0 0 2 2 4 4 2 3 3 4 2 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 CD79B 1 0 0 0 0 0 0 0 0 1 2 4 3 3 2 3 1 2 2 5 0 0 0 0 0 0 0 0 0 1 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 2 2 0 0 3 0 0 0 0 4 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD79A 0 0 0 0 0 0 0 0 0 0 0 5 2 2 5 8 1 5 5 12 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 HLA-DRA 0 1 0 0 1 1 0 1 0 0 14 28 18 7 15 28 7 26 10 16 7 22 0 10 6 0 4 3 7 13 0 1 0 0 1 0 1 1 0 0 0 0 0 0 0 1 1 1 0 0 10 10 4 1 6 28 10 13 5 8 108 93 41 42 138 77 76 15 19 104 1 0 0 0 2 1 1 0 2 7 TCL1A 0 0 0 0 0 0 0 0 0 0 3 0 2 4 0 0 3 3 3 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HLA-DQB1 1 0 0 0 0 0 0 0 0 0 1 6 2 2 2 8 2 2 1 2 0 3 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 2 0 0 1 1 0 21 21 3 5 11 11 10 1 2 11 0 0 0 0 0 0 0 0 0 1 HVCN1 0 0 0 0 0 0 0 0 0 0 3 1 0 0 2 0 2 1 1 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HLA-DMB 0 0 0 0 0 0 0 0 0 0 0 4 1 1 2 2 1 2 0 1 0 1 0 1 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 0 1 3 2 1 4 5 2 1 1 1 5 0 0 0 0 0 0 0 0 0 0 LTB 3 7 11 13 3 4 6 4 2 21 2 9 2 4 4 0 3 6 5 7 1 0 0 1 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 1 1 1 7 1 0 1 5 3 1 2 0 0 1 1 1 1 2 1 0 1 0 5 0 0 0 0 1 4 0 0 1 0 0 0 0 0 0 0 LINC00926 0 0 0 0 0 0 0 0 0 0 0 2 0 1 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FCER2 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SP100 1 0 1 1 0 0 0 0 0 1 0 3 2 0 1 2 2 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 3 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NCF1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 2 2 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 PPP3CC 0 0 0 0 0 1 0 0 0 0 0 1 0 1 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EAF2 0 0 0 0 0 0 0 0 0 0 3 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 PPAPDC1B 0 0 0 0 0 0 0 0 0 0 0 3 0 1 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD19 0 0 0 0 0 0 0 0 0 0 0 1 0 2 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KIAA0125 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CYB561A3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD180 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 RP11-693J15.5 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FAM96A 0 1 0 0 0 0 0 0 0 0 1 0 0 0 2 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CXCR4 1 1 0 6 0 2 4 1 0 4 2 0 4 1 0 0 4 2 6 2 3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 4 0 7 1 3 0 6 1 0 1 0 1 0 1 0 1 0 0 0 0 1 2 12 3 1 3 0 1 2 0 0 2 0 0 0 0 0 0 0 0 0 0 STX10 0 0 1 0 0 1 0 1 0 0 2 0 0 0 2 0 0 0 1 1 0 0 0 1 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SNHG7 0 2 0 0 0 0 0 0 0 1 0 1 1 0 2 3 0 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 NT5C 0 0 0 0 0 0 0 0 0 0 2 2 1 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 BANK1 0 1 0 0 0 0 0 0 0 0 0 4 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 IGLL5 0 0 0 0 0 0 0 0 0 0 1 0 15 0 0 0 0 23 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD200 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FCRLA 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD3D 4 4 4 5 4 4 3 2 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 7 0 0 0 0 1 0 1 0 0 2 3 0 3 15 1 3 6 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NOSIP 0 3 2 2 3 1 1 3 2 1 0 0 0 0 0 2 0 0 0 0 0 2 0 0 1 0 0 0 0 0 1 0 1 1 0 0 0 2 0 0 1 0 0 0 0 0 1 0 0 1 0 2 0 0 0 1 1 0 0 0 1 1 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 SAFB2 0 1 0 1 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD2 1 0 2 2 0 1 0 1 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 2 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IL7R 5 2 1 2 2 0 1 12 0 9 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 1 0 0 0 1 2 0 0 0 0 0 0 0 0 0 0 1 3 1 1 1 0 2 0 2 0 0 0 0 0 0 0 0 0 0 1 1 1 0 1 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 PIK3IP1 0 0 1 0 0 2 3 2 3 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 MPHOSPH6 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KHDRBS1 0 1 1 1 36 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 1 0 1 0 1 0 0 0 1 0 1 0 0 2 0 0 0 0 0 0 0 0 0 0 MAL 1 1 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCR7 0 5 0 0 2 0 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 THYN1 0 2 1 1 0 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TAF7 0 2 0 2 1 2 0 2 3 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 2 3 0 0 1 0 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 LDHB 3 2 1 6 5 3 4 0 1 6 0 1 0 0 0 0 2 0 1 0 1 2 0 2 1 0 1 0 0 0 0 0 0 2 2 0 1 0 0 0 2 1 4 0 4 4 0 0 0 2 0 0 1 0 0 2 0 1 0 1 2 0 0 5 2 2 0 1 2 0 1 0 0 0 0 1 0 0 0 1 TMEM123 3 3 0 4 2 1 1 2 1 1 0 1 1 0 0 0 1 3 1 1 0 0 0 0 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 2 0 1 0 0 0 1 0 0 1 1 0 1 1 0 0 0 0 1 0 0 0 2 3 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 CCDC104 0 0 0 2 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EPC1 1 0 1 0 0 1 0 1 1 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EIF4A2 3 1 2 5 2 4 3 2 3 0 0 2 1 1 5 0 0 1 0 0 0 0 0 0 1 0 1 1 0 1 2 2 0 2 0 0 0 1 3 1 1 0 0 1 2 0 2 3 0 1 0 2 0 0 2 0 1 0 2 1 4 0 0 4 2 4 1 0 0 1 0 0 0 0 0 0 0 0 0 0 CD3E 0 2 1 4 3 1 3 4 2 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 1 0 2 0 1 2 0 1 5 2 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 TMUB1 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 BLOC1S4 1 0 2 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ACSM3 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TMEM204 1 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SRSF7 2 0 1 1 54 2 1 1 1 3 1 2 0 1 0 0 0 0 0 0 0 2 0 0 1 0 0 0 0 3 1 0 1 15 0 0 0 0 0 1 2 1 3 1 0 1 1 1 0 1 0 1 0 0 0 1 0 3 1 0 0 2 1 1 3 0 1 5 13 2 0 0 0 0 0 0 0 0 0 0 ACAP1 0 0 1 2 0 1 2 2 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 1 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TNFAIP8 1 3 2 3 2 0 0 0 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 1 0 1 1 0 4 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 CD7 2 2 2 3 2 1 0 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 1 1 1 3 4 2 1 1 2 1 4 0 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 TAGAP 1 1 1 1 0 0 0 1 2 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 DNAJB1 2 0 0 2 0 0 2 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 2 0 0 0 0 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 1 0 0 1 0 2 0 2 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 ASNSD1 1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 S1PR4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 1 1 1 0 0 1 1 1 0 0 1 0 39 0 0 0 0 0 0 0 1 0 0 2 3 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 CTSW 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 3 2 3 2 4 8 6 1 11 1 4 1 2 1 2 2 1 5 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 GZMK 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 2 0 0 2 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NKG7 0 0 0 0 1 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 2 1 0 0 0 0 1 0 0 1 35 14 12 30 20 27 28 10 25 27 31 22 7 2 4 14 16 4 29 8 5 3 0 0 0 0 5 0 0 0 0 1 0 0 1 3 0 1 0 1 0 0 0 0 0 0 0 0 0 0 IL32 1 0 9 8 1 0 3 3 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 5 4 0 0 0 0 7 8 5 5 0 7 1 6 7 6 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 DNAJC2 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 1 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 LYAR 0 1 1 1 3 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 1 1 0 1 1 2 47 0 1 1 1 1 0 2 0 0 0 0 2 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 CST7 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 4 4 2 7 2 4 3 3 2 5 2 3 1 1 0 2 8 4 5 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 LCK 0 3 2 0 1 1 2 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 1 1 2 1 0 1 1 2 0 1 2 1 1 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCL5 0 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 2 5 14 0 29 1 7 5 25 0 14 27 3 13 17 7 3 16 12 3 1 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 8 5 4 10 11 30 8 5 9 2 HNRNPH1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SSR2 0 2 2 4 1 1 0 0 0 6 0 1 0 0 1 1 0 1 0 0 0 0 0 0 3 0 1 0 1 0 0 2 0 0 1 0 1 0 1 2 1 2 1 1 1 2 4 1 2 4 2 1 0 0 2 0 3 1 3 1 0 2 3 0 1 3 2 0 4 2 0 0 0 0 0 0 0 0 0 0 DLGAP1-AS1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GIMAP1 0 2 0 0 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 0 2 0 1 1 1 1 0 2 1 0 0 1 1 17 0 0 0 1 0 1 0 1 0 2 0 1 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 MMADHC 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 2 0 1 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 1 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ZNF76 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD8A 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 3 0 1 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 PTPN22 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GYPC 1 2 2 0 0 1 0 0 2 1 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 1 0 1 1 0 1 1 0 0 1 0 1 0 1 3 0 1 0 0 7 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 HNRNPF 0 0 0 1 0 1 0 1 2 0 0 2 1 0 1 0 0 1 0 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 1 2 1 1 2 1 1 1 0 1 2 0 2 0 1 0 0 0 0 1 1 0 1 0 0 1 1 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 RPL7L1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 1 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 KLRG1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 3 0 2 0 1 0 0 0 0 1 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CRBN 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 2 1 1 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 2 0 1 0 1 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 SATB1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 13 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SIT1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PMPCB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 1 0 0 0 2 1 0 1 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 NRBP1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 1 0 1 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 TCF7 0 0 1 0 1 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 HNRNPA3 0 0 0 1 2 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 2 2 1 1 1 1 0 2 1 2 0 2 1 0 1 2 0 0 1 0 0 0 2 1 1 0 1 1 0 0 0 1 4 0 1 0 2 0 0 0 0 0 0 0 0 0 0 S100A8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 5 25 5 25 6 24 40 16 11 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 2 0 4 3 0 1 1 2 0 0 2 0 2 1 9 1 23 4 0 0 1 0 0 0 1 0 0 0 2 S100A9 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 30 12 51 22 85 3 54 55 35 17 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 20 6 1 0 10 4 8 6 0 0 0 0 1 10 0 41 11 32 17 0 3 0 0 0 0 0 0 0 0 7 LYZ 1 1 1 0 0 1 0 0 1 0 1 4 0 1 0 0 0 1 1 0 50 29 25 49 98 11 59 28 34 16 0 0 1 0 2 0 0 1 0 0 0 1 1 0 0 0 0 1 0 0 41 4 3 3 14 17 7 6 9 6 76 20 24 79 53 53 87 76 42 114 3 1 1 0 1 0 0 0 0 22 CD14 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 2 4 1 0 1 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 2 2 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 FCN1 1 1 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 10 6 5 9 7 1 1 2 8 7 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 2 13 7 5 1 4 3 1 1 2 0 0 0 3 1 2 4 6 1 0 0 0 0 0 0 0 0 0 0 0 0 TYROBP 0 0 0 2 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 14 13 3 10 16 4 13 12 19 12 3 0 4 3 6 7 3 4 5 15 2 0 1 1 0 0 0 0 0 0 11 21 2 5 21 13 16 9 16 17 2 8 6 9 11 14 10 10 6 7 0 0 0 0 0 0 0 0 0 14 ASGR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 2 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 NFKBIA 0 0 1 1 0 0 0 0 0 4 0 1 1 0 1 0 0 0 1 1 3 13 5 0 11 0 2 3 5 10 0 1 0 1 0 1 0 0 0 0 5 1 1 0 0 0 1 0 1 0 2 2 2 0 2 1 1 1 2 9 2 2 0 1 1 6 1 3 2 4 0 0 0 0 0 0 0 0 0 6 TYMP 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 7 1 6 5 1 6 4 5 1 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 6 5 1 1 6 4 3 2 4 5 1 3 2 5 14 11 3 4 8 4 0 0 0 0 0 0 0 0 0 2 CTSS 1 1 0 0 1 2 0 1 1 1 1 0 1 0 0 0 1 2 0 2 15 9 1 5 7 3 4 4 11 7 0 0 1 0 0 0 1 0 0 4 1 1 0 0 1 0 0 0 0 0 8 8 7 3 10 15 18 19 4 17 5 3 1 5 0 3 6 2 0 3 1 0 0 0 0 0 0 0 0 3 TSPO 0 0 0 0 1 1 1 0 0 1 0 1 0 0 0 1 1 0 0 0 1 2 6 0 36 1 5 0 3 5 1 0 0 0 0 0 1 1 1 0 1 1 0 0 2 0 1 0 0 0 2 4 0 1 2 3 6 4 2 5 1 0 0 4 2 5 10 6 4 2 0 0 0 0 0 2 0 0 0 3 RBP7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 2 1 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CTSB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 4 1 1 7 1 1 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 4 0 1 0 1 0 3 0 0 2 1 1 0 2 0 2 2 1 0 0 0 0 0 0 1 0 0 0 0 1 LGALS1 1 0 1 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 14 10 8 11 4 6 7 22 37 3 4 9 6 1 3 14 2 1 4 1 3 0 0 0 0 1 0 1 0 5 12 4 2 16 10 6 2 12 16 8 13 21 9 20 10 23 5 28 13 0 0 0 0 1 0 0 0 0 10 FPR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 0 2 1 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 VSTM1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 1 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 BLVRA 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 1 3 1 2 0 1 0 1 1 3 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 2 2 5 1 2 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 MPEG1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 1 1 1 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 2 1 0 1 0 0 0 0 0 0 0 1 0 0 0 0 BID 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 27 0 1 1 1 0 0 1 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 3 6 1 2 2 4 2 2 2 2 0 0 3 1 0 2 0 0 0 0 0 0 0 0 0 0 SMCO4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 2 1 0 1 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 2 1 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 CFD 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 4 2 1 1 0 0 2 15 2 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 4 5 2 0 0 5 2 3 2 3 0 0 0 0 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 3 LINC00936 0 0 0 1 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 5 1 0 0 1 1 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 3 1 2 1 1 0 3 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 LGALS2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 12 6 2 1 6 0 0 0 5 2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 3 10 1 2 3 4 4 1 3 6 0 0 0 0 0 0 0 0 0 3 MS4A6A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 2 2 1 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 4 1 0 7 7 0 2 1 2 0 0 0 0 0 0 0 0 0 0 1 FCGRT 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 0 0 1 14 1 2 0 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 2 0 1 3 1 1 0 1 1 3 1 0 2 2 3 3 1 4 3 0 0 0 0 0 0 0 0 0 2 LGALS3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 4 4 1 3 0 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 2 0 0 4 2 0 2 1 0 0 5 1 0 6 2 7 2 2 0 0 0 0 0 0 1 0 0 0 1 NUP214 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 3 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 2 0 0 0 0 1 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 SCO2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 2 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 1 2 0 0 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IL17RA 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IFI6 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 5 3 0 0 3 0 1 5 0 4 0 0 0 0 1 0 1 1 2 0 0 0 0 0 0 0 0 0 1 0 0 2 0 1 1 3 1 3 0 2 0 4 0 2 6 2 5 1 0 0 0 0 0 0 0 0 0 0 0 4 HLA-DPA1 0 0 0 0 0 0 0 0 0 0 3 8 2 2 5 9 0 5 1 5 0 13 2 1 0 1 0 0 7 6 0 1 0 2 0 0 1 0 0 0 0 0 0 0 1 3 0 0 1 0 12 4 2 1 5 5 7 14 5 11 75 52 11 19 54 23 45 10 23 37 0 0 0 0 0 0 0 0 0 5 FCER1A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 1 2 4 8 5 8 4 7 0 0 0 0 0 0 0 0 0 0 0 CLEC10A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 2 4 2 3 6 4 2 1 0 0 0 0 0 0 0 0 0 1 HLA-DMA 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 4 1 1 0 1 0 4 1 1 0 0 0 0 1 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 4 0 0 0 0 6 6 5 4 6 5 6 5 3 5 0 0 0 0 0 0 0 0 0 1 RGS1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3 1 3 0 1 3 0 1 2 0 0 0 0 0 0 0 0 0 0 HLA-DPB1 0 0 0 0 0 0 0 0 0 0 4 10 4 4 8 23 7 0 4 6 0 18 1 2 0 3 0 1 7 7 2 4 0 0 0 0 0 0 0 0 0 4 0 0 1 2 0 0 0 0 8 3 5 2 3 7 6 5 9 4 102 78 23 25 69 24 43 8 10 50 1 0 0 0 0 0 0 0 0 5 HLA-DQA1 0 0 0 1 0 0 0 0 0 0 0 4 4 1 0 8 1 5 0 1 1 5 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 2 0 0 0 1 1 2 0 0 25 39 5 2 16 6 11 3 4 9 0 0 0 0 0 0 0 0 0 0 RNF130 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 2 2 2 2 0 1 1 1 6 3 5 1 0 0 0 0 0 0 0 0 0 0 HLA-DRB5 0 0 0 0 0 0 1 0 0 0 1 4 3 0 4 8 1 2 2 4 0 8 1 1 0 0 0 0 4 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 4 5 0 0 3 3 6 3 6 2 11 26 5 2 31 21 21 2 3 10 0 0 0 0 0 0 0 0 0 1 HLA-DRB1 0 0 0 0 0 0 0 0 0 0 2 10 6 1 5 16 5 11 5 8 2 12 1 5 1 0 3 0 5 3 0 2 0 1 0 0 0 0 1 0 0 0 0 0 0 3 0 0 0 0 8 4 0 0 7 7 13 6 6 4 50 53 10 9 68 36 49 3 9 26 0 0 0 0 0 0 0 0 0 4 CST3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 13 28 15 11 13 7 37 5 20 18 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 0 0 16 32 7 9 11 17 33 10 15 25 61 31 25 14 58 112 37 18 29 125 5 1 0 0 5 1 3 0 0 16 IL1B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 0 1 0 0 2 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 8 0 3 1 2 3 6 1 0 0 0 1 0 0 0 0 0 0 5 POP7 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 1 1 33 0 0 0 0 1 3 0 0 0 0 0 0 0 0 0 0 HLA-DQA2 0 0 0 0 0 0 0 0 0 0 0 2 0 0 1 0 1 1 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 0 0 7 9 1 0 6 1 4 1 0 5 0 0 0 0 0 0 0 0 0 0 CD1C 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5 0 0 3 3 0 0 0 1 0 0 0 0 0 0 0 0 0 0 GSTP1 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 3 0 1 0 0 2 3 1 6 5 1 3 1 4 2 1 2 0 1 2 0 1 2 0 1 2 0 0 0 0 0 0 0 1 0 4 1 2 0 1 5 0 0 1 1 9 4 5 7 2 5 12 7 10 18 0 0 0 0 1 0 0 0 0 4 EIF3G 1 1 1 1 2 0 0 1 0 2 0 1 0 0 0 2 0 0 0 0 0 0 1 0 2 0 0 1 2 1 3 0 1 0 3 0 0 1 0 3 1 1 0 0 0 0 2 1 1 1 3 3 0 1 2 2 0 1 2 0 1 0 1 2 1 0 1 1 3 43 0 0 0 0 0 0 0 0 0 3 VPS28 0 0 0 3 0 0 0 0 1 0 0 0 1 0 2 0 0 0 0 0 0 0 0 1 2 0 1 1 1 0 0 0 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 2 3 0 4 3 0 1 0 1 38 0 0 1 0 0 1 0 0 2 0 0 0 2 LY86 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 2 1 1 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 2 0 3 2 3 1 2 0 1 8 1 0 0 0 0 0 0 0 0 0 ZFP36L1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 21 0 1 0 0 0 0 0 0 0 0 0 0 0 ZNF330 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 1 32 0 0 0 0 0 0 0 0 0 0 0 ANXA2 0 0 0 0 1 1 0 0 0 1 0 1 1 0 0 1 0 0 0 0 1 3 0 3 1 1 1 0 2 3 1 0 0 4 1 0 4 1 0 1 0 0 1 0 0 0 1 1 0 2 9 3 1 0 4 2 3 2 0 6 5 1 5 1 22 10 9 1 3 3 0 0 0 0 0 0 0 0 0 4 GRN 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 1 0 1 0 5 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 1 2 3 0 1 1 3 6 1 0 2 5 4 8 2 4 5 0 1 0 0 0 0 0 0 0 0 CFP 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 7 1 1 1 0 2 0 2 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 3 0 0 1 1 0 3 2 4 0 2 0 1 39 1 3 5 1 0 0 0 0 0 0 0 0 0 1 HSP90AA1 2 0 1 2 3 2 2 1 0 3 0 0 1 0 0 2 4 0 0 1 0 0 0 0 0 0 0 0 3 3 1 4 5 1 1 0 1 0 0 0 0 0 0 2 0 1 0 1 3 0 3 1 0 0 0 1 0 0 1 1 3 1 0 2 64 2 3 1 1 1 0 0 0 0 0 0 0 0 0 0 FUOM 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 LST1 0 0 0 3 2 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 3 6 1 4 8 3 5 0 7 13 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 15 17 8 11 18 13 36 17 12 27 12 7 7 4 8 10 4 2 6 6 0 0 0 0 0 0 0 0 0 7 AIF1 2 0 1 0 0 0 2 1 0 0 0 0 0 0 1 0 0 0 0 1 5 7 6 5 4 3 1 2 10 12 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 7 12 7 6 32 33 12 19 18 29 6 7 1 3 11 7 9 4 1 4 0 0 0 0 1 0 0 0 0 5 PSAP 0 0 2 0 3 2 0 0 0 0 0 0 0 0 2 0 0 0 0 0 6 5 1 5 3 2 1 1 6 4 0 1 2 0 1 0 1 1 0 0 3 1 0 0 1 0 0 1 2 1 8 8 6 2 9 9 10 8 5 10 1 2 1 6 6 4 4 2 2 7 0 0 1 1 1 0 0 0 0 1 YWHAB 0 0 0 1 1 0 0 1 0 1 0 0 2 0 1 0 0 1 1 0 1 0 0 1 2 0 0 1 2 0 2 0 1 1 0 1 0 1 0 2 0 1 1 0 0 1 2 2 1 1 2 2 1 0 50 1 1 1 3 1 5 0 0 0 2 5 4 0 1 3 0 0 0 0 0 0 0 0 0 1 MYO1G 0 0 2 1 0 1 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 2 0 1 0 1 0 0 0 0 0 1 1 1 1 0 0 1 1 0 0 1 0 0 1 0 0 3 3 1 27 1 1 0 0 0 0 0 0 2 2 1 2 0 0 0 0 0 0 0 0 0 1 SAT1 0 1 0 0 0 1 1 1 1 2 0 1 0 0 2 5 0 0 0 0 4 15 8 5 4 2 8 2 11 18 3 0 0 0 0 0 1 0 1 1 0 1 3 0 0 0 0 0 2 1 21 25 6 10 26 26 16 15 11 22 10 5 5 16 2 3 16 3 4 5 3 4 2 6 3 17 3 6 4 3 RGS2 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 0 0 3 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 2 3 16 0 1 11 3 5 4 6 8 1 1 0 0 0 1 1 1 2 0 0 0 0 0 0 0 0 0 0 SERPINA1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 6 4 0 2 0 0 1 0 3 3 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 3 4 5 5 3 6 1 1 0 3 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 2 IFITM3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 5 0 0 0 2 4 1 2 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 3 4 1 11 9 2 5 7 10 0 12 2 1 3 4 4 0 0 1 0 0 0 0 0 0 0 0 0 1 FCGR3A 0 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 1 6 2 2 1 0 1 2 1 2 6 2 0 0 0 0 0 1 0 0 0 0 5 1 2 14 4 18 9 5 11 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 LILRA3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 3 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 S100A11 2 0 1 2 1 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 2 10 4 2 2 2 1 6 5 6 6 0 0 0 0 0 1 1 0 0 1 0 0 0 1 1 0 1 0 0 17 13 1 2 9 12 14 8 7 13 5 4 5 3 11 9 9 4 5 2 0 0 0 0 0 0 0 0 0 1 FCER1G 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 6 3 4 6 1 2 4 4 9 8 8 0 3 1 2 5 6 6 1 6 3 0 0 0 0 0 0 0 0 0 12 12 2 4 35 16 24 9 9 30 8 8 3 3 13 8 7 5 8 3 0 0 0 1 0 0 0 1 0 4 TNFRSF1B 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 3 4 1 2 1 1 0 0 0 0 0 1 0 0 0 2 0 0 0 0 0 0 0 0 0 0 IFITM2 3 0 3 3 1 3 3 1 0 3 0 1 2 0 1 0 0 0 2 3 6 4 0 0 1 1 0 1 3 6 8 2 3 5 2 1 5 1 3 2 7 4 2 2 5 1 1 4 1 2 5 10 1 4 17 8 33 8 14 19 4 7 4 3 2 2 0 1 6 4 0 0 0 0 1 1 0 0 0 1 WARS 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 2 2 1 1 0 3 1 1 2 0 0 0 0 0 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 IFI30 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3 0 1 2 1 6 1 5 6 6 0 3 1 1 3 3 0 1 4 0 0 0 0 0 0 0 0 0 1 MS4A7 0 0 0 1 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 1 0 2 4 3 1 0 2 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 C5AR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 4 2 1 1 3 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HCK 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 0 1 0 3 1 2 3 5 0 1 0 0 1 1 0 0 1 4 0 0 0 0 0 0 0 0 0 1 COTL1 0 0 4 2 1 2 0 1 1 3 0 2 0 0 0 1 0 1 0 0 6 15 2 4 7 3 6 0 4 20 0 1 1 0 0 0 1 0 0 0 1 2 0 0 5 1 0 0 2 0 9 20 9 3 6 9 91 11 18 18 18 2 9 11 12 11 7 5 4 25 1 2 0 3 0 2 3 0 4 7 LGALS9 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 3 0 0 6 0 0 3 0 3 0 0 1 1 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 CD68 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 4 0 0 0 3 0 1 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 4 1 0 4 3 0 4 2 8 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 0 0 1 1 1 RP11-290F20.3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 0 0 4 0 5 2 1 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 RHOC 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 2 0 0 1 0 1 3 1 1 2 0 0 0 0 0 0 1 1 0 0 1 6 0 1 1 2 7 2 6 3 2 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 CARD16 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 1 1 1 0 0 2 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 2 2 0 2 2 1 0 6 3 6 1 0 0 1 2 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 LRRC25 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 1 6 4 1 0 2 0 1 1 0 0 0 1 1 0 2 0 0 0 0 0 0 0 0 0 1 COPS6 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 3 0 1 0 0 0 0 0 0 1 0 0 26 0 0 2 2 1 0 0 1 0 0 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 ADAR 0 0 0 1 1 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 1 1 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 2 25 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 PPBP 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 43 41 36 55 58 54 66 34 30 6 GPX1 0 0 0 1 1 1 0 1 0 1 0 1 1 0 1 0 1 0 0 0 4 5 3 5 12 1 15 2 3 1 0 1 0 0 0 0 2 0 1 0 0 0 0 1 1 0 1 2 0 0 5 3 0 0 1 1 0 1 1 2 6 7 2 6 24 16 28 3 6 3 18 8 12 18 18 28 11 13 16 9 TPM4 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0 0 0 2 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 1 1 0 0 0 1 2 1 0 1 1 4 4 2 2 2 15 2 1 3 2 PF4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 14 11 14 18 23 62 9 14 6 0 SDPR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 11 3 13 8 8 29 3 6 5 2 NRGN 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 1 5 3 3 2 7 3 1 1 2 SPARC 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 3 2 2 3 9 3 3 4 2 GNG11 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 5 9 10 7 23 12 6 11 1 CLU 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 14 5 8 11 15 6 4 3 5 2 HIST1H2AC 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 3 5 5 2 42 2 1 2 1 NCOA4 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 2 1 1 0 0 1 0 0 0 0 0 8 2 0 12 8 7 3 2 6 0 GP9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 3 2 3 11 6 5 3 0 FERMT3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 1 1 0 0 1 0 0 0 0 2 0 0 0 2 0 0 1 0 0 1 2 5 4 4 1 6 0 4 0 1 ODC1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 2 0 1 0 0 0 3 0 1 2 1 14 2 0 4 1 CD9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 4 4 3 4 3 4 20 5 0 RUFY1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 3 3 2 3 2 9 0 0 1 0 TUBB1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 3 5 2 14 32 2 0 8 0 TALDO1 1 2 0 0 2 0 0 0 0 0 0 0 0 0 1 2 0 0 0 0 1 1 2 3 5 1 2 0 3 2 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 1 0 1 2 0 0 0 2 2 2 1 2 1 0 0 1 3 1 3 1 1 2 2 0 1 2 1 10 37 0 2 3 TREML1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 2 7 4 0 1 3 5 2 NGFRAP1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 2 0 2 3 1 2 4 0 PGRMC1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 4 2 6 2 2 0 0 CA2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 3 1 4 1 3 8 0 13 2 0 ITGA2B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 1 4 2 4 1 4 1 0 0 MYL9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 3 4 8 1 2 0 0 1 TMEM40 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 1 1 2 1 2 3 0 PARVB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4 1 4 0 0 1 0 0 0 PTCRA 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 4 0 0 20 2 2 1 0 ACRBP 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 25 0 3 1 1 TSC22D1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 1 0 26 1 0 0 1 VDAC3 0 0 0 1 0 0 1 0 0 1 0 29 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 2 0 0 1 1 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 41 0 0 2 1 0 1 1 1 GZMB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 27 2 1 10 8 5 10 7 4 11 3 0 0 0 0 0 6 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GZMA 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5 3 4 10 8 12 10 3 13 1 8 2 1 0 0 0 3 3 2 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GNLY 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 35 0 15 3 29 11 22 15 18 18 10 0 0 3 0 0 4 1 3 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 FGFBP2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 5 3 9 2 6 3 6 8 2 5 4 1 0 0 0 2 9 0 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 AKR1C3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 7 0 1 1 0 1 5 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCL4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 5 3 1 0 3 1 1 2 1 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PRF1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 14 1 4 9 7 10 10 2 4 7 6 13 0 0 0 0 6 0 5 3 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 GZMH 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 5 7 1 0 3 1 0 2 6 0 0 0 0 0 10 0 9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 XBP1 1 0 1 1 2 0 0 1 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 2 2 4 1 0 2 1 3 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 GZMM 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 2 1 1 2 3 2 2 6 2 1 0 0 1 0 3 2 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PTGDR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 0 1 51 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 IGFBP7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 4 0 0 3 0 1 7 4 0 3 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 1 1 0 0 0 1 3 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 TTC38 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KLRD1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 1 1 0 1 2 2 1 0 1 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ARHGDIA 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 1 1 1 1 1 0 1 0 1 25 1 0 0 0 0 0 0 0 0 0 2 0 1 0 0 3 2 1 0 0 1 0 1 1 1 2 4 1 0 0 0 0 0 0 0 1 0 0 0 0 IL2RB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 2 1 1 1 0 3 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CLIC3 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 4 0 1 2 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PPP1R18 0 1 0 0 0 1 0 0 0 1 0 0 1 0 1 0 0 0 1 0 0 2 0 0 0 0 0 1 0 0 2 2 1 1 1 1 3 0 3 1 0 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 CD247 0 1 1 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 1 1 3 0 2 2 0 1 1 2 1 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ALOX5AP 1 0 0 0 1 0 0 1 0 0 1 0 0 0 0 2 1 0 1 0 0 0 0 0 0 0 0 1 0 0 3 0 2 1 1 3 1 2 1 2 0 2 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 XCL2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 2 0 0 0 0 1 2 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C12orf75 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 1 0 0 4 2 1 2 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 RARRES3 1 0 0 3 0 1 1 0 1 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 7 3 2 0 1 3 3 5 0 1 0 2 1 1 0 2 2 0 1 1 0 0 0 0 2 0 0 0 0 0 0 1 0 0 2 1 1 0 0 0 0 0 1 0 0 1 0 0 0 0 PCMT1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 2 1 0 58 0 0 1 0 2 1 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 2 0 1 0 0 0 2 0 0 0 4 2 1 0 3 1 0 0 0 0 0 0 0 0 0 0 LAMP1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 1 3 2 1 2 1 0 1 0 1 0 0 1 2 0 1 0 1 0 0 0 0 0 0 0 3 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 SPON2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 3 5 1 3 0 0 1 2 0 2 3 0 0 0 0 0 3 1 3 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 S100B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 10 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 Seurat/inst/CITATION0000644000176200001440000000756214152476164013653 0ustar liggesuserscitHeader("To cite Seurat in publications, please use:") citEntry(entry = "article", author = personList(as.person("Yuhan Hao"), as.person("Stephanie Hao"), as.person("Erica Andersen-Nissen"), as.person("William M. Mauck III"), as.person("Shiwei Zheng"), as.person("Andrew Butler"), as.person("Maddie J. Lee"), as.person("Aaron J. Wilk"), as.person("Charlotte Darby"), as.person("Michael Zagar"), as.person("Paul Hoffman"), as.person("Marlon Stoeckius"), as.person("Efthymia Papalexi"), as.person("Eleni P. Mimitou"), as.person("Jaison Jain"), as.person("Avi Srivastava"), as.person("Tim Stuart"), as.person("Lamar B. Fleming"), as.person("Bertrand Yeung"), as.person("Angela J. Rogers"), as.person("Juliana M. McElrath"), as.person("Catherine A. Blish"), as.person("Raphael Gottardo"), as.person("Peter Smibert"), as.person("Rahul Satija")), title = "Integrated analysis of multimodal single-cell data", journal = "Cell", year = "2021", doi = "10.1016/j.cell.2021.04.048", url = "https://doi.org/10.1016/j.cell.2021.04.048", textVersion = "Hao and Hao et al. Integrated analysis of multimodal single-cell data. Cell (2021) [Seurat V4]" ) citEntry(entry = "article", author = personList(as.person("Tim Stuart"), as.person("Andrew Butler"), as.person("Paul Hoffman"), as.person("Christoph Hafemeister"), as.person("Efthymia Papalexi"), as.person("William M Mauck III"), as.person("Yuhan Hao"), as.person("Marlon Stoeckius"), as.person("Peter Smibert"), as.person("Rahul Satija")), title = "Comprehensive Integration of Single-Cell Data", journal = "Cell", year = "2019", volume = "177", pages = "1888-1902", doi = "10.1016/j.cell.2019.05.031", url = "https://doi.org/10.1016/j.cell.2019.05.031", textVersion = "Stuart and Butler et al. Comprehensive Integration of Single-Cell Data. Cell (2019) [Seurat V3]" ) citEntry(entry = "article", author = personList(as.person("Andrew Butler"), as.person("Paul Hoffman"), as.person("Peter Smibert"), as.person("Efthymia Papalexi"), as.person("Rahul Satija")), title = "Integrating single-cell transcriptomic data across different conditions, technologies, and species", journal = "Nature Biotechnology", year = "2018", volume = "36", pages = "411-420", doi = "10.1038/nbt.4096", url = "https://doi.org/10.1038/nbt.4096", textVersion = "Butler et al. Integrating single-cell transcriptomic data across different conditions, technologies, and species. Nat Biotechnol (2018) [Seurat V2]" ) citEntry(entry = "article", author = personList(as.person("Rahul Satija"), as.person("Jeffrey A Farrell"), as.person("David Gennert"), as.person("Alexander F Schier"), as.person("Aviv Regev")), title = "Spatial reconstruction of single-cell gene expression data", journal = "Nature Biotechnology", year = "2015", volume = "33", pages = "495-502", doi = "10.1038/nbt.3192", url = "https://doi.org/10.1038/nbt.3192", textVersion = "Satija and Farrell et al. Spatial reconstruction of single-cell gene expression data. Nat Biotechnol (2015) [Seurat V1]" )