Seurat/0000755000176200001440000000000014525771316011530 5ustar liggesusersSeurat/NAMESPACE0000644000176200001440000006171314525500037012745 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("SCTResults<-",SCTAssay) S3method("SCTResults<-",SCTModel) S3method("[",SlideSeq) S3method("[",VisiumV1) S3method("levels<-",SCTAssay) S3method(.CalcN,IterableMatrix) S3method(AnnotateAnchors,IntegrationAnchorSet) S3method(AnnotateAnchors,TransferAnchorSet) S3method(AnnotateAnchors,default) S3method(Cells,SCTAssay) S3method(Cells,SCTModel) S3method(Cells,STARmap) S3method(Cells,SlideSeq) S3method(Cells,VisiumV1) S3method(Features,SCTAssay) S3method(Features,SCTModel) S3method(FetchData,VisiumV1) S3method(FindClusters,Seurat) S3method(FindClusters,default) S3method(FindMarkers,Assay) S3method(FindMarkers,DimReduc) S3method(FindMarkers,SCTAssay) S3method(FindMarkers,Seurat) S3method(FindMarkers,StdAssay) S3method(FindMarkers,default) S3method(FindNeighbors,Assay) S3method(FindNeighbors,Seurat) S3method(FindNeighbors,default) S3method(FindNeighbors,dist) S3method(FindSpatiallyVariableFeatures,Assay) S3method(FindSpatiallyVariableFeatures,Seurat) S3method(FindSpatiallyVariableFeatures,StdAssay) S3method(FindSpatiallyVariableFeatures,default) S3method(FindVariableFeatures,Assay) S3method(FindVariableFeatures,SCTAssay) S3method(FindVariableFeatures,Seurat) S3method(FindVariableFeatures,StdAssay) S3method(FindVariableFeatures,V3Matrix) S3method(FindVariableFeatures,default) S3method(FoldChange,Assay) S3method(FoldChange,DimReduc) S3method(FoldChange,SCTAssay) S3method(FoldChange,Seurat) S3method(FoldChange,StdAssay) S3method(FoldChange,default) S3method(GetAssay,Seurat) S3method(GetImage,STARmap) S3method(GetImage,SlideSeq) S3method(GetImage,VisiumV1) S3method(GetTissueCoordinates,STARmap) S3method(GetTissueCoordinates,SlideSeq) S3method(GetTissueCoordinates,VisiumV1) S3method(HVFInfo,SCTAssay) S3method(IntegrateEmbeddings,IntegrationAnchorSet) S3method(IntegrateEmbeddings,TransferAnchorSet) S3method(LeverageScore,Assay) S3method(LeverageScore,Seurat) S3method(LeverageScore,StdAssay) S3method(LeverageScore,default) S3method(LogNormalize,IterableMatrix) S3method(LogNormalize,V3Matrix) S3method(LogNormalize,data.frame) S3method(LogNormalize,default) S3method(MappingScore,AnchorSet) S3method(MappingScore,default) S3method(NormalizeData,Assay) S3method(NormalizeData,Seurat) S3method(NormalizeData,StdAssay) S3method(NormalizeData,V3Matrix) S3method(NormalizeData,default) S3method(ProjectCellEmbeddings,Assay) S3method(ProjectCellEmbeddings,IterableMatrix) S3method(ProjectCellEmbeddings,SCTAssay) S3method(ProjectCellEmbeddings,Seurat) S3method(ProjectCellEmbeddings,StdAssay) S3method(ProjectCellEmbeddings,default) S3method(ProjectUMAP,DimReduc) S3method(ProjectUMAP,Seurat) S3method(ProjectUMAP,default) S3method(PseudobulkExpression,Assay) S3method(PseudobulkExpression,Seurat) S3method(PseudobulkExpression,StdAssay) S3method(Radius,STARmap) S3method(Radius,SlideSeq) S3method(Radius,VisiumV1) S3method(RenameCells,SCTAssay) S3method(RenameCells,STARmap) S3method(RenameCells,SlideSeq) S3method(RenameCells,VisiumV1) S3method(RunCCA,Seurat) S3method(RunCCA,default) S3method(RunGraphLaplacian,Seurat) S3method(RunGraphLaplacian,default) S3method(RunICA,Assay) S3method(RunICA,Seurat) S3method(RunICA,default) S3method(RunLDA,Assay) S3method(RunLDA,Seurat) S3method(RunLDA,default) S3method(RunPCA,Assay) S3method(RunPCA,Seurat) S3method(RunPCA,Seurat5) S3method(RunPCA,StdAssay) S3method(RunPCA,default) S3method(RunSLSI,Assay) S3method(RunSLSI,Seurat) S3method(RunSLSI,default) S3method(RunSPCA,Assay) S3method(RunSPCA,Assay5) S3method(RunSPCA,Seurat) S3method(RunSPCA,default) S3method(RunTSNE,DimReduc) S3method(RunTSNE,Seurat) S3method(RunTSNE,dist) S3method(RunTSNE,matrix) S3method(RunUMAP,Graph) S3method(RunUMAP,Neighbor) S3method(RunUMAP,Seurat) S3method(RunUMAP,default) S3method(SCTResults,SCTAssay) S3method(SCTResults,SCTModel) S3method(SCTResults,Seurat) S3method(SCTransform,Assay) S3method(SCTransform,IterableMatrix) S3method(SCTransform,Seurat) S3method(SCTransform,StdAssay) S3method(SCTransform,default) S3method(ScaleData,Assay) S3method(ScaleData,IterableMatrix) S3method(ScaleData,Seurat) S3method(ScaleData,StdAssay) S3method(ScaleData,default) S3method(ScaleFactors,VisiumV1) S3method(ScoreJackStraw,DimReduc) S3method(ScoreJackStraw,JackStrawData) S3method(ScoreJackStraw,Seurat) S3method(VST,IterableMatrix) S3method(VST,default) S3method(VST,dgCMatrix) S3method(VST,matrix) S3method(VariableFeatures,SCTAssay) S3method(VariableFeatures,SCTModel) S3method(as.CellDataSet,Seurat) S3method(as.Seurat,CellDataSet) S3method(as.Seurat,SingleCellExperiment) S3method(as.SingleCellExperiment,Seurat) S3method(as.data.frame,Matrix) S3method(as.sparse,H5Group) S3method(as.sparse,IterableMatrix) S3method(components,SCTAssay) S3method(dim,STARmap) S3method(dim,SlideSeq) S3method(dim,VisiumV1) S3method(fortify,Centroids) S3method(fortify,Molecules) S3method(fortify,Segmentation) S3method(levels,SCTAssay) S3method(merge,SCTAssay) S3method(subset,AnchorSet) S3method(subset,SCTAssay) S3method(subset,STARmap) S3method(subset,SlideSeq) S3method(subset,VisiumV1) export("%iff%") export("%||%") export("DefaultAssay<-") export("Idents<-") export("Index<-") export("JS<-") export("Key<-") export("Loadings<-") export("Misc<-") export("Project<-") export("SCTResults<-") export("Tool<-") export("VariableFeatures<-") export(AddAzimuthResults) export(AddMetaData) export(AddModuleScore) export(AggregateExpression) export(AnnotateAnchors) export(Assays) export(AugmentPlot) export(AutoPointSize) export(AverageExpression) export(BGTextColor) export(BarcodeInflectionsPlot) export(BlackAndWhite) export(BlueAndRed) export(BoldTitle) export(BridgeCellsRepresentation) export(BuildClusterTree) export(BuildNicheAssay) export(CCAIntegration) export(CalcPerturbSig) export(CalculateBarcodeInflections) export(CaseMatch) export(CellCycleScoring) export(CellScatter) export(CellSelector) export(Cells) export(CellsByIdentities) export(CenterTitle) export(CollapseEmbeddingOutliers) export(CollapseSpeciesExpressionMatrix) export(ColorDimSplit) export(CombinePlots) export(Command) export(CountSketch) export(CreateAssayObject) export(CreateCategoryMatrix) export(CreateDimReducObject) export(CreateSCTAssayObject) export(CreateSeuratObject) export(CustomDistance) export(CustomPalette) export(DEenrichRPlot) export(DarkTheme) export(DefaultAssay) export(DietSeurat) export(DimHeatmap) export(DimPlot) export(DiscretePalette) export(Distances) export(DoHeatmap) export(DotPlot) export(ElbowPlot) export(Embeddings) export(ExpMean) export(ExpSD) export(ExpVar) export(FastRPCAIntegration) export(FastRowScale) export(FeatureLocator) export(FeaturePlot) export(FeatureScatter) export(FetchData) export(FetchResiduals) export(FilterSlideSeq) export(FindAllMarkers) export(FindBridgeIntegrationAnchors) export(FindBridgeTransferAnchors) export(FindClusters) export(FindConservedMarkers) export(FindIntegrationAnchors) export(FindMarkers) export(FindMultiModalNeighbors) export(FindNeighbors) export(FindSpatiallyVariableFeatures) export(FindSubCluster) export(FindTransferAnchors) export(FindVariableFeatures) export(FoldChange) export(FontSize) export(GaussianSketch) export(GeneSymbolThesarus) export(GetAssay) export(GetAssayData) export(GetImage) export(GetIntegrationData) export(GetResidual) export(GetTissueCoordinates) export(GetTransferPredictions) export(GroupCorrelation) export(GroupCorrelationPlot) export(HTODemux) export(HTOHeatmap) export(HVFInfo) export(HarmonyIntegration) export(HoverLocator) export(IFeaturePlot) export(ISpatialDimPlot) export(ISpatialFeaturePlot) export(Idents) export(ImageDimPlot) export(ImageFeaturePlot) export(Images) export(Index) export(Indices) export(IntegrateData) export(IntegrateEmbeddings) export(IntegrateLayers) export(Intensity) export(IsGlobal) export(JS) export(JackStraw) export(JackStrawPlot) export(JointPCAIntegration) export(Key) export(L2CCA) export(L2Dim) export(LabelClusters) export(LabelPoints) export(LeverageScore) export(LinkedDimPlot) export(LinkedFeaturePlot) export(Load10X_Spatial) export(LoadAkoya) export(LoadAnnoyIndex) export(LoadCurioSeeker) export(LoadHuBMAPCODEX) export(LoadNanostring) export(LoadSTARmap) export(LoadVizgen) export(LoadXenium) export(Loadings) export(LocalStruct) export(LogNormalize) export(LogSeuratCommand) export(LogVMR) export(Luminance) export(MULTIseqDemux) export(MapQuery) export(MappingScore) export(MetaFeature) export(MinMax) export(Misc) export(MixingMetric) export(MixscapeHeatmap) export(MixscapeLDA) export(NNPlot) export(NNtoGraph) export(Neighbors) export(NoAxes) export(NoGrid) export(NoLegend) export(NormalizeData) export(PCAPlot) export(PCASigGenes) export(PCHeatmap) export(PercentAbove) export(PercentageFeatureSet) export(PlotClusterTree) export(PlotPerturbScore) export(PolyDimPlot) export(PolyFeaturePlot) export(PredictAssay) export(PrepLDA) export(PrepSCTFindMarkers) export(PrepSCTIntegration) export(PrepareBridgeReference) export(Project) export(ProjectCellEmbeddings) export(ProjectData) export(ProjectDim) export(ProjectDimReduc) export(ProjectIntegration) export(ProjectUMAP) export(PseudobulkExpression) export(PurpleAndYellow) export(RPCAIntegration) export(Radius) export(Read10X) export(Read10X_Image) export(Read10X_h5) export(Read10X_probe_metadata) export(ReadAkoya) export(ReadMtx) export(ReadNanostring) export(ReadParseBio) export(ReadSTARsolo) export(ReadSlideSeq) export(ReadVitessce) export(ReadVizgen) export(ReadXenium) export(Reductions) export(RegroupIdents) export(RelativeCounts) export(RenameCells) export(RenameIdents) export(ReorderIdent) export(RestoreLegend) export(RidgePlot) export(RotatedAxis) export(RowMergeSparseMatrices) export(RunCCA) export(RunGraphLaplacian) export(RunICA) export(RunLDA) export(RunMarkVario) export(RunMixscape) export(RunMoransI) export(RunPCA) export(RunSLSI) export(RunSPCA) export(RunTSNE) export(RunUMAP) export(SCTResults) export(SCTransform) export(SVFInfo) export(SampleUMI) export(SaveAnnoyIndex) export(ScaleData) export(ScaleFactors) export(ScoreJackStraw) export(SelectIntegrationFeatures) export(SelectIntegrationFeatures5) export(SelectSCTIntegrationFeatures) export(SetAssayData) export(SetIdent) export(SetIntegrationData) export(SetQuantile) export(SeuratAxes) export(SeuratTheme) export(SingleCorPlot) export(SingleDimPlot) export(SingleExIPlot) export(SingleImageMap) export(SingleImagePlot) export(SingleRasterMap) export(SingleSpatialPlot) export(SketchData) export(SpatialDimPlot) export(SpatialFeaturePlot) export(SpatialPlot) export(SpatialTheme) export(SpatiallyVariableFeatures) export(SplitObject) export(StashIdent) export(Stdev) export(SubsetByBarcodeInflections) export(TSNEPlot) export(Tool) export(TopCells) export(TopFeatures) export(TopNeighbors) export(TransferData) export(TransferSketchLabels) export(UMAPPlot) export(UnSketchEmbeddings) export(UpdateSCTAssays) export(UpdateSeuratObject) export(UpdateSymbolList) export(VST) export(VariableFeaturePlot) export(VariableFeatures) export(VizDimLoadings) export(VlnPlot) export(WhichCells) export(WhiteBackground) export(as.CellDataSet) export(as.Graph) export(as.Neighbor) export(as.Seurat) export(as.SingleCellExperiment) export(as.sparse) export(components) export(scalefactors) exportClasses(AnchorSet) exportClasses(Assay) exportClasses(BridgeReferenceSet) exportClasses(DimReduc) exportClasses(Graph) exportClasses(IntegrationAnchorSet) exportClasses(IntegrationData) exportClasses(JackStrawData) exportClasses(ModalityWeights) exportClasses(Neighbor) exportClasses(Seurat) exportClasses(SeuratCommand) exportClasses(SpatialImage) exportClasses(TransferAnchorSet) exportClasses(VisiumV1) importClassesFrom(Matrix,dgCMatrix) importClassesFrom(SeuratObject,Assay) importClassesFrom(SeuratObject,DimReduc) importClassesFrom(SeuratObject,Graph) importClassesFrom(SeuratObject,JackStrawData) importClassesFrom(SeuratObject,Neighbor) importClassesFrom(SeuratObject,Seurat) importClassesFrom(SeuratObject,SeuratCommand) importClassesFrom(SeuratObject,SpatialImage) importFrom(KernSmooth,bkde) importFrom(MASS,ginv) importFrom(MASS,glm.nb) importFrom(MASS,lda) importFrom(Matrix,Matrix) importFrom(Matrix,as.matrix) importFrom(Matrix,colMeans) importFrom(Matrix,colSums) importFrom(Matrix,crossprod) importFrom(Matrix,diag) importFrom(Matrix,qrR) importFrom(Matrix,readMM) importFrom(Matrix,rowMeans) importFrom(Matrix,rowSums) importFrom(Matrix,sparse.model.matrix) importFrom(Matrix,sparseMatrix) importFrom(Matrix,summary) importFrom(Matrix,t) importFrom(RANN,nn2) importFrom(RColorBrewer,brewer.pal) importFrom(RColorBrewer,brewer.pal.info) importFrom(ROCR,performance) importFrom(ROCR,prediction) importFrom(RSpectra,eigs_sym) importFrom(Rcpp,evalCpp) importFrom(RcppAnnoy,AnnoyAngular) importFrom(RcppAnnoy,AnnoyEuclidean) importFrom(RcppAnnoy,AnnoyHamming) importFrom(RcppAnnoy,AnnoyManhattan) importFrom(RcppHNSW,hnsw_build) importFrom(RcppHNSW,hnsw_search) importFrom(Rtsne,Rtsne) importFrom(SeuratObject,"%!NA%") importFrom(SeuratObject,"%NA%") importFrom(SeuratObject,"%iff%") importFrom(SeuratObject,"%||%") importFrom(SeuratObject,"DefaultAssay<-") importFrom(SeuratObject,"DefaultLayer<-") importFrom(SeuratObject,"Idents<-") importFrom(SeuratObject,"Index<-") importFrom(SeuratObject,"JS<-") importFrom(SeuratObject,"Key<-") importFrom(SeuratObject,"LayerData<-") importFrom(SeuratObject,"Loadings<-") importFrom(SeuratObject,"Misc<-") importFrom(SeuratObject,"Project<-") importFrom(SeuratObject,"Tool<-") importFrom(SeuratObject,"VariableFeatures<-") importFrom(SeuratObject,.CalcN) importFrom(SeuratObject,.CheckFmargin) importFrom(SeuratObject,.FilterObjects) importFrom(SeuratObject,.IsFutureSeurat) importFrom(SeuratObject,.MARGIN) importFrom(SeuratObject,.PropagateList) importFrom(SeuratObject,.SparseSlots) importFrom(SeuratObject,AddMetaData) importFrom(SeuratObject,Assays) importFrom(SeuratObject,AttachDeps) importFrom(SeuratObject,Boundaries) importFrom(SeuratObject,CastAssay) importFrom(SeuratObject,Cells) importFrom(SeuratObject,CellsByIdentities) importFrom(SeuratObject,Command) importFrom(SeuratObject,CreateAssayObject) importFrom(SeuratObject,CreateCentroids) importFrom(SeuratObject,CreateDimReducObject) importFrom(SeuratObject,CreateFOV) importFrom(SeuratObject,CreateSegmentation) importFrom(SeuratObject,CreateSeuratObject) importFrom(SeuratObject,DefaultAssay) importFrom(SeuratObject,DefaultBoundary) importFrom(SeuratObject,DefaultDimReduc) importFrom(SeuratObject,DefaultFOV) importFrom(SeuratObject,DefaultLayer) importFrom(SeuratObject,Distances) importFrom(SeuratObject,Embeddings) importFrom(SeuratObject,EmptyDF) importFrom(SeuratObject,Features) importFrom(SeuratObject,FetchData) importFrom(SeuratObject,GetAssayData) importFrom(SeuratObject,GetImage) importFrom(SeuratObject,GetTissueCoordinates) importFrom(SeuratObject,HVFInfo) importFrom(SeuratObject,Idents) importFrom(SeuratObject,Images) importFrom(SeuratObject,Index) importFrom(SeuratObject,Indices) importFrom(SeuratObject,IsGlobal) importFrom(SeuratObject,IsSparse) importFrom(SeuratObject,JS) importFrom(SeuratObject,JoinLayers) importFrom(SeuratObject,Key) importFrom(SeuratObject,Keys) importFrom(SeuratObject,LayerData) importFrom(SeuratObject,Layers) importFrom(SeuratObject,Loadings) importFrom(SeuratObject,LogSeuratCommand) importFrom(SeuratObject,Misc) importFrom(SeuratObject,Molecules) importFrom(SeuratObject,Neighbors) importFrom(SeuratObject,Overlay) importFrom(SeuratObject,PackageCheck) importFrom(SeuratObject,Project) importFrom(SeuratObject,Radius) importFrom(SeuratObject,Reductions) importFrom(SeuratObject,RenameAssays) importFrom(SeuratObject,RenameCells) importFrom(SeuratObject,RenameIdents) importFrom(SeuratObject,ReorderIdent) importFrom(SeuratObject,RowMergeSparseMatrices) importFrom(SeuratObject,SVFInfo) importFrom(SeuratObject,SetAssayData) importFrom(SeuratObject,SetIdent) importFrom(SeuratObject,SparseEmptyMatrix) importFrom(SeuratObject,SpatiallyVariableFeatures) importFrom(SeuratObject,StashIdent) importFrom(SeuratObject,Stdev) importFrom(SeuratObject,StitchMatrix) importFrom(SeuratObject,Tool) importFrom(SeuratObject,UpdateSeuratObject) importFrom(SeuratObject,UpdateSlots) importFrom(SeuratObject,VariableFeatures) importFrom(SeuratObject,WhichCells) importFrom(SeuratObject,as.Graph) importFrom(SeuratObject,as.Neighbor) importFrom(SeuratObject,as.Seurat) importFrom(SeuratObject,as.sparse) importFrom(cluster,clara) importFrom(cowplot,get_legend) importFrom(cowplot,plot_grid) importFrom(cowplot,theme_cowplot) importFrom(fastDummies,dummy_cols) importFrom(fitdistrplus,fitdist) importFrom(future,nbrOfWorkers) importFrom(future,plan) importFrom(future.apply,future_lapply) importFrom(future.apply,future_sapply) importFrom(generics,components) importFrom(ggplot2,Geom) importFrom(ggplot2,GeomPolygon) importFrom(ggplot2,GeomViolin) importFrom(ggplot2,aes) importFrom(ggplot2,aes_string) importFrom(ggplot2,alpha) importFrom(ggplot2,annotation_raster) importFrom(ggplot2,coord_cartesian) importFrom(ggplot2,coord_fixed) importFrom(ggplot2,coord_flip) importFrom(ggplot2,cut_number) importFrom(ggplot2,discrete_scale) importFrom(ggplot2,draw_key_point) importFrom(ggplot2,dup_axis) importFrom(ggplot2,element_blank) importFrom(ggplot2,element_line) importFrom(ggplot2,element_rect) importFrom(ggplot2,element_text) importFrom(ggplot2,facet_grid) importFrom(ggplot2,facet_wrap) importFrom(ggplot2,fortify) importFrom(ggplot2,geom_abline) importFrom(ggplot2,geom_bar) importFrom(ggplot2,geom_blank) importFrom(ggplot2,geom_boxplot) importFrom(ggplot2,geom_density) importFrom(ggplot2,geom_hline) importFrom(ggplot2,geom_jitter) importFrom(ggplot2,geom_label) importFrom(ggplot2,geom_line) importFrom(ggplot2,geom_point) importFrom(ggplot2,geom_polygon) importFrom(ggplot2,geom_raster) importFrom(ggplot2,geom_rect) importFrom(ggplot2,geom_smooth) importFrom(ggplot2,geom_text) importFrom(ggplot2,geom_tile) importFrom(ggplot2,geom_violin) importFrom(ggplot2,geom_vline) importFrom(ggplot2,ggplot) importFrom(ggplot2,ggplot_build) importFrom(ggplot2,ggproto) importFrom(ggplot2,ggproto_parent) importFrom(ggplot2,ggsave) importFrom(ggplot2,ggtitle) importFrom(ggplot2,guide_colorbar) importFrom(ggplot2,guide_legend) importFrom(ggplot2,guides) importFrom(ggplot2,labs) importFrom(ggplot2,layer) importFrom(ggplot2,layer_scales) importFrom(ggplot2,margin) importFrom(ggplot2,position_dodge) importFrom(ggplot2,position_jitterdodge) importFrom(ggplot2,scale_alpha) importFrom(ggplot2,scale_alpha_manual) importFrom(ggplot2,scale_alpha_ordinal) importFrom(ggplot2,scale_color_brewer) importFrom(ggplot2,scale_color_distiller) importFrom(ggplot2,scale_color_gradient) importFrom(ggplot2,scale_color_gradientn) importFrom(ggplot2,scale_color_identity) importFrom(ggplot2,scale_color_manual) importFrom(ggplot2,scale_fill_brewer) importFrom(ggplot2,scale_fill_continuous) importFrom(ggplot2,scale_fill_gradient) importFrom(ggplot2,scale_fill_gradientn) importFrom(ggplot2,scale_fill_manual) importFrom(ggplot2,scale_fill_viridis_c) importFrom(ggplot2,scale_radius) importFrom(ggplot2,scale_size) importFrom(ggplot2,scale_x_continuous) importFrom(ggplot2,scale_x_log10) importFrom(ggplot2,scale_y_continuous) importFrom(ggplot2,scale_y_discrete) importFrom(ggplot2,scale_y_log10) importFrom(ggplot2,stat_density2d) importFrom(ggplot2,stat_qq) importFrom(ggplot2,sym) importFrom(ggplot2,theme) importFrom(ggplot2,theme_classic) importFrom(ggplot2,theme_void) importFrom(ggplot2,transform_position) importFrom(ggplot2,unit) importFrom(ggplot2,vars) importFrom(ggplot2,waiver) importFrom(ggplot2,xlab) importFrom(ggplot2,xlim) importFrom(ggplot2,ylab) importFrom(ggplot2,ylim) importFrom(ggrepel,geom_label_repel) importFrom(ggrepel,geom_text_repel) importFrom(ggridges,geom_density_ridges) importFrom(ggridges,theme_ridges) importFrom(grDevices,as.raster) importFrom(grDevices,col2rgb) importFrom(grDevices,colorRampPalette) importFrom(grDevices,rgb) importFrom(graphics,axis) importFrom(graphics,image) importFrom(graphics,locator) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,plot.new) importFrom(graphics,smoothScatter) importFrom(graphics,title) importFrom(grid,addGrob) importFrom(grid,editGrob) importFrom(grid,gTree) importFrom(grid,gpar) importFrom(grid,grobName) importFrom(grid,grobTree) importFrom(grid,nullGrob) importFrom(grid,pointsGrob) importFrom(grid,rasterGrob) importFrom(grid,unit) importFrom(grid,viewport) importFrom(httr,GET) importFrom(httr,accept_json) importFrom(httr,build_url) importFrom(httr,content) importFrom(httr,parse_url) importFrom(httr,status_code) importFrom(httr,timeout) importFrom(ica,icafast) importFrom(ica,icaimax) importFrom(ica,icajade) importFrom(igraph,E) importFrom(igraph,graph.adjacency) importFrom(igraph,graph_from_adj_list) importFrom(igraph,graph_from_adjacency_matrix) importFrom(igraph,plot.igraph) importFrom(irlba,irlba) importFrom(jsonlite,fromJSON) importFrom(jsonlite,read_json) importFrom(leiden,leiden) importFrom(lifecycle,deprecate_soft) importFrom(lifecycle,deprecate_stop) importFrom(lifecycle,deprecate_warn) importFrom(lifecycle,deprecated) importFrom(lifecycle,is_present) importFrom(lmtest,lrtest) importFrom(matrixStats,rowAnyNAs) importFrom(matrixStats,rowMeans2) importFrom(matrixStats,rowSds) importFrom(matrixStats,rowSums2) importFrom(methods,"slot<-") importFrom(methods,.hasSlot) importFrom(methods,as) importFrom(methods,getMethod) importFrom(methods,is) importFrom(methods,new) importFrom(methods,setAs) importFrom(methods,setClass) importFrom(methods,setClassUnion) importFrom(methods,setGeneric) importFrom(methods,setMethod) importFrom(methods,setOldClass) importFrom(methods,setValidity) importFrom(methods,signature) importFrom(methods,slot) importFrom(methods,slotNames) importFrom(miniUI,gadgetTitleBar) importFrom(miniUI,miniButtonBlock) importFrom(miniUI,miniContentPanel) importFrom(miniUI,miniPage) importFrom(miniUI,miniTitleBarButton) importFrom(patchwork,wrap_plots) importFrom(pbapply,pbapply) importFrom(pbapply,pblapply) importFrom(pbapply,pbsapply) importFrom(plotly,add_annotations) importFrom(plotly,layout) importFrom(plotly,plot_ly) importFrom(plotly,raster2uri) importFrom(png,readPNG) importFrom(progressr,progressor) importFrom(purrr,imap) importFrom(reticulate,import) importFrom(reticulate,py_module_available) importFrom(reticulate,py_set_seed) importFrom(rlang,"!!!") importFrom(rlang,"!!") importFrom(rlang,abort) importFrom(rlang,arg_match) importFrom(rlang,arg_match0) importFrom(rlang,as_label) importFrom(rlang,as_name) importFrom(rlang,caller_env) importFrom(rlang,check_installed) importFrom(rlang,enquo) importFrom(rlang,exec) importFrom(rlang,inform) importFrom(rlang,is_integerish) importFrom(rlang,is_na) importFrom(rlang,is_quosure) importFrom(rlang,is_scalar_character) importFrom(rlang,is_scalar_integerish) importFrom(rlang,quo_get_env) importFrom(rlang,quo_get_expr) importFrom(rlang,sym) importFrom(rlang,warn) importFrom(scales,brewer_pal) importFrom(scales,hue_pal) importFrom(scales,rescale) importFrom(scales,squish_infinite) importFrom(scales,zero_range) importFrom(scattermore,geom_scattermore) importFrom(sctransform,correct_counts) importFrom(sctransform,get_residual_var) importFrom(sctransform,get_residuals) importFrom(sctransform,vst) importFrom(shiny,brushOpts) importFrom(shiny,brushedPoints) importFrom(shiny,clickOpts) importFrom(shiny,fillRow) importFrom(shiny,hoverOpts) importFrom(shiny,nearPoints) importFrom(shiny,observe) importFrom(shiny,observeEvent) importFrom(shiny,plotOutput) importFrom(shiny,reactiveValues) importFrom(shiny,renderPlot) importFrom(shiny,renderPrint) importFrom(shiny,runGadget) importFrom(shiny,selectInput) importFrom(shiny,sidebarPanel) importFrom(shiny,sliderInput) importFrom(shiny,stopApp) importFrom(shiny,updateSelectInput) importFrom(shiny,verbatimTextOutput) importFrom(spatstat.explore,markvario) importFrom(spatstat.geom,ppp) importFrom(stats,aggregate) importFrom(stats,anova) importFrom(stats,approxfun) importFrom(stats,as.dist) importFrom(stats,as.formula) importFrom(stats,ave) importFrom(stats,coef) importFrom(stats,complete.cases) importFrom(stats,cor) importFrom(stats,dist) importFrom(stats,dnorm) importFrom(stats,glm) importFrom(stats,hclust) importFrom(stats,kmeans) importFrom(stats,lm) importFrom(stats,loess) importFrom(stats,median) importFrom(stats,na.omit) importFrom(stats,p.adjust) importFrom(stats,pchisq) importFrom(stats,pnbinom) importFrom(stats,poisson) importFrom(stats,prcomp) importFrom(stats,predict) importFrom(stats,prop.test) importFrom(stats,quantile) importFrom(stats,qunif) importFrom(stats,relevel) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,setNames) importFrom(stats,t.test) importFrom(stats,var) importFrom(stats,wilcox.test) importFrom(tibble,tibble) importFrom(tools,file_ext) importFrom(tools,file_path_sans_ext) importFrom(utils,argsAnywhere) importFrom(utils,capture.output) importFrom(utils,file_test) importFrom(utils,globalVariables) importFrom(utils,head) importFrom(utils,isS3method) importFrom(utils,isS3stdGeneric) importFrom(utils,lsf.str) importFrom(utils,methods) importFrom(utils,packageVersion) importFrom(utils,read.csv) importFrom(utils,read.delim) importFrom(utils,read.table) importFrom(utils,setTxtProgressBar) importFrom(utils,tail) importFrom(utils,txtProgressBar) importFrom(utils,write.table) importFrom(uwot,umap) importFrom(uwot,umap_transform) importMethodsFrom(Matrix,t) useDynLib(Seurat) Seurat/LICENSE0000644000176200001440000000005414525500037012522 0ustar liggesusersYEAR: 2021 COPYRIGHT HOLDER: Seurat authors Seurat/README.md0000644000176200001440000000300714525500037012775 0ustar liggesusers[![Build Status](https://travis-ci.com/satijalab/seurat.svg?branch=master)](https://app.travis-ci.com:443/github/satijalab/seurat) [![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/satijalab/seurat?branch=master&svg=true)](https://ci.appveyor.com/project/satijalab/seurat) [![CRAN Version](https://www.r-pkg.org/badges/version/Seurat)](https://cran.r-project.org/package=Seurat) [![CRAN Downloads](https://cranlogs.r-pkg.org/badges/Seurat)](https://cran.r-project.org/package=Seurat) # Seurat v5 Seurat is an R toolkit for single cell genomics, developed and maintained by the Satija Lab at NYGC. We are excited to release Seurat v5! This updates introduces new functionality for spatial, multimodal, and scalable single-cell analysis. Seurat v5 is backwards-compatible with previous versions, so that users will continue to be able to re-run existing workflows. Instructions, documentation, and tutorials can be found at: * https://satijalab.org/seurat Seurat is also hosted on GitHub, you can view and clone the repository at * https://github.com/satijalab/seurat Seurat has been successfully installed on Mac OS X, Linux, and Windows, using the devtools package to install directly from GitHub Improvements and new features will be added on a regular basis, please post on the [github page](https://github.com/satijalab/seurat) with any questions or if you would like to contribute For a version history/changelog, please see the [NEWS file](https://github.com/satijalab/seurat/blob/master/NEWS.md). Seurat/data/0000755000176200001440000000000014525500037012427 5ustar liggesusersSeurat/data/cc.genes.rda0000644000176200001440000000101014525500037014574 0ustar liggesusersBZh91AY&SYJM*H ?*@ݻ[ib&!OI1=!@jj&=Fh LM4@#! 4H4҃&i=Cd JHWI#'& kSmj'nKdVZИil1MT"Fy.D UF.7 L4P!hb@.UE]c`[[Q35\VdQp DYg9=XY*O>- zƀE߮!Pcvo.H'gI׸Jۺl|J i3 dh8IXcHfK$J/ YaU=9֥M |nR2,b_"iKF)B.$){-L5P"o?4rx&Xms)%I1,vǝ msY8/sgblv 9`ѯvE(_:CqUN)y!C?Ի,6 wə) A;{{GjIiDS4\AVEqK(82,zln)ʄ*Җ3DcK&*j#8b~L&lyf"Lk ^D Rɚo9 RAOL ˾hR!i͹kD24^/>} F߮b? 5..p!aSeurat/man/0000755000176200001440000000000014525500056012272 5ustar liggesusersSeurat/man/Load10X_Spatial.Rd0000644000176200001440000000260214525500037015405 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Load10X_Spatial} \alias{Load10X_Spatial} \title{Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object} \usage{ Load10X_Spatial( data.dir, filename = "filtered_feature_bc_matrix.h5", assay = "Spatial", slice = "slice1", filter.matrix = TRUE, to.upper = FALSE, image = NULL, ... ) } \arguments{ \item{data.dir}{Directory containing the H5 file specified by \code{filename} and the image data in a subdirectory called \code{spatial}} \item{filename}{Name of H5 file containing the feature barcode matrix} \item{assay}{Name of the initial assay} \item{slice}{Name for the stored image of the tissue slice} \item{filter.matrix}{Only keep spots that have been determined to be over tissue} \item{to.upper}{Converts all feature names to upper case. Can be useful when analyses require comparisons between human and mouse gene names for example.} \item{image}{Name of image to pull the coordinates from} \item{...}{Arguments passed to \code{\link{Read10X_h5}}} } \value{ A \code{Seurat} object } \description{ Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object } \examples{ \dontrun{ data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show filtered_feature_bc_matrix.h5 Load10X_Spatial(data.dir = data_dir) } } \concept{preprocessing} Seurat/man/DimHeatmap.Rd0000644000176200001440000000463314525500037014577 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{DimHeatmap} \alias{DimHeatmap} \alias{PCHeatmap} \title{Dimensional reduction heatmap} \usage{ DimHeatmap( object, dims = 1, nfeatures = 30, cells = NULL, reduction = "pca", disp.min = -2.5, disp.max = NULL, balanced = TRUE, projected = FALSE, ncol = NULL, fast = TRUE, raster = TRUE, slot = "scale.data", assays = NULL, combine = TRUE ) PCHeatmap(object, ...) } \arguments{ \item{object}{Seurat object} \item{dims}{Dimensions to plot} \item{nfeatures}{Number of genes to plot} \item{cells}{A list of cells to plot. If numeric, just plots the top cells.} \item{reduction}{Which dimensional reduction to use} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped); defaults to 2.5 if \code{slot} is 'scale.data', 6 otherwise} \item{balanced}{Plot an equal number of genes with both + and - scores.} \item{projected}{Use the full projected dimensional reduction} \item{ncol}{Number of columns to plot} \item{fast}{If true, use \code{image} to generate plots; faster than using ggplot2, but not customizable} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} \item{slot}{Data slot to use, choose from 'raw.data', 'data', or 'scale.data'} \item{assays}{A vector of assays to pull data from} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{...}{Extra parameters passed to \code{DimHeatmap}} } \value{ No return value by default. If using fast = FALSE, will return a \code{\link[patchwork]{patchwork}ed} ggplot object if combine = TRUE, otherwise returns a list of ggplot objects } \description{ Draws a heatmap focusing on a principal component. Both cells and genes are sorted by their principal component scores. Allows for nice visualization of sources of heterogeneity in the dataset. } \examples{ data("pbmc_small") DimHeatmap(object = pbmc_small) } \seealso{ \code{\link[graphics]{image}} \code{\link[ggplot2]{geom_raster}} } \concept{convenience} \concept{visualization} Seurat/man/ExpMean.Rd0000644000176200001440000000072214525500037014116 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpMean} \alias{ExpMean} \title{Calculate the mean of logged values} \usage{ ExpMean(x, ...) } \arguments{ \item{x}{A vector of values} \item{...}{Other arguments (not used)} } \value{ Returns the mean in log-space } \description{ Calculate mean of logged values in non-log space (return answer in log-space) } \examples{ ExpMean(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/SingleRasterMap.Rd0000644000176200001440000000201614525500037015617 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleRasterMap} \alias{SingleRasterMap} \title{A single heatmap from ggplot2 using geom_raster} \usage{ SingleRasterMap( data, raster = TRUE, cell.order = NULL, feature.order = NULL, colors = PurpleAndYellow(), disp.min = -2.5, disp.max = 2.5, limits = NULL, group.by = NULL ) } \arguments{ \item{data}{A matrix or data frame with data to plot} \item{raster}{switch between geom_raster and geom_tile} \item{cell.order}{...} \item{feature.order}{...} \item{colors}{A vector of colors to use} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped)} \item{limits}{A two-length numeric vector with the limits for colors on the plot} \item{group.by}{A vector to group cells by, should be one grouping identity per cell} } \value{ A ggplot2 object } \description{ A single heatmap from ggplot2 using geom_raster } \keyword{internal} Seurat/man/JackStrawData-class.Rd0000644000176200001440000000062214525500037016346 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{JackStrawData-class} \alias{JackStrawData-class} \title{The JackStrawData Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:JackStrawData]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:JackStrawData]{SeuratObject::JackStrawData-class}} } Seurat/man/cc.genes.updated.2019.Rd0000644000176200001440000000237614525500037016274 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{cc.genes.updated.2019} \alias{cc.genes.updated.2019} \title{Cell cycle genes: 2019 update} \format{ A list of two vectors \describe{ \item{s.genes}{Genes associated with S-phase} \item{g2m.genes}{Genes associated with G2M-phase} } } \source{ \url{https://www.science.org/doi/abs/10.1126/science.aad0501} } \usage{ cc.genes.updated.2019 } \description{ A list of genes used in cell-cycle regression, updated with 2019 symbols } \section{Updated symbols}{ The following symbols were updated from \code{\link{cc.genes}} \describe{ \item{s.genes}{ \itemize{ \item \emph{MCM2}: \emph{MCM7} \item \emph{MLF1IP}: \emph{CENPU} \item \emph{RPA2}: \emph{POLR1B} \item \emph{BRIP1}: \emph{MRPL36} } } \item{g2m.genes}{ \itemize{ \item \emph{FAM64A}: \emph{PIMREG} \item \emph{HN1}: \emph{JPT1} } } } } \examples{ \dontrun{ cc.genes.updated.2019 <- cc.genes cc.genes.updated.2019$s.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$s.genes) cc.genes.updated.2019$g2m.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$g2m.genes) } } \seealso{ \code{\link{cc.genes}} } \concept{data} \keyword{datasets} Seurat/man/NNtoGraph.Rd0000644000176200001440000000106414525500037014421 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{NNtoGraph} \alias{NNtoGraph} \title{Convert Neighbor class to an asymmetrical Graph class} \usage{ NNtoGraph(nn.object, col.cells = NULL, weighted = FALSE) } \arguments{ \item{nn.object}{A neighbor class object} \item{col.cells}{Cells names of the neighbors, cell names in nn.object is used by default} \item{weighted}{Determine if use distance in the Graph} } \value{ Returns a Graph object } \description{ Convert Neighbor class to an asymmetrical Graph class } Seurat/man/ProjectDim.Rd0000644000176200001440000000262714525500037014627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{ProjectDim} \alias{ProjectDim} \title{Project Dimensional reduction onto full dataset} \usage{ ProjectDim( object, reduction = "pca", assay = NULL, dims.print = 1:5, nfeatures.print = 20, overwrite = FALSE, do.center = FALSE, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Reduction to use} \item{assay}{Assay to use} \item{dims.print}{Number of dims to print features for} \item{nfeatures.print}{Number of features with highest/lowest loadings to print for each dimension} \item{overwrite}{Replace the existing data in feature.loadings} \item{do.center}{Center the dataset prior to projection (should be set to TRUE)} \item{verbose}{Print top genes associated with the projected dimensions} } \value{ Returns Seurat object with the projected values } \description{ Takes a pre-computed dimensional reduction (typically calculated on a subset of genes) and projects this onto the entire dataset (all genes). Note that the cell loadings will remain unchanged, but now there are gene loadings for all genes. } \examples{ data("pbmc_small") pbmc_small pbmc_small <- ProjectDim(object = pbmc_small, reduction = "pca") # Vizualize top projected genes in heatmap DimHeatmap(object = pbmc_small, reduction = "pca", dims = 1, balanced = TRUE) } \concept{dimensional_reduction} Seurat/man/GetAssay.Rd0000644000176200001440000000114114525500037014275 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{GetAssay} \alias{GetAssay} \alias{GetAssay.Seurat} \title{Get an Assay object from a given Seurat object.} \usage{ GetAssay(object, ...) \method{GetAssay}{Seurat}(object, assay = NULL, ...) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{assay}{Assay to get} } \value{ Returns an Assay object } \description{ Get an Assay object from a given Seurat object. } \examples{ data("pbmc_small") GetAssay(object = pbmc_small, assay = "RNA") } \concept{objects} Seurat/man/CalcDispersion.Rd0000644000176200001440000000136014525500037015462 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{CalcDispersion} \alias{CalcDispersion} \title{Calculate dispersion of features} \usage{ CalcDispersion( object, mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) } \arguments{ \item{object}{Data matrix} \item{mean.function}{Function to calculate mean} \item{dispersion.function}{Function to calculate dispersion} \item{num.bin}{Number of bins to use} \item{binning.method}{Method to use for binning. Options are 'equal_width' or 'equal_frequency'} \item{verbose}{Display progress} } \description{ Calculate dispersion of features } \keyword{internal} Seurat/man/FindSubCluster.Rd0000644000176200001440000000205314525500037015454 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{FindSubCluster} \alias{FindSubCluster} \title{Find subclusters under one cluster} \usage{ FindSubCluster( object, cluster, graph.name, subcluster.name = "sub.cluster", resolution = 0.5, algorithm = 1 ) } \arguments{ \item{object}{An object} \item{cluster}{the cluster to be sub-clustered} \item{graph.name}{Name of graph to use for the clustering algorithm} \item{subcluster.name}{the name of sub cluster added in the meta.data} \item{resolution}{Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities.} \item{algorithm}{Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python.} } \value{ return a object with sub cluster labels in the sub-cluster.name variable } \description{ Find subclusters under one cluster } \concept{clustering} Seurat/man/PolyDimPlot.Rd0000644000176200001440000000151614525500037014777 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PolyDimPlot} \alias{PolyDimPlot} \title{Polygon DimPlot} \usage{ PolyDimPlot( object, group.by = NULL, cells = NULL, poly.data = "spatial", flip.coords = FALSE ) } \arguments{ \item{object}{Seurat object} \item{group.by}{A grouping variable present in the metadata. Default is to use the groupings present in the current cell identities (\code{Idents(object = object)})} \item{cells}{Vector of cells to plot (default is all cells)} \item{poly.data}{Name of the polygon dataframe in the misc slot} \item{flip.coords}{Flip x and y coordinates} } \value{ Returns a ggplot object } \description{ Plot cells as polygons, rather than single points. Color cells by identity, or a categorical variable in metadata } \concept{visualization} Seurat/man/SubsetByBarcodeInflections.Rd0000644000176200001440000000203514525500037017776 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{SubsetByBarcodeInflections} \alias{SubsetByBarcodeInflections} \title{Subset a Seurat Object based on the Barcode Distribution Inflection Points} \usage{ SubsetByBarcodeInflections(object) } \arguments{ \item{object}{Seurat object} } \value{ Returns a subsetted Seurat object. } \description{ This convenience function subsets a Seurat object based on calculated inflection points. } \details{ See [CalculateBarcodeInflections()] to calculate inflection points and [BarcodeInflectionsPlot()] to visualize and test inflection point calculations. } \examples{ data("pbmc_small") pbmc_small <- CalculateBarcodeInflections( object = pbmc_small, group.column = 'groups', threshold.low = 20, threshold.high = 30 ) SubsetByBarcodeInflections(object = pbmc_small) } \seealso{ \code{\link{CalculateBarcodeInflections}} \code{\link{BarcodeInflectionsPlot}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{preprocessing} Seurat/man/subset.AnchorSet.Rd0000644000176200001440000000265714525500037015764 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{subset.AnchorSet} \alias{subset.AnchorSet} \title{Subset an AnchorSet object} \usage{ \method{subset}{AnchorSet}( x, score.threshold = NULL, disallowed.dataset.pairs = NULL, dataset.matrix = NULL, group.by = NULL, disallowed.ident.pairs = NULL, ident.matrix = NULL, ... ) } \arguments{ \item{x}{object to be subsetted.} \item{score.threshold}{Only anchor pairs with scores greater than this value are retained.} \item{disallowed.dataset.pairs}{Remove any anchors formed between the provided pairs. E.g. \code{list(c(1, 5), c(1, 2))} filters out any anchors between datasets 1 and 5 and datasets 1 and 2.} \item{dataset.matrix}{Provide a binary matrix specifying whether a dataset pair is allowable (1) or not (0). Should be a dataset x dataset matrix.} \item{group.by}{Grouping variable to determine allowable ident pairs} \item{disallowed.ident.pairs}{Remove any anchors formed between provided ident pairs. E.g. \code{list(c("CD4", "CD8"), c("B-cell", "T-cell"))}} \item{ident.matrix}{Provide a binary matrix specifying whether an ident pair is allowable (1) or not (0). Should be an ident x ident symmetric matrix} \item{...}{further arguments to be passed to or from other methods.} } \value{ Returns an \code{\link{AnchorSet}} object with specified anchors filtered out } \description{ Subset an AnchorSet object } \concept{objects} Seurat/man/Read10X.Rd0000644000176200001440000000357414525500037013735 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X} \alias{Read10X} \title{Load in data from 10X} \usage{ Read10X( data.dir, gene.column = 2, cell.column = 1, unique.features = TRUE, strip.suffix = FALSE ) } \arguments{ \item{data.dir}{Directory containing the matrix.mtx, genes.tsv (or features.tsv), and barcodes.tsv files provided by 10X. A vector or named vector can be given in order to load several data directories. If a named vector is given, the cell barcode names will be prefixed with the name.} \item{gene.column}{Specify which column of genes.tsv or features.tsv to use for gene names; default is 2} \item{cell.column}{Specify which column of barcodes.tsv to use for cell names; default is 1} \item{unique.features}{Make feature names unique (default TRUE)} \item{strip.suffix}{Remove trailing "-1" if present in all cell barcodes.} } \value{ If features.csv indicates the data has multiple data types, a list containing a sparse matrix of the data from each type will be returned. Otherwise a sparse matrix containing the expression data will be returned. } \description{ Enables easy loading of sparse data matrices provided by 10X genomics. } \examples{ \dontrun{ # For output from CellRanger < 3.0 data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show barcodes.tsv, genes.tsv, and matrix.mtx expression_matrix <- Read10X(data.dir = data_dir) seurat_object = CreateSeuratObject(counts = expression_matrix) # For output from CellRanger >= 3.0 with multiple data types data_dir <- 'path/to/data/directory' list.files(data_dir) # Should show barcodes.tsv.gz, features.tsv.gz, and matrix.mtx.gz data <- Read10X(data.dir = data_dir) seurat_object = CreateSeuratObject(counts = data$`Gene Expression`) seurat_object[['Protein']] = CreateAssayObject(counts = data$`Antibody Capture`) } } \concept{preprocessing} Seurat/man/GetTissueCoordinates.Rd0000644000176200001440000000200114525500037016660 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetTissueCoordinates.SlideSeq} \alias{GetTissueCoordinates.SlideSeq} \alias{GetTissueCoordinates.STARmap} \alias{GetTissueCoordinates.VisiumV1} \title{Get Tissue Coordinates} \usage{ \method{GetTissueCoordinates}{SlideSeq}(object, ...) \method{GetTissueCoordinates}{STARmap}(object, qhulls = FALSE, ...) \method{GetTissueCoordinates}{VisiumV1}( object, scale = "lowres", cols = c("imagerow", "imagecol"), ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{qhulls}{return qhulls instead of centroids} \item{scale}{A factor to scale the coordinates by; choose from: 'tissue', 'fiducial', 'hires', 'lowres', or \code{NULL} for no scaling} \item{cols}{Columns of tissue coordinates data.frame to pull} } \description{ Get Tissue Coordinates } \seealso{ \code{\link[SeuratObject:GetTissueCoordinates]{SeuratObject::GetTissueCoordinates}} } \concept{objects} \concept{spatial} Seurat/man/RunMarkVario.Rd0000644000176200001440000000117114525500037015140 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RunMarkVario} \alias{RunMarkVario} \title{Run the mark variogram computation on a given position matrix and expression matrix.} \usage{ RunMarkVario(spatial.location, data, ...) } \arguments{ \item{spatial.location}{A 2 column matrix giving the spatial locations of each of the data points also in data} \item{data}{Matrix containing the data used as "marks" (e.g. gene expression)} \item{...}{Arguments passed to markvario} } \description{ Wraps the functionality of markvario from the spatstat package. } \concept{preprocessing} Seurat/man/FoldChange.Rd0000644000176200001440000000670714525500037014564 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/differential_expression.R \name{FoldChange} \alias{FoldChange} \alias{FoldChange.default} \alias{FoldChange.Assay} \alias{FoldChange.SCTAssay} \alias{FoldChange.DimReduc} \alias{FoldChange.Seurat} \title{Fold Change} \usage{ FoldChange(object, ...) \method{FoldChange}{default}(object, cells.1, cells.2, mean.fxn, fc.name, features = NULL, ...) \method{FoldChange}{Assay}( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, norm.method = NULL, ... ) \method{FoldChange}{SCTAssay}( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, ... ) \method{FoldChange}{DimReduc}( object, cells.1, cells.2, features = NULL, slot = NULL, pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, ... ) \method{FoldChange}{Seurat}( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = "data", reduction = NULL, features = NULL, pseudocount.use = 1, mean.fxn = NULL, base = 2, fc.name = NULL, ... ) } \arguments{ \item{object}{A Seurat object} \item{...}{Arguments passed to other methods} \item{cells.1}{Vector of cell names belonging to group 1} \item{cells.2}{Vector of cell names belonging to group 2} \item{mean.fxn}{Function to use for fold change or average difference calculation} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame} \item{features}{Features to calculate fold change for. If NULL, use all features} \item{slot}{Slot to pull data from} \item{pseudocount.use}{Pseudocount to add to averaged expression values when calculating logFC.} \item{base}{The base with respect to which logarithms are computed.} \item{norm.method}{Normalization method for mean function selection when \code{slot} is \dQuote{\code{data}}} \item{ident.1}{Identity class to calculate fold change for; pass an object of class \code{phylo} or 'clustertree' to calculate fold change for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to calculate fold change for} \item{group.by}{Regroup cells into a different identity class prior to calculating fold change (see example in \code{\link{FindMarkers}})} \item{subset.ident}{Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example in \code{\link{FindMarkers}})} \item{assay}{Assay to use in fold change calculation} \item{reduction}{Reduction to use - will calculate average difference on cell embeddings} } \value{ Returns a data.frame } \description{ Calculate log fold change and percentage of cells expressing each feature for different identity classes. } \details{ If the slot is \code{scale.data} or a reduction is specified, average difference is returned instead of log fold change and the column is named "avg_diff". Otherwise, log2 fold change is returned with column named "avg_log2_FC". } \examples{ \dontrun{ data("pbmc_small") FoldChange(pbmc_small, ident.1 = 1) } } \seealso{ \code{FindMarkers} } \concept{differential_expression} Seurat/man/CustomDistance.Rd0000644000176200001440000000147114525500037015510 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CustomDistance} \alias{CustomDistance} \title{Run a custom distance function on an input data matrix} \usage{ CustomDistance(my.mat, my.function, ...) } \arguments{ \item{my.mat}{A matrix to calculate distance on} \item{my.function}{A function to calculate distance} \item{...}{Extra parameters to my.function} } \value{ A distance matrix } \description{ Run a custom distance function on an input data matrix } \examples{ data("pbmc_small") # Define custom distance matrix manhattan.distance <- function(x, y) return(sum(abs(x-y))) input.data <- GetAssayData(pbmc_small, assay.type = "RNA", slot = "scale.data") cell.manhattan.dist <- CustomDistance(input.data, manhattan.distance) } \author{ Jean Fan } \concept{utilities} Seurat/man/AverageExpression.Rd0000644000176200001440000000467114525500037016222 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AverageExpression} \alias{AverageExpression} \title{Averaged feature expression by identity class} \usage{ AverageExpression( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = "ident", add.ident = NULL, layer = "data", slot = deprecated(), verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{assays}{Which assays to use. Default is all assays} \item{features}{Features to analyze. Default is all features in the assay} \item{return.seurat}{Whether to return the data as a Seurat object. Default is FALSE} \item{group.by}{Category (or vector of categories) for grouping (e.g, ident, replicate, celltype); 'ident' by default To use multiple categories, specify a vector, such as c('ident', 'replicate', 'celltype')} \item{add.ident}{(Deprecated). Place an additional label on each cell prior to pseudobulking} \item{layer}{Layer(s) to use; if multiple layers are given, assumed to follow the order of 'assays' (if specified) or object's assays} \item{slot}{(Deprecated). Slots(s) to use} \item{verbose}{Print messages and show progress bar} \item{...}{Arguments to be passed to methods such as \code{\link{CreateSeuratObject}}} } \value{ Returns a matrix with genes as rows, identity classes as columns. If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. } \description{ Returns averaged expression values for each identity class. } \details{ If layer is set to 'data', this function assumes that the data has been log normalized and therefore feature values are exponentiated prior to averaging so that averaging is done in non-log space. Otherwise, if layer is set to either 'counts' or 'scale.data', no exponentiation is performed prior to averaging. If \code{return.seurat = TRUE} and layer is not 'scale.data', averaged values are placed in the 'counts' layer of the returned object and 'log1p' is run on the averaged counts and placed in the 'data' layer \code{\link{ScaleData}} is then run on the default assay before returning the object. If \code{return.seurat = TRUE} and layer is 'scale.data', the 'counts' layer contains average counts and 'scale.data' is set to the averaged values of 'scale.data'. } \examples{ data("pbmc_small") head(AverageExpression(object = pbmc_small)$RNA) head(AverageExpression(object = pbmc_small, group.by = c('ident', 'groups'))$RNA) } \concept{utilities} Seurat/man/PredictAssay.Rd0000644000176200001440000000255614525500037015163 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{PredictAssay} \alias{PredictAssay} \title{Predict value from nearest neighbors} \usage{ PredictAssay( object, nn.idx, assay, reduction = NULL, dims = NULL, return.assay = TRUE, slot = "scale.data", features = NULL, mean.function = rowMeans, seed = 4273, verbose = TRUE ) } \arguments{ \item{object}{The object used to calculate knn} \item{nn.idx}{k near neighbour indices. A cells x k matrix.} \item{assay}{Assay used for prediction} \item{reduction}{Cell embedding of the reduction used for prediction} \item{dims}{Number of dimensions of cell embedding} \item{return.assay}{Return an assay or a predicted matrix} \item{slot}{slot used for prediction} \item{features}{features used for prediction} \item{mean.function}{the function used to calculate row mean} \item{seed}{Sets the random seed to check if the nearest neighbor is query cell} \item{verbose}{Print progress} } \value{ return an assay containing predicted expression value in the data slot } \description{ This function will predict expression or cell embeddings from its k nearest neighbors index. For each cell, it will average its k neighbors value to get its new imputed value. It can average expression value in assays and cell embeddings from dimensional reductions. } \concept{integration} Seurat/man/Radius.Rd0000644000176200001440000000076414525500037014016 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{Radius.SlideSeq} \alias{Radius.SlideSeq} \alias{Radius.STARmap} \alias{Radius.VisiumV1} \title{Get Spot Radius} \usage{ \method{Radius}{SlideSeq}(object) \method{Radius}{STARmap}(object) \method{Radius}{VisiumV1}(object) } \arguments{ \item{object}{An image object} } \description{ Get Spot Radius } \seealso{ \code{\link[SeuratObject:Radius]{SeuratObject::Radius}} } \concept{objects} \concept{spatial} Seurat/man/SCTAssay-class.Rd0000644000176200001440000000561414525500037015323 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{SCTAssay-class} \alias{SCTAssay-class} \alias{SCTModel} \alias{SCTAssay} \alias{levels.SCTAssay} \alias{levels<-.SCTAssay} \title{The SCTModel Class} \usage{ \method{levels}{SCTAssay}(x) \method{levels}{SCTAssay}(x) <- value } \arguments{ \item{x}{An \code{SCTAssay} object} \item{value}{New levels, must be in the same order as the levels present} } \value{ \code{levels}: SCT model names \code{levels<-}: \code{x} with updated SCT model names } \description{ The SCTModel object is a model and parameters storage from SCTransform. It can be used to calculate Pearson residuals for new genes. The SCTAssay object contains all the information found in an \code{\link{Assay}} object, with extra information from the results of \code{\link{SCTransform}} } \section{Slots}{ \describe{ \item{\code{feature.attributes}}{A data.frame with feature attributes in SCTransform} \item{\code{cell.attributes}}{A data.frame with cell attributes in SCTransform} \item{\code{clips}}{A list of two numeric of length two specifying the min and max values the Pearson residual will be clipped to. One for vst and one for SCTransform} \item{\code{umi.assay}}{Name of the assay of the seurat object containing UMI matrix and the default is RNA} \item{\code{model}}{A formula used in SCTransform} \item{\code{arguments}}{other information used in SCTransform} \item{\code{median_umi}}{Median UMI (or scale factor) used to calculate corrected counts} \item{\code{SCTModel.list}}{A list containing SCT models} }} \section{Get and set SCT model names}{ SCT results are named by initial run of \code{\link{SCTransform}} in order to keep SCT parameters straight between runs. When working with merged \code{SCTAssay} objects, these model names are important. \code{levels} allows querying the models present. \code{levels<-} allows the changing of the names of the models present, useful when merging \code{SCTAssay} objects. Note: unlike normal \code{\link[base]{levels<-}}, \code{levels<-.SCTAssay} allows complete changing of model names, not reordering. } \section{Creating an \code{SCTAssay} from an \code{Assay}}{ Conversion from an \code{Assay} object to an \code{SCTAssay} object by is done by adding the additional slots to the object. If \code{from} has results generated by \code{\link{SCTransform}} from Seurat v3.0.0 to v3.1.1, the conversion will automagically fill the new slots with the data } \examples{ \dontrun{ # SCTAssay objects are generated from SCTransform pbmc_small <- SCTransform(pbmc_small) } \dontrun{ # SCTAssay objects are generated from SCTransform pbmc_small <- SCTransform(pbmc_small) pbmc_small[["SCT"]] } \dontrun{ # Query and change SCT model names levels(pbmc_small[['SCT']]) levels(pbmc_small[['SCT']]) <- '3' levels(pbmc_small[['SCT']]) } } \seealso{ \code{\link{Assay}} \code{\link{Assay}} } \concept{objects} Seurat/man/NNPlot.Rd0000644000176200001440000000366114525500037013740 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{NNPlot} \alias{NNPlot} \title{Highlight Neighbors in DimPlot} \usage{ NNPlot( object, reduction, nn.idx, query.cells, dims = 1:2, label = FALSE, label.size = 4, repel = FALSE, sizes.highlight = 2, pt.size = 1, cols.highlight = c("#377eb8", "#e41a1c"), na.value = "#bdbdbd", order = c("self", "neighbors", "other"), show.all.cells = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{nn.idx}{the neighbor index of all cells} \item{query.cells}{cells used to find their neighbors} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{repel}{Repel labels} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight. If \code{sizes.highlight = TRUE} size of all points will be this value.} \item{pt.size}{Adjust point size for plotting} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{na.value}{Color value for NA points when using custom scale} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{show.all.cells}{Show all cells or only query and neighbor cells} \item{...}{Extra parameters passed to \code{DimPlot}} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ It will color the query cells and the neighbors of the query cells in the DimPlot } \concept{visualization} Seurat/man/SingleCorPlot.Rd0000644000176200001440000000244014525500037015304 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleCorPlot} \alias{SingleCorPlot} \title{A single correlation plot} \usage{ SingleCorPlot( data, col.by = NULL, cols = NULL, pt.size = NULL, smooth = FALSE, rows.highlight = NULL, legend.title = NULL, na.value = "grey50", span = NULL, raster = NULL, raster.dpi = NULL, plot.cor = TRUE, jitter = TRUE ) } \arguments{ \item{data}{A data frame with two columns to be plotted} \item{col.by}{A vector or factor of values to color the plot by} \item{cols}{An optional vector of colors to use} \item{pt.size}{Point size for the plot} \item{smooth}{Make a smoothed scatter plot} \item{rows.highlight}{A vector of rows to highlight (like cells.highlight in \code{\link{SingleDimPlot}})} \item{legend.title}{Optional legend title} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{the pixel resolution for rastered plots, passed to geom_scattermore(). Default is c(512, 512)} \item{plot.cor}{...} \item{jitter}{Jitter for easier visualization of crowded points} } \value{ A ggplot2 object } \description{ A single correlation plot } \keyword{internal} Seurat/man/RunUMAP.Rd0000644000176200001440000002266714525500037014024 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunUMAP} \alias{RunUMAP} \alias{RunUMAP.default} \alias{RunUMAP.Graph} \alias{RunUMAP.Neighbor} \alias{RunUMAP.Seurat} \title{Run UMAP} \usage{ RunUMAP(object, ...) \method{RunUMAP}{default}( object, reduction.key = "UMAP_", assay = NULL, reduction.model = NULL, return.model = FALSE, umap.method = "uwot", n.neighbors = 30L, n.components = 2L, metric = "cosine", n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, ... ) \method{RunUMAP}{Graph}( object, assay = NULL, umap.method = "umap-learn", n.components = 2L, metric = "correlation", n.epochs = 0L, learning.rate = 1, min.dist = 0.3, spread = 1, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, densmap = FALSE, densmap.kwds = NULL, verbose = TRUE, reduction.key = "UMAP_", ... ) \method{RunUMAP}{Neighbor}(object, reduction.model, ...) \method{RunUMAP}{Seurat}( object, dims = NULL, reduction = "pca", features = NULL, graph = NULL, assay = DefaultAssay(object = object), nn.name = NULL, slot = "data", umap.method = "uwot", reduction.model = NULL, return.model = FALSE, n.neighbors = 30L, n.components = 2L, metric = "cosine", n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, reduction.name = "umap", reduction.key = NULL, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and UMAP} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. UMAP by default} \item{assay}{Assay to pull data for when using \code{features}, or assay used to construct Graph if running UMAP on a Graph} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{return.model}{whether UMAP will return the uwot model} \item{umap.method}{UMAP implementation to run. Can be \describe{ \item{\code{uwot}:}{Runs umap via the uwot R package} \item{\code{uwot-learn}:}{Runs umap via the uwot R package and return the learned umap model} \item{\code{umap-learn}:}{Run the Seurat wrapper of the python umap-learn package} }} \item{n.neighbors}{This determines the number of neighboring points used in local approximations of manifold structure. Larger values will result in more global structure being preserved at the loss of detailed local structure. In general this parameter should often be in the range 5 to 50.} \item{n.components}{The dimension of the space to embed into.} \item{metric}{metric: This determines the choice of metric used to measure distance in the input space. A wide variety of metrics are already coded, and a user defined function can be passed as long as it has been JITd by numba.} \item{n.epochs}{he number of training epochs to be used in optimizing the low dimensional embedding. Larger values result in more accurate embeddings. If NULL is specified, a value will be selected based on the size of the input dataset (200 for large datasets, 500 for small).} \item{learning.rate}{The initial learning rate for the embedding optimization.} \item{min.dist}{This controls how tightly the embedding is allowed compress points together. Larger values ensure embedded points are moreevenly distributed, while smaller values allow the algorithm to optimise more accurately with regard to local structure. Sensible values are in the range 0.001 to 0.5.} \item{spread}{The effective scale of embedded points. In combination with min.dist this determines how clustered/clumped the embedded points are.} \item{set.op.mix.ratio}{Interpolate between (fuzzy) union and intersection as the set operation used to combine local fuzzy simplicial sets to obtain a global fuzzy simplicial sets. Both fuzzy set operations use the product t-norm. The value of this parameter should be between 0.0 and 1.0; a value of 1.0 will use a pure fuzzy union, while 0.0 will use a pure fuzzy intersection.} \item{local.connectivity}{The local connectivity required - i.e. the number of nearest neighbors that should be assumed to be connected at a local level. The higher this value the more connected the manifold becomes locally. In practice this should be not more than the local intrinsic dimension of the manifold.} \item{repulsion.strength}{Weighting applied to negative samples in low dimensional embedding optimization. Values higher than one will result in greater weight being given to negative samples.} \item{negative.sample.rate}{The number of negative samples to select per positive sample in the optimization process. Increasing this value will result in greater repulsive force being applied, greater optimization cost, but slightly more accuracy.} \item{a}{More specific parameters controlling the embedding. If NULL, these values are set automatically as determined by min. dist and spread. Parameter of differentiable approximation of right adjoint functor.} \item{b}{More specific parameters controlling the embedding. If NULL, these values are set automatically as determined by min. dist and spread. Parameter of differentiable approximation of right adjoint functor.} \item{uwot.sgd}{Set \code{uwot::umap(fast_sgd = TRUE)}; see \code{\link[uwot]{umap}} for more details} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed} \item{metric.kwds}{A dictionary of arguments to pass on to the metric, such as the p value for Minkowski distance. If NULL then no arguments are passed on.} \item{angular.rp.forest}{Whether to use an angular random projection forest to initialise the approximate nearest neighbor search. This can be faster, but is mostly on useful for metric that use an angular style distance such as cosine, correlation etc. In the case of those metrics angular forests will be chosen automatically.} \item{densmap}{Whether to use the density-augmented objective of densMAP. Turning on this option generates an embedding where the local densities are encouraged to be correlated with those in the original space. Parameters below with the prefix ‘dens’ further control the behavior of this extension. Default is FALSE. Only compatible with 'umap-learn' method and version of umap-learn >= 0.5.0} \item{dens.lambda}{Specific parameter which controls the regularization weight of the density correlation term in densMAP. Higher values prioritize density preservation over the UMAP objective, and vice versa for values closer to zero. Setting this parameter to zero is equivalent to running the original UMAP algorithm. Default value is 2.} \item{dens.frac}{Specific parameter which controls the fraction of epochs (between 0 and 1) where the density-augmented objective is used in densMAP. The first (1 - dens_frac) fraction of epochs optimize the original UMAP objective before introducing the density correlation term. Default is 0.3.} \item{dens.var.shift}{Specific parameter which specifies a small constant added to the variance of local radii in the embedding when calculating the density correlation objective to prevent numerical instability from dividing by a small number. Default is 0.1.} \item{verbose}{Controls verbosity} \item{densmap.kwds}{A dictionary of arguments to pass on to the densMAP optimization.} \item{dims}{Which dimensions to use as input features, used only if \code{features} is NULL} \item{reduction}{Which dimensional reduction (PCA or ICA) to use for the UMAP input. Default is PCA} \item{features}{If set, run UMAP on this subset of features (instead of running on a set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run on features} \item{graph}{Name of graph on which to run UMAP} \item{nn.name}{Name of knn output on which to run UMAP} \item{slot}{The slot used to pull data for when using \code{features}. data slot is by default.} \item{reduction.name}{Name to store dimensional reduction under in the Seurat object} } \value{ Returns a Seurat object containing a UMAP representation } \description{ Runs the Uniform Manifold Approximation and Projection (UMAP) dimensional reduction technique. To run using \code{umap.method="umap-learn"}, you must first install the umap-learn python package (e.g. via \code{pip install umap-learn}). Details on this package can be found here: \url{https://github.com/lmcinnes/umap}. For a more in depth discussion of the mathematics underlying UMAP, see the ArXiv paper here: \url{https://arxiv.org/abs/1802.03426}. } \examples{ \dontrun{ data("pbmc_small") pbmc_small # Run UMAP map on first 5 PCs pbmc_small <- RunUMAP(object = pbmc_small, dims = 1:5) # Plot results DimPlot(object = pbmc_small, reduction = 'umap') } } \references{ McInnes, L, Healy, J, UMAP: Uniform Manifold Approximation and Projection for Dimension Reduction, ArXiv e-prints 1802.03426, 2018 } \concept{dimensional_reduction} Seurat/man/ScaleFactors.Rd0000644000176200001440000000161314525500037015132 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{ScaleFactors} \alias{ScaleFactors} \alias{scalefactors} \alias{ScaleFactors.VisiumV1} \title{Get image scale factors} \usage{ ScaleFactors(object, ...) scalefactors(spot, fiducial, hires, lowres) \method{ScaleFactors}{VisiumV1}(object, ...) \method{ScaleFactors}{VisiumV1}(object, ...) } \arguments{ \item{object}{An object to get scale factors from} \item{...}{Arguments passed to other methods} \item{spot}{Spot full resolution scale factor} \item{fiducial}{Fiducial full resolution scale factor} \item{hires}{High resolutoin scale factor} \item{lowres}{Low resolution scale factor} } \value{ An object of class \code{scalefactors} } \description{ Get image scale factors } \note{ \code{scalefactors} objects can be created with \code{scalefactors()} } \concept{objects} \concept{spatial} Seurat/man/ExpSD.Rd0000644000176200001440000000066714525500037013554 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpSD} \alias{ExpSD} \title{Calculate the standard deviation of logged values} \usage{ ExpSD(x) } \arguments{ \item{x}{A vector of values} } \value{ Returns the standard deviation in log-space } \description{ Calculate SD of logged values in non-log space (return answer in log-space) } \examples{ ExpSD(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/DISP.Rd0000644000176200001440000000071214525500037013317 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{DISP} \alias{DISP} \title{Find variable features based on dispersion} \usage{ DISP(data, nselect = 2000L, verbose = TRUE, ...) } \arguments{ \item{data}{Data matrix} \item{nselect}{Number of top features to select based on dispersion values} \item{verbose}{Display progress} } \description{ Find variable features based on dispersion } \keyword{internal} Seurat/man/FindMultiModalNeighbors.Rd0000644000176200001440000000451014525500037017271 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clustering.R \name{FindMultiModalNeighbors} \alias{FindMultiModalNeighbors} \title{Construct weighted nearest neighbor graph} \usage{ FindMultiModalNeighbors( object, reduction.list, dims.list, k.nn = 20, l2.norm = TRUE, knn.graph.name = "wknn", snn.graph.name = "wsnn", weighted.nn.name = "weighted.nn", modality.weight.name = NULL, knn.range = 200, prune.SNN = 1/15, sd.scale = 1, cross.contant.list = NULL, smooth = FALSE, return.intermediate = FALSE, modality.weight = NULL, verbose = TRUE ) } \arguments{ \item{object}{A Seurat object} \item{reduction.list}{A list of two dimensional reductions, one for each of the modalities to be integrated} \item{dims.list}{A list containing the dimensions for each reduction to use} \item{k.nn}{the number of multimodal neighbors to compute. 20 by default} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction. TRUE by default.} \item{knn.graph.name}{Multimodal knn graph name} \item{snn.graph.name}{Multimodal snn graph name} \item{weighted.nn.name}{Multimodal neighbor object name} \item{modality.weight.name}{Variable name to store modality weight in object meta data} \item{knn.range}{The number of approximate neighbors to compute} \item{prune.SNN}{Cutoff not to discard edge in SNN graph} \item{sd.scale}{The scaling factor for kernel width. 1 by default} \item{cross.contant.list}{Constant used to avoid divide-by-zero errors. 1e-4 by default} \item{smooth}{Smoothing modality score across each individual modality neighbors. FALSE by default} \item{return.intermediate}{Store intermediate results in misc} \item{modality.weight}{A \code{\link{ModalityWeights}} object generated by \code{FindModalityWeights}} \item{verbose}{Print progress bars and output} } \value{ Seurat object containing a nearest-neighbor object, KNN graph, and SNN graph - each based on a weighted combination of modalities. } \description{ This function will construct a weighted nearest neighbor (WNN) graph. For each cell, we identify the nearest neighbors based on a weighted combination of two modalities. Takes as input two dimensional reductions, one computed for each modality.Other parameters are listed for debugging, but can be left as default values. } \concept{clustering} Seurat/man/HoverLocator.Rd0000644000176200001440000000167414525500037015177 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{HoverLocator} \alias{HoverLocator} \title{Hover Locator} \usage{ HoverLocator(plot, information = NULL, axes = TRUE, dark.theme = FALSE, ...) } \arguments{ \item{plot}{A ggplot2 plot} \item{information}{An optional dataframe or matrix of extra information to be displayed on hover} \item{axes}{Display or hide x- and y-axes} \item{dark.theme}{Plot using a dark theme?} \item{...}{Extra parameters to be passed to \code{\link[plotly]{layout}}} } \description{ Get quick information from a scatterplot by hovering over points } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) HoverLocator(plot = plot, information = FetchData(object = pbmc_small, vars = 'percent.mito')) } } \seealso{ \code{\link[plotly]{layout}} \code{\link[ggplot2]{ggplot_build}} \code{\link{DimPlot}} \code{\link{FeaturePlot}} } \concept{visualization} Seurat/man/AddAzimuthScores.Rd0000644000176200001440000000112214525500037015765 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddAzimuthScores} \alias{AddAzimuthScores} \title{Add Azimuth Scores} \usage{ AddAzimuthScores(object, filename) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{filename}{Path to Azimuth mapping scores file} } \value{ \code{object} with the mapping scores added } \description{ Add mapping and prediction scores from Azimuth to a \code{\link[SeuratObject]{Seurat}} object } \examples{ \dontrun{ object <- AddAzimuthScores(object, filename = "azimuth_pred.tsv") } } Seurat/man/LeverageScore.Rd0000644000176200001440000000510014525500037015302 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/sketching.R \name{LeverageScore} \alias{LeverageScore} \alias{LeverageScore.default} \alias{LeverageScore.StdAssay} \alias{LeverageScore.Assay} \alias{LeverageScore.Seurat} \title{Leverage Score Calculation} \usage{ LeverageScore(object, ...) \method{LeverageScore}{default}( object, nsketch = 5000L, ndims = NULL, method = CountSketch, eps = 0.5, seed = 123L, verbose = TRUE, ... ) \method{LeverageScore}{StdAssay}( object, nsketch = 5000L, ndims = NULL, method = CountSketch, vf.method = NULL, layer = "data", eps = 0.5, seed = 123L, verbose = TRUE, ... ) \method{LeverageScore}{Assay}( object, nsketch = 5000L, ndims = NULL, method = CountSketch, vf.method = NULL, layer = "data", eps = 0.5, seed = 123L, verbose = TRUE, ... ) \method{LeverageScore}{Seurat}( object, assay = NULL, nsketch = 5000L, ndims = NULL, var.name = "leverage.score", over.write = FALSE, method = CountSketch, vf.method = NULL, layer = "data", eps = 0.5, seed = 123L, verbose = TRUE, ... ) } \arguments{ \item{object}{A matrix-like object} \item{...}{Arguments passed to other methods} \item{nsketch}{A positive integer. The number of sketches to be used in the approximation. Default is 5000.} \item{ndims}{A positive integer or NULL. The number of dimensions to use. If NULL, the number of dimensions will default to the number of columns in the object.} \item{method}{The sketching method to use, defaults to CountSketch.} \item{eps}{A numeric. The error tolerance for the approximation in Johnson–Lindenstrauss embeddings, defaults to 0.5.} \item{seed}{A positive integer. The seed for the random number generator, defaults to 123.} \item{verbose}{Print progress and diagnostic messages} \item{vf.method}{VariableFeatures method} \item{layer}{layer to use} \item{assay}{assay to use} \item{var.name}{name of slot to store leverage scores} \item{over.write}{whether to overwrite slot that currently stores leverage scores. Defaults to FALSE, in which case the 'var.name' is modified if it already exists in the object} } \description{ This function computes the leverage scores for a given object It uses the concept of sketching and random projections. The function provides an approximation to the leverage scores using a scalable method suitable for large matrices. } \references{ Clarkson, K. L. & Woodruff, D. P. Low-rank approximation and regression in input sparsity time. JACM 63, 1–45 (2017). \url{https://dl.acm.org/doi/10.1145/3019134}; } Seurat/man/SingleSpatialPlot.Rd0000644000176200001440000000404014525500037016154 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleSpatialPlot} \alias{SingleSpatialPlot} \title{Base plotting function for all Spatial plots} \usage{ SingleSpatialPlot( data, image, cols = NULL, image.alpha = 1, pt.alpha = NULL, crop = TRUE, pt.size.factor = NULL, stroke = 0.25, col.by = NULL, alpha.by = NULL, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), geom = c("spatial", "interactive", "poly"), na.value = "grey50" ) } \arguments{ \item{data}{Data.frame with info to be plotted} \item{image}{\code{SpatialImage} object to be plotted} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors} \item{image.alpha}{Adjust the opacity of the background images. Set to 0 to remove.} \item{pt.alpha}{Adjust the opacity of the points if plotting a \code{SpatialDimPlot}} \item{crop}{Crop the plot in to focus on points plotted. Set to \code{FALSE} to show entire background image.} \item{pt.size.factor}{Sets the size of the points relative to spot.radius} \item{stroke}{Control the width of the border around the spots} \item{col.by}{Mapping variable for the point color} \item{alpha.by}{Mapping variable for the point alpha value} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in cols.highlight} \item{cols.highlight}{A vector of colors to highlight the cells as; ordered the same as the groups in cells.highlight; last color corresponds to unselected cells.} \item{geom}{Switch between normal spatial geom and geom to enable hover functionality} \item{na.value}{Color for spots with NA values} } \value{ A ggplot2 object } \description{ Base plotting function for all Spatial plots } \keyword{internal} Seurat/man/cc.genes.Rd0000644000176200001440000000075414525500037014253 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{cc.genes} \alias{cc.genes} \title{Cell cycle genes} \format{ A list of two vectors \describe{ \item{s.genes}{Genes associated with S-phase} \item{g2m.genes}{Genes associated with G2M-phase} } } \source{ \url{https://www.science.org/doi/abs/10.1126/science.aad0501} } \usage{ cc.genes } \description{ A list of genes used in cell-cycle regression } \concept{data} \keyword{datasets} Seurat/man/SingleDimPlot.Rd0000644000176200001440000000507614525500037015302 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleDimPlot} \alias{SingleDimPlot} \title{Plot a single dimension} \usage{ SingleDimPlot( data, dims, col.by = NULL, cols = NULL, pt.size = NULL, shape.by = NULL, alpha = 1, alpha.by = NULL, order = NULL, label = FALSE, repel = FALSE, label.size = 4, cells.highlight = NULL, cols.highlight = "#DE2D26", sizes.highlight = 1, na.value = "grey50", raster = NULL, raster.dpi = NULL ) } \arguments{ \item{data}{Data to plot} \item{dims}{A two-length numeric vector with dimensions to use} \item{col.by}{...} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}.By default, ggplot2 assigns colors} \item{pt.size}{Adjust point size for plotting} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with \code{\link{FetchData}}) allowing for both different colors and different shapes on cells.} \item{alpha}{Alpha value for plotting (default is 1)} \item{alpha.by}{Mapping variable for the point alpha value} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top).} \item{label}{Whether to label the clusters} \item{repel}{Repel labels} \item{label.size}{Sets size of labels} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight} \item{na.value}{Color value for NA points when using custom scale.} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{the pixel resolution for rastered plots, passed to geom_scattermore(). Default is c(512, 512)} } \value{ A ggplot2 object } \description{ Plot a single dimension } \keyword{internal} Seurat/man/SampleUMI.Rd0000644000176200001440000000145514525500037014361 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{SampleUMI} \alias{SampleUMI} \title{Sample UMI} \usage{ SampleUMI(data, max.umi = 1000, upsample = FALSE, verbose = FALSE) } \arguments{ \item{data}{Matrix with the raw count data} \item{max.umi}{Number of UMIs to sample to} \item{upsample}{Upsamples all cells with fewer than max.umi} \item{verbose}{Display the progress bar} } \value{ Matrix with downsampled data } \description{ Downsample each cell to a specified number of UMIs. Includes an option to upsample cells below specified UMI as well. } \examples{ data("pbmc_small") counts = as.matrix(x = GetAssayData(object = pbmc_small, assay = "RNA", slot = "counts")) downsampled = SampleUMI(data = counts) head(x = downsampled) } \concept{preprocessing} Seurat/man/SingleImageMap.Rd0000644000176200001440000000113514525500037015402 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleImageMap} \alias{SingleImageMap} \title{A single heatmap from base R using \code{\link[graphics]{image}}} \usage{ SingleImageMap(data, order = NULL, title = NULL) } \arguments{ \item{data}{matrix of data to plot} \item{order}{optional vector of cell names to specify order in plot} \item{title}{Title for plot} } \value{ No return, generates a base-R heatmap using \code{\link[graphics]{image}} } \description{ A single heatmap from base R using \code{\link[graphics]{image}} } \keyword{internal} Seurat/man/HTOHeatmap.Rd0000644000176200001440000000316614525500037014520 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{HTOHeatmap} \alias{HTOHeatmap} \title{Hashtag oligo heatmap} \usage{ HTOHeatmap( object, assay = "HTO", classification = paste0(assay, "_classification"), global.classification = paste0(assay, "_classification.global"), ncells = 5000, singlet.names = NULL, raster = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized, and demultiplexing has been run with HTODemux().} \item{assay}{Hashtag assay name.} \item{classification}{The naming for metadata column with classification result from HTODemux().} \item{global.classification}{The slot for metadata column specifying a cell as singlet/doublet/negative.} \item{ncells}{Number of cells to plot. Default is to choose 5000 cells by random subsampling, to avoid having to draw exceptionally large heatmaps.} \item{singlet.names}{Namings for the singlets. Default is to use the same names as HTOs.} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} } \value{ Returns a ggplot2 plot object. } \description{ Draws a heatmap of hashtag oligo signals across singlets/doublets/negative cells. Allows for the visualization of HTO demultiplexing results. } \examples{ \dontrun{ object <- HTODemux(object) HTOHeatmap(object) } } \seealso{ \code{\link{HTODemux}} } \concept{visualization} Seurat/man/ISpatialFeaturePlot.Rd0000644000176200001440000000155014525500037016442 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ISpatialFeaturePlot} \alias{ISpatialFeaturePlot} \title{Visualize features spatially and interactively} \usage{ ISpatialFeaturePlot( object, feature, image = NULL, slot = "data", alpha = c(0.1, 1) ) } \arguments{ \item{object}{A Seurat object} \item{feature}{Feature to visualize} \item{image}{Name of the image to use in the plot} \item{slot}{If plotting a feature, which data slot to pull from (counts, data, or scale.data)} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} } \value{ Returns final plot as a ggplot object } \description{ Visualize features spatially and interactively } \concept{spatial} \concept{visualization} Seurat/man/Assay-class.Rd0000644000176200001440000000062714525500037014750 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Assay-class} \alias{Assay-class} \title{The Assay Class} \description{ The \code{Assay} object is the basic unit of Seurat; for more details, please see the documentation in \code{\link[SeuratObject:Assay]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Assay]{SeuratObject::Assay-class}} } Seurat/man/MixscapeLDA.Rd0000644000176200001440000000323014525500037014650 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{MixscapeLDA} \alias{MixscapeLDA} \title{Linear discriminant analysis on pooled CRISPR screen data.} \usage{ MixscapeLDA( object, assay = NULL, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Assay to use for performing Linear Discriminant Analysis (LDA).} \item{ndims.print}{Number of LDA dimensions to print.} \item{nfeatures.print}{Number of features to print for each LDA component.} \item{reduction.key}{Reduction key name.} \item{seed}{Value for random seed} \item{pc.assay}{Assay to use for running Principle components analysis.} \item{labels}{Meta data column with target gene class labels.} \item{nt.label}{Name of non-targeting cell class.} \item{npcs}{Number of principle components to use.} \item{verbose}{Print progress bar.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} } \value{ Returns a Seurat object with LDA added in the reduction slot. } \description{ This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all cells in the data. Finally, it uses the first 10 principle components from each projection as input to lda in MASS package together with mixscape class labels. } \concept{mixscape} Seurat/man/IntegrationData-class.Rd0000644000176200001440000000165514525500037016747 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{IntegrationData-class} \alias{IntegrationData-class} \alias{IntegrationData} \title{The IntegrationData Class} \description{ The IntegrationData object is an intermediate storage container used internally throughout the integration procedure to hold bits of data that are useful downstream. } \section{Slots}{ \describe{ \item{\code{neighbors}}{List of neighborhood information for cells (outputs of \code{RANN::nn2})} \item{\code{weights}}{Anchor weight matrix} \item{\code{integration.matrix}}{Integration matrix} \item{\code{anchors}}{Anchor matrix} \item{\code{offsets}}{The offsets used to enable cell look up in downstream functions} \item{\code{objects.ncell}}{Number of cells in each object in the object.list} \item{\code{sample.tree}}{Sample tree used for ordering multi-dataset integration} }} \concept{objects} Seurat/man/ProjectCellEmbeddings.Rd0000644000176200001440000000554714525500037016763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{ProjectCellEmbeddings} \alias{ProjectCellEmbeddings} \alias{ProjectCellEmbeddings.Seurat} \alias{ProjectCellEmbeddings.Assay} \alias{ProjectCellEmbeddings.SCTAssay} \alias{ProjectCellEmbeddings.StdAssay} \alias{ProjectCellEmbeddings.default} \alias{ProjectCellEmbeddings.IterableMatrix} \title{Project query data to the reference dimensional reduction} \usage{ ProjectCellEmbeddings(query, ...) \method{ProjectCellEmbeddings}{Seurat}( query, reference, query.assay = NULL, reference.assay = NULL, reduction = "pca", dims = 1:50, normalization.method = c("LogNormalize", "SCT"), scale = TRUE, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) \method{ProjectCellEmbeddings}{Assay}( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) \method{ProjectCellEmbeddings}{SCTAssay}( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) \method{ProjectCellEmbeddings}{StdAssay}( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) \method{ProjectCellEmbeddings}{default}( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, features = NULL, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) \method{ProjectCellEmbeddings}{IterableMatrix}( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, features = features, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, block.size = 10000, ... ) } \arguments{ \item{query}{An object for query cells} \item{reference}{An object for reference cells} \item{query.assay}{Assay name for query object} \item{reference.assay}{Assay name for reference object} \item{reduction}{Name of dimensional reduction from reference object} \item{dims}{Dimensions used for reference dimensional reduction} \item{scale}{Determine if scale query data based on reference data variance} \item{verbose}{Print progress} \item{feature.mean}{Mean of features in reference} \item{feature.sd}{Standard variance of features in reference} } \value{ A matrix with projected cell embeddings } \description{ Project query data to the reference dimensional reduction } \keyword{internal} Seurat/man/RunMixscape.Rd0000644000176200001440000000603714525500037015024 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{RunMixscape} \alias{RunMixscape} \title{Run Mixscape} \usage{ RunMixscape( object, assay = "PRTB", slot = "scale.data", labels = "gene", nt.class.name = "NT", new.class.name = "mixscape_class", min.de.genes = 5, min.cells = 5, de.assay = "RNA", logfc.threshold = 0.25, iter.num = 10, verbose = FALSE, split.by = NULL, fine.mode = FALSE, fine.mode.labels = "guide_ID", prtb.type = "KO" ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Assay to use for mixscape classification.} \item{slot}{Assay data slot to use.} \item{labels}{metadata column with target gene labels.} \item{nt.class.name}{Classification name of non-targeting gRNA cells.} \item{new.class.name}{Name of mixscape classification to be stored in metadata.} \item{min.de.genes}{Required number of genes that are differentially expressed for method to separate perturbed and non-perturbed cells.} \item{min.cells}{Minimum number of cells in target gene class. If fewer than this many cells are assigned to a target gene class during classification, all are assigned NP.} \item{de.assay}{Assay to use when performing differential expression analysis. Usually RNA.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{iter.num}{Number of normalmixEM iterations to run if convergence does not occur.} \item{verbose}{Display messages} \item{split.by}{metadata column with experimental condition/cell type classification information. This is meant to be used to account for cases a perturbation is condition/cell type -specific.} \item{fine.mode}{When this is equal to TRUE, DE genes for each target gene class will be calculated for each gRNA separately and pooled into one DE list for calculating the perturbation score of every cell and their subsequent classification.} \item{fine.mode.labels}{metadata column with gRNA ID labels.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} } \value{ Returns Seurat object with with the following information in the meta data and tools slots: \describe{ \item{mixscape_class}{Classification result with cells being either classified as perturbed (KO, by default) or non-perturbed (NP) based on their target gene class.} \item{mixscape_class.global}{Global classification result (perturbed, NP or NT)} \item{p_ko}{Posterior probabilities used to determine if a cell is KO (default). Name of this item will change to match prtb.type parameter setting. (>0.5) or NP} \item{perturbation score}{Perturbation scores for every cell calculated in the first iteration of the function.} } } \description{ Function to identify perturbed and non-perturbed gRNA expressing cells that accounts for multiple treatments/conditions/chemical perturbations. } \concept{mixscape} Seurat/man/Cells.Rd0000644000176200001440000000110614525500037013620 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{Cells.SCTModel} \alias{Cells.SCTModel} \alias{Cells.SlideSeq} \alias{Cells.STARmap} \alias{Cells.VisiumV1} \title{Get Cell Names} \usage{ \method{Cells}{SCTModel}(x, ...) \method{Cells}{SlideSeq}(x, ...) \method{Cells}{STARmap}(x, ...) \method{Cells}{VisiumV1}(x, ...) } \arguments{ \item{x}{An object} \item{...}{Arguments passed to other methods} } \description{ Get Cell Names } \seealso{ \code{\link[SeuratObject:Cells]{SeuratObject::Cells}} } \concept{objects} \concept{spatial} Seurat/man/RunMoransI.Rd0000644000176200001440000000073314525500037014620 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RunMoransI} \alias{RunMoransI} \title{Compute Moran's I value.} \usage{ RunMoransI(data, pos, verbose = TRUE) } \arguments{ \item{data}{Expression matrix} \item{pos}{Position matrix} \item{verbose}{Display messages/progress} } \description{ Wraps the functionality of the Moran.I function from the ape package. Weights are computed as 1/distance. } \concept{preprocessing} Seurat/man/fortify-Spatial.Rd0000644000176200001440000000166714525500037015647 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{fortify-Spatial} \alias{fortify-Spatial} \alias{fortify.Centroids} \alias{fortify} \alias{fortify.Molecules} \alias{fortify.Segmentation} \title{Prepare Coordinates for Spatial Plots} \usage{ \method{fortify}{Centroids}(model, data, ...) \method{fortify}{Molecules}(model, data, nmols = NULL, seed = NA_integer_, ...) \method{fortify}{Segmentation}(model, data, ...) } \arguments{ \item{model}{A \code{\linkS4class{Segmentation}}, \code{\linkS4class{Centroids}}, or \code{\linkS4class{Molecules}} object} \item{data}{Extra data to be used for annotating the cell segmentations; the easiest way to pass data is a one-column \code{\link[base:data.frame]{data frame}} with the values to color by and the cell names are rownames} \item{...}{Arguments passed to other methods} } \description{ Prepare Coordinates for Spatial Plots } \keyword{internal} Seurat/man/VisiumV1-class.Rd0000644000176200001440000000137714525500037015356 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{VisiumV1-class} \alias{VisiumV1-class} \alias{VisiumV1} \title{The VisiumV1 class} \description{ The VisiumV1 class represents spatial information from the 10X Genomics Visium platform } \section{Slots}{ \describe{ \item{\code{image}}{A three-dimensional array with PNG image data, see \code{\link[png]{readPNG}} for more details} \item{\code{scale.factors}}{An object of class \code{\link{scalefactors}}; see \code{\link{scalefactors}} for more information} \item{\code{coordinates}}{A data frame with tissue coordinate information} \item{\code{spot.radius}}{Single numeric value giving the radius of the spots} }} \concept{objects} \concept{spatial} Seurat/man/Read10X_h5.Rd0000644000176200001440000000132614525500037014322 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X_h5} \alias{Read10X_h5} \title{Read 10X hdf5 file} \usage{ Read10X_h5(filename, use.names = TRUE, unique.features = TRUE) } \arguments{ \item{filename}{Path to h5 file} \item{use.names}{Label row names with feature names rather than ID numbers.} \item{unique.features}{Make feature names unique (default TRUE)} } \value{ Returns a sparse matrix with rows and columns labeled. If multiple genomes are present, returns a list of sparse matrices (one per genome). } \description{ Read count matrix from 10X CellRanger hdf5 file. This can be used to read both scATAC-seq and scRNA-seq matrices. } \concept{preprocessing} Seurat/man/SeuratTheme.Rd0000644000176200001440000000602114525500037015005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SeuratTheme} \alias{SeuratTheme} \alias{CenterTitle} \alias{DarkTheme} \alias{FontSize} \alias{NoAxes} \alias{NoLegend} \alias{NoGrid} \alias{SeuratAxes} \alias{SpatialTheme} \alias{RestoreLegend} \alias{RotatedAxis} \alias{BoldTitle} \alias{WhiteBackground} \title{Seurat Themes} \usage{ SeuratTheme() CenterTitle(...) DarkTheme(...) FontSize( x.text = NULL, y.text = NULL, x.title = NULL, y.title = NULL, main = NULL, ... ) NoAxes(..., keep.text = FALSE, keep.ticks = FALSE) NoLegend(...) NoGrid(...) SeuratAxes(...) SpatialTheme(...) RestoreLegend(..., position = "right") RotatedAxis(...) BoldTitle(...) WhiteBackground(...) } \arguments{ \item{...}{Extra parameters to be passed to \code{theme}} \item{x.text, y.text}{X and Y axis text sizes} \item{x.title, y.title}{X and Y axis title sizes} \item{main}{Plot title size} \item{keep.text}{Keep axis text} \item{keep.ticks}{Keep axis ticks} \item{position}{A position to restore the legend to} } \value{ A ggplot2 theme object } \description{ Various themes to be applied to ggplot2-based plots \describe{ \item{\code{SeuratTheme}}{The curated Seurat theme, consists of ...} \item{\code{DarkTheme}}{A dark theme, axes and text turn to white, the background becomes black} \item{\code{NoAxes}}{Removes axis lines, text, and ticks} \item{\code{NoLegend}}{Removes the legend} \item{\code{FontSize}}{Sets axis and title font sizes} \item{\code{NoGrid}}{Removes grid lines} \item{\code{SeuratAxes}}{Set Seurat-style axes} \item{\code{SpatialTheme}}{A theme designed for spatial visualizations (eg \code{\link{PolyFeaturePlot}}, \code{\link{PolyDimPlot}})} \item{\code{RestoreLegend}}{Restore a legend after removal} \item{\code{RotatedAxis}}{Rotate X axis text 45 degrees} \item{\code{BoldTitle}}{Enlarges and emphasizes the title} } } \examples{ # Generate a plot with a dark theme library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + DarkTheme(legend.position = 'none') # Generate a plot with no axes library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoAxes() # Generate a plot with no legend library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoLegend() # Generate a plot with no grid lines library(ggplot2) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) p + NoGrid() } \seealso{ \code{\link[ggplot2]{theme}} } \concept{visualization} Seurat/man/BridgeCellsRepresentation.Rd0000644000176200001440000000304014525500037017657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{BridgeCellsRepresentation} \alias{BridgeCellsRepresentation} \title{Construct a dictionary representation for each unimodal dataset} \usage{ BridgeCellsRepresentation( object.list, bridge.object, object.reduction, bridge.reduction, laplacian.reduction = "lap", laplacian.dims = 1:50, bridge.assay.name = "Bridge", return.all.assays = FALSE, l2.norm = TRUE, verbose = TRUE ) } \arguments{ \item{object.list}{A list of Seurat objects} \item{bridge.object}{A multi-omic bridge Seurat which is used as the basis to represent unimodal datasets} \item{object.reduction}{A list of dimensional reductions from object.list used to be reconstructed by bridge.object} \item{bridge.reduction}{A list of dimensional reductions from bridge.object used to reconstruct object.reduction} \item{laplacian.reduction}{Name of bridge graph laplacian dimensional reduction} \item{laplacian.dims}{Dimensions used for bridge graph laplacian dimensional reduction} \item{bridge.assay.name}{Assay name used for bridge object reconstruction value (default is 'Bridge')} \item{return.all.assays}{Whether to return all assays in the object.list. Only bridge assay is returned by default.} \item{l2.norm}{Whether to l2 normalize the dictionary representation} \item{verbose}{Print messages and progress} } \value{ Returns a object list in which each object has a bridge cell derived assay } \description{ Construct a dictionary representation for each unimodal dataset } Seurat/man/RPCAIntegration.Rd0000644000176200001440000001040314525500037015507 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{RPCAIntegration} \alias{RPCAIntegration} \title{Seurat-RPCA Integration} \usage{ RPCAIntegration( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = "integrated.dr", reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.filter = NA, scale.layer = "scale.data", dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{A \code{Seurat} object} \item{assay}{Name of \code{Assay} in the \code{Seurat} object} \item{layers}{Names of layers in \code{assay}} \item{orig}{A \link[SeuratObject:DimReduc]{dimensional reduction} to correct} \item{new.reduction}{Name of new integrated dimensional reduction} \item{reference}{A reference \code{Seurat} object} \item{features}{A vector of features to use for integration} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{dims}{Dimensions of dimensional reduction to use for integration} \item{k.filter}{Number of anchors to filter} \item{scale.layer}{Name of scaled layer in \code{Assay}} \item{dims.to.integrate}{Number of dimensions to return integrated values for} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case the full corrected space is used for computing anchor weights.} }} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives: \if{html}{\out{
}}\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 }\if{html}{\out{
}} Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{verbose}{Print progress} \item{...}{Arguments passed on to \code{FindIntegrationAnchors}} } \description{ Seurat-RPCA Integration } \examples{ \dontrun{ # Preprocessing obj <- SeuratData::LoadData("pbmcsca") obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) obj <- NormalizeData(obj) obj <- FindVariableFeatures(obj) obj <- ScaleData(obj) obj <- RunPCA(obj) # After preprocessing, we run integration obj <- IntegrateLayers(object = obj, method = RPCAIntegration, orig.reduction = "pca", new.reduction = 'integrated.rpca', verbose = FALSE) # Reference-based Integration # Here, we use the first layer as a reference for integraion # Thus, we only identify anchors between the reference and the rest of the datasets, # saving computational resources obj <- IntegrateLayers(object = obj, method = RPCAIntegration, orig.reduction = "pca", new.reduction = 'integrated.rpca', reference = 1, verbose = FALSE) # Modifying parameters # We can also specify parameters such as `k.anchor` to increase the strength of # integration obj <- IntegrateLayers(object = obj, method = RPCAIntegration, orig.reduction = "pca", new.reduction = 'integrated.rpca', k.anchor = 20, verbose = FALSE) # Integrating SCTransformed data obj <- SCTransform(object = obj) obj <- IntegrateLayers(object = obj, method = RPCAIntegration, orig.reduction = "pca", new.reduction = 'integrated.rpca', assay = "SCT", verbose = FALSE) } } Seurat/man/ProjectUMAP.Rd0000644000176200001440000000611314525500037014652 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{ProjectUMAP} \alias{ProjectUMAP} \alias{ProjectUMAP.default} \alias{ProjectUMAP.DimReduc} \alias{ProjectUMAP.Seurat} \title{Project query into UMAP coordinates of a reference} \usage{ ProjectUMAP(query, ...) \method{ProjectUMAP}{default}( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) \method{ProjectUMAP}{DimReduc}( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) \method{ProjectUMAP}{Seurat}( query, query.reduction, query.dims = NULL, reference, reference.reduction, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, reduction.name = "ref.umap", reduction.key = "refUMAP_", ... ) } \arguments{ \item{query}{Query dataset} \item{...}{Additional parameters to \code{\link{RunUMAP}}} \item{query.dims}{Dimensions (columns) to use from query} \item{reference}{Reference dataset} \item{reference.dims}{Dimensions (columns) to use from reference} \item{k.param}{Defines k for the k-nearest neighbor algorithm} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{annoy.metric}{Distance metric for annoy. Options include: euclidean, cosine, manhattan, and hamming} \item{l2.norm}{Take L2Norm of the data} \item{cache.index}{Include cached index in returned Neighbor object (only relevant if return.neighbor = TRUE)} \item{index}{Precomputed index. Useful if querying new data against existing index to avoid recomputing.} \item{neighbor.name}{Name to store neighbor information in the query} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{query.reduction}{Name of reduction to use from the query for neighbor finding} \item{reference.reduction}{Name of reduction to use from the reference for neighbor finding} \item{reduction.name}{Name of projected UMAP to store in the query} \item{reduction.key}{Value for the projected UMAP key} } \description{ This function will take a query dataset and project it into the coordinates of a provided reference UMAP. This is essentially a wrapper around two steps: \itemize{ \item{FindNeighbors - Find the nearest reference cell neighbors and their distances for each query cell.} \item{RunUMAP - Perform umap projection by providing the neighbor set calculated above and the umap model previously computed in the reference.} } } \concept{dimensional_reduction} Seurat/man/FindNeighbors.Rd0000644000176200001440000001247014525500037015305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/clustering.R \name{FindNeighbors} \alias{FindNeighbors} \alias{FindNeighbors.default} \alias{FindNeighbors.Assay} \alias{FindNeighbors.dist} \alias{FindNeighbors.Seurat} \title{(Shared) Nearest-neighbor graph construction} \usage{ FindNeighbors(object, ...) \method{FindNeighbors}{default}( object, query = NULL, distance.matrix = FALSE, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, index = NULL, ... ) \method{FindNeighbors}{Assay}( object, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, ... ) \method{FindNeighbors}{dist}( object, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, ... ) \method{FindNeighbors}{Seurat}( object, reduction = "pca", dims = 1:10, assay = NULL, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, do.plot = FALSE, graph.name = NULL, l2.norm = FALSE, cache.index = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{query}{Matrix of data to query against object. If missing, defaults to object.} \item{distance.matrix}{Boolean value of whether the provided matrix is a distance matrix; note, for objects of class \code{dist}, this parameter will be set automatically} \item{k.param}{Defines k for the k-nearest neighbor algorithm} \item{return.neighbor}{Return result as \code{\link{Neighbor}} object. Not used with distance matrix input.} \item{compute.SNN}{also compute the shared nearest neighbor graph} \item{prune.SNN}{Sets the cutoff for acceptable Jaccard index when computing the neighborhood overlap for the SNN construction. Any edges with values less than or equal to this will be set to 0 and removed from the SNN graph. Essentially sets the stringency of pruning (0 --- no pruning, 1 --- prune everything).} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{annoy.metric}{Distance metric for annoy. Options include: euclidean, cosine, manhattan, and hamming} \item{nn.eps}{Error bound when performing nearest neighbor seach using RANN; default of 0.0 implies exact nearest neighbor search} \item{verbose}{Whether or not to print output to the console} \item{l2.norm}{Take L2Norm of the data} \item{cache.index}{Include cached index in returned Neighbor object (only relevant if return.neighbor = TRUE)} \item{index}{Precomputed index. Useful if querying new data against existing index to avoid recomputing.} \item{features}{Features to use as input for building the (S)NN; used only when \code{dims} is \code{NULL}} \item{reduction}{Reduction to use as input for building the (S)NN} \item{dims}{Dimensions of reduction to use as input} \item{assay}{Assay to use in construction of (S)NN; used only when \code{dims} is \code{NULL}} \item{do.plot}{Plot SNN graph on tSNE coordinates} \item{graph.name}{Optional naming parameter for stored (S)NN graph (or Neighbor object, if return.neighbor = TRUE). Default is assay.name_(s)nn. To store both the neighbor graph and the shared nearest neighbor (SNN) graph, you must supply a vector containing two names to the \code{graph.name} parameter. The first element in the vector will be used to store the nearest neighbor (NN) graph, and the second element used to store the SNN graph. If only one name is supplied, only the NN graph is stored.} } \value{ This function can either return a \code{\link{Neighbor}} object with the KNN information or a list of \code{\link{Graph}} objects with the KNN and SNN depending on the settings of \code{return.neighbor} and \code{compute.SNN}. When running on a \code{\link{Seurat}} object, this returns the \code{\link{Seurat}} object with the Graphs or Neighbor objects stored in their respective slots. Names of the Graph or Neighbor object can be found with \code{\link{Graphs}} or \code{\link{Neighbors}}. } \description{ Computes the \code{k.param} nearest neighbors for a given dataset. Can also optionally (via \code{compute.SNN}), construct a shared nearest neighbor graph by calculating the neighborhood overlap (Jaccard index) between every cell and its \code{k.param} nearest neighbors. } \examples{ data("pbmc_small") pbmc_small # Compute an SNN on the gene expression level pbmc_small <- FindNeighbors(pbmc_small, features = VariableFeatures(object = pbmc_small)) # More commonly, we build the SNN on a dimensionally reduced form of the data # such as the first 10 principle components. pbmc_small <- FindNeighbors(pbmc_small, reduction = "pca", dims = 1:10) } \concept{clustering} Seurat/man/Seurat-class.Rd0000644000176200001440000000066414525500037015134 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Seurat-class} \alias{Seurat-class} \title{The Seurat Class} \description{ The Seurat object is a representation of single-cell expression data for R; for more details, please see the documentation in \code{\link[SeuratObject:Seurat]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Seurat]{SeuratObject::Seurat-class}} } Seurat/man/GroupCorrelationPlot.Rd0000644000176200001440000000152214525500037016715 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{GroupCorrelationPlot} \alias{GroupCorrelationPlot} \title{Boxplot of correlation of a variable (e.g. number of UMIs) with expression data} \usage{ GroupCorrelationPlot( object, assay = NULL, feature.group = "feature.grp", cor = "nCount_RNA_cor" ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay where the feature grouping info and correlations are stored} \item{feature.group}{Name of the column in meta.features where the feature grouping info is stored} \item{cor}{Name of the column in meta.features where correlation info is stored} } \value{ Returns a ggplot boxplot of correlations split by group } \description{ Boxplot of correlation of a variable (e.g. number of UMIs) with expression data } \concept{visualization} Seurat/man/BuildNicheAssay.Rd0000644000176200001440000000161414525500037015571 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{BuildNicheAssay} \alias{BuildNicheAssay} \title{Construct an assay for spatial niche analysis} \usage{ BuildNicheAssay( object, fov, group.by, assay = "niche", neighbors.k = 20, niches.k = 4 ) } \arguments{ \item{object}{A Seurat object} \item{fov}{FOV object to gather cell positions from} \item{group.by}{Cell classifications to count in spatial neighborhood} \item{assay}{Name for spatial neighborhoods assay} \item{neighbors.k}{Number of neighbors to consider for each cell} \item{niches.k}{Number of clusters to return based on the niche assay} } \value{ Seurat object containing a new assay } \description{ This function will construct a new assay where each feature is a cell label The values represents the sum of a particular cell label neighboring a given cell. } \concept{clustering} Seurat/man/LoadSTARmap.Rd0000644000176200001440000000201114525500037014621 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{LoadSTARmap} \alias{LoadSTARmap} \title{Load STARmap data} \usage{ LoadSTARmap( data.dir, counts.file = "cell_barcode_count.csv", gene.file = "genes.csv", qhull.file = "qhulls.tsv", centroid.file = "centroids.tsv", assay = "Spatial", image = "image" ) } \arguments{ \item{data.dir}{location of data directory that contains the counts matrix, gene name, qhull, and centroid files.} \item{counts.file}{name of file containing the counts matrix (csv)} \item{gene.file}{name of file containing the gene names (csv)} \item{qhull.file}{name of file containing the hull coordinates (tsv)} \item{centroid.file}{name of file containing the centroid positions (tsv)} \item{assay}{Name of assay to associate spatial data to} \item{image}{Name of "image" object storing spatial coordinates} } \value{ A \code{\link{Seurat}} object } \description{ Load STARmap data } \seealso{ \code{\link{STARmap}} } \concept{preprocessing} Seurat/man/DoHeatmap.Rd0000644000176200001440000000474014525500037014427 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DoHeatmap} \alias{DoHeatmap} \title{Feature expression heatmap} \usage{ DoHeatmap( object, features = NULL, cells = NULL, group.by = "ident", group.bar = TRUE, group.colors = NULL, disp.min = -2.5, disp.max = NULL, slot = "scale.data", assay = NULL, label = TRUE, size = 5.5, hjust = 0, vjust = 0, angle = 45, raster = TRUE, draw.lines = TRUE, lines.width = NULL, group.bar.height = 0.02, combine = TRUE ) } \arguments{ \item{object}{Seurat object} \item{features}{A vector of features to plot, defaults to \code{VariableFeatures(object = object)}} \item{cells}{A vector of cells to plot} \item{group.by}{A vector of variables to group cells by; pass 'ident' to group by cell identity classes} \item{group.bar}{Add a color bar showing group status for cells} \item{group.colors}{Colors to use for the color bar} \item{disp.min}{Minimum display value (all values below are clipped)} \item{disp.max}{Maximum display value (all values above are clipped); defaults to 2.5 if \code{slot} is 'scale.data', 6 otherwise} \item{slot}{Data slot to use, choose from 'raw.data', 'data', or 'scale.data'} \item{assay}{Assay to pull from} \item{label}{Label the cell identies above the color bar} \item{size}{Size of text above color bar} \item{hjust}{Horizontal justification of text above color bar} \item{vjust}{Vertical justification of text above color bar} \item{angle}{Angle of text above color bar} \item{raster}{If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE if you are encountering that issue (note that plots may take longer to produce/render).} \item{draw.lines}{Include white lines to separate the groups} \item{lines.width}{Integer number to adjust the width of the separating white lines. Corresponds to the number of "cells" between each group.} \item{group.bar.height}{Scale the height of the color bar} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a heatmap of single cell feature expression. } \examples{ data("pbmc_small") DoHeatmap(object = pbmc_small) } \concept{visualization} Seurat/man/CalculateBarcodeInflections.Rd0000644000176200001440000000455614525500037020145 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{CalculateBarcodeInflections} \alias{CalculateBarcodeInflections} \title{Calculate the Barcode Distribution Inflection} \usage{ CalculateBarcodeInflections( object, barcode.column = "nCount_RNA", group.column = "orig.ident", threshold.low = NULL, threshold.high = NULL ) } \arguments{ \item{object}{Seurat object} \item{barcode.column}{Column to use as proxy for barcodes ("nCount_RNA" by default)} \item{group.column}{Column to group by ("orig.ident" by default)} \item{threshold.low}{Ignore barcodes of rank below this threshold in inflection calculation} \item{threshold.high}{Ignore barcodes of rank above thisf threshold in inflection calculation} } \value{ Returns Seurat object with a new list in the `tools` slot, `CalculateBarcodeInflections` with values: * `barcode_distribution` - contains the full barcode distribution across the entire dataset * `inflection_points` - the calculated inflection points within the thresholds * `threshold_values` - the provided (or default) threshold values to search within for inflections * `cells_pass` - the cells that pass the inflection point calculation } \description{ This function calculates an adaptive inflection point ("knee") of the barcode distribution for each sample group. This is useful for determining a threshold for removing low-quality samples. } \details{ The function operates by calculating the slope of the barcode number vs. rank distribution, and then finding the point at which the distribution changes most steeply (the "knee"). Of note, this calculation often must be restricted as to the range at which it performs, so `threshold` parameters are provided to restrict the range of the calculation based on the rank of the barcodes. [BarcodeInflectionsPlot()] is provided as a convenience function to visualize and test different thresholds and thus provide more sensical end results. See [BarcodeInflectionsPlot()] to visualize the calculated inflection points and [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. } \examples{ data("pbmc_small") CalculateBarcodeInflections(pbmc_small, group.column = 'groups') } \seealso{ \code{\link{BarcodeInflectionsPlot}} \code{\link{SubsetByBarcodeInflections}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{preprocessing} Seurat/man/FetchResiduals_reference.Rd0000644000176200001440000000136614525500037017511 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{FetchResiduals_reference} \alias{FetchResiduals_reference} \title{temporal function to get residuals from reference} \usage{ FetchResiduals_reference( object, reference.SCT.model = NULL, features = NULL, nCount_UMI = NULL, verbose = FALSE ) } \arguments{ \item{object}{A seurat object} \item{reference.SCT.model}{a reference SCT model that should be used for calculating the residuals} \item{features}{Names of features to compute} \item{nCount_UMI}{UMI counts. If not specified, defaults to column sums of object} \item{verbose}{Whether to print messages and progress bars} } \description{ temporal function to get residuals from reference } Seurat/man/CollapseSpeciesExpressionMatrix.Rd0000644000176200001440000000275214525500037021111 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CollapseSpeciesExpressionMatrix} \alias{CollapseSpeciesExpressionMatrix} \title{Slim down a multi-species expression matrix, when only one species is primarily of interenst.} \usage{ CollapseSpeciesExpressionMatrix( object, prefix = "HUMAN_", controls = "MOUSE_", ncontrols = 100 ) } \arguments{ \item{object}{A UMI count matrix. Should contain rownames that start with the ensuing arguments prefix.1 or prefix.2} \item{prefix}{The prefix denoting rownames for the species of interest. Default is "HUMAN_". These rownames will have this prefix removed in the returned matrix.} \item{controls}{The prefix denoting rownames for the species of 'negative control' cells. Default is "MOUSE_".} \item{ncontrols}{How many of the most highly expressed (average) negative control features (by default, 100 mouse genes), should be kept? All other rownames starting with prefix.2 are discarded.} } \value{ A UMI count matrix. Rownames that started with \code{prefix} have this prefix discarded. For rownames starting with \code{controls}, only the \code{ncontrols} most highly expressed features are kept, and the prefix is kept. All other rows are retained. } \description{ Valuable for CITE-seq analyses, where we typically spike in rare populations of 'negative control' cells from a different species. } \examples{ \dontrun{ cbmc.rna.collapsed <- CollapseSpeciesExpressionMatrix(cbmc.rna) } } \concept{utilities} Seurat/man/PCASigGenes.Rd0000644000176200001440000000206414525500037014612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{PCASigGenes} \alias{PCASigGenes} \title{Significant genes from a PCA} \usage{ PCASigGenes( object, pcs.use, pval.cut = 0.1, use.full = FALSE, max.per.pc = NULL ) } \arguments{ \item{object}{Seurat object} \item{pcs.use}{PCS to use.} \item{pval.cut}{P-value cutoff} \item{use.full}{Use the full list of genes (from the projected PCA). Assumes that \code{ProjectDim} has been run. Currently, must be set to FALSE.} \item{max.per.pc}{Maximum number of genes to return per PC. Used to avoid genes from one PC dominating the entire analysis.} } \value{ A vector of genes whose p-values are statistically significant for at least one of the given PCs. } \description{ Returns a set of genes, based on the JackStraw analysis, that have statistically significant associations with a set of PCs. } \examples{ data("pbmc_small") PCASigGenes(pbmc_small, pcs.use = 1:2) } \seealso{ \code{\link{ProjectDim}} \code{\link{JackStraw}} } \concept{dimensional_reduction} Seurat/man/RunCCA.Rd0000644000176200001440000000500314525500037013631 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunCCA} \alias{RunCCA} \alias{RunCCA.default} \alias{RunCCA.Seurat} \title{Perform Canonical Correlation Analysis} \usage{ RunCCA(object1, object2, ...) \method{RunCCA}{default}( object1, object2, standardize = TRUE, num.cc = 20, seed.use = 42, verbose = FALSE, ... ) \method{RunCCA}{Seurat}( object1, object2, assay1 = NULL, assay2 = NULL, num.cc = 20, features = NULL, renormalize = FALSE, rescale = FALSE, compute.gene.loadings = TRUE, add.cell.id1 = NULL, add.cell.id2 = NULL, verbose = TRUE, ... ) } \arguments{ \item{object1}{First Seurat object} \item{object2}{Second Seurat object.} \item{...}{Extra parameters (passed onto MergeSeurat in case with two objects passed, passed onto ScaleData in case with single object and rescale.groups set to TRUE)} \item{standardize}{Standardize matrices - scales columns to have unit variance and mean 0} \item{num.cc}{Number of canonical vectors to calculate} \item{seed.use}{Random seed to set. If NULL, does not set a seed} \item{verbose}{Show progress messages} \item{assay1, assay2}{Assays to pull from in the first and second objects, respectively} \item{features}{Set of genes to use in CCA. Default is the union of both the variable features sets present in both objects.} \item{renormalize}{Renormalize raw data after merging the objects. If FALSE, merge the data matrices also.} \item{rescale}{Rescale the datasets prior to CCA. If FALSE, uses existing data in the scale data slots.} \item{compute.gene.loadings}{Also compute the gene loadings. NOTE - this will scale every gene in the dataset which may impose a high memory cost.} \item{add.cell.id1, add.cell.id2}{Add ...} } \value{ Returns a combined Seurat object with the CCA results stored. } \description{ Runs a canonical correlation analysis using a diagonal implementation of CCA. For details about stored CCA calculation parameters, see \code{PrintCCAParams}. } \examples{ \dontrun{ data("pbmc_small") pbmc_small # As CCA requires two datasets, we will split our test object into two just for this example pbmc1 <- subset(pbmc_small, cells = colnames(pbmc_small)[1:40]) pbmc2 <- subset(pbmc_small, cells = colnames(x = pbmc_small)[41:80]) pbmc1[["group"]] <- "group1" pbmc2[["group"]] <- "group2" pbmc_cca <- RunCCA(object1 = pbmc1, object2 = pbmc2) # Print results print(x = pbmc_cca[["cca"]]) } } \seealso{ \code{\link{merge.Seurat}} } \concept{dimensional_reduction} Seurat/man/GetIntegrationData.Rd0000644000176200001440000000100014525500037016264 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetIntegrationData} \alias{GetIntegrationData} \title{Get integration data} \usage{ GetIntegrationData(object, integration.name, slot) } \arguments{ \item{object}{Seurat object} \item{integration.name}{Name of integration object} \item{slot}{Which slot in integration object to get} } \value{ Returns data from the requested slot within the integrated object } \description{ Get integration data } \concept{objects} Seurat/man/LoadCurioSeeker.Rd0000644000176200001440000000103514525500037015577 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{LoadCurioSeeker} \alias{LoadCurioSeeker} \title{Load Curio Seeker data} \usage{ LoadCurioSeeker(data.dir, assay = "Spatial") } \arguments{ \item{data.dir}{location of data directory that contains the counts matrix, gene names, barcodes/beads, and barcodes/bead location files.} \item{assay}{Name of assay to associate spatial data to} } \value{ A \code{\link{Seurat}} object } \description{ Load Curio Seeker data } \concept{preprocessing} Seurat/man/DiscretePalette.Rd0000644000176200001440000000207314525500037015643 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DiscretePalette} \alias{DiscretePalette} \title{Discrete colour palettes from pals} \usage{ DiscretePalette(n, palette = NULL, shuffle = FALSE) } \arguments{ \item{n}{Number of colours to be generated.} \item{palette}{Options are "alphabet", "alphabet2", "glasbey", "polychrome", "stepped", and "parade". Can be omitted and the function will use the one based on the requested n.} \item{shuffle}{Shuffle the colors in the selected palette.} } \value{ A vector of colors } \description{ These are included here because pals depends on a number of compiled packages, and this can lead to increases in run time for Travis, and generally should be avoided when possible. } \details{ These palettes are a much better default for data with many classes than the default ggplot2 palette. Many thanks to Kevin Wright for writing the pals package. Taken from the pals package (Licence: GPL-3). \url{https://cran.r-project.org/package=pals} Credit: Kevin Wright } \concept{visualization} Seurat/man/SetIntegrationData.Rd0000644000176200001440000000102314525500037016305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{SetIntegrationData} \alias{SetIntegrationData} \title{Set integration data} \usage{ SetIntegrationData(object, integration.name, slot, new.data) } \arguments{ \item{object}{Seurat object} \item{integration.name}{Name of integration object} \item{slot}{Which slot in integration object to set} \item{new.data}{New data to insert} } \value{ Returns a \code{\link{Seurat}} object } \description{ Set integration data } \concept{objects} Seurat/man/HarmonyIntegration.Rd0000644000176200001440000000562114525500037016405 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{HarmonyIntegration} \alias{HarmonyIntegration} \title{Harmony Integration} \usage{ HarmonyIntegration( object, orig, features = NULL, scale.layer = "scale.data", new.reduction = "harmony", layers = NULL, npcs = 50L, key = "harmony_", theta = NULL, lambda = NULL, sigma = 0.1, nclust = NULL, tau = 0, block.size = 0.05, max.iter.harmony = 10L, max.iter.cluster = 20L, epsilon.cluster = 1e-05, epsilon.harmony = 1e-04, verbose = TRUE, ... ) } \arguments{ \item{object}{An \code{\link[SeuratObject]{Assay5}} object} \item{orig}{A \link[SeuratObject:DimReduc]{dimensional reduction} to correct} \item{features}{Ignored} \item{scale.layer}{Ignored} \item{new.reduction}{Name of new integrated dimensional reduction} \item{layers}{Ignored} \item{npcs}{If doing PCA on input matrix, number of PCs to compute} \item{key}{Key for Harmony dimensional reduction} \item{theta}{Diversity clustering penalty parameter} \item{lambda}{Ridge regression penalty parameter} \item{sigma}{Width of soft kmeans clusters} \item{nclust}{Number of clusters in model} \item{tau}{Protection against overclustering small datasets with large ones} \item{block.size}{What proportion of cells to update during clustering} \item{max.iter.harmony}{Maximum number of rounds to run Harmony} \item{max.iter.cluster}{Maximum number of rounds to run clustering at each round of Harmony} \item{epsilon.cluster}{Convergence tolerance for clustering round of Harmony} \item{epsilon.harmony}{Convergence tolerance for Harmony} \item{verbose}{Whether to print progress messages. TRUE to print, FALSE to suppress} \item{...}{Ignored} } \value{ ... } \description{ Harmony Integration } \note{ This function requires the \href{https://cran.r-project.org/package=harmony}{\pkg{harmony}} package to be installed } \examples{ \dontrun{ # Preprocessing obj <- SeuratData::LoadData("pbmcsca") obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) obj <- NormalizeData(obj) obj <- FindVariableFeatures(obj) obj <- ScaleData(obj) obj <- RunPCA(obj) # After preprocessing, we integrate layers with added parameters specific to Harmony: obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, orig.reduction = "pca", new.reduction = 'harmony', verbose = FALSE) # Modifying Parameters # We can also add arguments specific to Harmony such as theta, to give more diverse clusters obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, orig.reduction = "pca", new.reduction = 'harmony', verbose = FALSE, theta = 3) # Integrating SCTransformed data obj <- SCTransform(object = obj) obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, orig.reduction = "pca", new.reduction = 'harmony', assay = "SCT", verbose = FALSE) } } \seealso{ \code{\link[harmony:HarmonyMatrix]{harmony::HarmonyMatrix}()} } \concept{integration} Seurat/man/RelativeCounts.Rd0000644000176200001440000000143214525500037015527 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{RelativeCounts} \alias{RelativeCounts} \title{Normalize raw data to fractions} \usage{ RelativeCounts(data, scale.factor = 1, verbose = TRUE) } \arguments{ \item{data}{Matrix with the raw count data} \item{scale.factor}{Scale the result. Default is 1} \item{verbose}{Print progress} } \value{ Returns a matrix with the relative counts } \description{ Normalize count data to relative counts per cell by dividing by the total per cell. Optionally use a scale factor, e.g. for counts per million (CPM) use \code{scale.factor = 1e6}. } \examples{ mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) mat mat_norm <- RelativeCounts(data = mat) mat_norm } \concept{preprocessing} Seurat/man/TopFeatures.Rd0000644000176200001440000000172614525500037015027 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopFeatures} \alias{TopFeatures} \title{Find features with highest scores for a given dimensional reduction technique} \usage{ TopFeatures( object, dim = 1, nfeatures = 20, projected = FALSE, balanced = FALSE, ... ) } \arguments{ \item{object}{DimReduc object} \item{dim}{Dimension to use} \item{nfeatures}{Number of features to return} \item{projected}{Use the projected feature loadings} \item{balanced}{Return an equal number of features with both + and - scores.} \item{...}{Extra parameters passed to \code{\link{Loadings}}} } \value{ Returns a vector of features } \description{ Return a list of features with the strongest contribution to a set of components } \examples{ data("pbmc_small") pbmc_small TopFeatures(object = pbmc_small[["pca"]], dim = 1) # After projection: TopFeatures(object = pbmc_small[["pca"]], dim = 1, projected = TRUE) } \concept{objects} Seurat/man/RunSLSI.Rd0000644000176200001440000000336014525500037014021 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunSLSI} \alias{RunSLSI} \alias{RunSLSI.default} \alias{RunSLSI.Assay} \alias{RunSLSI.Seurat} \title{Run Supervised Latent Semantic Indexing} \usage{ RunSLSI(object, ...) \method{RunSLSI}{default}( object, assay = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSLSI}{Assay}( object, assay = NULL, features = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSLSI}{Seurat}( object, assay = NULL, features = NULL, n = 50, reduction.name = "slsi", reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to IRLBA irlba} \item{assay}{Name of Assay SLSI is being run on} \item{n}{Total Number of SLSI components to compute and store} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names} \item{graph}{Graph used supervised by SLSI} \item{verbose}{Display messages} \item{seed.use}{Set a random seed. Setting NULL will not set a seed.} \item{features}{Features to compute SLSI on. If NULL, SLSI will be run using the variable features for the Assay.} \item{reduction.name}{dimensional reduction name} } \value{ Returns Seurat object with the SLSI calculation stored in the reductions slot } \description{ Run a supervised LSI (SLSI) dimensionality reduction supervised by a cell-cell kernel. SLSI is used to capture a linear transformation of peaks that maximizes its dependency to the given cell-cell kernel. } \concept{dimensional_reduction} Seurat/man/IFeaturePlot.Rd0000644000176200001440000000144514525500037015127 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{IFeaturePlot} \alias{IFeaturePlot} \title{Visualize features in dimensional reduction space interactively} \usage{ IFeaturePlot(object, feature, dims = c(1, 2), reduction = NULL, slot = "data") } \arguments{ \item{object}{Seurat object} \item{feature}{Feature to plot} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{slot}{Which slot to pull expression data from?} } \value{ Returns the final plot as a ggplot object } \description{ Visualize features in dimensional reduction space interactively } \concept{visualization} Seurat/man/ExpVar.Rd0000644000176200001440000000065514525500037013773 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{ExpVar} \alias{ExpVar} \title{Calculate the variance of logged values} \usage{ ExpVar(x) } \arguments{ \item{x}{A vector of values} } \value{ Returns the variance in log-space } \description{ Calculate variance of logged values in non-log space (return answer in log-space) } \examples{ ExpVar(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/SelectIntegrationFeatures.Rd0000644000176200001440000000353714525500037017712 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{SelectIntegrationFeatures} \alias{SelectIntegrationFeatures} \title{Select integration features} \usage{ SelectIntegrationFeatures( object.list, nfeatures = 2000, assay = NULL, verbose = TRUE, fvf.nfeatures = 2000, ... ) } \arguments{ \item{object.list}{List of seurat objects} \item{nfeatures}{Number of features to return} \item{assay}{Name or vector of assay names (one for each object) from which to pull the variable features.} \item{verbose}{Print messages} \item{fvf.nfeatures}{nfeatures for \code{\link{FindVariableFeatures}}. Used if \code{VariableFeatures} have not been set for any object in \code{object.list}.} \item{...}{Additional parameters to \code{\link{FindVariableFeatures}}} } \value{ A vector of selected features } \description{ Choose the features to use when integrating multiple datasets. This function ranks features by the number of datasets they are deemed variable in, breaking ties by the median variable feature rank across datasets. It returns the top scoring features by this ranking. } \details{ If for any assay in the list, \code{\link{FindVariableFeatures}} hasn't been run, this method will try to run it using the \code{fvf.nfeatures} parameter and any additional ones specified through the \dots. } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset and take the first 2 pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] # perform SCTransform normalization pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) # select integration features features <- SelectIntegrationFeatures(pancreas.list) } } \concept{integration} Seurat/man/CCAIntegration.Rd0000644000176200001440000000761014525500037015356 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{CCAIntegration} \alias{CCAIntegration} \title{Seurat-CCA Integration} \usage{ CCAIntegration( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = "integrated.dr", reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.filter = NA, scale.layer = "scale.data", dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{A \code{Seurat} object} \item{assay}{Name of \code{Assay} in the \code{Seurat} object} \item{layers}{Names of layers in \code{assay}} \item{orig}{A \link[SeuratObject:DimReduc]{dimensional reduction} to correct} \item{new.reduction}{Name of new integrated dimensional reduction} \item{reference}{A reference \code{Seurat} object} \item{features}{A vector of features to use for integration} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{dims}{Dimensions of dimensional reduction to use for integration} \item{k.filter}{Number of anchors to filter} \item{scale.layer}{Name of scaled layer in \code{Assay}} \item{dims.to.integrate}{Number of dimensions to return integrated values for} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case the full corrected space is used for computing anchor weights.} }} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives: \if{html}{\out{
}}\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 }\if{html}{\out{
}} Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{verbose}{Print progress} \item{...}{Arguments passed on to \code{FindIntegrationAnchors}} } \description{ Seurat-CCA Integration } \examples{ \dontrun{ # Preprocessing obj <- SeuratData::LoadData("pbmcsca") obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) obj <- NormalizeData(obj) obj <- FindVariableFeatures(obj) obj <- ScaleData(obj) obj <- RunPCA(obj) # After preprocessing, we integrate layers. obj <- IntegrateLayers(object = obj, method = CCAIntegration, orig.reduction = "pca", new.reduction = "integrated.cca", verbose = FALSE) # Modifying parameters # We can also specify parameters such as `k.anchor` to increase the strength of integration obj <- IntegrateLayers(object = obj, method = CCAIntegration, orig.reduction = "pca", new.reduction = "integrated.cca", k.anchor = 20, verbose = FALSE) # Integrating SCTransformed data obj <- SCTransform(object = obj) obj <- IntegrateLayers(object = obj, method = CCAIntegration, orig.reduction = "pca", new.reduction = "integrated.cca", assay = "SCT", verbose = FALSE) } } Seurat/man/CustomPalette.Rd0000644000176200001440000000223514525500037015353 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BlackAndWhite} \alias{BlackAndWhite} \alias{BlueAndRed} \alias{CustomPalette} \alias{PurpleAndYellow} \title{Create a custom color palette} \usage{ BlackAndWhite(mid = NULL, k = 50) BlueAndRed(k = 50) CustomPalette(low = "white", high = "red", mid = NULL, k = 50) PurpleAndYellow(k = 50) } \arguments{ \item{mid}{middle color. Optional.} \item{k}{number of steps (colors levels) to include between low and high values} \item{low}{low color} \item{high}{high color} } \value{ A color palette for plotting } \description{ Creates a custom color palette based on low, middle, and high color values } \examples{ df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = BlackAndWhite()) df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = BlueAndRed()) myPalette <- CustomPalette() myPalette df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) plot(df, col = PurpleAndYellow()) } \concept{visualization} Seurat/man/PrepareBridgeReference.Rd0000644000176200001440000000610514525500037017114 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{PrepareBridgeReference} \alias{PrepareBridgeReference} \title{Prepare the bridge and reference datasets} \usage{ PrepareBridgeReference( reference, bridge, reference.reduction = "pca", reference.dims = 1:50, normalization.method = c("SCT", "LogNormalize"), reference.assay = NULL, bridge.ref.assay = "RNA", bridge.query.assay = "ATAC", supervised.reduction = c("slsi", "spca", NULL), bridge.query.reduction = NULL, bridge.query.features = NULL, laplacian.reduction.name = "lap", laplacian.reduction.key = "lap_", laplacian.reduction.dims = 1:50, verbose = TRUE ) } \arguments{ \item{reference}{A reference Seurat object} \item{bridge}{A multi-omic bridge Seurat object} \item{reference.reduction}{Name of dimensional reduction of the reference object (default is 'pca')} \item{reference.dims}{Number of dimensions used for the reference.reduction (default is 50)} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{reference.assay}{Assay name for reference (default is \code{\link{DefaultAssay}})} \item{bridge.ref.assay}{Assay name for bridge used for reference mapping. RNA by default} \item{bridge.query.assay}{Assay name for bridge used for query mapping. ATAC by default} \item{supervised.reduction}{Type of supervised dimensional reduction to be performed for integrating the bridge and query. #' Options are: \itemize{ \item{slsi: Perform supervised LSI as the dimensional reduction for the bridge-query integration} \item{spca: Perform supervised PCA as the dimensional reduction for the bridge-query integration} \item{NULL: no supervised dimensional reduction will be calculated. bridge.query.reduction is used for the bridge-query integration} }} \item{bridge.query.reduction}{Name of dimensions used for the bridge-query harmonization. 'bridge.query.reduction' and 'supervised.reduction' cannot be NULL together.} \item{bridge.query.features}{Features used for bridge query dimensional reduction (default is NULL which uses VariableFeatures from the bridge object)} \item{laplacian.reduction.name}{Name of dimensional reduction name of graph laplacian eigenspace (default is 'lap')} \item{laplacian.reduction.key}{Dimensional reduction key (default is 'lap_')} \item{laplacian.reduction.dims}{Number of dimensions used for graph laplacian eigenspace (default is 50)} \item{verbose}{Print progress and message (default is TRUE)} } \value{ Returns a \code{BridgeReferenceSet} that can be used as input to \code{\link{FindBridgeTransferAnchors}}. The parameters used are stored in the \code{BridgeReferenceSet} as well } \description{ Preprocess the multi-omic bridge and unimodal reference datasets into an extended reference. This function performs the following three steps: 1. Performs within-modality harmonization between bridge and reference 2. Performs dimensional reduction on the SNN graph of bridge datasets via Laplacian Eigendecomposition 3. Constructs a bridge dictionary representation for unimodal reference cells } Seurat/man/FindIntegrationAnchors.Rd0000644000176200001440000001350214525500037017163 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindIntegrationAnchors} \alias{FindIntegrationAnchors} \title{Find integration anchors} \usage{ FindIntegrationAnchors( object.list = NULL, assay = NULL, reference = NULL, anchor.features = 2000, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), sct.clip.range = NULL, reduction = c("cca", "rpca", "jpca", "rlsi"), l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) } \arguments{ \item{object.list}{A list of \code{\link{Seurat}} objects between which to find anchors for downstream integration.} \item{assay}{A vector of assay names specifying which assay to use when constructing anchors. If NULL, the current default assay for each object is used.} \item{reference}{A vector specifying the object/s to be used as a reference during integration. If NULL (default), all pairwise anchors are found (no reference/s). If not NULL, the corresponding objects in \code{object.list} will be used as references. When using a set of specified references, anchors are first found between each query and each reference. The references are then integrated through pairwise integration. Each query is then mapped to the integrated reference.} \item{anchor.features}{Can be either: \itemize{ \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} to select the provided number of features to be used in anchor finding} \item{A vector of features to be used as input to the anchor finding process} }} \item{scale}{Whether or not to scale the features provided. Only set to FALSE if you have previously scaled the features you want to use for each object in the object.list} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{sct.clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{reduction}{Dimensional reduction to perform when finding anchors. Can be one of: \itemize{ \item{cca: Canonical correlation analysis} \item{rpca: Reciprocal PCA} \item{jpca: Joint PCA} \item{rlsi: Reciprocal LSI} }} \item{l2.norm}{Perform L2 normalization on the CCA cell embeddings after dimensional reduction} \item{dims}{Which dimensions to use from the CCA to specify the neighbor search space} \item{k.anchor}{How many neighbors (k) to use when picking anchors} \item{k.filter}{How many neighbors (k) to use when filtering anchors} \item{k.score}{How many neighbors (k) to use when scoring anchors} \item{max.features}{The maximum number of features to use when specifying the neighborhood search space in the anchor filtering} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{eps}{Error bound on the neighbor finding algorithm (from RANN/Annoy)} \item{verbose}{Print progress bars and output} } \value{ Returns an \code{\link{AnchorSet}} object that can be used as input to \code{\link{IntegrateData}}. } \description{ Find a set of anchors between a list of \code{\link{Seurat}} objects. These anchors can later be used to integrate the objects using the \code{\link{IntegrateData}} function. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019: \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} First, determine anchor.features if not explicitly specified using \code{\link{SelectIntegrationFeatures}}. Then for all pairwise combinations of reference and query datasets: \itemize{ \item{Perform dimensional reduction on the dataset pair as specified via the \code{reduction} parameter. If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the embedding vectors.} \item{Identify anchors - pairs of cells from each dataset that are contained within each other's neighborhoods (also known as mutual nearest neighbors).} \item{Filter low confidence anchors to ensure anchors in the low dimension space are in broad agreement with the high dimensional measurements. This is done by looking at the neighbors of each query cell in the reference dataset using \code{max.features} to define this space. If the reference cell isn't found within the first \code{k.filter} neighbors, remove the anchor.} \item{Assign each remaining anchor a score. For each anchor cell, determine the nearest \code{k.score} anchors within its own dataset and within its pair's dataset. Based on these neighborhoods, construct an overall neighbor graph and then compute the shared neighbor overlap between anchor and query cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on these scores to dampen outlier effects and rescale to range between 0-1.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset pancreas.list <- SplitObject(panc8, split.by = "tech") # perform standard preprocessing on each object for (i in 1:length(pancreas.list)) { pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) pancreas.list[[i]] <- FindVariableFeatures( pancreas.list[[i]], selection.method = "vst", nfeatures = 2000, verbose = FALSE ) } # find anchors anchors <- FindIntegrationAnchors(object.list = pancreas.list) # integrate data integrated <- IntegrateData(anchorset = anchors) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/CellScatter.Rd0000644000176200001440000000245714525500037014775 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CellScatter} \alias{CellScatter} \alias{CellPlot} \title{Cell-cell scatter plot} \usage{ CellScatter( object, cell1, cell2, features = NULL, highlight = NULL, cols = NULL, pt.size = 1, smooth = FALSE, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{cell1}{Cell 1 name} \item{cell2}{Cell 2 name} \item{features}{Features to plot (default, all features)} \item{highlight}{Features to highlight} \item{cols}{Colors to use for identity class plotting.} \item{pt.size}{Size of the points on the plot} \item{smooth}{Smooth the graph (similar to smoothScatter)} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A ggplot object } \description{ Creates a plot of scatter plot of features across two single cells. Pearson correlation between the two cells is displayed above the plot. } \examples{ data("pbmc_small") CellScatter(object = pbmc_small, cell1 = 'ATAGGAGAAACAGA', cell2 = 'CATCAGGATGCACA') } \concept{visualization} Seurat/man/LocalStruct.Rd0000644000176200001440000000242214525500037015017 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{LocalStruct} \alias{LocalStruct} \title{Calculate the local structure preservation metric} \usage{ LocalStruct( object, grouping.var, idents = NULL, neighbors = 100, reduction = "pca", reduced.dims = 1:10, orig.dims = 1:10, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{grouping.var}{Grouping variable} \item{idents}{Optionally specify a set of idents to compute metric for} \item{neighbors}{Number of neighbors to compute in pca/corrected pca space} \item{reduction}{Dimensional reduction to use for corrected space} \item{reduced.dims}{Number of reduced dimensions to use} \item{orig.dims}{Number of PCs to use in original space} \item{verbose}{Display progress bar} } \value{ Returns the average preservation metric } \description{ Calculates a metric that describes how well the local structure of each group prior to integration is preserved after integration. This procedure works as follows: For each group, compute a PCA, compute the top num.neighbors in pca space, compute the top num.neighbors in corrected pca space, compute the size of the intersection of those two sets of neighbors. Return the average over all groups. } \concept{integration} Seurat/man/GetResidual.Rd0000644000176200001440000000314014525500037014766 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{GetResidual} \alias{GetResidual} \title{Calculate pearson residuals of features not in the scale.data} \usage{ GetResidual( object, features, assay = NULL, umi.assay = "RNA", clip.range = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE ) } \arguments{ \item{object}{A seurat object} \item{features}{Name of features to add into the scale.data} \item{assay}{Name of the assay of the seurat object generated by SCTransform} \item{umi.assay}{Name of the assay of the seurat object containing UMI matrix and the default is RNA} \item{clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{replace.value}{Recalculate residuals for all features, even if they are already present. Useful if you want to change the clip.range.} \item{na.rm}{For features where there is no feature model stored, return NA for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only return residuals for features with a model stored for all cells.} \item{verbose}{Whether to print messages and progress bars} } \value{ Returns a Seurat object containing Pearson residuals of added features in its scale.data } \description{ This function calls sctransform::get_residuals. } \examples{ \dontrun{ data("pbmc_small") pbmc_small <- SCTransform(object = pbmc_small, variable.features.n = 20) pbmc_small <- GetResidual(object = pbmc_small, features = c('MS4A1', 'TCL1A')) } } \seealso{ \code{\link[sctransform]{get_residuals}} } \concept{preprocessing} Seurat/man/PrepSCTIntegration.Rd0000644000176200001440000000622614525500037016252 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{PrepSCTIntegration} \alias{PrepSCTIntegration} \title{Prepare an object list normalized with sctransform for integration.} \usage{ PrepSCTIntegration( object.list, assay = NULL, anchor.features = 2000, sct.clip.range = NULL, verbose = TRUE ) } \arguments{ \item{object.list}{A list of \code{\link{Seurat}} objects to prepare for integration} \item{assay}{The name of the \code{\link{Assay}} to use for integration. This can be a single name if all the assays to be integrated have the same name, or a character vector containing the name of each \code{\link{Assay}} in each object to be integrated. The specified assays must have been normalized using \code{\link{SCTransform}}. If NULL (default), the current default assay for each object is used.} \item{anchor.features}{Can be either: \itemize{ \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} to select the provided number of features to be used in anchor finding} \item{A vector of features to be used as input to the anchor finding process} }} \item{sct.clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{verbose}{Display output/messages} } \value{ A list of \code{\link{Seurat}} objects with the appropriate \code{scale.data} slots containing only the required \code{anchor.features}. } \description{ This function takes in a list of objects that have been normalized with the \code{\link{SCTransform}} method and performs the following steps: \itemize{ \item{If anchor.features is a numeric value, calls \code{\link{SelectIntegrationFeatures}} to determine the features to use in the downstream integration procedure.} \item{Ensures that the sctransform residuals for the features specified to anchor.features are present in each object in the list. This is necessary because the default behavior of \code{\link{SCTransform}} is to only store the residuals for the features determined to be variable. Residuals are recomputed for missing features using the stored model parameters via the \code{\link{GetResidual}} function.} \item{Subsets the \code{scale.data} slot to only contain the residuals for anchor.features for efficiency in downstream processing. } } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset and take the first 2 to integrate pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] # perform SCTransform normalization pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) # select integration features and prep step features <- SelectIntegrationFeatures(pancreas.list) pancreas.list <- PrepSCTIntegration( pancreas.list, anchor.features = features ) # downstream integration steps anchors <- FindIntegrationAnchors( pancreas.list, normalization.method = "SCT", anchor.features = features ) pancreas.integrated <- IntegrateData(anchors, normalization.method = "SCT") } } \concept{integration} Seurat/man/roxygen/0000755000176200001440000000000014525500037013764 5ustar liggesusersSeurat/man/roxygen/templates/0000755000176200001440000000000014525500037015762 5ustar liggesusersSeurat/man/roxygen/templates/section-progressr.R0000644000176200001440000000104714525500037021577 0ustar liggesusers#' @section Progress Updates with \pkg{progressr}: #' This function uses #' \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to #' render status updates and progress bars. To enable progress updates, wrap #' the function call in \code{\link[progressr]{with_progress}} or run #' \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running #' this function. For more details about \pkg{progressr}, please read #' \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} Seurat/man/roxygen/templates/seealso-methods.R0000644000176200001440000000011014525500037021171 0ustar liggesusers#' @seealso \code{<%= cls %>} methods: \code{\link{<%= cls %>-methods}} Seurat/man/roxygen/templates/param-dotsm.R0000644000176200001440000000006114525500037020326 0ustar liggesusers#' @param ... Arguments passed to other methods Seurat/man/roxygen/templates/note-reqdpkg.R0000644000176200001440000000021114525500037020477 0ustar liggesusers#' @note This function requires the #' \href{https://cran.r-project.org/package=<%= pkg %>}{\pkg{<%= pkg %>}} package #' to be installed Seurat/man/roxygen/templates/section-future.R0000644000176200001440000000132714525500037021064 0ustar liggesusers#' @section Parallelization with \pkg{future}: #' This function uses #' \href{https://cran.r-project.org/package=future}{\pkg{future}} to enable #' parallelization. Parallelization strategies can be set using #' \code{\link[future]{plan}}. Common plans include \dQuote{\code{sequential}} #' for non-parallelized processing or \dQuote{\code{multisession}} for parallel #' evaluation using multiple \R sessions; for other plans, see the #' \dQuote{Implemented evaluation strategies} section of #' \code{\link[future:plan]{?future::plan}}. For a more thorough introduction #' to \pkg{future}, see #' \href{https://future.futureverse.org/articles/future-1-overview.html}{\code{vignette("future-1-overview")}} #' #' @concept future Seurat/man/roxygen/templates/param-dotsi.R0000644000176200001440000000002614525500037020323 0ustar liggesusers#' @param ... Ignored Seurat/man/BGTextColor.Rd0000644000176200001440000000223514525500037014716 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BGTextColor} \alias{BGTextColor} \title{Determine text color based on background color} \source{ \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} } \usage{ BGTextColor( background, threshold = 186, w3c = FALSE, dark = "black", light = "white" ) } \arguments{ \item{background}{A vector of background colors; supports R color names and hexadecimal codes} \item{threshold}{Intensity threshold for light/dark cutoff; intensities greater than \code{theshold} yield \code{dark}, others yield \code{light}} \item{w3c}{Use \href{https://www.w3.org/TR/WCAG20/}{W3C} formula for calculating background text color; ignores \code{threshold}} \item{dark}{Color for dark text} \item{light}{Color for light text} } \value{ A named vector of either \code{dark} or \code{light}, depending on \code{background}; names of vector are \code{background} } \description{ Determine text color based on background color } \examples{ BGTextColor(background = c('black', 'white', '#E76BF3')) } \concept{visualization} Seurat/man/NormalizeData.Rd0000644000176200001440000000407014525500037015313 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{NormalizeData} \alias{NormalizeData} \alias{NormalizeData.V3Matrix} \alias{NormalizeData.Assay} \alias{NormalizeData.Seurat} \title{Normalize Data} \usage{ NormalizeData(object, ...) \method{NormalizeData}{V3Matrix}( object, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, block.size = NULL, verbose = TRUE, ... ) \method{NormalizeData}{Assay}( object, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) \method{NormalizeData}{Seurat}( object, assay = NULL, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{normalization.method}{Method for normalization. \itemize{ \item \dQuote{\code{LogNormalize}}: Feature counts for each cell are divided by the total counts for that cell and multiplied by the \code{scale.factor}. This is then natural-log transformed using \code{log1p} \item \dQuote{\code{CLR}}: Applies a centered log ratio transformation \item \dQuote{\code{RC}}: Relative counts. Feature counts for each cell are divided by the total counts for that cell and multiplied by the \code{scale.factor}. No log-transformation is applied. For counts per million (CPM) set \code{scale.factor = 1e6} }} \item{scale.factor}{Sets the scale factor for cell-level normalization} \item{margin}{If performing CLR normalization, normalize across features (1) or cells (2)} \item{block.size}{How many cells should be run in each chunk, will try to split evenly across threads} \item{verbose}{display progress bar for normalization procedure} \item{assay}{Name of assay to use} } \value{ Returns object after normalization } \description{ Normalize the count data present in a given assay. } \examples{ \dontrun{ data("pbmc_small") pbmc_small pmbc_small <- NormalizeData(object = pbmc_small) } } \concept{preprocessing} Seurat/man/FindConservedMarkers.Rd0000644000176200001440000000376014525500037016644 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{FindConservedMarkers} \alias{FindConservedMarkers} \title{Finds markers that are conserved between the groups} \usage{ FindConservedMarkers( object, ident.1, ident.2 = NULL, grouping.var, assay = "RNA", slot = "data", min.cells.group = 3, meta.method = metap::minimump, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{ident.1}{Identity class to define markers for} \item{ident.2}{A second identity class for comparison. If NULL (default) - use all other cells for comparison.} \item{grouping.var}{grouping variable} \item{assay}{of assay to fetch data for (default is RNA)} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{meta.method}{method for combining p-values. Should be a function from the metap package (NOTE: pass the function, not a string)} \item{verbose}{Print a progress bar once expression testing begins} \item{\dots}{parameters to pass to FindMarkers} } \value{ data.frame containing a ranked list of putative conserved markers, and associated statistics (p-values within each group and a combined p-value (such as Fishers combined p-value or others from the metap package), percentage of cells expressing the marker, average differences). Name of group is appended to each associated output column (e.g. CTRL_p_val). If only one group is tested in the grouping.var, max and combined p-values are not returned. } \description{ Finds markers that are conserved between the groups } \examples{ \dontrun{ data("pbmc_small") pbmc_small # Create a simulated grouping variable pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) FindConservedMarkers(pbmc_small, ident.1 = 0, ident.2 = 1, grouping.var = "groups") } } \concept{differential_expression} Seurat/man/AddAzimuthResults.Rd0000644000176200001440000000124214525500037016173 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddAzimuthResults} \alias{AddAzimuthResults} \title{Add Azimuth Results} \usage{ AddAzimuthResults(object = NULL, filename) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{filename}{Path to Azimuth mapping scores file} } \value{ \code{object} with Azimuth results added } \description{ Add mapping and prediction scores, UMAP embeddings, and imputed assay (if available) from Azimuth to an existing or new \code{\link[SeuratObject]{Seurat}} object } \examples{ \dontrun{ object <- AddAzimuthResults(object, filename = "azimuth_results.Rds") } } Seurat/man/PercentAbove.Rd0000644000176200001440000000107614525500037015141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{PercentAbove} \alias{PercentAbove} \title{Calculate the percentage of a vector above some threshold} \usage{ PercentAbove(x, threshold) } \arguments{ \item{x}{Vector of values} \item{threshold}{Threshold to use when calculating percentage} } \value{ Returns the percentage of \code{x} values above the given threshold } \description{ Calculate the percentage of a vector above some threshold } \examples{ set.seed(42) PercentAbove(sample(1:100, 10), 75) } \concept{utilities} Seurat/man/IntegrateLayers.Rd0000644000176200001440000000221214525500037015657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{IntegrateLayers} \alias{IntegrateLayers} \title{Integrate Layers} \usage{ IntegrateLayers( object, method, orig.reduction = "pca", assay = NULL, features = NULL, layers = NULL, scale.layer = "scale.data", ... ) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{method}{Integration method function} \item{orig.reduction}{Name of dimensional reduction for correction} \item{assay}{Name of assay for integration} \item{features}{A vector of features to use for integration} \item{layers}{Names of normalized layers in \code{assay}} \item{scale.layer}{Name(s) of scaled layer(s) in \code{assay}} \item{...}{Arguments passed on to \code{method}} } \value{ \code{object} with integration data added to it } \description{ Integrate Layers } \section{Integration Method Functions}{ The following integration method functions are available: \Sexpr[stage=render,results=rd]{Seurat:::.rd_methods("integration")} } \seealso{ \link[Seurat:writing-integration]{Writing integration method functions} } \concept{integration} Seurat/man/AnchorSet-class.Rd0000644000176200001440000000276314525500037015561 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{AnchorSet-class} \alias{AnchorSet-class} \alias{AnchorSet} \title{The AnchorSet Class} \description{ The AnchorSet class is an intermediate data storage class that stores the anchors and other related information needed for performing downstream analyses - namely data integration (\code{\link{IntegrateData}}) and data transfer (\code{\link{TransferData}}). } \section{Slots}{ \describe{ \item{\code{object.list}}{List of objects used to create anchors} \item{\code{reference.cells}}{List of cell names in the reference dataset - needed when performing data transfer.} \item{\code{reference.objects}}{Position of reference object/s in object.list} \item{\code{query.cells}}{List of cell names in the query dataset - needed when performing data transfer} \item{\code{anchors}}{The anchor matrix. This contains the cell indices of both anchor pair cells, the anchor score, and the index of the original dataset in the object.list for cell1 and cell2 of the anchor.} \item{\code{offsets}}{The offsets used to enable cell look up in downstream functions} \item{\code{weight.reduction}}{The weight dimensional reduction used to calculate weight matrix} \item{\code{anchor.features}}{The features used when performing anchor finding.} \item{\code{neighbors}}{List containing Neighbor objects for reuse later (e.g. mapping)} \item{\code{command}}{Store log of parameters that were used} }} \concept{objects} Seurat/man/UpdateSymbolList.Rd0000644000176200001440000000425514525500037016032 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{UpdateSymbolList} \alias{UpdateSymbolList} \alias{GeneSymbolThesarus} \title{Get updated synonyms for gene symbols} \source{ \url{https://www.genenames.org/} \url{https://www.genenames.org/help/rest/} } \usage{ GeneSymbolThesarus( symbols, timeout = 10, several.ok = FALSE, search.types = c("alias_symbol", "prev_symbol"), verbose = TRUE, ... ) UpdateSymbolList( symbols, timeout = 10, several.ok = FALSE, verbose = TRUE, ... ) } \arguments{ \item{symbols}{A vector of gene symbols} \item{timeout}{Time to wait before canceling query in seconds} \item{several.ok}{Allow several current gene symbols for each provided symbol} \item{search.types}{Type of query to perform: \describe{ \item{\dQuote{\code{alias_symbol}}}{Find alternate symbols for the genes described by \code{symbols}} \item{\dQuote{\code{prev_symbol}}}{Find new new symbols for the genes described by \code{symbols}} } This parameter accepts multiple options and short-hand options (eg. \dQuote{\code{prev}} for \dQuote{\code{prev_symbol}})} \item{verbose}{Show a progress bar depicting search progress} \item{...}{Extra parameters passed to \code{\link[httr]{GET}}} } \value{ \code{GeneSymbolThesarus}:, if \code{several.ok}, a named list where each entry is the current symbol found for each symbol provided and the names are the provided symbols. Otherwise, a named vector with the same information. \code{UpdateSymbolList}: \code{symbols} with updated symbols from HGNC's gene names database } \description{ Find current gene symbols based on old or alias symbols using the gene names database from the HUGO Gene Nomenclature Committee (HGNC) } \details{ For each symbol passed, we query the HGNC gene names database for current symbols that have the provided symbol as either an alias (\code{alias_symbol}) or old (\code{prev_symbol}) symbol. All other queries are \strong{not} supported. } \note{ This function requires internet access } \examples{ \dontrun{ GeneSybmolThesarus(symbols = c("FAM64A")) } \dontrun{ UpdateSymbolList(symbols = cc.genes$s.genes) } } \seealso{ \code{\link[httr]{GET}} } \concept{utilities} Seurat/man/MappingScore.Rd0000644000176200001440000000540414525500037015152 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{MappingScore} \alias{MappingScore} \alias{MappingScore.default} \alias{MappingScore.AnchorSet} \title{Metric for evaluating mapping success} \usage{ MappingScore(anchors, ...) \method{MappingScore}{default}( anchors, combined.object, query.neighbors, ref.embeddings, query.embeddings, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) \method{MappingScore}{AnchorSet}( anchors, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) } \arguments{ \item{anchors}{AnchorSet object or just anchor matrix from the Anchorset object returned from FindTransferAnchors} \item{...}{Reserved for internal use} \item{combined.object}{Combined object (ref + query) from the Anchorset object returned} \item{query.neighbors}{Neighbors object computed on query cells} \item{ref.embeddings}{Reference embeddings matrix} \item{query.embeddings}{Query embeddings matrix} \item{kanchors}{Number of anchors to use in projection steps when computing weights} \item{ndim}{Number of dimensions to use when working with low dimensional projections of the data} \item{ksmooth}{Number of cells to average over when computing transition probabilities} \item{ksnn}{Number of cells to average over when determining the kernel bandwidth from the SNN graph} \item{snn.prune}{Amount of pruning to apply to edges in SNN graph} \item{subtract.first.nn}{Option to the scoring function when computing distances to subtract the distance to the first nearest neighbor} \item{nn.method}{Nearest neighbor method to use (annoy or RANN)} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{query.weights}{Query weights matrix for reuse} \item{verbose}{Display messages/progress} } \value{ Returns a vector of cell scores } \description{ This metric was designed to help identify query cells that aren't well represented in the reference dataset. The intuition for the score is that we are going to project the query cells into a reference-defined space and then project them back onto the query. By comparing the neighborhoods before and after projection, we identify cells who's local neighborhoods are the most affected by this transformation. This could be because there is a population of query cells that aren't present in the reference or the state of the cells in the query is significantly different from the equivalent cell type in the reference. } \concept{integration} Seurat/man/LogVMR.Rd0000644000176200001440000000075114525500037013671 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{LogVMR} \alias{LogVMR} \title{Calculate the variance to mean ratio of logged values} \usage{ LogVMR(x, ...) } \arguments{ \item{x}{A vector of values} \item{...}{Other arguments (not used)} } \value{ Returns the VMR in log-space } \description{ Calculate the variance to mean ratio (VMR) in non-logspace (return answer in log-space) } \examples{ LogVMR(x = c(1, 2, 3)) } \concept{utilities} Seurat/man/RidgePlot.Rd0000644000176200001440000000374114525500037014456 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{RidgePlot} \alias{RidgePlot} \title{Single cell ridge plot} \usage{ RidgePlot( object, features, cols = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = deprecated(), layer = "data", stack = FALSE, combine = TRUE, fill.by = "feature" ) } \arguments{ \item{object}{Seurat object} \item{features}{Features to plot (gene expression, metrics, PC scores, anything that can be retreived by FetchData)} \item{cols}{Colors to use for plotting} \item{idents}{Which classes to include in the plot (default is all)} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction} \item{assay}{Name of assay to use, defaults to the active assay} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{y.max}{Maximum y axis value} \item{same.y.lims}{Set all the y-axis limits to the same values} \item{log}{plot the feature axis on log scale} \item{ncol}{Number of columns if multiple plots are displayed} \item{slot}{Slot to pull expression data from (e.g. "counts" or "data")} \item{layer}{Layer to pull expression data from (e.g. "counts" or "data")} \item{stack}{Horizontally stack plots for each feature} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot} \item{fill.by}{Color violins/ridges based on either 'feature' or 'ident'} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a ridge plot of single cell data (gene expression, metrics, PC scores, etc.) } \examples{ data("pbmc_small") RidgePlot(object = pbmc_small, features = 'PC_1') } \concept{visualization} Seurat/man/FindBridgeTransferAnchors.Rd0000644000176200001440000000452614525500037017607 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindBridgeTransferAnchors} \alias{FindBridgeTransferAnchors} \title{Find bridge anchors between query and extended bridge-reference} \usage{ FindBridgeTransferAnchors( extended.reference, query, query.assay = NULL, dims = 1:30, scale = FALSE, reduction = c("lsiproject", "pcaproject"), bridge.reduction = c("direct", "cca"), verbose = TRUE ) } \arguments{ \item{extended.reference}{BridgeReferenceSet object generated from \code{\link{PrepareBridgeReference}}} \item{query}{A query Seurat object} \item{query.assay}{Assay name for query-bridge integration} \item{dims}{Number of dimensions for query-bridge integration} \item{scale}{Determine if scale the query data for projection} \item{reduction}{Dimensional reduction to perform when finding anchors. Options are: \itemize{ \item{pcaproject: Project the PCA from the bridge onto the query. We recommend using PCA when bridge and query datasets are from scRNA-seq} \item{lsiproject: Project the LSI from the bridge onto the query. We recommend using LSI when bridge and query datasets are from scATAC-seq or scCUT&TAG data. This requires that LSI or supervised LSI has been computed for the bridge dataset, and the same features (eg, peaks or genome bins) are present in both the bridge and query. } }} \item{bridge.reduction}{Dimensional reduction to perform when finding anchors. Can be one of: \itemize{ \item{cca: Canonical correlation analysis} \item{direct: Use assay data as a dimensional reduction} }} \item{verbose}{Print messages and progress} } \value{ Returns an \code{AnchorSet} object that can be used as input to \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and \code{\link{MapQuery}}. } \description{ Find a set of anchors between unimodal query and the other unimodal reference using a pre-computed \code{\link{BridgeReferenceSet}}. This function performs three steps: 1. Harmonize the bridge and query cells in the bridge query reduction space 2. Construct the bridge dictionary representations for query cells 3. Find a set of anchors between query and reference in the bridge graph laplacian eigenspace These anchors can later be used to integrate embeddings or transfer data from the reference to query object using the \code{\link{MapQuery}} object. } Seurat/man/SpatialImage-class.Rd0000644000176200001440000000061414525500037016224 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{SpatialImage-class} \alias{SpatialImage-class} \title{The SpatialImage Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:SpatialImage]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:SpatialImage]{SeuratObject::SpatialImage-class}} } Seurat/man/ElbowPlot.Rd0000644000176200001440000000144014525500037014466 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ElbowPlot} \alias{ElbowPlot} \title{Quickly Pick Relevant Dimensions} \usage{ ElbowPlot(object, ndims = 20, reduction = "pca") } \arguments{ \item{object}{Seurat object} \item{ndims}{Number of dimensions to plot standard deviation for} \item{reduction}{Reduction technique to plot standard deviation for} } \value{ A ggplot object } \description{ Plots the standard deviations (or approximate singular values if running PCAFast) of the principle components for easy identification of an elbow in the graph. This elbow often corresponds well with the significant dims and is much faster to run than Jackstraw } \examples{ data("pbmc_small") ElbowPlot(object = pbmc_small) } \concept{visualization} Seurat/man/RenameCells.Rd0000644000176200001440000000143114525500037014751 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{RenameCells.SCTAssay} \alias{RenameCells.SCTAssay} \alias{RenameCells.SlideSeq} \alias{RenameCells.STARmap} \alias{RenameCells.VisiumV1} \title{Rename Cells in an Object} \usage{ \method{RenameCells}{SCTAssay}(object, new.names = NULL, ...) \method{RenameCells}{SlideSeq}(object, new.names = NULL, ...) \method{RenameCells}{STARmap}(object, new.names = NULL, ...) \method{RenameCells}{VisiumV1}(object, new.names = NULL, ...) } \arguments{ \item{object}{An object} \item{new.names}{vector of new cell names} \item{...}{Arguments passed to other methods} } \description{ Rename Cells in an Object } \seealso{ \code{\link[SeuratObject:RenameCells]{SeuratObject::RenameCells}} } \concept{objects} Seurat/man/UpdateSCTAssays.Rd0000644000176200001440000000074114525500037015542 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{UpdateSCTAssays} \alias{UpdateSCTAssays} \title{Update pre-V4 Assays generated with SCTransform in the Seurat to the new SCTAssay class} \usage{ UpdateSCTAssays(object) } \arguments{ \item{object}{A Seurat object} } \value{ A Seurat object with updated SCTAssays } \description{ Update pre-V4 Assays generated with SCTransform in the Seurat to the new SCTAssay class } \concept{objects} Seurat/man/SCTransform.Rd0000644000176200001440000001353414525500037014767 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R, % R/preprocessing5.R \name{SCTransform} \alias{SCTransform} \alias{SCTransform.default} \alias{SCTransform.Assay} \alias{SCTransform.Seurat} \alias{SCTransform.IterableMatrix} \title{Perform sctransform-based normalization} \usage{ SCTransform(object, ...) \method{SCTransform}{default}( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = umi)/30), sqrt(x = ncol(x = umi)/30)), vst.flavor = "v2", conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) \method{SCTransform}{Assay}( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object)/30), sqrt(x = ncol(x = object)/30)), vst.flavor = "v2", conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) \method{SCTransform}{Seurat}( object, assay = "RNA", new.assay.name = "SCT", reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object[[assay]])/30), sqrt(x = ncol(x = object[[assay]])/30)), vst.flavor = "v2", conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) \method{SCTransform}{IterableMatrix}( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object)/30), sqrt(x = ncol(x = object)/30)), vst.flavor = "v2", conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) } \arguments{ \item{object}{UMI counts matrix} \item{...}{Additional parameters passed to \code{sctransform::vst}} \item{cell.attr}{A metadata with cell attributes} \item{reference.SCT.model}{If not NULL, compute residuals for the object using the provided SCT model; supports only log_umi as the latent variable. If residual.features are not specified, compute for the top variable.features.n specified in the model which are also present in the object. If residual.features are specified, the variable features of the resulting SCT assay are set to the top variable.features.n in the model.} \item{do.correct.umi}{Place corrected UMI matrix in assay counts slot; default is TRUE} \item{ncells}{Number of subsampling cells used to build NB regression; default is 5000} \item{residual.features}{Genes to calculate residual features for; default is NULL (all genes). If specified, will be set to VariableFeatures of the returned object.} \item{variable.features.n}{Use this many features as variable features after ranking by residual variance; default is 3000. Only applied if residual.features is not set.} \item{variable.features.rv.th}{Instead of setting a fixed number of variable features, use this residual variance cutoff; this is only used when \code{variable.features.n} is set to NULL; default is 1.3. Only applied if residual.features is not set.} \item{vars.to.regress}{Variables to regress out in a second non-regularized linear regression. For example, percent.mito. Default is NULL} \item{do.scale}{Whether to scale residuals to have unit variance; default is FALSE} \item{do.center}{Whether to center residuals to have mean zero; default is TRUE} \item{clip.range}{Range to clip the residuals to; default is \code{c(-sqrt(n/30), sqrt(n/30))}, where n is the number of cells} \item{vst.flavor}{When set to 'v2' sets method = glmGamPoi_offset, n_cells=2000, and exclude_poisson = TRUE which causes the model to learn theta and intercept only besides excluding poisson genes from learning and regularization} \item{conserve.memory}{If set to TRUE the residual matrix for all genes is never created in full; useful for large data sets, but will take longer to run; this will also set return.only.var.genes to TRUE; default is FALSE} \item{return.only.var.genes}{If set to TRUE the scale.data matrices in output assay are subset to contain only the variable genes; default is TRUE} \item{seed.use}{Set a random seed. By default, sets the seed to 1448145. Setting NULL will not set a seed.} \item{verbose}{Whether to print messages and progress bars} \item{assay}{Name of assay to pull the count data from; default is 'RNA'} \item{new.assay.name}{Name for the new assay containing the normalized data; default is 'SCT'} } \value{ Returns a Seurat object with a new assay (named SCT by default) with counts being (corrected) counts, data being log1p(counts), scale.data being pearson residuals; sctransform::vst intermediate results are saved in misc slot of the new assay. } \description{ This function calls sctransform::vst. The sctransform package is available at https://github.com/satijalab/sctransform. Use this function as an alternative to the NormalizeData, FindVariableFeatures, ScaleData workflow. Results are saved in a new assay (named SCT by default) with counts being (corrected) counts, data being log1p(counts), scale.data being pearson residuals; sctransform::vst intermediate results are saved in misc slot of new assay. } \seealso{ \code{\link[sctransform]{correct_counts}} \code{\link[sctransform]{get_residuals}} } \concept{preprocessing} Seurat/man/FastRPCAIntegration.Rd0000644000176200001440000000461014525500037016330 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FastRPCAIntegration} \alias{FastRPCAIntegration} \title{Perform integration on the joint PCA cell embeddings.} \usage{ FastRPCAIntegration( object.list, reference = NULL, anchor.features = 2000, k.anchor = 20, dims = 1:30, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), new.reduction.name = "integrated_dr", npcs = 50, findintegrationanchors.args = list(), verbose = TRUE ) } \arguments{ \item{object.list}{A list of \code{\link{Seurat}} objects between which to find anchors for downstream integration.} \item{reference}{A vector specifying the object/s to be used as a reference during integration. If NULL (default), all pairwise anchors are found (no reference/s). If not NULL, the corresponding objects in \code{object.list} will be used as references. When using a set of specified references, anchors are first found between each query and each reference. The references are then integrated through pairwise integration. Each query is then mapped to the integrated reference.} \item{anchor.features}{Can be either: \itemize{ \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} to select the provided number of features to be used in anchor finding} \item{A vector of features to be used as input to the anchor finding process} }} \item{k.anchor}{How many neighbors (k) to use when picking anchors} \item{dims}{Which dimensions to use from the CCA to specify the neighbor search space} \item{scale}{Whether or not to scale the features provided. Only set to FALSE if you have previously scaled the features you want to use for each object in the object.list} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{new.reduction.name}{Name of integrated dimensional reduction} \item{npcs}{Total Number of PCs to compute and store (50 by default)} \item{findintegrationanchors.args}{A named list of additional arguments to \code{\link{FindIntegrationAnchors}}} \item{verbose}{Print messages and progress} } \value{ Returns a Seurat object with integrated dimensional reduction } \description{ This is a convenience wrapper function around the following three functions that are often run together when perform integration. #' \code{\link{FindIntegrationAnchors}}, \code{\link{RunPCA}}, \code{\link{IntegrateEmbeddings}}. } Seurat/man/LogNormalize.Rd0000644000176200001440000000220414525500037015160 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R, % R/preprocessing5.R \name{LogNormalize} \alias{LogNormalize} \alias{LogNormalize.data.frame} \alias{LogNormalize.V3Matrix} \alias{LogNormalize.default} \title{Normalize Raw Data} \usage{ LogNormalize(data, scale.factor = 10000, margin = 2L, verbose = TRUE, ...) \method{LogNormalize}{data.frame}(data, scale.factor = 10000, margin = 2L, verbose = TRUE, ...) \method{LogNormalize}{V3Matrix}(data, scale.factor = 10000, margin = 2L, verbose = TRUE, ...) \method{LogNormalize}{default}(data, scale.factor = 10000, margin = 2L, verbose = TRUE, ...) } \arguments{ \item{data}{Matrix with the raw count data} \item{scale.factor}{Scale the data; default is \code{1e4}} \item{margin}{Margin to normalize over} \item{verbose}{Print progress} \item{...}{Arguments passed to other methods} } \value{ A matrix with the normalized and log-transformed data } \description{ Normalize Raw Data } \examples{ mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) mat mat_norm <- LogNormalize(data = mat) mat_norm } \concept{preprocessing} Seurat/man/Read10X_Image.Rd0000644000176200001440000000140514525500037015026 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X_Image} \alias{Read10X_Image} \title{Load a 10X Genomics Visium Image} \usage{ Read10X_Image(image.dir, filter.matrix = TRUE, ...) } \arguments{ \item{image.dir}{Path to directory with 10X Genomics visium image data; should include files \code{tissue_lowres_iamge.png}, \code{scalefactors_json.json} and \code{tissue_positions_list.csv}} \item{filter.matrix}{Filter spot/feature matrix to only include spots that have been determined to be over tissue.} \item{...}{Ignored for now} } \value{ A \code{\link{VisiumV1}} object } \description{ Load a 10X Genomics Visium Image } \seealso{ \code{\link{VisiumV1}} \code{\link{Load10X_Spatial}} } \concept{preprocessing} Seurat/man/Seurat-package.Rd0000644000176200001440000001104714525500037015417 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/zzz.R \docType{package} \name{Seurat-package} \alias{Seurat} \alias{Seurat-package} \title{Seurat: Tools for Single Cell Genomics} \description{ A toolkit for quality control, analysis, and exploration of single cell RNA sequencing data. 'Seurat' aims to enable users to identify and interpret sources of heterogeneity from single cell transcriptomic measurements, and to integrate diverse types of single cell data. See Satija R, Farrell J, Gennert D, et al (2015) \doi{10.1038/nbt.3192}, Macosko E, Basu A, Satija R, et al (2015) \doi{10.1016/j.cell.2015.05.002}, Stuart T, Butler A, et al (2019) \doi{10.1016/j.cell.2019.05.031}, and Hao, Hao, et al (2020) \doi{10.1101/2020.10.12.335331} for more details. } \section{Package options}{ Seurat uses the following [options()] to configure behaviour: \describe{ \item{\code{Seurat.memsafe}}{global option to call gc() after many operations. This can be helpful in cleaning up the memory status of the R session and prevent use of swap space. However, it does add to the computational overhead and setting to FALSE can speed things up if you're working in an environment where RAM availability is not a concern.} \item{\code{Seurat.warn.umap.uwot}}{Show warning about the default backend for \code{\link{RunUMAP}} changing from Python UMAP via reticulate to UWOT} \item{\code{Seurat.checkdots}}{For functions that have ... as a parameter, this controls the behavior when an item isn't used. Can be one of warn, stop, or silent.} \item{\code{Seurat.limma.wilcox.msg}}{{Show message about more efficient Wilcoxon Rank Sum test available via the limma package}} \item{\code{Seurat.Rfast2.msg}}{{Show message about more efficient Moran's I function available via the Rfast2 package}} \item{\code{Seurat.warn.vlnplot.split}}{Show message about changes to default behavior of split/multi violin plots} } } \seealso{ Useful links: \itemize{ \item \url{https://satijalab.org/seurat} \item \url{https://github.com/satijalab/seurat} \item Report bugs at \url{https://github.com/satijalab/seurat/issues} } } \author{ \strong{Maintainer}: Rahul Satija \email{seurat@nygenome.org} (\href{https://orcid.org/0000-0001-9448-8833}{ORCID}) Other contributors: \itemize{ \item Andrew Butler \email{abutler@nygenome.org} (\href{https://orcid.org/0000-0003-3608-0463}{ORCID}) [contributor] \item Saket Choudhary \email{schoudhary@nygenome.org} (\href{https://orcid.org/0000-0001-5202-7633}{ORCID}) [contributor] \item Charlotte Darby \email{cdarby@nygenome.org} (\href{https://orcid.org/0000-0003-2195-5300}{ORCID}) [contributor] \item Jeff Farrell \email{jfarrell@g.harvard.edu} [contributor] \item Isabella Grabski \email{igrabski@nygenome.org} (\href{https://orcid.org/0000-0002-0616-5469}{ORCID}) [contributor] \item Christoph Hafemeister \email{chafemeister@nygenome.org} (\href{https://orcid.org/0000-0001-6365-8254}{ORCID}) [contributor] \item Yuhan Hao \email{yhao@nygenome.org} (\href{https://orcid.org/0000-0002-1810-0822}{ORCID}) [contributor] \item Austin Hartman \email{ahartman@nygenome.org} (\href{https://orcid.org/0000-0001-7278-1852}{ORCID}) [contributor] \item Paul Hoffman \email{hoff0792@umn.edu} (\href{https://orcid.org/0000-0002-7693-8957}{ORCID}) [contributor] \item Jaison Jain \email{jjain@nygenome.org} (\href{https://orcid.org/0000-0002-9478-5018}{ORCID}) [contributor] \item Longda Jiang \email{ljiang@nygenome.org} (\href{https://orcid.org/0000-0003-4964-6497}{ORCID}) [contributor] \item Madeline Kowalski \email{mkowalski@nygenome.org} (\href{https://orcid.org/0000-0002-5655-7620}{ORCID}) [contributor] \item Skylar Li \email{sli@nygenome.org} [contributor] \item Gesmira Molla \email{gmolla@nygenome.org} (\href{https://orcid.org/0000-0002-8628-5056}{ORCID}) [contributor] \item Efthymia Papalexi \email{epapalexi@nygenome.org} (\href{https://orcid.org/0000-0001-5898-694X}{ORCID}) [contributor] \item Patrick Roelli \email{proelli@nygenome.org} [contributor] \item Karthik Shekhar \email{kshekhar@berkeley.edu} [contributor] \item Avi Srivastava \email{asrivastava@nygenome.org} (\href{https://orcid.org/0000-0001-9798-2079}{ORCID}) [contributor] \item Tim Stuart \email{tstuart@nygenome.org} (\href{https://orcid.org/0000-0002-3044-0897}{ORCID}) [contributor] \item Kristof Torkenczy (\href{https://orcid.org/0000-0002-4869-7957}{ORCID}) [contributor] \item Shiwei Zheng \email{szheng@nygenome.org} (\href{https://orcid.org/0000-0001-6682-6743}{ORCID}) [contributor] \item Satija Lab and Collaborators [funder] } } Seurat/man/HVFInfo.SCTAssay.Rd0000644000176200001440000000136514525500037015455 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{HVFInfo.SCTAssay} \alias{HVFInfo.SCTAssay} \title{Get Variable Feature Information} \usage{ \method{HVFInfo}{SCTAssay}(object, method, status = FALSE, ...) } \arguments{ \item{object}{An object} \item{method}{method to determine variable features} \item{status}{Add variable status to the resulting data frame} \item{...}{Arguments passed to other methods} } \description{ Get variable feature information from \code{\link{SCTAssay}} objects } \examples{ \dontrun{ # Get the HVF info directly from an SCTAssay object pbmc_small <- SCTransform(pbmc_small) HVFInfo(pbmc_small[["SCT"]], method = 'sct')[1:5, ] } } \seealso{ \code{\link[SeuratObject]{HVFInfo}} } Seurat/man/ReadXenium.Rd0000644000176200001440000000340314525500037014621 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convenience.R, R/preprocessing.R \name{LoadXenium} \alias{LoadXenium} \alias{ReadXenium} \title{Read and Load 10x Genomics Xenium in-situ data} \usage{ LoadXenium(data.dir, fov = "fov", assay = "Xenium") ReadXenium( data.dir, outs = c("matrix", "microns"), type = "centroids", mols.qv.threshold = 20 ) } \arguments{ \item{data.dir}{Directory containing all Xenium output files with default filenames} \item{fov}{FOV name} \item{assay}{Assay name} \item{outs}{Types of molecular outputs to read; choose one or more of: \itemize{ \item \dQuote{matrix}: the counts matrix \item \dQuote{microns}: molecule coordinates }} \item{type}{Type of cell spatial coordinate matrices to read; choose one or more of: \itemize{ \item \dQuote{centroids}: cell centroids in pixel coordinate space \item \dQuote{segmentations}: cell segmentations in pixel coordinate space }} \item{mols.qv.threshold}{Remove transcript molecules with a QV less than this threshold. QV >= 20 is the standard threshold used to construct the cell x gene count matrix.} } \value{ \code{LoadXenium}: A \code{\link[SeuratObject]{Seurat}} object \code{ReadXenium}: A list with some combination of the following values: \itemize{ \item \dQuote{\code{matrix}}: a \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells are columns and features are rows \item \dQuote{\code{centroids}}: a data frame with cell centroid coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} } } \description{ Read and Load 10x Genomics Xenium in-situ data } \concept{preprocessing} Seurat/man/RunLDA.Rd0000644000176200001440000000312514525500037013646 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/mixscape.R \name{RunLDA} \alias{RunLDA} \alias{RunLDA.default} \alias{RunLDA.Assay} \alias{RunLDA.Seurat} \title{Run Linear Discriminant Analysis} \usage{ RunLDA(object, ...) \method{RunLDA}{default}( object, labels, assay = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) \method{RunLDA}{Assay}( object, assay = NULL, labels, features = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) \method{RunLDA}{Seurat}( object, assay = NULL, labels, features = NULL, reduction.name = "lda", reduction.key = "LDA_", seed = 42, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, ... ) } \arguments{ \item{object}{An object of class Seurat.} \item{...}{Arguments passed to other methods} \item{labels}{Meta data column with target gene class labels.} \item{assay}{Assay to use for performing Linear Discriminant Analysis (LDA).} \item{verbose}{Print the top genes associated with high/low loadings for the PCs} \item{ndims.print}{Number of LDA dimensions to print.} \item{nfeatures.print}{Number of features to print for each LDA component.} \item{reduction.key}{Reduction key name.} \item{seed}{Value for random seed} \item{features}{Features to compute LDA on} \item{reduction.name}{dimensional reduction name, lda by default} } \description{ Run Linear Discriminant Analysis Function to perform Linear Discriminant Analysis. } \concept{mixscape} Seurat/man/ScoreJackStraw.Rd0000644000176200001440000000305014525500037015443 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{ScoreJackStraw} \alias{ScoreJackStraw} \alias{ScoreJackStraw.JackStrawData} \alias{ScoreJackStraw.DimReduc} \alias{ScoreJackStraw.Seurat} \title{Compute Jackstraw scores significance.} \usage{ ScoreJackStraw(object, ...) \method{ScoreJackStraw}{JackStrawData}(object, dims = 1:5, score.thresh = 1e-05, ...) \method{ScoreJackStraw}{DimReduc}(object, dims = 1:5, score.thresh = 1e-05, ...) \method{ScoreJackStraw}{Seurat}( object, reduction = "pca", dims = 1:5, score.thresh = 1e-05, do.plot = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{dims}{Which dimensions to examine} \item{score.thresh}{Threshold to use for the proportion test of PC significance (see Details)} \item{reduction}{Reduction associated with JackStraw to score} \item{do.plot}{Show plot. To return ggplot object, use \code{JackStrawPlot} after running ScoreJackStraw.} } \value{ Returns a Seurat object } \description{ Significant PCs should show a p-value distribution that is strongly skewed to the left compared to the null distribution. The p-value for each PC is based on a proportion test comparing the number of features with a p-value below a particular threshold (score.thresh), compared with the proportion of features expected under a uniform distribution of p-values. } \seealso{ \code{\link{JackStrawPlot}} \code{\link{JackStrawPlot}} } \author{ Omri Wurtzel } \concept{dimensional_reduction} Seurat/man/ReadSlideSeq.Rd0000644000176200001440000000077414525500037015075 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{ReadSlideSeq} \alias{ReadSlideSeq} \title{Load Slide-seq spatial data} \usage{ ReadSlideSeq(coord.file, assay = "Spatial") } \arguments{ \item{coord.file}{Path to csv file containing bead coordinate positions} \item{assay}{Name of assay to associate image to} } \value{ A \code{\link{SlideSeq}} object } \description{ Load Slide-seq spatial data } \seealso{ \code{\link{SlideSeq}} } \concept{preprocessing} Seurat/man/SeuratCommand-class.Rd0000644000176200001440000000062214525500037016425 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{SeuratCommand-class} \alias{SeuratCommand-class} \title{The SeuratCommand Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:SeuratCommand]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:SeuratCommand]{SeuratObject::SeuratCommand-class}} } Seurat/man/JointPCAIntegration.Rd0000644000176200001440000000605014525500037016374 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{JointPCAIntegration} \alias{JointPCAIntegration} \title{Seurat-Joint PCA Integration} \usage{ JointPCAIntegration( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = "integrated.dr", reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.anchor = 20, scale.layer = "scale.data", dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) } \arguments{ \item{object}{A \code{Seurat} object} \item{assay}{Name of \code{Assay} in the \code{Seurat} object} \item{layers}{Names of layers in \code{assay}} \item{orig}{A \link[SeuratObject:DimReduc]{dimensional reduction} to correct} \item{new.reduction}{Name of new integrated dimensional reduction} \item{reference}{A reference \code{Seurat} object} \item{features}{A vector of features to use for integration} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{dims}{Dimensions of dimensional reduction to use for integration} \item{k.anchor}{How many neighbors (k) to use when picking anchors} \item{scale.layer}{Name of scaled layer in \code{Assay}} \item{dims.to.integrate}{Number of dimensions to return integrated values for} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case the full corrected space is used for computing anchor weights.} }} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives: \if{html}{\out{
}}\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 }\if{html}{\out{
}} Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{verbose}{Print progress} \item{...}{Arguments passed on to \code{FindIntegrationAnchors}} } \description{ Seurat-Joint PCA Integration } Seurat/man/TransferData.Rd0000644000176200001440000001510414525500037015137 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{TransferData} \alias{TransferData} \title{Transfer data} \usage{ TransferData( anchorset, refdata, reference = NULL, query = NULL, query.assay = NULL, weight.reduction = "pcaproject", l2.norm = FALSE, dims = NULL, k.weight = 50, sd.weight = 1, eps = 0, n.trees = 50, verbose = TRUE, slot = "data", prediction.assay = FALSE, only.weights = FALSE, store.weights = TRUE ) } \arguments{ \item{anchorset}{An \code{\link{AnchorSet}} object generated by \code{\link{FindTransferAnchors}}} \item{refdata}{Data to transfer. This can be specified in one of two ways: \itemize{ \item{The reference data itself as either a vector where the names correspond to the reference cells, or a matrix, where the column names correspond to the reference cells.} \item{The name of the metadata field or assay from the reference object provided. This requires the reference parameter to be specified. If pulling assay data in this manner, it will pull the data from the data slot. To transfer data from other slots, please pull the data explicitly with \code{\link{GetAssayData}} and provide that matrix here.} }} \item{reference}{Reference object from which to pull data to transfer} \item{query}{Query object into which the data will be transferred.} \item{query.assay}{Name of the Assay to use from query} \item{weight.reduction}{Dimensional reduction to use for the weighting anchors. Options are: \itemize{ \item{pcaproject: Use the projected PCA used for anchor building} \item{lsiproject: Use the projected LSI used for anchor building} \item{pca: Use an internal PCA on the query only} \item{cca: Use the CCA used for anchor building} \item{custom DimReduc: User provided \code{\link{DimReduc}} object computed on the query cells} }} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction} \item{dims}{Set of dimensions to use in the anchor weighting procedure. If NULL, the same dimensions that were used to find anchors will be used for weighting.} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}})} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{verbose}{Print progress bars and output} \item{slot}{Slot to store the imputed data. Must be either "data" (default) or "counts"} \item{prediction.assay}{Return an \code{Assay} object with the prediction scores for each class stored in the \code{data} slot.} \item{only.weights}{Only return weights matrix} \item{store.weights}{Optionally store the weights matrix used for predictions in the returned query object.} } \value{ If \code{query} is not provided, for the categorical data in \code{refdata}, returns a data.frame with label predictions. If \code{refdata} is a matrix, returns an Assay object where the imputed data has been stored in the provided slot. If \code{query} is provided, a modified query object is returned. For the categorical data in refdata, prediction scores are stored as Assays (prediction.score.NAME) and two additional metadata fields: predicted.NAME and predicted.NAME.score which contain the class prediction and the score for that predicted class. For continuous data, an Assay called NAME is returned. NAME here corresponds to the name of the element in the refdata list. } \description{ Transfer categorical or continuous data across single-cell datasets. For transferring categorical information, pass a vector from the reference dataset (e.g. \code{refdata = reference$celltype}). For transferring continuous information, pass a matrix from the reference dataset (e.g. \code{refdata = GetAssayData(reference[['RNA']])}). } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} For both transferring discrete labels and also feature imputation, we first compute the weights matrix. \itemize{ \item{Construct a weights matrix that defines the association between each query cell and each anchor. These weights are computed as 1 - the distance between the query cell and the anchor divided by the distance of the query cell to the \code{k.weight}th anchor multiplied by the anchor score computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian kernel width a bandwidth defined by \code{sd.weight} and normalize across all \code{k.weight} anchors.} } The main difference between label transfer (classification) and feature imputation is what gets multiplied by the weights matrix. For label transfer, we perform the following steps: \itemize{ \item{Create a binary classification matrix, the rows corresponding to each possible class and the columns corresponding to the anchors. If the reference cell in the anchor pair is a member of a certain class, that matrix entry is filled with a 1, otherwise 0.} \item{Multiply this classification matrix by the transpose of weights matrix to compute a prediction score for each class for each cell in the query dataset.} } For feature imputation, we perform the following step: \itemize{ \item{Multiply the expression matrix for the reference anchor cells by the weights matrix. This returns a predicted expression matrix for the specified features for each cell in the query dataset.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("pbmc3k") # for demonstration, split the object into reference and query pbmc.reference <- pbmc3k[, 1:1350] pbmc.query <- pbmc3k[, 1351:2700] # perform standard preprocessing on each object pbmc.reference <- NormalizeData(pbmc.reference) pbmc.reference <- FindVariableFeatures(pbmc.reference) pbmc.reference <- ScaleData(pbmc.reference) pbmc.query <- NormalizeData(pbmc.query) pbmc.query <- FindVariableFeatures(pbmc.query) pbmc.query <- ScaleData(pbmc.query) # find anchors anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) # transfer labels predictions <- TransferData(anchorset = anchors, refdata = pbmc.reference$seurat_annotations) pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/FetchResidualSCTModel.Rd0000644000176200001440000000370214525500037016637 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{FetchResidualSCTModel} \alias{FetchResidualSCTModel} \title{Calculate pearson residuals of features not in the scale.data This function is the secondary function under FetchResiduals} \usage{ FetchResidualSCTModel( object, assay = "SCT", umi.assay = "RNA", layer = "counts", chunk_size = 2000, layer.cells = NULL, SCTModel = NULL, reference.SCT.model = NULL, new_features = NULL, clip.range = NULL, replace.value = FALSE, verbose = FALSE ) } \arguments{ \item{object}{A seurat object} \item{assay}{Name of the assay of the seurat object generated by SCTransform. Default is "SCT"} \item{umi.assay}{Name of the assay of the seurat object to fetch UMIs from. Default is "RNA"} \item{layer}{Name of the layer under `umi.assay` to fetch UMIs from. Default is "counts"} \item{chunk_size}{Number of cells to load in memory for calculating residuals} \item{layer.cells}{Vector of cells to calculate the residual for. Default is NULL which uses all cells in the layer} \item{SCTModel}{Which SCTmodel to use from the object for calculating the residual. Will be ignored if reference.SCT.model is set} \item{reference.SCT.model}{If a reference SCT model should be used for calculating the residuals. When set to not NULL, ignores the `SCTModel` paramater.} \item{new_features}{A vector of features to calculate the residuals for} \item{clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to. Useful if you want to change the clip.range.} \item{replace.value}{Whether to replace the value of residuals if it already exists} \item{verbose}{Whether to print messages and progress bars} } \value{ Returns a matrix containing centered pearson residuals of added features } \description{ Calculate pearson residuals of features not in the scale.data This function is the secondary function under FetchResiduals } Seurat/man/ProjectData.Rd0000644000176200001440000000363314525500037014765 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sketching.R \name{ProjectData} \alias{ProjectData} \title{Project full data to the sketch assay} \usage{ ProjectData( object, assay = "RNA", sketched.assay = "sketch", sketched.reduction, full.reduction, dims, normalization.method = c("LogNormalize", "SCT"), refdata = NULL, k.weight = 50, umap.model = NULL, recompute.neighbors = FALSE, recompute.weights = FALSE, verbose = TRUE ) } \arguments{ \item{object}{A Seurat object.} \item{assay}{Assay name for the full data. Default is 'RNA'.} \item{sketched.assay}{Sketched assay name to project onto. Default is 'sketch'.} \item{sketched.reduction}{Dimensional reduction results of the sketched assay to project onto.} \item{full.reduction}{Dimensional reduction name for the projected full dataset.} \item{dims}{Dimensions to include in the projection.} \item{normalization.method}{Normalization method to use. Can be 'LogNormalize' or 'SCT'. Default is 'LogNormalize'.} \item{refdata}{An optional list for label transfer from sketch to full data. Default is NULL. Similar to refdata in `MapQuery`} \item{k.weight}{Number of neighbors to consider when weighting labels for transfer. Default is 50.} \item{umap.model}{An optional pre-computed UMAP model. Default is NULL.} \item{recompute.neighbors}{Whether to recompute the neighbors for label transfer. Default is FALSE.} \item{recompute.weights}{Whether to recompute the weights for label transfer. Default is FALSE.} \item{verbose}{Print progress and diagnostic messages.} } \value{ A Seurat object with the full data projected onto the sketched dimensional reduction results. The projected data are stored in the specified full reduction. } \description{ This function allows projection of high-dimensional single-cell RNA expression data from a full dataset onto the lower-dimensional embedding of the sketch of the dataset. } Seurat/man/ReadMtx.Rd0000644000176200001440000000361114525500037014125 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{ReadMtx} \alias{ReadMtx} \title{Load in data from remote or local mtx files} \usage{ ReadMtx( mtx, cells, features, cell.column = 1, feature.column = 2, cell.sep = "\\t", feature.sep = "\\t", skip.cell = 0, skip.feature = 0, mtx.transpose = FALSE, unique.features = TRUE, strip.suffix = FALSE ) } \arguments{ \item{mtx}{Name or remote URL of the mtx file} \item{cells}{Name or remote URL of the cells/barcodes file} \item{features}{Name or remote URL of the features/genes file} \item{cell.column}{Specify which column of cells file to use for cell names; default is 1} \item{feature.column}{Specify which column of features files to use for feature/gene names; default is 2} \item{cell.sep}{Specify the delimiter in the cell name file} \item{feature.sep}{Specify the delimiter in the feature name file} \item{skip.cell}{Number of lines to skip in the cells file before beginning to read cell names} \item{skip.feature}{Number of lines to skip in the features file before beginning to gene names} \item{mtx.transpose}{Transpose the matrix after reading in} \item{unique.features}{Make feature names unique (default TRUE)} \item{strip.suffix}{Remove trailing "-1" if present in all cell barcodes.} } \value{ A sparse matrix containing the expression data. } \description{ Enables easy loading of sparse data matrices } \examples{ \dontrun{ # For local files: expression_matrix <- ReadMtx( mtx = "count_matrix.mtx.gz", features = "features.tsv.gz", cells = "barcodes.tsv.gz" ) seurat_object <- CreateSeuratObject(counts = expression_matrix) # For remote files: expression_matrix <- ReadMtx(mtx = "http://localhost/matrix.mtx", cells = "http://localhost/barcodes.tsv", features = "http://localhost/genes.tsv") seurat_object <- CreateSeuratObject(counts = data) } } \concept{preprocessing} Seurat/man/AddModuleScore.Rd0000644000176200001440000000473214525500037015420 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AddModuleScore} \alias{AddModuleScore} \title{Calculate module scores for feature expression programs in single cells} \usage{ AddModuleScore( object, features, pool = NULL, nbin = 24, ctrl = 100, k = FALSE, assay = NULL, name = "Cluster", seed = 1, search = FALSE, slot = "data", ... ) } \arguments{ \item{object}{Seurat object} \item{features}{A list of vectors of features for expression programs; each entry should be a vector of feature names} \item{pool}{List of features to check expression levels against, defaults to \code{rownames(x = object)}} \item{nbin}{Number of bins of aggregate expression levels for all analyzed features} \item{ctrl}{Number of control features selected from the same bin per analyzed feature} \item{k}{Use feature clusters returned from DoKMeans} \item{assay}{Name of assay to use} \item{name}{Name for the expression programs; will append a number to the end for each entry in \code{features} (eg. if \code{features} has three programs, the results will be stored as \code{name1}, \code{name2}, \code{name3}, respectively)} \item{seed}{Set a random seed. If NULL, seed is not set.} \item{search}{Search for symbol synonyms for features in \code{features} that don't match features in \code{object}? Searches the HGNC's gene names database; see \code{\link{UpdateSymbolList}} for more details} \item{slot}{Slot to calculate score values off of. Defaults to data slot (i.e log-normalized counts)} \item{...}{Extra parameters passed to \code{\link{UpdateSymbolList}}} } \value{ Returns a Seurat object with module scores added to object meta data; each module is stored as \code{name#} for each module program present in \code{features} } \description{ Calculate the average expression levels of each program (cluster) on single cell level, subtracted by the aggregated expression of control feature sets. All analyzed features are binned based on averaged expression, and the control features are randomly selected from each bin. } \examples{ \dontrun{ data("pbmc_small") cd_features <- list(c( 'CD79B', 'CD79A', 'CD19', 'CD180', 'CD200', 'CD3D', 'CD2', 'CD3E', 'CD7', 'CD8A', 'CD14', 'CD1C', 'CD68', 'CD9', 'CD247' )) pbmc_small <- AddModuleScore( object = pbmc_small, features = cd_features, ctrl = 5, name = 'CD_Features' ) head(x = pbmc_small[]) } } \references{ Tirosh et al, Science (2016) } \concept{utilities} Seurat/man/Graph-class.Rd0000644000176200001440000000054214525500037014725 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Graph-class} \alias{Graph-class} \title{The Graph Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:Graph]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Graph]{SeuratObject::Graph-class}} } Seurat/man/MixingMetric.Rd0000644000176200001440000000227514525500037015165 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{MixingMetric} \alias{MixingMetric} \title{Calculates a mixing metric} \usage{ MixingMetric( object, grouping.var, reduction = "pca", dims = 1:2, k = 5, max.k = 300, eps = 0, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{grouping.var}{Grouping variable for dataset} \item{reduction}{Which dimensionally reduced space to use} \item{dims}{Dimensions to use} \item{k}{Neighbor number to examine per group} \item{max.k}{Maximum size of local neighborhood to compute} \item{eps}{Error bound on the neighbor finding algorithm (from RANN)} \item{verbose}{Displays progress bar} } \value{ Returns a vector of values of the mixing metric for each cell } \description{ Here we compute a measure of how well mixed a composite dataset is. To compute, we first examine the local neighborhood for each cell (looking at max.k neighbors) and determine for each group (could be the dataset after integration) the k nearest neighbor and what rank that neighbor was in the overall neighborhood. We then take the median across all groups as the mixing metric per cell. } \concept{integration} Seurat/man/FeatureScatter.Rd0000644000176200001440000000515214525500037015504 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{FeatureScatter} \alias{FeatureScatter} \alias{GenePlot} \title{Scatter plot of single cell data} \usage{ FeatureScatter( object, feature1, feature2, cells = NULL, shuffle = FALSE, seed = 1, group.by = NULL, split.by = NULL, cols = NULL, pt.size = 1, shape.by = NULL, span = NULL, smooth = FALSE, combine = TRUE, slot = "data", plot.cor = TRUE, ncol = NULL, raster = NULL, raster.dpi = c(512, 512), jitter = FALSE ) } \arguments{ \item{object}{Seurat object} \item{feature1}{First feature to plot. Typically feature expression but can also be metrics, PC scores, etc. - anything that can be retreived with FetchData} \item{feature2}{Second feature to plot.} \item{cells}{Cells to include on the scatter plot.} \item{shuffle}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{seed}{Sets the seed if randomly shuffling the order of points.} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{split.by}{A factor in object metadata to split the feature plot by, pass 'ident' to split by cell identity'} \item{cols}{Colors to use for identity class plotting.} \item{pt.size}{Size of the points on the plot} \item{shape.by}{Ignored for now} \item{span}{Spline span in loess function call, if \code{NULL}, no spline added} \item{smooth}{Smooth the graph (similar to smoothScatter)} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed}} \item{slot}{Slot to pull data from, should be one of 'counts', 'data', or 'scale.data'} \item{plot.cor}{Display correlation in plot title} \item{ncol}{Number of columns if plotting multiple plots} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} \item{jitter}{Jitter for easier visualization of crowded points (default is FALSE)} } \value{ A ggplot object } \description{ Creates a scatter plot of two features (typically feature expression), across a set of single cells. Cells are colored by their identity class. Pearson correlation between the two features is displayed above the plot. } \examples{ data("pbmc_small") FeatureScatter(object = pbmc_small, feature1 = 'CD9', feature2 = 'CD3E') } \concept{visualization} Seurat/man/CalcPerturbSig.Rd0000644000176200001440000000361114525500037015432 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{CalcPerturbSig} \alias{CalcPerturbSig} \title{Calculate a perturbation Signature} \usage{ CalcPerturbSig( object, assay = NULL, features = NULL, slot = "data", gd.class = "guide_ID", nt.cell.class = "NT", split.by = NULL, num.neighbors = NULL, reduction = "pca", ndims = 15, new.assay.name = "PRTB", verbose = TRUE ) } \arguments{ \item{object}{An object of class Seurat.} \item{assay}{Name of Assay PRTB signature is being calculated on.} \item{features}{Features to compute PRTB signature for. Defaults to the variable features set in the assay specified.} \item{slot}{Data slot to use for PRTB signature calculation.} \item{gd.class}{Metadata column containing target gene classification.} \item{nt.cell.class}{Non-targeting gRNA cell classification identity.} \item{split.by}{Provide metadata column if multiple biological replicates exist to calculate PRTB signature for every replicate separately.} \item{num.neighbors}{Number of nearest neighbors to consider.} \item{reduction}{Reduction method used to calculate nearest neighbors.} \item{ndims}{Number of dimensions to use from dimensionality reduction method.} \item{new.assay.name}{Name for the new assay.} \item{verbose}{Display progress + messages} } \value{ Returns a Seurat object with a new assay added containing the perturbation signature for all cells in the data slot. } \description{ Function to calculate perturbation signature for pooled CRISPR screen datasets. For each target cell (expressing one target gRNA), we identified 20 cells from the control pool (non-targeting cells) with the most similar mRNA expression profiles. The perturbation signature is calculated by subtracting the averaged mRNA expression profile of the non-targeting neighbors from the mRNA expression profile of the target cell. } \concept{mixscape} Seurat/man/as.SingleCellExperiment.Rd0000644000176200001440000000114214525500037017242 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{as.SingleCellExperiment} \alias{as.SingleCellExperiment} \alias{as.SingleCellExperiment.Seurat} \title{Convert objects to SingleCellExperiment objects} \usage{ as.SingleCellExperiment(x, ...) \method{as.SingleCellExperiment}{Seurat}(x, assay = NULL, ...) } \arguments{ \item{x}{An object to convert to class \code{SingleCellExperiment}} \item{...}{Arguments passed to other methods} \item{assay}{Assays to convert} } \description{ Convert objects to SingleCellExperiment objects } \concept{objects} Seurat/man/SelectSCTIntegrationFeatures.Rd0000644000176200001440000000116014525500037020252 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{SelectSCTIntegrationFeatures} \alias{SelectSCTIntegrationFeatures} \title{Select SCT integration features} \usage{ SelectSCTIntegrationFeatures( object, nfeatures = 3000, assay = NULL, verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{nfeatures}{Number of features to return for integration} \item{assay}{Name of assay to use for integration feature selection} \item{verbose}{Print messages} \item{...}{Arguments passed on to \code{method}} } \description{ Select SCT integration features } Seurat/man/RunTSNE.Rd0000644000176200001440000000552514525500037014025 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunTSNE} \alias{RunTSNE} \alias{RunTSNE.matrix} \alias{RunTSNE.DimReduc} \alias{RunTSNE.dist} \alias{RunTSNE.Seurat} \title{Run t-distributed Stochastic Neighbor Embedding} \usage{ RunTSNE(object, ...) \method{RunTSNE}{matrix}( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{DimReduc}( object, cells = NULL, dims = 1:5, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{dist}( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) \method{RunTSNE}{Seurat}( object, reduction = "pca", cells = NULL, dims = 1:5, features = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, distance.matrix = NULL, reduction.name = "tsne", reduction.key = "tSNE_", ... ) } \arguments{ \item{object}{Seurat object} \item{...}{Arguments passed to other methods and to t-SNE call (most commonly used is perplexity)} \item{assay}{Name of assay that that t-SNE is being run on} \item{seed.use}{Random seed for the t-SNE. If NULL, does not set the seed} \item{tsne.method}{Select the method to use to compute the tSNE. Available methods are: \itemize{ \item \dQuote{\code{Rtsne}}: Use the Rtsne package Barnes-Hut implementation of tSNE (default) \item \dQuote{\code{FIt-SNE}}: Use the FFT-accelerated Interpolation-based t-SNE. Based on Kluger Lab code found here: \url{https://github.com/KlugerLab/FIt-SNE} }} \item{dim.embed}{The dimensional space of the resulting tSNE embedding (default is 2). For example, set to 3 for a 3d tSNE} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. \dQuote{\code{tSNE_}} by default} \item{cells}{Which cells to analyze (default, all cells)} \item{dims}{Which dimensions to use as input features} \item{reduction}{Which dimensional reduction (e.g. PCA, ICA) to use for the tSNE. Default is PCA} \item{features}{If set, run the tSNE on this subset of features (instead of running on a set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run on features} \item{distance.matrix}{If set, runs tSNE on the given distance matrix instead of data matrix (experimental)} \item{reduction.name}{dimensional reduction name, specifies the position in the object$dr list. tsne by default} } \description{ Run t-SNE dimensionality reduction on selected features. Has the option of running in a reduced dimensional space (i.e. spectral tSNE, recommended), or running based on a set of genes. For details about stored TSNE calculation parameters, see \code{PrintTSNEParams}. } \concept{dimensional_reduction} Seurat/man/CountSketch.Rd0000644000176200001440000000137514525500037015020 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sketching.R \name{CountSketch} \alias{CountSketch} \title{Generate CountSketch random matrix} \usage{ CountSketch(nsketch, ncells, seed = NA_integer_, ...) } \arguments{ \item{nsketch}{Number of sketching random cells} \item{ncells}{Number of cells in the original data} \item{seed}{a single value, interpreted as an integer, or \code{NULL} (see \sQuote{Details}).} \item{...}{Ignored} } \value{ ... } \description{ Generate CountSketch random matrix } \references{ Clarkson, KL. & Woodruff, DP. Low-rank approximation and regression in input sparsity time. Journal of the ACM (JACM). 2017 Jan 30;63(6):1-45. \url{https://dl.acm.org/doi/abs/10.1145/3019134}; } \keyword{internal} Seurat/man/PrepLDA.Rd0000644000176200001440000000230114525500037014003 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{PrepLDA} \alias{PrepLDA} \title{Function to prepare data for Linear Discriminant Analysis.} \usage{ PrepLDA( object, de.assay = "RNA", pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) } \arguments{ \item{object}{An object of class Seurat.} \item{de.assay}{Assay to use for selection of DE genes.} \item{pc.assay}{Assay to use for running Principle components analysis.} \item{labels}{Meta data column with target gene class labels.} \item{nt.label}{Name of non-targeting cell class.} \item{npcs}{Number of principle components to use.} \item{verbose}{Print progress bar.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} } \value{ Returns a list of the first 10 PCs from each projection. } \description{ This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all cells in the data. } \concept{mixscape} Seurat/man/MetaFeature.Rd0000644000176200001440000000176014525500037014766 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{MetaFeature} \alias{MetaFeature} \title{Aggregate expression of multiple features into a single feature} \usage{ MetaFeature( object, features, meta.name = "metafeature", cells = NULL, assay = NULL, slot = "data" ) } \arguments{ \item{object}{A Seurat object} \item{features}{List of features to aggregate} \item{meta.name}{Name of column in metadata to store metafeature} \item{cells}{List of cells to use (default all cells)} \item{assay}{Which assay to use} \item{slot}{Which slot to take data from (default data)} } \value{ Returns a \code{Seurat} object with metafeature stored in objct metadata } \description{ Calculates relative contribution of each feature to each cell for given set of features. } \examples{ data("pbmc_small") pbmc_small <- MetaFeature( object = pbmc_small, features = c("LTB", "EAF2"), meta.name = 'var.aggregate' ) head(pbmc_small[[]]) } \concept{utilities} Seurat/man/VariableFeaturePlot.Rd0000644000176200001440000000247414525500037016467 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VariableFeaturePlot} \alias{VariableFeaturePlot} \alias{VariableGenePlot} \alias{MeanVarPlot} \title{View variable features} \usage{ VariableFeaturePlot( object, cols = c("black", "red"), pt.size = 1, log = NULL, selection.method = NULL, assay = NULL, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{cols}{Colors to specify non-variable/variable status} \item{pt.size}{Size of the points on the plot} \item{log}{Plot the x-axis in log scale} \item{selection.method}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}} \item{assay}{Assay to pull variable features from} \item{raster}{Convert points to raster format, default is \code{NULL} which will automatically use raster if the number of points plotted is greater than 100,000} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A ggplot object } \description{ View variable features } \examples{ data("pbmc_small") VariableFeaturePlot(object = pbmc_small) } \seealso{ \code{\link{FindVariableFeatures}} } \concept{visualization} Seurat/man/CombinePlots.Rd0000644000176200001440000000212314525500037015154 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CombinePlots} \alias{CombinePlots} \title{Combine ggplot2-based plots into a single plot} \usage{ CombinePlots(plots, ncol = NULL, legend = NULL, ...) } \arguments{ \item{plots}{A list of gg objects} \item{ncol}{Number of columns} \item{legend}{Combine legends into a single legend choose from 'right' or 'bottom'; pass 'none' to remove legends, or \code{NULL} to leave legends as they are} \item{...}{Extra parameters passed to plot_grid} } \value{ A combined plot } \description{ Combine ggplot2-based plots into a single plot } \examples{ data("pbmc_small") pbmc_small[['group']] <- sample( x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE ) plot1 <- FeaturePlot( object = pbmc_small, features = 'MS4A1', split.by = 'group' ) plot2 <- FeaturePlot( object = pbmc_small, features = 'FCN1', split.by = 'group' ) CombinePlots( plots = list(plot1, plot2), legend = 'none', nrow = length(x = unique(x = pbmc_small[['group', drop = TRUE]])) ) } \concept{visualization} Seurat/man/LabelPoints.Rd0000644000176200001440000000245014525500037014775 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LabelPoints} \alias{LabelPoints} \alias{Labeler} \title{Add text labels to a ggplot2 plot} \usage{ LabelPoints( plot, points, labels = NULL, repel = FALSE, xnudge = 0.3, ynudge = 0.05, ... ) } \arguments{ \item{plot}{A ggplot2 plot with a GeomPoint layer} \item{points}{A vector of points to label; if \code{NULL}, will use all points in the plot} \item{labels}{A vector of labels for the points; if \code{NULL}, will use rownames of the data provided to the plot at the points selected} \item{repel}{Use \code{geom_text_repel} to create a nicely-repelled labels; this is slow when a lot of points are being plotted. If using \code{repel}, set \code{xnudge} and \code{ynudge} to 0} \item{xnudge, ynudge}{Amount to nudge X and Y coordinates of labels by} \item{...}{Extra parameters passed to \code{geom_text}} } \value{ A ggplot object } \description{ Add text labels to a ggplot2 plot } \examples{ data("pbmc_small") ff <- TopFeatures(object = pbmc_small[['pca']]) cc <- TopCells(object = pbmc_small[['pca']]) plot <- FeatureScatter(object = pbmc_small, feature1 = ff[1], feature2 = ff[2]) LabelPoints(plot = plot, points = cc) } \seealso{ \code{\link[ggplot2]{geom_text}} } \concept{visualization} Seurat/man/ReadVizgen.Rd0000644000176200001440000001205514525500037014621 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R, R/convenience.R \name{ReadVizgen} \alias{ReadVizgen} \alias{LoadVizgen} \title{Read and Load MERFISH Input from Vizgen} \usage{ ReadVizgen( data.dir, transcripts = NULL, spatial = NULL, molecules = NULL, type = "segmentations", mol.type = "microns", metadata = NULL, filter = NA_character_, z = 3L ) LoadVizgen(data.dir, fov, assay = "Vizgen", z = 3L) } \arguments{ \item{data.dir}{Path to the directory with Vizgen MERFISH files; requires at least one of the following files present: \itemize{ \item \dQuote{\code{cell_by_gene.csv}}: used for reading count matrix \item \dQuote{\code{cell_metadata.csv}}: used for reading cell spatial coordinate matrices \item \dQuote{\code{detected_transcripts.csv}}: used for reading molecule spatial coordinate matrices }} \item{transcripts}{Optional file path for counts matrix; pass \code{NA} to suppress reading counts matrix} \item{spatial}{Optional file path for spatial metadata; pass \code{NA} to suppress reading spatial coordinates. If \code{spatial} is provided and \code{type} is \dQuote{segmentations}, uses \code{dirname(spatial)} instead of \code{data.dir} to find HDF5 files} \item{molecules}{Optional file path for molecule coordinates file; pass \code{NA} to suppress reading spatial molecule information} \item{type}{Type of cell spatial coordinate matrices to read; choose one or more of: \itemize{ \item \dQuote{segmentations}: cell segmentation vertices; requires \href{https://cran.r-project.org/package=hdf5r}{\pkg{hdf5r}} to be installed and requires a directory \dQuote{\code{cell_boundaries}} within \code{data.dir}. Within \dQuote{\code{cell_boundaries}}, there must be one or more HDF5 file named \dQuote{\code{feature_data_##.hdf5}} \item \dQuote{centroids}: cell centroids in micron coordinate space \item \dQuote{boxes}: cell box outlines in micron coordinate space }} \item{mol.type}{Type of molecule spatial coordinate matrices to read; choose one or more of: \itemize{ \item \dQuote{pixels}: molecule coordinates in pixel space \item \dQuote{microns}: molecule coordinates in micron space }} \item{metadata}{Type of available metadata to read; choose zero or more of: \itemize{ \item \dQuote{volume}: estimated cell volume \item \dQuote{fov}: cell's fov }} \item{filter}{A character to filter molecules by, pass \code{NA} to skip molecule filtering} \item{z}{Z-index to load; must be between 0 and 6, inclusive} \item{fov}{Name to store FOV as} \item{assay}{Name to store expression matrix as} } \value{ \code{ReadVizgen}: A list with some combination of the following values: \itemize{ \item \dQuote{\code{transcripts}}: a \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells are columns and features are rows \item \dQuote{\code{segmentations}}: a data frame with cell polygon outlines in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{centroids}}: a data frame with cell centroid coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{boxes}}: a data frame with cell box outlines in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{microns}}: a data frame with molecule micron coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} \item \dQuote{\code{metadata}}: a data frame with the cell-level metadata requested by \code{metadata} } \code{LoadVizgen}: A \code{\link[SeuratObject]{Seurat}} object } \description{ Read and load in MERFISH data from Vizgen-formatted files } \note{ This function requires the \href{https://cran.r-project.org/package=data.table}{\pkg{data.table}} package to be installed } \section{Progress Updates with \pkg{progressr}}{ This function uses \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to render status updates and progress bars. To enable progress updates, wrap the function call in \code{\link[progressr]{with_progress}} or run \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running this function. For more details about \pkg{progressr}, please read \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} } \section{Parallelization with \pkg{future}}{ This function uses \href{https://cran.r-project.org/package=future}{\pkg{future}} to enable parallelization. Parallelization strategies can be set using \code{\link[future]{plan}}. Common plans include \dQuote{\code{sequential}} for non-parallelized processing or \dQuote{\code{multisession}} for parallel evaluation using multiple \R sessions; for other plans, see the \dQuote{Implemented evaluation strategies} section of \code{\link[future:plan]{?future::plan}}. For a more thorough introduction to \pkg{future}, see \href{https://future.futureverse.org/articles/future-1-overview.html}{\code{vignette("future-1-overview")}} } \concept{future} \concept{preprocessing} Seurat/man/BarcodeInflectionsPlot.Rd0000644000176200001440000000203314525500037017152 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{BarcodeInflectionsPlot} \alias{BarcodeInflectionsPlot} \title{Plot the Barcode Distribution and Calculated Inflection Points} \usage{ BarcodeInflectionsPlot(object) } \arguments{ \item{object}{Seurat object} } \value{ Returns a `ggplot2` object showing the by-group inflection points and provided (or default) rank threshold values in grey. } \description{ This function plots the calculated inflection points derived from the barcode-rank distribution. } \details{ See [CalculateBarcodeInflections()] to calculate inflection points and [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. } \examples{ data("pbmc_small") pbmc_small <- CalculateBarcodeInflections(pbmc_small, group.column = 'groups') BarcodeInflectionsPlot(pbmc_small) } \seealso{ \code{\link{CalculateBarcodeInflections}} \code{\link{SubsetByBarcodeInflections}} } \author{ Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} } \concept{visualization} Seurat/man/ProjectIntegration.Rd0000644000176200001440000000432214525500037016373 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{ProjectIntegration} \alias{ProjectIntegration} \title{Integrate embeddings from the integrated sketched.assay} \usage{ ProjectIntegration( object, sketched.assay = "sketch", assay = "RNA", reduction = "integrated_dr", features = NULL, layers = "data", reduction.name = NULL, reduction.key = NULL, method = c("sketch", "data"), ratio = 0.8, sketched.layers = NULL, seed = 123, verbose = TRUE ) } \arguments{ \item{object}{A Seurat object with all cells for one dataset} \item{sketched.assay}{Assay name for sketched-cell expression (default is 'sketch')} \item{assay}{Assay name for original expression (default is 'RNA')} \item{reduction}{Dimensional reduction name for batch-corrected embeddings in the sketched object (default is 'integrated_dr')} \item{features}{Features used for atomic sketch integration} \item{layers}{Names of layers for correction.} \item{reduction.name}{Name to save new reduction as; defaults to \code{paste0(reduction, '.orig')}} \item{reduction.key}{Key for new dimensional reduction; defaults to creating one from \code{reduction.name}} \item{method}{Methods to construct sketch-cell representation for all cells (default is 'sketch'). Can be one of: \itemize{ \item \dQuote{\code{sketch}}: Use random sketched data slot \item \dQuote{\code{data}}: Use data slot }} \item{ratio}{Sketch ratio of data slot when \code{dictionary.method} is set to \dQuote{\code{sketch}}; defaults to 0.8} \item{sketched.layers}{Names of sketched layers, defaults to all layers of \dQuote{\code{object[[assay]]}}} \item{seed}{A positive integer. The seed for the random number generator, defaults to 123.} \item{verbose}{Print progress and message} } \value{ Returns a Seurat object with an integrated dimensional reduction } \description{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Hao, et al Biorxiv 2022: \doi{10.1101/2022.02.24.481684} } \details{ First learn a atom dictionary representation to reconstruct each cell. Then, using this dictionary representation, reconstruct the embeddings of each cell from the integrated atoms. } Seurat/man/TransferAnchorSet-class.Rd0000644000176200001440000000061214525500037017255 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{TransferAnchorSet-class} \alias{TransferAnchorSet-class} \alias{TransferAnchorSet} \title{The TransferAnchorSet Class} \description{ Inherits from the Anchorset class. Implemented mainly for method dispatch purposes. See \code{\link{AnchorSet}} for slot details. } \concept{objects} Seurat/man/SlideSeq-class.Rd0000644000176200001440000000145714525500037015403 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{SlideSeq-class} \alias{SlideSeq-class} \alias{SlideSeq} \title{The SlideSeq class} \description{ The SlideSeq class represents spatial information from the Slide-seq platform } \section{Slots}{ \describe{ \item{\code{coordinates}}{...} }} \section{Slots}{ \describe{ \item{\code{assay}}{Name of assay to associate image data with; will give this image priority for visualization when the assay is set as the active/default assay in a \code{Seurat} object} \item{\code{key}}{A one-length character vector with the object's key; keys must be one or more alphanumeric characters followed by an underscore \dQuote{\code{_}} (regex pattern \dQuote{\code{^[a-zA-Z][a-zA-Z0-9]*_$}})} } } \concept{spatial} Seurat/man/JackStraw.Rd0000644000176200001440000000330214525500037014447 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{JackStraw} \alias{JackStraw} \title{Determine statistical significance of PCA scores.} \usage{ JackStraw( object, reduction = "pca", assay = NULL, dims = 20, num.replicate = 100, prop.freq = 0.01, verbose = TRUE, maxit = 1000 ) } \arguments{ \item{object}{Seurat object} \item{reduction}{DimReduc to use. ONLY PCA CURRENTLY SUPPORTED.} \item{assay}{Assay used to calculate reduction.} \item{dims}{Number of PCs to compute significance for} \item{num.replicate}{Number of replicate samplings to perform} \item{prop.freq}{Proportion of the data to randomly permute for each replicate} \item{verbose}{Print progress bar showing the number of replicates that have been processed.} \item{maxit}{maximum number of iterations to be performed by the irlba function of RunPCA} } \value{ Returns a Seurat object where JS(object = object[['pca']], slot = 'empirical') represents p-values for each gene in the PCA analysis. If ProjectPCA is subsequently run, JS(object = object[['pca']], slot = 'full') then represents p-values for all genes. } \description{ Randomly permutes a subset of data, and calculates projected PCA scores for these 'random' genes. Then compares the PCA scores for the 'random' genes with the observed PCA scores to determine statistical signifance. End result is a p-value for each gene's association with each principal component. } \examples{ \dontrun{ data("pbmc_small") pbmc_small = suppressWarnings(JackStraw(pbmc_small)) head(JS(object = pbmc_small[['pca']], slot = 'empirical')) } } \references{ Inspired by Chung et al, Bioinformatics (2014) } \concept{dimensional_reduction} Seurat/man/SingleImagePlot.Rd0000644000176200001440000000565114525500037015612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleImagePlot} \alias{SingleImagePlot} \title{Single Spatial Plot} \usage{ SingleImagePlot( data, col.by = NA, col.factor = TRUE, cols = NULL, shuffle.cols = FALSE, size = 0.1, molecules = NULL, mols.size = 0.1, mols.cols = NULL, mols.alpha = 1, alpha = molecules \%iff\% 0.3 \%||\% 0.6, border.color = "white", border.size = NULL, na.value = "grey50", dark.background = TRUE, ... ) } \arguments{ \item{data}{A data frame with at least the following columns: \itemize{ \item \dQuote{\code{x}}: Spatial-resolved \emph{x} coordinates, will be plotted on the \emph{y}-axis \item \dQuote{\code{y}}: Spatially-resolved \emph{y} coordinates, will be plotted on the \emph{x}-axis \item \dQuote{\code{cell}}: Cell name \item \dQuote{\code{boundary}}: Segmentation boundary label; when plotting multiple segmentation layers, the order of boundary transparency is set by factor levels for this column } Can pass \code{NA} to \code{data} suppress segmentation visualization} \item{col.by}{Name of column in \code{data} to color cell segmentations by; pass \code{NA} to suppress coloring} \item{col.factor}{Are the colors a factor or discrete?} \item{cols}{Colors for cell segmentations; can be one of the following: \itemize{ \item \code{NULL} for default ggplot2 colors \item A numeric value or name of a \link[RColorBrewer:RColorBrewer]{color brewer palette} \item Name of a palette for \code{\link{DiscretePalette}} \item A vector of colors equal to the length of unique levels of \code{data$col.by} }} \item{shuffle.cols}{Randomly shuffle colors when a palette or vector of colors is provided to \code{cols}} \item{size}{Point size for cells when plotting centroids} \item{molecules}{A data frame with spatially-resolved molecule coordinates; should have the following columns: \itemize{ \item \dQuote{\code{x}}: Spatial-resolved \emph{x} coordinates, will be plotted on the \emph{y}-axis \item \dQuote{\code{y}}: Spatially-resolved \emph{y} coordinates, will be plotted on the \emph{x}-axis \item \dQuote{\code{molecule}}: Molecule name }} \item{mols.size}{Point size for molecules} \item{mols.cols}{A vector of color for molecules. The "Set1" palette from RColorBrewer is used by default.} \item{mols.alpha}{Alpha value for molecules, should be between 0 and 1} \item{alpha}{Alpha value, should be between 0 and 1; when plotting multiple boundaries, \code{alpha} is equivalent to max alpha} \item{border.color}{Color of cell segmentation border; pass \code{NA} to suppress borders for segmentation-based plots} \item{border.size}{Thickness of cell segmentation borders; pass \code{NA} to suppress borders for centroid-based plots} \item{na.value}{Color value for \code{NA} segmentations when using custom scale} \item{...}{Ignored} } \value{ A ggplot object } \description{ Single Spatial Plot } \keyword{internal} Seurat/man/SelectIntegrationFeatures5.Rd0000644000176200001440000000175114525500037017773 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{SelectIntegrationFeatures5} \alias{SelectIntegrationFeatures5} \title{Select integration features} \usage{ SelectIntegrationFeatures5( object, nfeatures = 2000, assay = NULL, method = NULL, layers = NULL, verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{nfeatures}{Number of features to return for integration} \item{assay}{Name of assay to use for integration feature selection} \item{method}{Which method to pull. For \code{HVFInfo} and \code{VariableFeatures}, choose one from one of the following: \itemize{ \item \dQuote{vst} \item \dQuote{sctransform} or \dQuote{sct} \item \dQuote{mean.var.plot}, \dQuote{dispersion}, \dQuote{mvp}, or \dQuote{disp} }} \item{layers}{Name of layers to use for integration feature selection} \item{verbose}{Print messages} \item{...}{Arguments passed on to \code{method}} } \description{ Select integration features } Seurat/man/LoadAnnoyIndex.Rd0000644000176200001440000000064314525500037015437 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{LoadAnnoyIndex} \alias{LoadAnnoyIndex} \title{Load the Annoy index file} \usage{ LoadAnnoyIndex(object, file) } \arguments{ \item{object}{Neighbor object} \item{file}{Path to file with annoy index} } \value{ Returns the Neighbor object with the index stored } \description{ Load the Annoy index file } \concept{utilities} Seurat/man/SplitObject.Rd0000644000176200001440000000215714525500037015007 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{SplitObject} \alias{SplitObject} \title{Splits object into a list of subsetted objects.} \usage{ SplitObject(object, split.by = "ident") } \arguments{ \item{object}{Seurat object} \item{split.by}{Attribute for splitting. Default is "ident". Currently only supported for class-level (i.e. non-quantitative) attributes.} } \value{ A named list of Seurat objects, each containing a subset of cells from the original object. } \description{ Splits object based on a single attribute into a list of subsetted objects, one for each level of the attribute. For example, useful for taking an object that contains cells from many patients, and subdividing it into patient-specific objects. } \examples{ data("pbmc_small") # Assign the test object a three level attribute groups <- sample(c("group1", "group2", "group3"), size = 80, replace = TRUE) names(groups) <- colnames(pbmc_small) pbmc_small <- AddMetaData(object = pbmc_small, metadata = groups, col.name = "group") obj.list <- SplitObject(pbmc_small, split.by = "group") } \concept{objects} Seurat/man/LabelClusters.Rd0000644000176200001440000000305014525500037015322 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LabelClusters} \alias{LabelClusters} \title{Label clusters on a ggplot2-based scatter plot} \usage{ LabelClusters( plot, id, clusters = NULL, labels = NULL, split.by = NULL, repel = TRUE, box = FALSE, geom = "GeomPoint", position = "median", ... ) } \arguments{ \item{plot}{A ggplot2-based scatter plot} \item{id}{Name of variable used for coloring scatter plot} \item{clusters}{Vector of cluster ids to label} \item{labels}{Custom labels for the clusters} \item{split.by}{Split labels by some grouping label, useful when using \code{\link[ggplot2]{facet_wrap}} or \code{\link[ggplot2]{facet_grid}}} \item{repel}{Use \code{geom_text_repel} to create nicely-repelled labels} \item{box}{Use geom_label/geom_label_repel (includes a box around the text labels)} \item{geom}{Name of geom to get X/Y aesthetic names for} \item{position}{How to place the label if repel = FALSE. If "median", place the label at the median position. If "nearest" place the label at the position of the nearest data point to the median.} \item{...}{Extra parameters to \code{\link[ggrepel]{geom_text_repel}}, such as \code{size}} } \value{ A ggplot2-based scatter plot with cluster labels } \description{ Label clusters on a ggplot2-based scatter plot } \examples{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) LabelClusters(plot = plot, id = 'ident') } \seealso{ \code{\link[ggrepel]{geom_text_repel}} \code{\link[ggplot2]{geom_text}} } \concept{visualization} Seurat/man/FindClusters.Rd0000644000176200001440000000632514525500037015173 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/clustering.R \name{FindClusters} \alias{FindClusters} \alias{FindClusters.default} \alias{FindClusters.Seurat} \title{Cluster Determination} \usage{ FindClusters(object, ...) \method{FindClusters}{default}( object, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) \method{FindClusters}{Seurat}( object, graph.name = NULL, cluster.name = NULL, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{modularity.fxn}{Modularity function (1 = standard; 2 = alternative).} \item{initial.membership, node.sizes}{Parameters to pass to the Python leidenalg function.} \item{resolution}{Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities.} \item{method}{Method for running leiden (defaults to matrix which is fast for small datasets). Enable method = "igraph" to avoid casting large data to a dense matrix.} \item{algorithm}{Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python.} \item{n.start}{Number of random starts.} \item{n.iter}{Maximal number of iterations per random start.} \item{random.seed}{Seed of the random number generator.} \item{group.singletons}{Group singletons into nearest cluster. If FALSE, assign all singletons to a "singleton" group} \item{temp.file.location}{Directory where intermediate files will be written. Specify the ABSOLUTE path.} \item{edge.file.name}{Edge file to use as input for modularity optimizer jar.} \item{verbose}{Print output} \item{graph.name}{Name of graph to use for the clustering algorithm} \item{cluster.name}{Name of output clusters} } \value{ Returns a Seurat object where the idents have been updated with new cluster info; latest clustering results will be stored in object metadata under 'seurat_clusters'. Note that 'seurat_clusters' will be overwritten everytime FindClusters is run } \description{ Identify clusters of cells by a shared nearest neighbor (SNN) modularity optimization based clustering algorithm. First calculate k-nearest neighbors and construct the SNN graph. Then optimize the modularity function to determine clusters. For a full description of the algorithms, see Waltman and van Eck (2013) \emph{The European Physical Journal B}. Thanks to Nigel Delaney (evolvedmicrobe@github) for the rewrite of the Java modularity optimizer code in Rcpp! } \details{ To run Leiden algorithm, you must first install the leidenalg python package (e.g. via pip install leidenalg), see Traag et al (2018). } \concept{clustering} Seurat/man/PseudobulkExpression.Rd0000644000176200001440000000064114525500037016756 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R \name{PseudobulkExpression} \alias{PseudobulkExpression} \title{Pseudobulk Expression} \usage{ PseudobulkExpression(object, ...) } \arguments{ \item{object}{An assay} \item{...}{Arguments passed to other methods} } \value{ Returns object after normalization } \description{ Normalize the count data present in a given assay. } Seurat/man/PrepSCTFindMarkers.Rd0000644000176200001440000000546214525500037016175 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{PrepSCTFindMarkers} \alias{PrepSCTFindMarkers} \title{Prepare object to run differential expression on SCT assay with multiple models} \usage{ PrepSCTFindMarkers(object, assay = "SCT", verbose = TRUE) } \arguments{ \item{object}{Seurat object with SCT assays} \item{assay}{Assay name where for SCT objects are stored; Default is 'SCT'} \item{verbose}{Print messages and progress} } \value{ Returns a Seurat object with recorrected counts and data in the SCT assay. } \description{ Given a merged object with multiple SCT models, this function uses minimum of the median UMI (calculated using the raw UMI counts) of individual objects to reverse the individual SCT regression model using minimum of median UMI as the sequencing depth covariate. The counts slot of the SCT assay is replaced with recorrected counts and the data slot is replaced with log1p of recorrected counts. } \section{Progress Updates with \pkg{progressr}}{ This function uses \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to render status updates and progress bars. To enable progress updates, wrap the function call in \code{\link[progressr]{with_progress}} or run \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running this function. For more details about \pkg{progressr}, please read \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} } \section{Parallelization with \pkg{future}}{ This function uses \href{https://cran.r-project.org/package=future}{\pkg{future}} to enable parallelization. Parallelization strategies can be set using \code{\link[future]{plan}}. Common plans include \dQuote{\code{sequential}} for non-parallelized processing or \dQuote{\code{multisession}} for parallel evaluation using multiple \R sessions; for other plans, see the \dQuote{Implemented evaluation strategies} section of \code{\link[future:plan]{?future::plan}}. For a more thorough introduction to \pkg{future}, see \href{https://future.futureverse.org/articles/future-1-overview.html}{\code{vignette("future-1-overview")}} } \examples{ data("pbmc_small") pbmc_small1 <- SCTransform(object = pbmc_small, variable.features.n = 20, vst.flavor="v1") pbmc_small2 <- SCTransform(object = pbmc_small, variable.features.n = 20, vst.flavor="v1") pbmc_merged <- merge(x = pbmc_small1, y = pbmc_small2) pbmc_merged <- PrepSCTFindMarkers(object = pbmc_merged) markers <- FindMarkers( object = pbmc_merged, ident.1 = "0", ident.2 = "1", assay = "SCT" ) pbmc_subset <- subset(pbmc_merged, idents = c("0", "1")) markers_subset <- FindMarkers( object = pbmc_subset, ident.1 = "0", ident.2 = "1", assay = "SCT", recorrect_umi = FALSE ) } \concept{differential_expression} \concept{future} Seurat/man/reexports.Rd0000644000176200001440000001016014525500056014612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{import} \name{reexports} \alias{reexports} \alias{components} \alias{\%||\%} \alias{\%iff\%} \alias{AddMetaData} \alias{as.Graph} \alias{as.Neighbor} \alias{as.Seurat} \alias{as.sparse} \alias{Assays} \alias{Cells} \alias{CellsByIdentities} \alias{Command} \alias{CreateAssayObject} \alias{CreateDimReducObject} \alias{CreateSeuratObject} \alias{DefaultAssay} \alias{DefaultAssay<-} \alias{Distances} \alias{Embeddings} \alias{FetchData} \alias{GetAssayData} \alias{GetImage} \alias{GetTissueCoordinates} \alias{HVFInfo} \alias{Idents} \alias{Idents<-} \alias{Images} \alias{Index} \alias{Index<-} \alias{Indices} \alias{IsGlobal} \alias{JS} \alias{JS<-} \alias{Key} \alias{Key<-} \alias{Loadings} \alias{Loadings<-} \alias{LogSeuratCommand} \alias{Misc} \alias{Misc<-} \alias{Neighbors} \alias{Project} \alias{Project<-} \alias{Radius} \alias{Reductions} \alias{RenameCells} \alias{RenameIdents} \alias{ReorderIdent} \alias{RowMergeSparseMatrices} \alias{SetAssayData} \alias{SetIdent} \alias{SpatiallyVariableFeatures} \alias{StashIdent} \alias{Stdev} \alias{SVFInfo} \alias{Tool} \alias{Tool<-} \alias{UpdateSeuratObject} \alias{VariableFeatures} \alias{VariableFeatures<-} \alias{WhichCells} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from other packages. Follow the links below to see their documentation. \describe{ \item{generics}{\code{\link[generics]{components}}} \item{SeuratObject}{\code{\link[SeuratObject:set-if-null]{\%||\%}}, \code{\link[SeuratObject:set-if-null]{\%iff\%}}, \code{\link[SeuratObject]{AddMetaData}}, \code{\link[SeuratObject]{as.Graph}}, \code{\link[SeuratObject]{as.Neighbor}}, \code{\link[SeuratObject]{as.Seurat}}, \code{\link[SeuratObject]{as.sparse}}, \code{\link[SeuratObject:ObjectAccess]{Assays}}, \code{\link[SeuratObject]{Cells}}, \code{\link[SeuratObject]{CellsByIdentities}}, \code{\link[SeuratObject]{Command}}, \code{\link[SeuratObject]{CreateAssayObject}}, \code{\link[SeuratObject]{CreateDimReducObject}}, \code{\link[SeuratObject]{CreateSeuratObject}}, \code{\link[SeuratObject]{DefaultAssay}}, \code{\link[SeuratObject:DefaultAssay]{DefaultAssay<-}}, \code{\link[SeuratObject]{Distances}}, \code{\link[SeuratObject]{Embeddings}}, \code{\link[SeuratObject]{FetchData}}, \code{\link[SeuratObject:AssayData]{GetAssayData}}, \code{\link[SeuratObject]{GetImage}}, \code{\link[SeuratObject]{GetTissueCoordinates}}, \code{\link[SeuratObject:VariableFeatures]{HVFInfo}}, \code{\link[SeuratObject]{Idents}}, \code{\link[SeuratObject:Idents]{Idents<-}}, \code{\link[SeuratObject]{Images}}, \code{\link[SeuratObject:NNIndex]{Index}}, \code{\link[SeuratObject:NNIndex]{Index<-}}, \code{\link[SeuratObject]{Indices}}, \code{\link[SeuratObject]{IsGlobal}}, \code{\link[SeuratObject]{JS}}, \code{\link[SeuratObject:JS]{JS<-}}, \code{\link[SeuratObject]{Key}}, \code{\link[SeuratObject:Key]{Key<-}}, \code{\link[SeuratObject]{Loadings}}, \code{\link[SeuratObject:Loadings]{Loadings<-}}, \code{\link[SeuratObject]{LogSeuratCommand}}, \code{\link[SeuratObject]{Misc}}, \code{\link[SeuratObject:Misc]{Misc<-}}, \code{\link[SeuratObject:ObjectAccess]{Neighbors}}, \code{\link[SeuratObject]{Project}}, \code{\link[SeuratObject:Project]{Project<-}}, \code{\link[SeuratObject]{Radius}}, \code{\link[SeuratObject:ObjectAccess]{Reductions}}, \code{\link[SeuratObject]{RenameCells}}, \code{\link[SeuratObject:Idents]{RenameIdents}}, \code{\link[SeuratObject:Idents]{ReorderIdent}}, \code{\link[SeuratObject]{RowMergeSparseMatrices}}, \code{\link[SeuratObject:AssayData]{SetAssayData}}, \code{\link[SeuratObject:Idents]{SetIdent}}, \code{\link[SeuratObject:VariableFeatures]{SpatiallyVariableFeatures}}, \code{\link[SeuratObject:Idents]{StashIdent}}, \code{\link[SeuratObject]{Stdev}}, \code{\link[SeuratObject:VariableFeatures]{SVFInfo}}, \code{\link[SeuratObject]{Tool}}, \code{\link[SeuratObject:Tool]{Tool<-}}, \code{\link[SeuratObject]{UpdateSeuratObject}}, \code{\link[SeuratObject]{VariableFeatures}}, \code{\link[SeuratObject:VariableFeatures]{VariableFeatures<-}}, \code{\link[SeuratObject]{WhichCells}}} }} Seurat/man/RunICA.Rd0000644000176200001440000000426314525500037013646 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunICA} \alias{RunICA} \alias{RunICA.default} \alias{RunICA.Assay} \alias{RunICA.Seurat} \title{Run Independent Component Analysis on gene expression} \usage{ RunICA(object, ...) \method{RunICA}{default}( object, assay = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) \method{RunICA}{Assay}( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) \method{RunICA}{Seurat}( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "IC_", seed.use = 42, ... ) } \arguments{ \item{object}{Seurat object} \item{\dots}{Additional arguments to be passed to fastica} \item{assay}{Name of Assay ICA is being run on} \item{nics}{Number of ICs to compute} \item{rev.ica}{By default, computes the dimensional reduction on the cell x feature matrix. Setting to true will compute it on the transpose (feature x cell matrix).} \item{ica.function}{ICA function from ica package to run (options: icafast, icaimax, icajade)} \item{verbose}{Print the top genes associated with high/low loadings for the ICs} \item{ndims.print}{ICs to print genes for} \item{nfeatures.print}{Number of genes to print for each IC} \item{reduction.name}{dimensional reduction name} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names.} \item{seed.use}{Set a random seed. Setting NULL will not set a seed.} \item{features}{Features to compute ICA on} } \description{ Run fastica algorithm from the ica package for ICA dimensionality reduction. For details about stored ICA calculation parameters, see \code{PrintICAParams}. } \concept{dimensional_reduction} Seurat/man/DietSeurat.Rd0000644000176200001440000000255714525500037014642 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{DietSeurat} \alias{DietSeurat} \title{Slim down a Seurat object} \usage{ DietSeurat( object, layers = NULL, features = NULL, assays = NULL, dimreducs = NULL, graphs = NULL, misc = TRUE, counts = deprecated(), data = deprecated(), scale.data = deprecated(), ... ) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{layers}{A vector or named list of layers to keep} \item{features}{Only keep a subset of features, defaults to all features} \item{assays}{Only keep a subset of assays specified here} \item{dimreducs}{Only keep a subset of DimReducs specified here (if \code{NULL}, remove all DimReducs)} \item{graphs}{Only keep a subset of Graphs specified here (if \code{NULL}, remove all Graphs)} \item{misc}{Preserve the \code{misc} slot; default is \code{TRUE}} \item{counts}{Preserve the count matrices for the assays specified} \item{data}{Preserve the data matrices for the assays specified} \item{scale.data}{Preserve the scale data matrices for the assays specified} \item{...}{Ignored} } \value{ \code{object} with only the sub-object specified retained } \description{ Keep only certain aspects of the Seurat object. Can be useful in functions that utilize merge as it reduces the amount of data in the merge } \concept{objects} Seurat/man/PlotPerturbScore.Rd0000644000176200001440000000272714525500037016046 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{PlotPerturbScore} \alias{PlotPerturbScore} \title{Function to plot perturbation score distributions.} \usage{ PlotPerturbScore( object, target.gene.class = "gene", target.gene.ident = NULL, mixscape.class = "mixscape_class", col = "orange2", split.by = NULL, before.mixscape = FALSE, prtb.type = "KO" ) } \arguments{ \item{object}{An object of class Seurat.} \item{target.gene.class}{meta data column specifying all target gene names in the experiment.} \item{target.gene.ident}{Target gene name to visualize perturbation scores for.} \item{mixscape.class}{meta data column specifying mixscape classifications.} \item{col}{Specify color of target gene class or knockout cell class. For control non-targeting and non-perturbed cells, colors are set to different shades of grey.} \item{split.by}{For datasets with more than one cell type. Set equal TRUE to visualize perturbation scores for each cell type separately.} \item{before.mixscape}{Option to split densities based on mixscape classification (default) or original target gene classification. Default is set to NULL and plots cells by original class ID.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} } \value{ A ggplot object. } \description{ Density plots to visualize perturbation scores calculated from RunMixscape function. } \concept{mixscape} Seurat/man/FastRowScale.Rd0000644000176200001440000000130014525500037015107 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{FastRowScale} \alias{FastRowScale} \title{Scale and/or center matrix rowwise} \usage{ FastRowScale(mat, center = TRUE, scale = TRUE, scale_max = 10) } \arguments{ \item{mat}{A matrix} \item{center}{a logical value indicating whether to center the rows} \item{scale}{a logical value indicating whether to scale the rows} \item{scale_max}{clip all values greater than scale_max to scale_max. Don't clip if Inf.} } \value{ Returns the center/scaled matrix } \description{ Performs row scaling and/or centering. Equivalent to using t(scale(t(mat))) in R except in the case of NA values. } \concept{utilities} Seurat/man/ModalityWeights-class.Rd0000644000176200001440000000176414525500037017010 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{ModalityWeights-class} \alias{ModalityWeights-class} \alias{ModalityWeights} \title{The ModalityWeights Class} \description{ The ModalityWeights class is an intermediate data storage class that stores the modality weight and other related information needed for performing downstream analyses - namely data integration (\code{FindModalityWeights}) and data transfer (\code{\link{FindMultiModalNeighbors}}). } \section{Slots}{ \describe{ \item{\code{modality.weight.list}}{A list of modality weights value from all modalities} \item{\code{modality.assay}}{Names of assays for the list of dimensional reductions} \item{\code{params}}{A list of parameters used in the FindModalityWeights} \item{\code{score.matrix}}{a list of score matrices representing cross and within-modality prediction score, and kernel value} \item{\code{command}}{Store log of parameters that were used} }} \concept{objects} Seurat/man/MixscapeHeatmap.Rd0000644000176200001440000001211314525500037015627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{MixscapeHeatmap} \alias{MixscapeHeatmap} \title{Differential expression heatmap for mixscape} \usage{ MixscapeHeatmap( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = "RNA", max.genes = 100, test.use = "wilcox", max.cells.group = NULL, order.by.prob = TRUE, group.by = NULL, mixscape.class = "mixscape_class", prtb.type = "KO", fc.name = "avg_log2FC", pval.cutoff = 0.05, ... ) } \arguments{ \item{object}{An object} \item{ident.1}{Identity class to define markers for; pass an object of class \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for} \item{balanced}{Plot an equal number of genes with both groups of cells.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{assay}{Assay to use in differential expression testing} \item{max.genes}{Total number of DE genes to plot.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default); will use a fast implementation by Presto if installed \item{"wilcox_limma"} : Identifies differentially expressed genes between two groups of cells using the limma implementation of the Wilcoxon Rank Sum test; set this option to reproduce results from Seurat v4 \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{max.cells.group}{Number of cells per identity to plot.} \item{order.by.prob}{Order cells on heatmap based on their mixscape knockout probability from highest to lowest score.} \item{group.by}{(Deprecated) Option to split densities based on mixscape classification. Please use mixscape.class instead} \item{mixscape.class}{metadata column with mixscape classifications.} \item{prtb.type}{specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO.} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. Default is avg_log2FC} \item{pval.cutoff}{P-value cut-off for selection of significantly DE genes.} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ A ggplot object. } \description{ Draws a heatmap of single cell feature expression with cells ordered by their mixscape ko probabilities. } \concept{mixscape} Seurat/man/DotPlot.Rd0000644000176200001440000000530214525500037014145 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{DotPlot} \alias{DotPlot} \alias{SplitDotPlotGG} \title{Dot plot visualization} \usage{ DotPlot( object, features, assay = NULL, cols = c("lightgrey", "blue"), col.min = -2.5, col.max = 2.5, dot.min = 0, dot.scale = 6, idents = NULL, group.by = NULL, split.by = NULL, cluster.idents = FALSE, scale = TRUE, scale.by = "radius", scale.min = NA, scale.max = NA ) } \arguments{ \item{object}{Seurat object} \item{features}{Input vector of features, or named list of feature vectors if feature-grouped panels are desired (replicates the functionality of the old SplitDotPlotGG)} \item{assay}{Name of assay to use, defaults to the active assay} \item{cols}{Colors to plot: the name of a palette from \code{RColorBrewer::brewer.pal.info}, a pair of colors defining a gradient, or 3+ colors defining multiple gradients (if split.by is set)} \item{col.min}{Minimum scaled average expression threshold (everything smaller will be set to this)} \item{col.max}{Maximum scaled average expression threshold (everything larger will be set to this)} \item{dot.min}{The fraction of cells at which to draw the smallest dot (default is 0). All cell groups with less than this expressing the given gene will have no dot drawn.} \item{dot.scale}{Scale the size of the points, similar to cex} \item{idents}{Identity classes to include in plot (default is all)} \item{group.by}{Factor to group the cells by} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity' see \code{\link{FetchData}} for more details} \item{cluster.idents}{Whether to order identities by hierarchical clusters based on given features, default is FALSE} \item{scale}{Determine whether the data is scaled, TRUE for default} \item{scale.by}{Scale the size of the points by 'size' or by 'radius'} \item{scale.min}{Set lower limit for scaling, use NA for default} \item{scale.max}{Set upper limit for scaling, use NA for default} } \value{ A ggplot object } \description{ Intuitive way of visualizing how feature expression changes across different identity classes (clusters). The size of the dot encodes the percentage of cells within a class, while the color encodes the AverageExpression level across all cells within a class (blue is high). } \examples{ data("pbmc_small") cd_genes <- c("CD247", "CD3E", "CD9") DotPlot(object = pbmc_small, features = cd_genes) pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) DotPlot(object = pbmc_small, features = cd_genes, split.by = 'groups') } \seealso{ \code{RColorBrewer::brewer.pal.info} } \concept{visualization} Seurat/man/PlotClusterTree.Rd0000644000176200001440000000161414525500037015662 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PlotClusterTree} \alias{PlotClusterTree} \title{Plot clusters as a tree} \usage{ PlotClusterTree(object, direction = "downwards", ...) } \arguments{ \item{object}{Seurat object} \item{direction}{A character string specifying the direction of the tree (default is downwards) Possible options: "rightwards", "leftwards", "upwards", and "downwards".} \item{\dots}{Additional arguments to \code{\link[ape:plot.phylo]{ape::plot.phylo}}} } \value{ Plots dendogram (must be precomputed using BuildClusterTree), returns no value } \description{ Plots previously computed tree (from BuildClusterTree) } \examples{ \dontrun{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small <- BuildClusterTree(object = pbmc_small) PlotClusterTree(object = pbmc_small) } } } \concept{visualization} Seurat/man/RunSPCA.Rd0000644000176200001440000000513614525500037014000 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunSPCA} \alias{RunSPCA} \alias{RunSPCA.default} \alias{RunSPCA.Assay} \alias{RunSPCA.Assay5} \alias{RunSPCA.Seurat} \title{Run Supervised Principal Component Analysis} \usage{ RunSPCA(object, ...) \method{RunSPCA}{default}( object, assay = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = FALSE, seed.use = 42, ... ) \method{RunSPCA}{Assay}( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) \method{RunSPCA}{Assay5}( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, layer = "scale.data", ... ) \method{RunSPCA}{Seurat}( object, assay = NULL, features = NULL, npcs = 50, reduction.name = "spca", reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and IRLBA} \item{assay}{Name of Assay SPCA is being run on} \item{npcs}{Total Number of SPCs to compute and store (50 by default)} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. SPC by default} \item{graph}{Graph used supervised by SPCA} \item{verbose}{Print the top genes associated with high/low loadings for the SPCs} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed.} \item{features}{Features to compute SPCA on. If features=NULL, SPCA will be run using the variable features for the Assay.} \item{layer}{Layer to run SPCA on} \item{reduction.name}{dimensional reduction name, spca by default} } \value{ Returns Seurat object with the SPCA calculation stored in the reductions slot } \description{ Run a supervised PCA (SPCA) dimensionality reduction supervised by a cell-cell kernel. SPCA is used to capture a linear transformation which maximizes its dependency to the given cell-cell kernel. We use SNN graph as the kernel to supervise the linear matrix factorization. } \references{ Barshan E, Ghodsi A, Azimifar Z, Jahromi MZ. Supervised principal component analysis: Visualization, classification and regression on subspaces and submanifolds. Pattern Recognition. 2011 Jul 1;44(7):1357-71. \url{https://www.sciencedirect.com/science/article/pii/S0031320310005819?casa_token=AZMFg5OtPnAAAAAA:_Udu7GJ7G2ed1-XSmr-3IGSISUwcHfMpNtCj-qacXH5SBC4nwzVid36GXI3r8XG8dK5WOQui}; } \concept{dimensional_reduction} Seurat/man/FindMarkers.Rd0000644000176200001440000002761314525500037014776 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/differential_expression.R \name{FindMarkers} \alias{FindMarkers} \alias{FindMarkersNode} \alias{FindMarkers.default} \alias{FindMarkers.Assay} \alias{FindMarkers.SCTAssay} \alias{FindMarkers.DimReduc} \alias{FindMarkers.Seurat} \title{Gene expression markers of identity classes} \usage{ FindMarkers(object, ...) \method{FindMarkers}{default}( object, slot = "data", counts = numeric(), cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, fc.results = NULL, densify = FALSE, ... ) \method{FindMarkers}{Assay}( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, norm.method = NULL, ... ) \method{FindMarkers}{SCTAssay}( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, recorrect_umi = TRUE, ... ) \method{FindMarkers}{DimReduc}( object, cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = rowMeans, fc.name = NULL, densify = FALSE, ... ) \method{FindMarkers}{Seurat}( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = "data", reduction = NULL, features = NULL, logfc.threshold = 0.1, pseudocount.use = 1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and to specific DE methods} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{counts}{Count matrix if using scale.data for DE tests. This is used for computing pct.1 and pct.2 and for filtering features based on fraction expressing} \item{cells.1}{Vector of cell names belonging to group 1} \item{cells.2}{Vector of cell names belonging to group 2} \item{features}{Genes to test. Default is to use all genes} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default); will use a fast implementation by Presto if installed \item{"wilcox_limma"} : Identifies differentially expressed genes between two groups of cells using the limma implementation of the Wilcoxon Rank Sum test; set this option to reproduce results from Seurat v4 \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{min.pct}{only test genes that are detected in a minimum fraction of min.pct cells in either of the two populations. Meant to speed up the function by not testing genes that are very infrequently expressed. Default is 0.01} \item{min.diff.pct}{only test genes that show a minimum difference in the fraction of detection between the two groups. Set to -Inf by default} \item{verbose}{Print a progress bar once expression testing begins} \item{only.pos}{Only return positive markers (FALSE by default)} \item{max.cells.per.ident}{Down sample each identity class to a max number. Default is no downsampling. Not activated by default (set to Inf)} \item{random.seed}{Random seed for downsampling} \item{latent.vars}{Variables to test, used only when \code{test.use} is one of 'LR', 'negbinom', 'poisson', or 'MAST'} \item{min.cells.feature}{Minimum number of cells expressing the feature in at least one of the two groups, currently only used for poisson and negative binomial tests} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{pseudocount.use}{Pseudocount to add to averaged expression values when calculating logFC. 1 by default.} \item{fc.results}{data.frame from FoldChange} \item{densify}{Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE} \item{mean.fxn}{Function to use for fold change or average difference calculation. If NULL, the appropriate function will be chose according to the slot used} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. If NULL, the fold change column will be named according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data slot "avg_diff".} \item{base}{The base with respect to which logarithms are computed.} \item{norm.method}{Normalization method for fold change calculation when \code{slot} is \dQuote{\code{data}}} \item{recorrect_umi}{Recalculate corrected UMI counts using minimum of the median UMIs when performing DE using multiple SCT objects; default is TRUE} \item{ident.1}{Identity class to define markers for; pass an object of class \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run} \item{ident.2}{A second identity class for comparison; if \code{NULL}, use all other cells for comparison; if an object of class \code{phylo} or 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for} \item{group.by}{Regroup cells into a different identity class prior to performing differential expression (see example)} \item{subset.ident}{Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example)} \item{assay}{Assay to use in differential expression testing} \item{reduction}{Reduction to use in differential expression testing - will test for DE on cell embeddings} } \value{ data.frame with a ranked list of putative markers as rows, and associated statistics as columns (p-values, ROC score, etc., depending on the test used (\code{test.use})). The following columns are always present: \itemize{ \item \code{avg_logFC}: log fold-chage of the average expression between the two groups. Positive values indicate that the gene is more highly expressed in the first group \item \code{pct.1}: The percentage of cells where the gene is detected in the first group \item \code{pct.2}: The percentage of cells where the gene is detected in the second group \item \code{p_val_adj}: Adjusted p-value, based on bonferroni correction using all genes in the dataset } } \description{ Finds markers (differentially expressed genes) for identity classes } \details{ p-value adjustment is performed using bonferroni correction based on the total number of genes in the dataset. Other correction methods are not recommended, as Seurat pre-filters genes using the arguments above, reducing the number of tests performed. Lastly, as Aaron Lun has pointed out, p-values should be interpreted cautiously, as the genes used for clustering are the same genes tested for differential expression. } \examples{ \dontrun{ data("pbmc_small") # Find markers for cluster 2 markers <- FindMarkers(object = pbmc_small, ident.1 = 2) head(x = markers) # Take all cells in cluster 2, and find markers that separate cells in the 'g1' group (metadata # variable 'group') markers <- FindMarkers(pbmc_small, ident.1 = "g1", group.by = 'groups', subset.ident = "2") head(x = markers) # Pass 'clustertree' or an object of class phylo to ident.1 and # a node to ident.2 as a replacement for FindMarkersNode if (requireNamespace("ape", quietly = TRUE)) { pbmc_small <- BuildClusterTree(object = pbmc_small) markers <- FindMarkers(object = pbmc_small, ident.1 = 'clustertree', ident.2 = 5) head(x = markers) } } } \references{ McDavid A, Finak G, Chattopadyay PK, et al. Data exploration, quality control and testing in single-cell qPCR-based gene expression experiments. Bioinformatics. 2013;29(4):461-467. doi:10.1093/bioinformatics/bts714 Trapnell C, et al. The dynamics and regulators of cell fate decisions are revealed by pseudotemporal ordering of single cells. Nature Biotechnology volume 32, pages 381-386 (2014) Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based Analysis of Single Cell Transcriptomics. R package version 1.2.1. https://github.com/RGLab/MAST/ Love MI, Huber W and Anders S (2014). "Moderated estimation of fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. https://bioconductor.org/packages/release/bioc/html/DESeq2.html } \seealso{ \code{FoldChange} } \concept{differential_expression} Seurat/man/SingleExIPlot.Rd0000644000176200001440000000263614525500037015255 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{SingleExIPlot} \alias{SingleExIPlot} \title{Plot a single expression by identity on a plot} \usage{ SingleExIPlot( data, idents, split = NULL, type = "violin", sort = FALSE, y.max = NULL, adjust = 1, pt.size = 0, alpha = 1, cols = NULL, seed.use = 42, log = FALSE, add.noise = TRUE, raster = NULL ) } \arguments{ \item{data}{Data to plot} \item{idents}{Idents to use} \item{split}{Use a split violin plot} \item{type}{Make either a \dQuote{ridge} or \dQuote{violin} plot} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted} \item{y.max}{Maximum Y value to plot} \item{adjust}{Adjust parameter for geom_violin} \item{pt.size}{Size of points for violin plots} \item{alpha}{Alpha vlaue for violin plots} \item{cols}{Colors to use for plotting} \item{seed.use}{Random seed to use. If NULL, don't set a seed} \item{log}{plot Y axis on log10 scale} \item{add.noise}{determine if adding small noise for plotting} \item{raster}{Convert points to raster format. Requires 'ggrastr' to be installed. default is \code{NULL} which automatically rasterizes if ggrastr is installed and number of points exceed 100,000.} } \value{ A ggplot-based Expression-by-Identity plot } \description{ Plot a single expression by identity on a plot } \keyword{internal} Seurat/man/CellSelector.Rd0000644000176200001440000000234014525500037015137 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CellSelector} \alias{CellSelector} \alias{FeatureLocator} \title{Cell Selector} \usage{ CellSelector(plot, object = NULL, ident = "SelectedCells", ...) FeatureLocator(plot, ...) } \arguments{ \item{plot}{A ggplot2 plot} \item{object}{An optional Seurat object; if passes, will return an object with the identities of selected cells set to \code{ident}} \item{ident}{An optional new identity class to assign the selected cells} \item{...}{Ignored} } \value{ If \code{object} is \code{NULL}, the names of the points selected; otherwise, a Seurat object with the selected cells identity classes set to \code{ident} } \description{ Select points on a scatterplot and get information about them } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) # Follow instructions in the terminal to select points cells.located <- CellSelector(plot = plot) cells.located # Automatically set the identity class of selected cells and return a new Seurat object pbmc_small <- CellSelector(plot = plot, object = pbmc_small, ident = 'SelectedCells') } } \seealso{ \code{\link{DimPlot}} \code{\link{FeaturePlot}} } \concept{visualization} Seurat/man/UnSketchEmbeddings.Rd0000644000176200001440000000111314525500037016262 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{UnSketchEmbeddings} \alias{UnSketchEmbeddings} \title{Transfer embeddings from sketched cells to the full data} \usage{ UnSketchEmbeddings( atom.data, atom.cells = NULL, orig.data, embeddings, sketch.matrix = NULL ) } \arguments{ \item{atom.data}{Atom data} \item{atom.cells}{Atom cells} \item{orig.data}{Original data} \item{embeddings}{Embeddings of atom cells} \item{sketch.matrix}{Sketch matrix} } \description{ Transfer embeddings from sketched cells to the full data } Seurat/man/PercentageFeatureSet.Rd0000644000176200001440000000322514525500037016627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{PercentageFeatureSet} \alias{PercentageFeatureSet} \title{Calculate the percentage of all counts that belong to a given set of features} \usage{ PercentageFeatureSet( object, pattern = NULL, features = NULL, col.name = NULL, assay = NULL ) } \arguments{ \item{object}{A Seurat object} \item{pattern}{A regex pattern to match features against} \item{features}{A defined feature set. If features provided, will ignore the pattern matching} \item{col.name}{Name in meta.data column to assign. If this is not null, returns a Seurat object with the proportion of the feature set stored in metadata.} \item{assay}{Assay to use} } \value{ Returns a vector with the proportion of the feature set or if md.name is set, returns a Seurat object with the proportion of the feature set stored in metadata. } \description{ This function enables you to easily calculate the percentage of all the counts belonging to a subset of the possible features for each cell. This is useful when trying to compute the percentage of transcripts that map to mitochondrial genes for example. The calculation here is simply the column sum of the matrix present in the counts slot for features belonging to the set divided by the column sum for all features times 100. } \examples{ data("pbmc_small") # Calculate the proportion of transcripts mapping to mitochondrial genes # NOTE: The pattern provided works for human gene names. You may need to adjust depending on your # system of interest pbmc_small[["percent.mt"]] <- PercentageFeatureSet(object = pbmc_small, pattern = "^MT-") } \concept{utilities} Seurat/man/DimReduc-class.Rd0000644000176200001440000000070614525500037015362 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{DimReduc-class} \alias{DimReduc-class} \title{The DimReduc Class} \description{ The \code{DimReduc} object stores a dimensionality reduction taken out in Seurat; for more details, please see the documentation in \code{\link[SeuratObject:DimReduc]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:DimReduc]{SeuratObject::DimReduc-class}} } Seurat/man/PolyFeaturePlot.Rd0000644000176200001440000000263714525500037015666 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{PolyFeaturePlot} \alias{PolyFeaturePlot} \title{Polygon FeaturePlot} \usage{ PolyFeaturePlot( object, features, cells = NULL, poly.data = "spatial", ncol = ceiling(x = length(x = features)/2), min.cutoff = 0, max.cutoff = NA, common.scale = TRUE, flip.coords = FALSE ) } \arguments{ \item{object}{Seurat object} \item{features}{Vector of features to plot. Features can come from: \itemize{ \item An \code{Assay} feature (e.g. a gene name - "MS4A1") \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") \item A column name from a \code{DimReduc} object corresponding to the cell embedding values (e.g. the PC 1 scores - "PC_1") }} \item{cells}{Vector of cells to plot (default is all cells)} \item{poly.data}{Name of the polygon dataframe in the misc slot} \item{ncol}{Number of columns to split the plot into} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{common.scale}{...} \item{flip.coords}{Flip x and y coordinates} } \value{ Returns a ggplot object } \description{ Plot cells as polygons, rather than single points. Color cells by any value accessible by \code{\link{FetchData}}. } \concept{spatial} \concept{visualization} Seurat/man/BridgeReferenceSet-class.Rd0000644000176200001440000000133614525500037017355 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{BridgeReferenceSet-class} \alias{BridgeReferenceSet-class} \alias{BridgeReferenceSet} \title{The BridgeReferenceSet Class The BridgeReferenceSet is an output from PrepareBridgeReference} \description{ The BridgeReferenceSet Class The BridgeReferenceSet is an output from PrepareBridgeReference } \section{Slots}{ \describe{ \item{\code{bridge}}{The multi-omic object} \item{\code{reference}}{The Reference object only containing bridge representation assay} \item{\code{params}}{A list of parameters used in the PrepareBridgeReference} \item{\code{command}}{Store log of parameters that were used} }} \concept{objects} Seurat/man/FilterSlideSeq.Rd0000644000176200001440000000313514525500037015441 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{FilterSlideSeq} \alias{FilterSlideSeq} \title{Filter stray beads from Slide-seq puck} \usage{ FilterSlideSeq( object, image = "image", center = NULL, radius = NULL, do.plot = TRUE ) } \arguments{ \item{object}{Seurat object with slide-seq data} \item{image}{Name of the image where the coordinates are stored} \item{center}{Vector specifying the x and y coordinates for the center of the inclusion circle} \item{radius}{Radius of the circle of inclusion} \item{do.plot}{Display a \code{\link{SpatialDimPlot}} with the cells being removed labeled.} } \value{ Returns a Seurat object with only the subset of cells that pass the circular filter } \description{ This function is useful for removing stray beads that fall outside the main Slide-seq puck area. Essentially, it's a circular filter where you set a center and radius defining a circle of beads to keep. If the center is not set, it will be estimated from the bead coordinates (removing the 1st and 99th quantile to avoid skewing the center by the stray beads). By default, this function will display a \code{\link{SpatialDimPlot}} showing which cells were removed for easy adjustment of the center and/or radius. } \examples{ \dontrun{ # This example uses the ssHippo dataset which you can download # using the SeuratData package. library(SeuratData) data('ssHippo') # perform filtering of beads ssHippo.filtered <- FilterSlideSeq(ssHippo, radius = 2300) # This radius looks to small so increase and repeat until satisfied } } \concept{objects} \concept{spatial} Seurat/man/ISpatialDimPlot.Rd0000644000176200001440000000140314525500037015555 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ISpatialDimPlot} \alias{ISpatialDimPlot} \title{Visualize clusters spatially and interactively} \usage{ ISpatialDimPlot(object, image = NULL, group.by = NULL, alpha = c(0.3, 1)) } \arguments{ \item{object}{A Seurat object} \item{image}{Name of the image to use in the plot} \item{group.by}{Name of meta.data column to group the data by} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} } \value{ Returns final plot as a ggplot object } \description{ Visualize clusters spatially and interactively } \concept{spatial} \concept{visualization} Seurat/man/FindSpatiallyVariableFeatures.Rd0000644000176200001440000000544514525500037020500 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R, % R/preprocessing5.R \name{FindSpatiallyVariableFeatures} \alias{FindSpatiallyVariableFeatures} \alias{FindSpatiallyVariableFeatures.default} \alias{FindSpatiallyVariableFeatures.Assay} \alias{FindSpatiallyVariableFeatures.Seurat} \alias{FindSpatiallyVariableFeatures.StdAssay} \title{Find spatially variable features} \usage{ FindSpatiallyVariableFeatures(object, ...) \method{FindSpatiallyVariableFeatures}{default}( object, spatial.location, selection.method = c("markvariogram", "moransi"), r.metric = 5, x.cuts = NULL, y.cuts = NULL, verbose = TRUE, ... ) \method{FindSpatiallyVariableFeatures}{Assay}( object, slot = "scale.data", spatial.location, selection.method = c("markvariogram", "moransi"), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) \method{FindSpatiallyVariableFeatures}{Seurat}( object, assay = NULL, slot = "scale.data", features = NULL, image = NULL, selection.method = c("markvariogram", "moransi"), r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = 2000, verbose = TRUE, ... ) \method{FindSpatiallyVariableFeatures}{StdAssay}( object, layer = "scale.data", spatial.location, selection.method = c("markvariogram", "moransi"), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) } \arguments{ \item{object}{A Seurat object, assay, or expression matrix} \item{...}{Arguments passed to other methods} \item{spatial.location}{Coordinates for each cell/spot/bead} \item{selection.method}{Method for selecting spatially variable features. \itemize{ \item \code{markvariogram}: See \code{\link{RunMarkVario}} for details \item \code{moransi}: See \code{\link{RunMoransI}} for details. }} \item{r.metric}{r value at which to report the "trans" value of the mark variogram} \item{x.cuts}{Number of divisions to make in the x direction, helps define the grid over which binning is performed} \item{y.cuts}{Number of divisions to make in the y direction, helps define the grid over which binning is performed} \item{verbose}{Print messages and progress} \item{slot}{Slot in the Assay to pull data from} \item{features}{If provided, only compute on given features. Otherwise, compute for all features.} \item{nfeatures}{Number of features to mark as the top spatially variable.} \item{assay}{Assay to pull the features (marks) from} \item{image}{Name of image to pull the coordinates from} \item{layer}{Layer in the Assay5 to pull data from} } \description{ Identify features whose variability in expression can be explained to some degree by spatial location. } \concept{preprocessing} \concept{spatial} Seurat/man/IntegrationAnchorSet-class.Rd0000644000176200001440000000062614525500037017761 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{IntegrationAnchorSet-class} \alias{IntegrationAnchorSet-class} \alias{IntegrationAnchorSet} \title{The IntegrationAnchorSet Class} \description{ Inherits from the Anchorset class. Implemented mainly for method dispatch purposes. See \code{\link{AnchorSet}} for slot details. } \concept{objects} Seurat/man/CreateCategoryMatrix.Rd0000644000176200001440000000103314525500037016643 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CreateCategoryMatrix} \alias{CreateCategoryMatrix} \title{Create one hot matrix for a given label} \usage{ CreateCategoryMatrix( labels, method = c("aggregate", "average"), cells.name = NULL ) } \arguments{ \item{labels}{A vector of labels} \item{method}{Method to aggregate cells with the same label. Either 'aggregate' or 'average'} \item{cells.name}{A vector of cell names} } \description{ Create one hot matrix for a given label } Seurat/man/IntegrateEmbeddings.Rd0000644000176200001440000001026714525500037016472 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{IntegrateEmbeddings} \alias{IntegrateEmbeddings} \alias{IntegrateEmbeddings.IntegrationAnchorSet} \alias{IntegrateEmbeddings.TransferAnchorSet} \title{Integrate low dimensional embeddings} \usage{ IntegrateEmbeddings(anchorset, ...) \method{IntegrateEmbeddings}{IntegrationAnchorSet}( anchorset, new.reduction.name = "integrated_dr", reductions = NULL, dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) \method{IntegrateEmbeddings}{TransferAnchorSet}( anchorset, reference, query, query.assay = NULL, new.reduction.name = "integrated_dr", reductions = "pcaproject", dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, reuse.weights.matrix = TRUE, sd.weight = 1, preserve.order = FALSE, verbose = TRUE, ... ) } \arguments{ \item{anchorset}{An AnchorSet object} \item{...}{Reserved for internal use} \item{new.reduction.name}{Name for new integrated dimensional reduction.} \item{reductions}{Name of reductions to be integrated. For a TransferAnchorSet, this should be the name of a reduction present in the anchorset object (for example, "pcaproject"). For an IntegrationAnchorSet, this should be a \code{\link{DimReduc}} object containing all cells present in the anchorset object.} \item{dims.to.integrate}{Number of dimensions to return integrated values for} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case the full corrected space is used for computing anchor weights.} }} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives: \if{html}{\out{
}}\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 }\if{html}{\out{
}} Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{verbose}{Print progress bars and output} \item{reference}{Reference object used in anchorset construction} \item{query}{Query object used in anchorset construction} \item{query.assay}{Name of the Assay to use from query} \item{reuse.weights.matrix}{Can be used in conjunction with the store.weights parameter in TransferData to reuse a precomputed weights matrix.} } \value{ When called on a TransferAnchorSet (from FindTransferAnchors), this will return the query object with the integrated embeddings stored in a new reduction. When called on an IntegrationAnchorSet (from IntegrateData), this will return a merged object with the integrated reduction stored. } \description{ Perform dataset integration using a pre-computed Anchorset of specified low dimensional representations. } \details{ The main steps of this procedure are identical to \code{\link{IntegrateData}} with one key distinction. When computing the weights matrix, the distance calculations are performed in the full space of integrated embeddings when integrating more than two datasets, as opposed to a reduced PCA space which is the default behavior in \code{\link{IntegrateData}}. } \concept{integration} Seurat/man/SCTResults.Rd0000644000176200001440000000263614525500037014602 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{SCTResults} \alias{SCTResults} \alias{SCTResults<-} \alias{SCTResults.SCTModel} \alias{SCTResults<-.SCTModel} \alias{SCTResults.SCTAssay} \alias{SCTResults<-.SCTAssay} \alias{SCTResults.Seurat} \title{Get SCT results from an Assay} \usage{ SCTResults(object, ...) SCTResults(object, ...) <- value \method{SCTResults}{SCTModel}(object, slot, ...) \method{SCTResults}{SCTModel}(object, slot, ...) <- value \method{SCTResults}{SCTAssay}(object, slot, model = NULL, ...) \method{SCTResults}{SCTAssay}(object, slot, model = NULL, ...) <- value \method{SCTResults}{Seurat}(object, assay = "SCT", slot, model = NULL, ...) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods (not used)} \item{value}{new data to set} \item{slot}{Which slot to pull the SCT results from} \item{model}{Name of SCModel to pull result from. Available names can be retrieved with \code{levels}.} \item{assay}{Assay in the Seurat object to pull from} } \value{ Returns the value present in the requested slot for the requested group. If group is not specified, returns a list of slot results for each group unless there is only one group present (in which case it just returns the slot directly). } \description{ Pull the \code{\link{SCTResults}} information from an \code{\link{SCTAssay}} object. } \concept{objects} Seurat/man/RegroupIdents.Rd0000644000176200001440000000113314525500037015350 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{RegroupIdents} \alias{RegroupIdents} \title{Regroup idents based on meta.data info} \usage{ RegroupIdents(object, metadata) } \arguments{ \item{object}{Seurat object} \item{metadata}{Name of metadata column} } \value{ A Seurat object with the active idents regrouped } \description{ For cells in each ident, set a new identity based on the most common value of a specified metadata column. } \examples{ data("pbmc_small") pbmc_small <- RegroupIdents(pbmc_small, metadata = "groups") } \concept{utilities} Seurat/man/VizDimLoadings.Rd0000644000176200001440000000241514525500037015445 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VizDimLoadings} \alias{VizDimLoadings} \title{Visualize Dimensional Reduction genes} \usage{ VizDimLoadings( object, dims = 1:5, nfeatures = 30, col = "blue", reduction = "pca", projected = FALSE, balanced = FALSE, ncol = NULL, combine = TRUE ) } \arguments{ \item{object}{Seurat object} \item{dims}{Number of dimensions to display} \item{nfeatures}{Number of genes to display} \item{col}{Color of points to use} \item{reduction}{Reduction technique to visualize results for} \item{projected}{Use reduction values for full dataset (i.e. projected dimensional reduction values)} \item{balanced}{Return an equal number of genes with + and - scores. If FALSE (default), returns the top genes ranked by the scores absolute values} \item{ncol}{Number of columns to display} \item{combine}{Combine plots into a single \code{patchwork} ggplot object. If \code{FALSE}, return a list of ggplot objects} } \value{ A \code{patchwork} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Visualize top genes associated with reduction components } \examples{ data("pbmc_small") VizDimLoadings(object = pbmc_small) } \concept{visualization} Seurat/man/GaussianSketch.Rd0000644000176200001440000000102614525500037015473 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sketching.R \name{GaussianSketch} \alias{GaussianSketch} \title{Gaussian sketching} \usage{ GaussianSketch(nsketch, ncells, seed = NA_integer_, ...) } \arguments{ \item{nsketch}{Number of sketching random cells} \item{ncells}{Number of cells in the original data} \item{seed}{a single value, interpreted as an integer, or \code{NULL} (see \sQuote{Details}).} \item{...}{Ignored} } \value{ ... } \description{ Gaussian sketching } \keyword{internal} Seurat/man/AugmentPlot.Rd0000644000176200001440000000151614525500037015022 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{AugmentPlot} \alias{AugmentPlot} \title{Augments ggplot2-based plot with a PNG image.} \usage{ AugmentPlot(plot, width = 10, height = 10, dpi = 100) } \arguments{ \item{plot}{A ggplot object} \item{width, height}{Width and height of PNG version of plot} \item{dpi}{Plot resolution} } \value{ A ggplot object } \description{ Creates "vector-friendly" plots. Does this by saving a copy of the plot as a PNG file, then adding the PNG image with \code{\link[ggplot2]{annotation_raster}} to a blank plot of the same dimensions as \code{plot}. Please note: original legends and axes will be lost during augmentation. } \examples{ \dontrun{ data("pbmc_small") plot <- DimPlot(object = pbmc_small) AugmentPlot(plot = plot) } } \concept{visualization} Seurat/man/FetchResiduals.Rd0000644000176200001440000000330514525500037015466 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{FetchResiduals} \alias{FetchResiduals} \title{Calculate pearson residuals of features not in the scale.data} \usage{ FetchResiduals( object, features, assay = NULL, umi.assay = "RNA", layer = "counts", clip.range = NULL, reference.SCT.model = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE ) } \arguments{ \item{object}{A seurat object} \item{features}{Name of features to add into the scale.data} \item{assay}{Name of the assay of the seurat object generated by SCTransform} \item{umi.assay}{Name of the assay of the seurat object containing UMI matrix and the default is RNA} \item{layer}{Name (prefix) of the layer to pull counts from} \item{clip.range}{Numeric of length two specifying the min and max values the Pearson residual will be clipped to} \item{reference.SCT.model}{reference.SCT.model If a reference SCT model should be used for calculating the residuals. When set to not NULL, ignores the `SCTModel` paramater.} \item{replace.value}{Recalculate residuals for all features, even if they are already present. Useful if you want to change the clip.range.} \item{na.rm}{For features where there is no feature model stored, return NA for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only return residuals for features with a model stored for all cells.} \item{verbose}{Whether to print messages and progress bars} } \value{ Returns a Seurat object containing Pearson residuals of added features in its scale.data } \description{ This function calls sctransform::get_residuals. } \seealso{ \code{\link[sctransform]{get_residuals}} } \concept{preprocessing} Seurat/man/DEenrichRPlot.Rd0000644000176200001440000001054714525500037015231 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixscape.R \name{DEenrichRPlot} \alias{DEenrichRPlot} \title{DE and EnrichR pathway visualization barplot} \usage{ DEenrichRPlot( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = NULL, max.genes, test.use = "wilcox", p.val.cutoff = 0.05, cols = NULL, enrich.database = NULL, num.pathway = 10, return.gene.list = FALSE, ... ) } \arguments{ \item{object}{Name of object class Seurat.} \item{ident.1}{Cell class identity 1.} \item{ident.2}{Cell class identity 2.} \item{balanced}{Option to display pathway enrichments for both negative and positive DE genes.If false, only positive DE gene will be displayed.} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{assay}{Assay to use in differential expression testing} \item{max.genes}{Maximum number of genes to use as input to enrichR.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default); will use a fast implementation by Presto if installed \item{"wilcox_limma"} : Identifies differentially expressed genes between two groups of cells using the limma implementation of the Wilcoxon Rank Sum test; set this option to reproduce results from Seurat v4 \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{p.val.cutoff}{Cutoff to select DE genes.} \item{cols}{A list of colors to use for barplots.} \item{enrich.database}{Database to use from enrichR.} \item{num.pathway}{Number of pathways to display in barplot.} \item{return.gene.list}{Return list of DE genes} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ Returns one (only enriched) or two (both enriched and depleted) barplots with the top enriched/depleted GO terms from EnrichR. } \description{ DE and EnrichR pathway visualization barplot } \concept{mixscape} Seurat/man/SketchData.Rd0000644000176200001440000000307714525500037014602 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sketching.R \name{SketchData} \alias{SketchData} \title{Sketch Data} \usage{ SketchData( object, assay = NULL, ncells = 5000L, sketched.assay = "sketch", method = c("LeverageScore", "Uniform"), var.name = "leverage.score", over.write = FALSE, seed = 123L, cast = "dgCMatrix", verbose = TRUE, ... ) } \arguments{ \item{object}{A Seurat object.} \item{assay}{Assay name. Default is NULL, in which case the default assay of the object is used.} \item{ncells}{A positive integer indicating the number of cells to sample for the sketching. Default is 5000.} \item{sketched.assay}{Sketched assay name. A sketch assay is created or overwrite with the sketch data. Default is 'sketch'.} \item{method}{Sketching method to use. Can be 'LeverageScore' or 'Uniform'. Default is 'LeverageScore'.} \item{var.name}{A metadata column name to store the leverage scores. Default is 'leverage.score'.} \item{over.write}{whether to overwrite existing column in the metadata. Default is FALSE.} \item{seed}{A positive integer for the seed of the random number generator. Default is 123.} \item{cast}{The type to cast the resulting assay to. Default is 'dgCMatrix'.} \item{verbose}{Print progress and diagnostic messages} \item{...}{Arguments passed to other methods} } \value{ A Seurat object with the sketched data added as a new assay. } \description{ This function uses sketching methods to downsample high-dimensional single-cell RNA expression data, which can help with scalability for large datasets. } Seurat/man/ReadAkoya.Rd0000644000176200001440000000541714525500037014427 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R, R/convenience.R \name{ReadAkoya} \alias{ReadAkoya} \alias{LoadAkoya} \title{Read and Load Akoya CODEX data} \usage{ ReadAkoya( filename, type = c("inform", "processor", "qupath"), filter = "DAPI|Blank|Empty", inform.quant = c("mean", "total", "min", "max", "std") ) LoadAkoya( filename, type = c("inform", "processor", "qupath"), fov, assay = "Akoya", ... ) } \arguments{ \item{filename}{Path to matrix generated by upstream processing.} \item{type}{Specify which type matrix is being provided. \itemize{ \item \dQuote{\code{processor}}: matrix generated by CODEX Processor \item \dQuote{\code{inform}}: matrix generated by inForm \item \dQuote{\code{qupath}}: matrix generated by QuPath }} \item{filter}{A pattern to filter features by; pass \code{NA} to skip feature filtering} \item{inform.quant}{When \code{type} is \dQuote{\code{inform}}, the quantification level to read in} \item{fov}{Name to store FOV as} \item{assay}{Name to store expression matrix as} \item{...}{ Arguments passed on to \code{\link[=ReadAkoya]{ReadAkoya}} \describe{ \item{\code{}}{} }} } \value{ \code{ReadAkoya}: A list with some combination of the following values \itemize{ \item \dQuote{\code{matrix}}: a \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells are columns and features are rows \item \dQuote{\code{centroids}}: a data frame with cell centroid coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{metadata}}: a data frame with cell-level meta data; includes all columns in \code{filename} that aren't in \dQuote{\code{matrix}} or \dQuote{\code{centroids}} } When \code{type} is \dQuote{\code{inform}}, additional expression matrices are returned and named using their segmentation type (eg. \dQuote{nucleus}, \dQuote{membrane}). The \dQuote{Entire Cell} segmentation type is returned in the \dQuote{\code{matrix}} entry of the list \code{LoadAkoya}: A \code{\link[SeuratObject]{Seurat}} object } \description{ Read and Load Akoya CODEX data } \note{ This function requires the \href{https://cran.r-project.org/package=data.table}{\pkg{data.table}} package to be installed } \section{Progress Updates with \pkg{progressr}}{ This function uses \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to render status updates and progress bars. To enable progress updates, wrap the function call in \code{\link[progressr]{with_progress}} or run \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running this function. For more details about \pkg{progressr}, please read \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} } \concept{preprocessing} Seurat/man/ScaleData.Rd0000644000176200001440000000744214525500037014410 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{ScaleData} \alias{ScaleData} \alias{ScaleData.default} \alias{ScaleData.IterableMatrix} \alias{ScaleData.Assay} \alias{ScaleData.Seurat} \title{Scale and center the data.} \usage{ ScaleData(object, ...) \method{ScaleData}{default}( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) \method{ScaleData}{IterableMatrix}( object, features = NULL, do.scale = TRUE, do.center = TRUE, scale.max = 10, ... ) \method{ScaleData}{Assay}( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) \method{ScaleData}{Seurat}( object, features = NULL, assay = NULL, vars.to.regress = NULL, split.by = NULL, model.use = "linear", use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{features}{Vector of features names to scale/center. Default is variable features.} \item{vars.to.regress}{Variables to regress out (previously latent.vars in RegressOut). For example, nUMI, or percent.mito.} \item{latent.data}{Extra data to regress out, should be cells x latent data} \item{split.by}{Name of variable in object metadata or a vector or factor defining grouping of cells. See argument \code{f} in \code{\link[base]{split}} for more details} \item{model.use}{Use a linear model or generalized linear model (poisson, negative binomial) for the regression. Options are 'linear' (default), 'poisson', and 'negbinom'} \item{use.umi}{Regress on UMI count data. Default is FALSE for linear modeling, but automatically set to TRUE if model.use is 'negbinom' or 'poisson'} \item{do.scale}{Whether to scale the data.} \item{do.center}{Whether to center the data.} \item{scale.max}{Max value to return for scaled data. The default is 10. Setting this can help reduce the effects of features that are only expressed in a very small number of cells. If regressing out latent variables and using a non-linear model, the default is 50.} \item{block.size}{Default size for number of features to scale at in a single computation. Increasing block.size may speed up calculations but at an additional memory cost.} \item{min.cells.to.block}{If object contains fewer than this number of cells, don't block for scaling calculations.} \item{verbose}{Displays a progress bar for scaling procedure} \item{assay}{Name of Assay to scale} } \description{ Scales and centers features in the dataset. If variables are provided in vars.to.regress, they are individually regressed against each feature, and the resulting residuals are then scaled and centered. } \details{ ScaleData now incorporates the functionality of the function formerly known as RegressOut (which regressed out given the effects of provided variables and then scaled the residuals). To make use of the regression functionality, simply pass the variables you want to remove to the vars.to.regress parameter. Setting center to TRUE will center the expression for each feature by subtracting the average expression for that feature. Setting scale to TRUE will scale the expression level for each feature by dividing the centered feature expression levels by their standard deviations if center is TRUE and by their root mean square otherwise. } \concept{preprocessing} Seurat/man/Neighbor-class.Rd0000644000176200001440000000056414525500037015425 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/reexports.R \docType{class} \name{Neighbor-class} \alias{Neighbor-class} \title{The Neighbor Class} \description{ For more details, please see the documentation in \code{\link[SeuratObject:Neighbor]{SeuratObject}} } \seealso{ \code{\link[SeuratObject:Neighbor]{SeuratObject::Neighbor-class}} } Seurat/man/ReadSTARsolo.Rd0000644000176200001440000000061614525500037015025 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convenience.R \name{ReadSTARsolo} \alias{ReadSTARsolo} \title{Read output from STARsolo} \usage{ ReadSTARsolo(data.dir, ...) } \arguments{ \item{data.dir}{Directory containing the data files} \item{...}{Extra parameters passed to \code{\link{ReadMtx}}} } \description{ Read output from STARsolo } \concept{convenience} Seurat/man/SpatialPlot.Rd0000644000176200001440000001346314525500037015023 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{SpatialPlot} \alias{SpatialPlot} \alias{SpatialDimPlot} \alias{SpatialFeaturePlot} \title{Visualize spatial clustering and expression data.} \usage{ SpatialPlot( object, group.by = NULL, features = NULL, images = NULL, cols = NULL, image.alpha = 1, crop = TRUE, slot = "data", keep.scale = "feature", min.cutoff = NA, max.cutoff = NA, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), facet.highlight = FALSE, label = FALSE, label.size = 5, label.color = "white", label.box = TRUE, repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), stroke = 0.25, interactive = FALSE, do.identify = FALSE, identify.ident = NULL, do.hover = FALSE, information = NULL ) SpatialDimPlot( object, group.by = NULL, images = NULL, cols = NULL, crop = TRUE, cells.highlight = NULL, cols.highlight = c("#DE2D26", "grey50"), facet.highlight = FALSE, label = FALSE, label.size = 7, label.color = "white", repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, label.box = TRUE, interactive = FALSE, information = NULL ) SpatialFeaturePlot( object, features, images = NULL, crop = TRUE, slot = "data", keep.scale = "feature", min.cutoff = NA, max.cutoff = NA, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, interactive = FALSE, information = NULL ) } \arguments{ \item{object}{A Seurat object} \item{group.by}{Name of meta.data column to group the data by} \item{features}{Name of the feature to visualize. Provide either group.by OR features, not both.} \item{images}{Name of the images to use in the plot(s)} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors} \item{image.alpha}{Adjust the opacity of the background images. Set to 0 to remove.} \item{crop}{Crop the plot in to focus on points plotted. Set to \code{FALSE} to show entire background image.} \item{slot}{If plotting a feature, which data slot to pull from (counts, data, or scale.data)} \item{keep.scale}{How to handle the color scale across multiple plots. Options are: \itemize{ \item \dQuote{feature} (default; by row/feature scaling): The plots for each individual feature are scaled to the maximum expression of the feature across the conditions provided to \code{split.by} \item \dQuote{all} (universal scaling): The plots for all features and conditions are scaled to the maximum expression value for the feature with the highest overall expression \item \code{NULL} (no scaling): Each individual plot is scaled to the maximum expression value of the feature in the condition provided to \code{split.by}; be aware setting \code{NULL} will result in color scales that are not comparable between plots }} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in cols.highlight} \item{cols.highlight}{A vector of colors to highlight the cells as; ordered the same as the groups in cells.highlight; last color corresponds to unselected cells.} \item{facet.highlight}{When highlighting certain groups of cells, split each group into its own plot} \item{label}{Whether to label the clusters} \item{label.size}{Sets the size of the labels} \item{label.color}{Sets the color of the label text} \item{label.box}{Whether to put a box around the label text (geom_text vs geom_label)} \item{repel}{Repels the labels to prevent overlap} \item{ncol}{Number of columns if plotting multiple plots} \item{combine}{Combine plots into a single gg object; note that if TRUE; themeing will not work when plotting multiple features/groupings} \item{pt.size.factor}{Scale the size of the spots.} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} \item{stroke}{Control the width of the border around the spots} \item{interactive}{Launch an interactive SpatialDimPlot or SpatialFeaturePlot session, see \code{\link{ISpatialDimPlot}} or \code{\link{ISpatialFeaturePlot}} for more details} \item{do.identify, do.hover}{DEPRECATED in favor of \code{interactive}} \item{identify.ident}{DEPRECATED} \item{information}{An optional dataframe or matrix of extra information to be displayed on hover} } \value{ If \code{do.identify}, either a vector of cells selected or the object with selected cells set to the value of \code{identify.ident} (if set). Else, if \code{do.hover}, a plotly object with interactive graphics. Else, a ggplot object } \description{ SpatialPlot plots a feature or discrete grouping (e.g. cluster assignments) as spots over the image that was collected. We also provide SpatialFeaturePlot and SpatialDimPlot as wrapper functions around SpatialPlot for a consistent naming framework. } \examples{ \dontrun{ # For functionality analagous to FeaturePlot SpatialPlot(seurat.object, features = "MS4A1") SpatialFeaturePlot(seurat.object, features = "MS4A1") # For functionality analagous to DimPlot SpatialPlot(seurat.object, group.by = "clusters") SpatialDimPlot(seurat.object, group.by = "clusters") } } \concept{convenience} \concept{spatial} \concept{visualization} Seurat/man/BuildClusterTree.Rd0000644000176200001440000000401414525500037016000 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tree.R \name{BuildClusterTree} \alias{BuildClusterTree} \title{Phylogenetic Analysis of Identity Classes} \usage{ BuildClusterTree( object, assay = NULL, features = NULL, dims = NULL, reduction = "pca", graph = NULL, slot = "data", reorder = FALSE, reorder.numeric = FALSE, verbose = TRUE ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay to use for the analysis.} \item{features}{Genes to use for the analysis. Default is the set of variable genes (\code{VariableFeatures(object = object)})} \item{dims}{If set, tree is calculated in dimension reduction space; overrides \code{features}} \item{reduction}{Name of dimension reduction to use. Only used if \code{dims} is not NULL.} \item{graph}{If graph is passed, build tree based on graph connectivity between clusters; overrides \code{dims} and \code{features}} \item{slot}{(Deprecated). Slots(s) to use} \item{reorder}{Re-order identity classes (factor ordering), according to position on the tree. This groups similar classes together which can be helpful, for example, when drawing violin plots.} \item{reorder.numeric}{Re-order identity classes according to position on the tree, assigning a numeric value ('1' is the leftmost node)} \item{verbose}{Show progress updates} } \value{ A Seurat object where the cluster tree can be accessed with \code{\link{Tool}} } \description{ Constructs a phylogenetic tree relating the 'average' cell from each identity class. Tree is estimated based on a distance matrix constructed in either gene expression space or PCA space. } \details{ Note that the tree is calculated for an 'average' cell, so gene expression or PC scores are averaged across all cells in an identity class before the tree is constructed. } \examples{ \dontrun{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small pbmc_small <- BuildClusterTree(object = pbmc_small) Tool(object = pbmc_small, slot = 'BuildClusterTree') } } } \concept{tree} Seurat/man/TopCells.Rd0000644000176200001440000000157214525500037014312 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopCells} \alias{TopCells} \title{Find cells with highest scores for a given dimensional reduction technique} \usage{ TopCells(object, dim = 1, ncells = 20, balanced = FALSE, ...) } \arguments{ \item{object}{DimReduc object} \item{dim}{Dimension to use} \item{ncells}{Number of cells to return} \item{balanced}{Return an equal number of cells with both + and - scores.} \item{...}{Extra parameters passed to \code{\link{Embeddings}}} } \value{ Returns a vector of cells } \description{ Return a list of genes with the strongest contribution to a set of components } \examples{ data("pbmc_small") pbmc_small head(TopCells(object = pbmc_small[["pca"]])) # Can specify which dimension and how many cells to return TopCells(object = pbmc_small[["pca"]], dim = 2, ncells = 5) } \concept{objects} Seurat/man/FindBridgeIntegrationAnchors.Rd0000644000176200001440000000416214525500037020302 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindBridgeIntegrationAnchors} \alias{FindBridgeIntegrationAnchors} \title{Find integration bridge anchors between query and extended bridge-reference} \usage{ FindBridgeIntegrationAnchors( extended.reference, query, query.assay = NULL, dims = 1:30, scale = FALSE, reduction = c("lsiproject", "pcaproject"), integration.reduction = c("direct", "cca"), verbose = TRUE ) } \arguments{ \item{extended.reference}{BridgeReferenceSet object generated from \code{\link{PrepareBridgeReference}}} \item{query}{A query Seurat object} \item{query.assay}{Assay name for query-bridge integration} \item{dims}{Number of dimensions for query-bridge integration} \item{scale}{Determine if scale the query data for projection} \item{reduction}{Dimensional reduction to perform when finding anchors. Options are: \itemize{ \item{pcaproject: Project the PCA from the bridge onto the query. We recommend using PCA when bridge and query datasets are from scRNA-seq} \item{lsiproject: Project the LSI from the bridge onto the query. We recommend using LSI when bridge and query datasets are from scATAC-seq or scCUT&TAG data. This requires that LSI or supervised LSI has been computed for the bridge dataset, and the same features (eg, peaks or genome bins) are present in both the bridge and query. } }} \item{integration.reduction}{Dimensional reduction to perform when finding anchors between query and reference. Options are: \itemize{ \item{direct: find anchors directly on the bridge representation space} \item{cca: perform cca on the on the bridge representation space and then find anchors } }} \item{verbose}{Print messages and progress} } \value{ Returns an \code{AnchorSet} object that can be used as input to \code{\link{IntegrateEmbeddings}}. } \description{ Find a set of anchors between unimodal query and the other unimodal reference using a pre-computed \code{\link{BridgeReferenceSet}}. These integration anchors can later be used to integrate query and reference using the \code{\link{IntegrateEmbeddings}} object. } Seurat/man/FindAllMarkers.Rd0000644000176200001440000001502614525500037015422 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/differential_expression.R \name{FindAllMarkers} \alias{FindAllMarkers} \alias{FindAllMarkersNode} \title{Gene expression markers for all identity classes} \usage{ FindAllMarkers( object, assay = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", slot = "data", min.pct = 0.01, min.diff.pct = -Inf, node = NULL, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, mean.fxn = NULL, fc.name = NULL, base = 2, return.thresh = 0.01, densify = FALSE, ... ) } \arguments{ \item{object}{An object} \item{assay}{Assay to use in differential expression testing} \item{features}{Genes to test. Default is to use all genes} \item{logfc.threshold}{Limit testing to genes which show, on average, at least X-fold difference (log-scale) between the two groups of cells. Default is 0.1 Increasing logfc.threshold speeds up the function, but can miss weaker signals.} \item{test.use}{Denotes which test to use. Available options are: \itemize{ \item{"wilcox"} : Identifies differentially expressed genes between two groups of cells using a Wilcoxon Rank Sum test (default); will use a fast implementation by Presto if installed \item{"wilcox_limma"} : Identifies differentially expressed genes between two groups of cells using the limma implementation of the Wilcoxon Rank Sum test; set this option to reproduce results from Seurat v4 \item{"bimod"} : Likelihood-ratio test for single cell gene expression, (McDavid et al., Bioinformatics, 2013) \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. For each gene, evaluates (using AUC) a classifier built on that gene alone, to classify between two groups of cells. An AUC value of 1 means that expression values for this gene alone can perfectly classify the two groupings (i.e. Each of the cells in cells.1 exhibit a higher level than each of the cells in cells.2). An AUC value of 0 also means there is perfect classification, but in the other direction. A value of 0.5 implies that the gene has no predictive power to classify the two groups. Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially expressed genes. \item{"t"} : Identify differentially expressed genes between two groups of cells using the Student's t-test. \item{"negbinom"} : Identifies differentially expressed genes between two groups of cells using a negative binomial generalized linear model. Use only for UMI-based datasets \item{"poisson"} : Identifies differentially expressed genes between two groups of cells using a poisson generalized linear model. Use only for UMI-based datasets \item{"LR"} : Uses a logistic regression framework to determine differentially expressed genes. Constructs a logistic regression model predicting group membership based on each feature individually and compares this to a null model with a likelihood ratio test. \item{"MAST"} : Identifies differentially expressed genes between two groups of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run the DE testing. \item{"DESeq2"} : Identifies differentially expressed genes between two groups of cells based on a model using DESeq2 which uses a negative binomial distribution (Love et al, Genome Biology, 2014).This test does not support pre-filtering of genes based on average difference (or percent detection rate) between cell groups. However, genes may be pre-filtered based on their minimum detection rate (min.pct) across both cell groups. To use this method, please install DESeq2, using the instructions at https://bioconductor.org/packages/release/bioc/html/DESeq2.html }} \item{slot}{Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", \code{slot} will be set to "counts"} \item{min.pct}{only test genes that are detected in a minimum fraction of min.pct cells in either of the two populations. Meant to speed up the function by not testing genes that are very infrequently expressed. Default is 0.01} \item{min.diff.pct}{only test genes that show a minimum difference in the fraction of detection between the two groups. Set to -Inf by default} \item{node}{A node to find markers for and all its children; requires \code{\link{BuildClusterTree}} to have been run previously; replaces \code{FindAllMarkersNode}} \item{verbose}{Print a progress bar once expression testing begins} \item{only.pos}{Only return positive markers (FALSE by default)} \item{max.cells.per.ident}{Down sample each identity class to a max number. Default is no downsampling. Not activated by default (set to Inf)} \item{random.seed}{Random seed for downsampling} \item{latent.vars}{Variables to test, used only when \code{test.use} is one of 'LR', 'negbinom', 'poisson', or 'MAST'} \item{min.cells.feature}{Minimum number of cells expressing the feature in at least one of the two groups, currently only used for poisson and negative binomial tests} \item{min.cells.group}{Minimum number of cells in one of the groups} \item{mean.fxn}{Function to use for fold change or average difference calculation. If NULL, the appropriate function will be chose according to the slot used} \item{fc.name}{Name of the fold change, average difference, or custom function column in the output data.frame. If NULL, the fold change column will be named according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data slot "avg_diff".} \item{base}{The base with respect to which logarithms are computed.} \item{return.thresh}{Only return markers that have a p-value < return.thresh, or a power > return.thresh (if the test is ROC)} \item{densify}{Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE} \item{...}{Arguments passed to other methods and to specific DE methods} } \value{ Matrix containing a ranked list of putative markers, and associated statistics (p-values, ROC score, etc.) } \description{ Finds markers (differentially expressed genes) for each of the identity classes in a dataset } \examples{ data("pbmc_small") # Find markers for all clusters all.markers <- FindAllMarkers(object = pbmc_small) head(x = all.markers) \dontrun{ # Pass a value to node as a replacement for FindAllMarkersNode pbmc_small <- BuildClusterTree(object = pbmc_small) all.markers <- FindAllMarkers(object = pbmc_small, node = 4) head(x = all.markers) } } \concept{differential_expression} Seurat/man/GroupCorrelation.Rd0000644000176200001440000000212514525500037016056 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{GroupCorrelation} \alias{GroupCorrelation} \title{Compute the correlation of features broken down by groups with another covariate} \usage{ GroupCorrelation( object, assay = NULL, slot = "scale.data", var = NULL, group.assay = NULL, min.cells = 5, ngroups = 6, do.plot = TRUE ) } \arguments{ \item{object}{Seurat object} \item{assay}{Assay to pull the data from} \item{slot}{Slot in the assay to pull feature expression data from (counts, data, or scale.data)} \item{var}{Variable with which to correlate the features} \item{group.assay}{Compute the gene groups based off the data in this assay.} \item{min.cells}{Only compute for genes in at least this many cells} \item{ngroups}{Number of groups to split into} \item{do.plot}{Display the group correlation boxplot (via \code{GroupCorrelationPlot})} } \value{ A Seurat object with the correlation stored in metafeatures } \description{ Compute the correlation of features broken down by groups with another covariate } \concept{utilities} Seurat/man/GetTransferPredictions.Rd0000644000176200001440000000207714525500037017216 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{GetTransferPredictions} \alias{GetTransferPredictions} \title{Get the predicted identity} \usage{ GetTransferPredictions( object, assay = "predictions", slot = "data", score.filter = 0.75 ) } \arguments{ \item{object}{Seurat object} \item{assay}{Name of the assay holding the predictions} \item{slot}{Slot of the assay in which the prediction scores are stored} \item{score.filter}{Return "Unassigned" for any cell with a score less than this value} } \value{ Returns a vector of predicted class names } \description{ Utility function to easily pull out the name of the class with the maximum prediction. This is useful if you've set \code{prediction.assay = TRUE} in \code{\link{TransferData}} and want to have a vector with the predicted class. } \examples{ \dontrun{ prediction.assay <- TransferData(anchorset = anchors, refdata = reference$class) query[["predictions"]] <- prediction.assay query$predicted.id <- GetTransferPredictions(query) } } \concept{integration} Seurat/man/as.CellDataSet.Rd0000644000176200001440000000116014525500037015305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/objects.R \name{as.CellDataSet} \alias{as.CellDataSet} \alias{as.CellDataSet.Seurat} \title{Convert objects to CellDataSet objects} \usage{ as.CellDataSet(x, ...) \method{as.CellDataSet}{Seurat}(x, assay = NULL, reduction = NULL, ...) } \arguments{ \item{x}{An object to convert to class \code{CellDataSet}} \item{...}{Arguments passed to other methods} \item{assay}{Assay to convert} \item{reduction}{Name of DimReduc to set to main reducedDim in cds} } \description{ Convert objects to CellDataSet objects } \concept{objects} Seurat/man/FindVariableFeatures.Rd0000644000176200001440000001127614525500037016614 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing.R \name{FindVariableFeatures} \alias{FindVariableFeatures} \alias{FindVariableGenes} \alias{FindVariableFeatures.V3Matrix} \alias{FindVariableFeatures.Assay} \alias{FindVariableFeatures.SCTAssay} \alias{FindVariableFeatures.Seurat} \title{Find variable features} \usage{ FindVariableFeatures(object, ...) \method{FindVariableFeatures}{V3Matrix}( object, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) \method{FindVariableFeatures}{Assay}( object, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) \method{FindVariableFeatures}{SCTAssay}(object, nfeatures = 2000, ...) \method{FindVariableFeatures}{Seurat}( object, assay = NULL, selection.method = "vst", loess.span = 0.3, clip.max = "auto", mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods} \item{selection.method}{How to choose top variable features. Choose one of : \itemize{ \item \dQuote{\code{vst}}: First, fits a line to the relationship of log(variance) and log(mean) using local polynomial regression (loess). Then standardizes the feature values using the observed mean and expected variance (given by the fitted line). Feature variance is then calculated on the standardized values after clipping to a maximum (see clip.max parameter). \item \dQuote{\code{mean.var.plot}} (mvp): First, uses a function to calculate average expression (mean.function) and dispersion (dispersion.function) for each feature. Next, divides features into \code{num.bin} (deafult 20) bins based on their average expression, and calculates z-scores for dispersion within each bin. The purpose of this is to identify variable features while controlling for the strong relationship between variability and average expression \item \dQuote{\code{dispersion}} (disp): selects the genes with the highest dispersion values }} \item{loess.span}{(vst method) Loess span parameter used when fitting the variance-mean relationship} \item{clip.max}{(vst method) After standardization values larger than clip.max will be set to clip.max; default is 'auto' which sets this value to the square root of the number of cells} \item{mean.function}{Function to compute x-axis value (average expression). Default is to take the mean of the detected (i.e. non-zero) values} \item{dispersion.function}{Function to compute y-axis value (dispersion). Default is to take the standard deviation of all values} \item{num.bin}{Total number of bins to use in the scaled analysis (default is 20)} \item{binning.method}{Specifies how the bins should be computed. Available methods are: \itemize{ \item \dQuote{\code{equal_width}}: each bin is of equal width along the x-axis (default) \item \dQuote{\code{equal_frequency}}: each bin contains an equal number of features (can increase statistical power to detect overdispersed eatures at high expression values, at the cost of reduced resolution along the x-axis) }} \item{verbose}{show progress bar for calculations} \item{nfeatures}{Number of features to select as top variable features; only used when \code{selection.method} is set to \code{'dispersion'} or \code{'vst'}} \item{mean.cutoff}{A two-length numeric vector with low- and high-cutoffs for feature means} \item{dispersion.cutoff}{A two-length numeric vector with low- and high-cutoffs for feature dispersions} \item{assay}{Assay to use} } \description{ Identifies features that are outliers on a 'mean variability plot'. } \details{ For the mean.var.plot method: Exact parameter settings may vary empirically from dataset to dataset, and based on visual inspection of the plot. Setting the y.cutoff parameter to 2 identifies features that are more than two standard deviations away from the average dispersion within a bin. The default X-axis function is the mean expression level, and for Y-axis it is the log(Variance/mean). All mean/variance calculations are not performed in log-space, but the results are reported in log-space - see relevant functions for exact details. } \concept{preprocessing} Seurat/man/L2CCA.Rd0000644000176200001440000000054114525500037013344 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{L2CCA} \alias{L2CCA} \title{L2-Normalize CCA} \usage{ L2CCA(object, ...) } \arguments{ \item{object}{Seurat object} \item{\dots}{Additional parameters to L2Dim.} } \description{ Perform l2 normalization on CCs } \concept{dimensional_reduction} Seurat/man/Read10X_probe_metadata.Rd0000644000176200001440000000122614525500037016754 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{Read10X_probe_metadata} \alias{Read10X_probe_metadata} \title{Read10x Probe Metadata} \usage{ Read10X_probe_metadata(data.dir, filename = "raw_probe_bc_matrix.h5") } \arguments{ \item{data.dir}{The directory where the file is located.} \item{filename}{The name of the file containing the raw probe barcode matrix in HDF5 format. The default filename is 'raw_probe_bc_matrix.h5'.} } \value{ Returns a data.frame containing the probe metadata. } \description{ This function reads the probe metadata from a 10x Genomics probe barcode matrix file in HDF5 format. } Seurat/man/MVP.Rd0000644000176200001440000000131214525500037013217 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing5.R \name{MVP} \alias{MVP} \title{Find variable features based on mean.var.plot} \usage{ MVP( data, verbose = TRUE, nselect = 2000L, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), ... ) } \arguments{ \item{data}{Data matrix} \item{verbose}{Whether to print messages and progress bars} \item{nselect}{Number of features to select based on dispersion values} \item{mean.cutoff}{Numeric of length two specifying the min and max values} \item{dispersion.cutoff}{Numeric of length two specifying the min and max values} } \description{ Find variable features based on mean.var.plot } \keyword{internal} Seurat/man/ImageDimPlot.Rd0000644000176200001440000000640514525500037015100 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ImageDimPlot} \alias{ImageDimPlot} \title{Spatial Cluster Plots} \usage{ ImageDimPlot( object, fov = NULL, boundaries = NULL, group.by = NULL, split.by = NULL, cols = NULL, shuffle.cols = FALSE, size = 0.5, molecules = NULL, mols.size = 0.1, mols.cols = NULL, mols.alpha = 1, nmols = 1000, alpha = 1, border.color = "white", border.size = NULL, na.value = "grey50", dark.background = TRUE, crop = FALSE, cells = NULL, overlap = FALSE, axes = FALSE, combine = TRUE, coord.fixed = TRUE, flip_xy = TRUE ) } \arguments{ \item{object}{A \code{\link[SeuratObject]{Seurat}} object} \item{fov}{Name of FOV to plot} \item{boundaries}{A vector of segmentation boundaries per image to plot; can be a character vector, a named character vector, or a named list. Names should be the names of FOVs and values should be the names of segmentation boundaries} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{shuffle.cols}{Randomly shuffle colors when a palette or vector of colors is provided to \code{cols}} \item{size}{Point size for cells when plotting centroids} \item{molecules}{A vector of molecules to plot} \item{mols.size}{Point size for molecules} \item{mols.cols}{A vector of color for molecules. The "Set1" palette from RColorBrewer is used by default.} \item{mols.alpha}{Alpha value for molecules, should be between 0 and 1} \item{nmols}{Max number of each molecule specified in `molecules` to plot} \item{alpha}{Alpha value for plotting (default is 1)} \item{border.color}{Color of cell segmentation border; pass \code{NA} to suppress borders for segmentation-based plots} \item{border.size}{Thickness of cell segmentation borders; pass \code{NA} to suppress borders for centroid-based plots} \item{na.value}{Color value for NA points when using custom scale} \item{dark.background}{Set plot background to black} \item{crop}{Crop the plots to area with cells only} \item{cells}{Vector of cells to plot (default is all cells)} \item{overlap}{Overlay boundaries from a single image to create a single plot; if \code{TRUE}, then boundaries are stacked in the order they're given (first is lowest)} \item{axes}{Keep axes and panel background} \item{combine}{Combine plots into a single \code{patchwork} ggplot object.If \code{FALSE}, return a list of ggplot objects} \item{coord.fixed}{Plot cartesian coordinates with fixed aspect ratio} \item{flip_xy}{Flag to flip X and Y axes. Default is FALSE.} } \value{ If \code{combine = TRUE}, a \code{patchwork} ggplot object; otherwise, a list of ggplot objects } \description{ Visualize clusters or other categorical groupings in a spatial context } Seurat/man/RunGraphLaplacian.Rd0000644000176200001440000000242714525500037016120 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{RunGraphLaplacian} \alias{RunGraphLaplacian} \alias{RunGraphLaplacian.Seurat} \alias{RunGraphLaplacian.default} \title{Run Graph Laplacian Eigendecomposition} \usage{ RunGraphLaplacian(object, ...) \method{RunGraphLaplacian}{Seurat}( object, graph, reduction.name = "lap", reduction.key = "LAP_", n = 50, verbose = TRUE, ... ) \method{RunGraphLaplacian}{default}(object, n = 50, reduction.key = "LAP_", verbose = TRUE, ...) } \arguments{ \item{object}{A Seurat object} \item{...}{Arguments passed to eigs_sym} \item{graph}{The name of graph} \item{reduction.name}{dimensional reduction name, lap by default} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. LAP by default} \item{n}{Total Number of Eigenvectors to compute and store (50 by default)} \item{verbose}{Print message and process} } \value{ Returns Seurat object with the Graph laplacian eigenvector calculation stored in the reductions slot } \description{ Run a graph laplacian dimensionality reduction. It is used as a low dimensional representation for a cell-cell graph. The input graph should be symmetric } \concept{dimensional_reduction} Seurat/man/SetQuantile.Rd0000644000176200001440000000135614525500037015023 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{SetQuantile} \alias{SetQuantile} \title{Find the Quantile of Data} \usage{ SetQuantile(cutoff, data) } \arguments{ \item{cutoff}{The cutoff to turn into a quantile} \item{data}{The data to turn find the quantile of} } \value{ The numerical representation of the quantile } \description{ Converts a quantile in character form to a number regarding some data. String form for a quantile is represented as a number prefixed with \dQuote{q}; for example, 10th quantile is \dQuote{q10} while 2nd quantile is \dQuote{q2}. Will only take a quantile of non-zero data values } \examples{ set.seed(42) SetQuantile('q10', sample(1:100, 10)) } \concept{utilities} Seurat/man/AnnotateAnchors.Rd0000644000176200001440000000256414525500037015656 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/integration.R \name{AnnotateAnchors} \alias{AnnotateAnchors} \alias{AnnotateAnchors.default} \alias{AnnotateAnchors.IntegrationAnchorSet} \alias{AnnotateAnchors.TransferAnchorSet} \title{Add info to anchor matrix} \usage{ AnnotateAnchors(anchors, vars, slot, ...) \method{AnnotateAnchors}{default}( anchors, vars = NULL, slot = NULL, object.list, assay = NULL, ... ) \method{AnnotateAnchors}{IntegrationAnchorSet}( anchors, vars = NULL, slot = NULL, object.list = NULL, assay = NULL, ... ) \method{AnnotateAnchors}{TransferAnchorSet}( anchors, vars = NULL, slot = NULL, reference = NULL, query = NULL, assay = NULL, ... ) } \arguments{ \item{anchors}{An \code{\link{AnchorSet}} object} \item{vars}{Variables to pull for each object via FetchData} \item{slot}{Slot to pull feature data for} \item{...}{Arguments passed to other methods} \item{object.list}{List of Seurat objects} \item{assay}{Specify the Assay per object if annotating with expression data} \item{reference}{Reference object used in \code{\link{FindTransferAnchors}}} \item{query}{Query object used in \code{\link{FindTransferAnchors}}} } \value{ Returns the anchor dataframe with additional columns for annotation metadata } \description{ Add info to anchor matrix } \concept{integration} Seurat/man/FindBridgeAnchor.Rd0000644000176200001440000000541214525500037015712 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindBridgeAnchor} \alias{FindBridgeAnchor} \title{Find bridge anchors between two unimodal datasets} \usage{ FindBridgeAnchor( object.list, bridge.object, object.reduction, bridge.reduction, anchor.type = c("Transfer", "Integration"), reference = NULL, laplacian.reduction = "lap", laplacian.dims = 1:50, reduction = c("direct", "cca"), bridge.assay.name = "Bridge", reference.bridge.stored = FALSE, k.anchor = 20, k.score = 50, verbose = TRUE, ... ) } \arguments{ \item{object.list}{A list of Seurat objects} \item{bridge.object}{A multi-omic bridge Seurat which is used as the basis to represent unimodal datasets} \item{object.reduction}{A list of dimensional reductions from object.list used to be reconstructed by bridge.object} \item{bridge.reduction}{A list of dimensional reductions from bridge.object used to reconstruct object.reduction} \item{anchor.type}{The type of anchors. Can be one of: \itemize{ \item{Integration: Generate IntegrationAnchors for integration} \item{Transfer: Generate TransferAnchors for transfering data} }} \item{reference}{A vector specifying the object/s to be used as a reference during integration or transfer data.} \item{laplacian.reduction}{Name of bridge graph laplacian dimensional reduction} \item{laplacian.dims}{Dimensions used for bridge graph laplacian dimensional reduction} \item{reduction}{Dimensional reduction to perform when finding anchors. Can be one of: \itemize{ \item{cca: Canonical correlation analysis} \item{direct: Use assay data as a dimensional reduction} }} \item{bridge.assay.name}{Assay name used for bridge object reconstruction value (default is 'Bridge')} \item{reference.bridge.stored}{If refernece has stored the bridge dictionary representation} \item{k.anchor}{How many neighbors (k) to use when picking anchors} \item{k.score}{How many neighbors (k) to use when scoring anchors} \item{verbose}{Print messages and progress} \item{...}{Additional parameters passed to \code{FindIntegrationAnchors} or \code{FindTransferAnchors}} } \value{ Returns an \code{\link{AnchorSet}} object that can be used as input to \code{\link{IntegrateEmbeddings}}.or \code{\link{MapQuery}} } \description{ First, bridge object is used to reconstruct two single-modality profiles and then project those cells into bridage graph laplacian space. Next, find a set of anchors between two single-modality objects. These anchors can later be used to integrate embeddings or transfer data from the reference to query object using the \code{\link{MapQuery}} object. } \details{ \itemize{ \item{ Bridge cells reconstruction } \item{ Find anchors between objects. It can be either IntegrationAnchors or TransferAnchor. } } } Seurat/man/CaseMatch.Rd0000644000176200001440000000113214525500037014405 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CaseMatch} \alias{CaseMatch} \title{Match the case of character vectors} \usage{ CaseMatch(search, match) } \arguments{ \item{search}{A vector of search terms} \item{match}{A vector of characters whose case should be matched} } \value{ Values from search present in match with the case of match } \description{ Match the case of character vectors } \examples{ data("pbmc_small") cd_genes <- c('Cd79b', 'Cd19', 'Cd200') CaseMatch(search = cd_genes, match = rownames(x = pbmc_small)) } \concept{utilities} Seurat/man/ReadParseBio.Rd0000644000176200001440000000064014525500037015060 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/convenience.R \name{ReadParseBio} \alias{ReadParseBio} \title{Read output from Parse Biosciences} \usage{ ReadParseBio(data.dir, ...) } \arguments{ \item{data.dir}{Directory containing the data files} \item{...}{Extra parameters passed to \code{\link{ReadMtx}}} } \description{ Read output from Parse Biosciences } \concept{convenience} Seurat/man/as.sparse.Rd0000644000176200001440000000256714525500037014471 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R, R/utilities.R \name{as.sparse.H5Group} \alias{as.sparse.H5Group} \alias{as.data.frame.Matrix} \title{Cast to Sparse} \usage{ \method{as.sparse}{H5Group}(x, ...) \method{as.data.frame}{Matrix}( x, row.names = NULL, optional = FALSE, ..., stringsAsFactors = getOption(x = "stringsAsFactors", default = FALSE) ) } \arguments{ \item{x}{An object} \item{...}{Arguments passed to other methods} \item{row.names}{\code{NULL} or a character vector giving the row names for the data; missing values are not allowed} \item{optional}{logical. If \code{TRUE}, setting row names and converting column names (to syntactic names: see \code{\link[base]{make.names}}) is optional. Note that all of \R's \pkg{base} package \code{as.data.frame()} methods use \code{optional} only for column names treatment, basically with the meaning of \code{\link[base]{data.frame}(*, check.names = !optional)}. See also the \code{make.names} argument of the \code{matrix} method.} \item{stringsAsFactors}{logical: should the character vector be converted to a factor?} } \value{ \code{as.data.frame.Matrix}: A data frame representation of the S4 Matrix } \description{ Cast to Sparse } \seealso{ \code{\link[SeuratObject:as.sparse]{SeuratObject::as.sparse}} } \concept{objects} \concept{utilities} Seurat/man/MinMax.Rd0000644000176200001440000000126214525500037013752 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{MinMax} \alias{MinMax} \title{Apply a ceiling and floor to all values in a matrix} \usage{ MinMax(data, min, max) } \arguments{ \item{data}{Matrix or data frame} \item{min}{all values below this min value will be replaced with min} \item{max}{all values above this max value will be replaced with max} } \value{ Returns matrix after performing these floor and ceil operations } \description{ Apply a ceiling and floor to all values in a matrix } \examples{ mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) mat MinMax(data = mat, min = 4, max = 5) } \concept{utilities} Seurat/man/AggregateExpression.Rd0000644000176200001440000000414414525500037016531 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{AggregateExpression} \alias{AggregateExpression} \title{Aggregated feature expression by identity class} \usage{ AggregateExpression( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = "ident", add.ident = NULL, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) } \arguments{ \item{object}{Seurat object} \item{assays}{Which assays to use. Default is all assays} \item{features}{Features to analyze. Default is all features in the assay} \item{return.seurat}{Whether to return the data as a Seurat object. Default is FALSE} \item{group.by}{Category (or vector of categories) for grouping (e.g, ident, replicate, celltype); 'ident' by default To use multiple categories, specify a vector, such as c('ident', 'replicate', 'celltype')} \item{add.ident}{(Deprecated). Place an additional label on each cell prior to pseudobulking} \item{normalization.method}{Method for normalization, see \code{\link{NormalizeData}}} \item{scale.factor}{Scale factor for normalization, see \code{\link{NormalizeData}}} \item{margin}{Margin to perform CLR normalization, see \code{\link{NormalizeData}}} \item{verbose}{Print messages and show progress bar} \item{...}{Arguments to be passed to methods such as \code{\link{CreateSeuratObject}}} } \value{ Returns a matrix with genes as rows, identity classes as columns. If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. } \description{ Returns summed counts ("pseudobulk") for each identity class. } \details{ If \code{return.seurat = TRUE}, aggregated values are placed in the 'counts' layer of the returned object. The data is then normalized by running \code{\link{NormalizeData}} on the aggregated counts. \code{\link{ScaleData}} is then run on the default assay before returning the object. } \examples{ \dontrun{ data("pbmc_small") head(AggregateExpression(object = pbmc_small)$RNA) head(AggregateExpression(object = pbmc_small, group.by = c('ident', 'groups'))$RNA) } } \concept{utilities} Seurat/man/ImageFeaturePlot.Rd0000644000176200001440000001063514525500037015762 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ImageFeaturePlot} \alias{ImageFeaturePlot} \title{Spatial Feature Plots} \usage{ ImageFeaturePlot( object, features, fov = NULL, boundaries = NULL, cols = if (isTRUE(x = blend)) { c("lightgrey", "#ff0000", "#00ff00") } else { c("lightgrey", "firebrick1") }, size = 0.5, min.cutoff = NA, max.cutoff = NA, split.by = NULL, molecules = NULL, mols.size = 0.1, mols.cols = NULL, nmols = 1000, alpha = 1, border.color = "white", border.size = NULL, dark.background = TRUE, blend = FALSE, blend.threshold = 0.5, crop = FALSE, cells = NULL, scale = c("feature", "all", "none"), overlap = FALSE, axes = FALSE, combine = TRUE, coord.fixed = TRUE ) } \arguments{ \item{object}{Seurat object} \item{features}{Vector of features to plot. Features can come from: \itemize{ \item An \code{Assay} feature (e.g. a gene name - "MS4A1") \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") \item A column name from a \code{DimReduc} object corresponding to the cell embedding values (e.g. the PC 1 scores - "PC_1") }} \item{fov}{Name of FOV to plot} \item{boundaries}{A vector of segmentation boundaries per image to plot; can be a character vector, a named character vector, or a named list. Names should be the names of FOVs and values should be the names of segmentation boundaries} \item{cols}{The two colors to form the gradient over. Provide as string vector with the first color corresponding to low values, the second to high. Also accepts a Brewer color scale or vector of colors. Note: this will bin the data into number of colors provided. When blend is \code{TRUE}, takes anywhere from 1-3 colors: \describe{ \item{1 color:}{Treated as color for double-negatives, will use default colors 2 and 3 for per-feature expression} \item{2 colors:}{Treated as colors for per-feature expression, will use default color 1 for double-negatives} \item{3+ colors:}{First color used for double-negatives, colors 2 and 3 used for per-feature expression, all others ignored} }} \item{size}{Point size for cells when plotting centroids} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{molecules}{A vector of molecules to plot} \item{mols.size}{Point size for molecules} \item{mols.cols}{A vector of color for molecules. The "Set1" palette from RColorBrewer is used by default.} \item{nmols}{Max number of each molecule specified in `molecules` to plot} \item{alpha}{Alpha value for plotting (default is 1)} \item{border.color}{Color of cell segmentation border; pass \code{NA} to suppress borders for segmentation-based plots} \item{border.size}{Thickness of cell segmentation borders; pass \code{NA} to suppress borders for centroid-based plots} \item{dark.background}{Set plot background to black} \item{blend}{Scale and blend expression values to visualize coexpression of two features} \item{blend.threshold}{The color cutoff from weak signal to strong signal; ranges from 0 to 1.} \item{crop}{Crop the plots to area with cells only} \item{cells}{Vector of cells to plot (default is all cells)} \item{scale}{Set color scaling across multiple plots; choose from: \itemize{ \item \dQuote{\code{feature}}: Plots per-feature are scaled across splits \item \dQuote{\code{all}}: Plots per-feature are scaled across all features \item \dQuote{\code{none}}: Plots are not scaled; \strong{note}: setting \code{scale} to \dQuote{\code{none}} will result in color scales that are \emph{not} comparable between plots } Ignored if \code{blend = TRUE}} \item{overlap}{Overlay boundaries from a single image to create a single plot; if \code{TRUE}, then boundaries are stacked in the order they're given (first is lowest)} \item{axes}{Keep axes and panel background} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{coord.fixed}{Plot cartesian coordinates with fixed aspect ratio} } \value{ If \code{combine = TRUE}, a \code{patchwork} ggplot object; otherwise, a list of ggplot objects } \description{ Visualize expression in a spatial context } Seurat/man/MULTIseqDemux.Rd0000644000176200001440000000243014525500037015165 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{MULTIseqDemux} \alias{MULTIseqDemux} \title{Demultiplex samples based on classification method from MULTI-seq (McGinnis et al., bioRxiv 2018)} \usage{ MULTIseqDemux( object, assay = "HTO", quantile = 0.7, autoThresh = FALSE, maxiter = 5, qrange = seq(from = 0.1, to = 0.9, by = 0.05), verbose = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the specified assay data has been added} \item{assay}{Name of the multiplexing assay (HTO by default)} \item{quantile}{The quantile to use for classification} \item{autoThresh}{Whether to perform automated threshold finding to define the best quantile. Default is FALSE} \item{maxiter}{Maximum number of iterations if autoThresh = TRUE. Default is 5} \item{qrange}{A range of possible quantile values to try if autoThresh = TRUE} \item{verbose}{Prints the output} } \value{ A Seurat object with demultiplexing results stored at \code{object$MULTI_ID} } \description{ Identify singlets, doublets and negative cells from multiplexing experiments. Annotate singlets by tags. } \examples{ \dontrun{ object <- MULTIseqDemux(object) } } \references{ \url{https://www.biorxiv.org/content/10.1101/387241v1} } \concept{preprocessing} Seurat/man/CollapseEmbeddingOutliers.Rd0000644000176200001440000000230414525500037017647 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{CollapseEmbeddingOutliers} \alias{CollapseEmbeddingOutliers} \title{Move outliers towards center on dimension reduction plot} \usage{ CollapseEmbeddingOutliers( object, reduction = "umap", dims = 1:2, group.by = "ident", outlier.sd = 2, reduction.key = "UMAP_" ) } \arguments{ \item{object}{Seurat object} \item{reduction}{Name of DimReduc to adjust} \item{dims}{Dimensions to visualize} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{outlier.sd}{Controls the outlier distance} \item{reduction.key}{Key for DimReduc that is returned} } \value{ Returns a DimReduc object with the modified embeddings } \description{ Move outliers towards center on dimension reduction plot } \examples{ \dontrun{ data("pbmc_small") pbmc_small <- FindClusters(pbmc_small, resolution = 1.1) pbmc_small <- RunUMAP(pbmc_small, dims = 1:5) DimPlot(pbmc_small, reduction = "umap") pbmc_small[["umap_new"]] <- CollapseEmbeddingOutliers(pbmc_small, reduction = "umap", reduction.key = 'umap_', outlier.sd = 0.5) DimPlot(pbmc_small, reduction = "umap_new") } } \concept{visualization} Seurat/man/L2Dim.Rd0000644000176200001440000000114114525500037013464 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimensional_reduction.R \name{L2Dim} \alias{L2Dim} \title{L2-normalization} \usage{ L2Dim(object, reduction, new.dr = NULL, new.key = NULL) } \arguments{ \item{object}{Seurat object} \item{reduction}{Dimensional reduction to normalize} \item{new.dr}{name of new dimensional reduction to store (default is olddr.l2)} \item{new.key}{name of key for new dimensional reduction} } \value{ Returns a \code{\link{Seurat}} object } \description{ Perform l2 normalization on given dimensional reduction } \concept{dimensional_reduction} Seurat/man/STARmap-class.Rd0000644000176200001440000000130114525500037015125 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \docType{class} \name{STARmap-class} \alias{STARmap-class} \alias{STARmap} \title{The STARmap class} \description{ The STARmap class } \section{Slots}{ \describe{ \item{\code{assay}}{Name of assay to associate image data with; will give this image priority for visualization when the assay is set as the active/default assay in a \code{Seurat} object} \item{\code{key}}{A one-length character vector with the object's key; keys must be one or more alphanumeric characters followed by an underscore \dQuote{\code{_}} (regex pattern \dQuote{\code{^[a-zA-Z][a-zA-Z0-9]*_$}})} } } \concept{objects} \concept{spatial} Seurat/man/AutoPointSize.Rd0000644000176200001440000000112514525500037015334 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{AutoPointSize} \alias{AutoPointSize} \title{Automagically calculate a point size for ggplot2-based scatter plots} \usage{ AutoPointSize(data, raster = NULL) } \arguments{ \item{data}{A data frame being passed to ggplot2} \item{raster}{If TRUE, point size is set to 1} } \value{ The "optimal" point size for visualizing these data } \description{ It happens to look good } \examples{ df <- data.frame(x = rnorm(n = 10000), y = runif(n = 10000)) AutoPointSize(data = df) } \concept{visualization} Seurat/man/CreateSCTAssayObject.Rd0000644000176200001440000000223314525500037016465 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{CreateSCTAssayObject} \alias{CreateSCTAssayObject} \title{Create a SCT Assay object} \usage{ CreateSCTAssayObject( counts, data, scale.data = NULL, umi.assay = "RNA", min.cells = 0, min.features = 0, SCTModel.list = NULL ) } \arguments{ \item{counts}{Unnormalized data such as raw counts or TPMs} \item{data}{Prenormalized data; if provided, do not pass \code{counts}} \item{scale.data}{a residual matrix} \item{umi.assay}{The UMI assay name. Default is RNA} \item{min.cells}{Include features detected in at least this many cells. Will subset the counts matrix as well. To reintroduce excluded features, create a new object with a lower cutoff} \item{min.features}{Include cells where at least this many features are detected} \item{SCTModel.list}{list of SCTModels} } \description{ Create a SCT object from a feature (e.g. gene) expression matrix and a list of SCTModels. The expected format of the input matrix is features x cells. } \details{ Non-unique cell or feature names are not allowed. Please make unique before calling this function. } \concept{objects} Seurat/man/IntegrateData.Rd0000644000176200001440000001357414525500037015306 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{IntegrateData} \alias{IntegrateData} \title{Integrate data} \usage{ IntegrateData( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) } \arguments{ \item{anchorset}{An \code{\link{AnchorSet}} object generated by \code{\link{FindIntegrationAnchors}}} \item{new.assay.name}{Name for the new assay containing the integrated data} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT} \item{features}{Vector of features to use when computing the PCA to determine the weights. Only set if you want a different set from those used in the anchor finding process} \item{features.to.integrate}{Vector of features to integrate. By default, will use the features used in anchor finding.} \item{dims}{Number of dimensions to use in the anchor weighting procedure} \item{k.weight}{Number of neighbors to consider when weighting anchors} \item{weight.reduction}{Dimension reduction to use when calculating anchor weights. This can be one of: \itemize{ \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} \item{A vector of \code{\link{DimReduc}} objects, specifying the object to use for each object in the integration} \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} } Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the first merge between reference and query, as the merged object will subsequently contain more cells than was in query, and weights will need to be calculated for all cells in the object.} \item{sd.weight}{Controls the bandwidth of the Gaussian kernel for weighting} \item{sample.tree}{Specify the order of integration. Order of integration should be encoded in a matrix, where each row represents one of the pairwise integration steps. Negative numbers specify a dataset, positive numbers specify the integration results from a given row (the format of the merge matrix included in the \code{\link{hclust}} function output). For example: \code{matrix(c(-2, 1, -3, -1), ncol = 2)} gives: \if{html}{\out{
}}\preformatted{ [,1] [,2] [1,] -2 -3 [2,] 1 -1 }\if{html}{\out{
}} Which would cause dataset 2 and 3 to be integrated first, then the resulting object integrated with dataset 1. If NULL, the sample tree will be computed automatically.} \item{preserve.order}{Do not reorder objects based on size for each pairwise integration.} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}})} \item{verbose}{Print progress bars and output} } \value{ Returns a \code{\link{Seurat}} object with a new integrated \code{\link{Assay}}. If \code{normalization.method = "LogNormalize"}, the integrated data is returned to the \code{data} slot and can be treated as log-normalized, corrected data. If \code{normalization.method = "SCT"}, the integrated data is returned to the \code{scale.data} slot and can be treated as centered, corrected Pearson residuals. } \description{ Perform dataset integration using a pre-computed \code{\link{AnchorSet}}. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} For pairwise integration: \itemize{ \item{Construct a weights matrix that defines the association between each query cell and each anchor. These weights are computed as 1 - the distance between the query cell and the anchor divided by the distance of the query cell to the \code{k.weight}th anchor multiplied by the anchor score computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian kernel width a bandwidth defined by \code{sd.weight} and normalize across all \code{k.weight} anchors.} \item{Compute the anchor integration matrix as the difference between the two expression matrices for every pair of anchor cells} \item{Compute the transformation matrix as the product of the integration matrix and the weights matrix.} \item{Subtract the transformation matrix from the original expression matrix.} } For multiple dataset integration, we perform iterative pairwise integration. To determine the order of integration (if not specified via \code{sample.tree}), we \itemize{ \item{Define a distance between datasets as the total number of cells in the smaller dataset divided by the total number of anchors between the two datasets.} \item{Compute all pairwise distances between datasets} \item{Cluster this distance matrix to determine a guide tree} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("panc8") # panc8 is a merged Seurat object containing 8 separate pancreas datasets # split the object by dataset pancreas.list <- SplitObject(panc8, split.by = "tech") # perform standard preprocessing on each object for (i in 1:length(pancreas.list)) { pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) pancreas.list[[i]] <- FindVariableFeatures( pancreas.list[[i]], selection.method = "vst", nfeatures = 2000, verbose = FALSE ) } # find anchors anchors <- FindIntegrationAnchors(object.list = pancreas.list) # integrate data integrated <- IntegrateData(anchorset = anchors) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} } \concept{integration} Seurat/man/MapQuery.Rd0000644000176200001440000000602114525500037014322 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{MapQuery} \alias{MapQuery} \title{Map query cells to a reference} \usage{ MapQuery( anchorset, query, reference, refdata = NULL, new.reduction.name = NULL, reference.reduction = NULL, reference.dims = NULL, query.dims = NULL, store.weights = FALSE, reduction.model = NULL, transferdata.args = list(), integrateembeddings.args = list(), projectumap.args = list(), verbose = TRUE ) } \arguments{ \item{anchorset}{An AnchorSet object} \item{query}{Query object used in anchorset construction} \item{reference}{Reference object used in anchorset construction} \item{refdata}{Data to transfer. This can be specified in one of two ways: \itemize{ \item{The reference data itself as either a vector where the names correspond to the reference cells, or a matrix, where the column names correspond to the reference cells.} \item{The name of the metadata field or assay from the reference object provided. This requires the reference parameter to be specified. If pulling assay data in this manner, it will pull the data from the data slot. To transfer data from other slots, please pull the data explicitly with \code{\link{GetAssayData}} and provide that matrix here.} }} \item{new.reduction.name}{Name for new integrated dimensional reduction.} \item{reference.reduction}{Name of reduction to use from the reference for neighbor finding} \item{reference.dims}{Dimensions (columns) to use from reference} \item{query.dims}{Dimensions (columns) to use from query} \item{store.weights}{Determine if the weight and anchor matrices are stored.} \item{reduction.model}{\code{DimReduc} object that contains the umap model} \item{transferdata.args}{A named list of additional arguments to \code{\link{TransferData}}} \item{integrateembeddings.args}{A named list of additional arguments to \code{\link{IntegrateEmbeddings}}} \item{projectumap.args}{A named list of additional arguments to \code{\link{ProjectUMAP}}} \item{verbose}{Print progress bars and output} } \value{ Returns a modified query Seurat object containing:#' \itemize{ \item{New Assays corresponding to the features transferred and/or their corresponding prediction scores from \code{\link{TransferData}}} \item{An integrated reduction from \code{\link{IntegrateEmbeddings}}} \item{A projected UMAP reduction of the query cells projected into the reference UMAP using \code{\link{ProjectUMAP}}} } } \description{ This is a convenience wrapper function around the following three functions that are often run together when mapping query data to a reference: \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}}, \code{\link{ProjectUMAP}}. Note that by default, the \code{weight.reduction} parameter for all functions will be set to the dimension reduction method used in the \code{\link{FindTransferAnchors}} function call used to construct the anchor object, and the \code{dims} parameter will be the same dimensions used to find anchors. } \concept{integration} Seurat/man/GetImage.Rd0000644000176200001440000000150014525500037014236 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{GetImage.SlideSeq} \alias{GetImage.SlideSeq} \alias{GetImage.STARmap} \alias{GetImage.VisiumV1} \title{Get Image Data} \usage{ \method{GetImage}{SlideSeq}(object, mode = c("grob", "raster", "plotly", "raw"), ...) \method{GetImage}{STARmap}(object, mode = c("grob", "raster", "plotly", "raw"), ...) \method{GetImage}{VisiumV1}(object, mode = c("grob", "raster", "plotly", "raw"), ...) } \arguments{ \item{object}{An object} \item{mode}{How to return the image; should accept one of \dQuote{grob}, \dQuote{raster}, \dQuote{plotly}, or \dQuote{raw}} \item{...}{Arguments passed to other methods} } \description{ Get Image Data } \seealso{ \code{\link[SeuratObject:GetImage]{SeuratObject::GetImage}} } \concept{objects} \concept{spatial} Seurat/man/CellCycleScoring.Rd0000644000176200001440000000267614525500037015757 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{CellCycleScoring} \alias{CellCycleScoring} \title{Score cell cycle phases} \usage{ CellCycleScoring( object, s.features, g2m.features, ctrl = NULL, set.ident = FALSE, ... ) } \arguments{ \item{object}{A Seurat object} \item{s.features}{A vector of features associated with S phase} \item{g2m.features}{A vector of features associated with G2M phase} \item{ctrl}{Number of control features selected from the same bin per analyzed feature supplied to \code{\link{AddModuleScore}}. Defaults to value equivalent to minimum number of features present in 's.features' and 'g2m.features'.} \item{set.ident}{If true, sets identity to phase assignments Stashes old identities in 'old.ident'} \item{...}{Arguments to be passed to \code{\link{AddModuleScore}}} } \value{ A Seurat object with the following columns added to object meta data: S.Score, G2M.Score, and Phase } \description{ Score cell cycle phases } \examples{ \dontrun{ data("pbmc_small") # pbmc_small doesn't have any cell-cycle genes # To run CellCycleScoring, please use a dataset with cell-cycle genes # An example is available at http://satijalab.org/seurat/cell_cycle_vignette.html pbmc_small <- CellCycleScoring( object = pbmc_small, g2m.features = cc.genes$g2m.genes, s.features = cc.genes$s.genes ) head(x = pbmc_small@meta.data) } } \seealso{ \code{AddModuleScore} } \concept{utilities} Seurat/man/ReadNanostring.Rd0000644000176200001440000001131514525500037015477 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R, R/convenience.R \name{ReadNanostring} \alias{ReadNanostring} \alias{LoadNanostring} \title{Read and Load Nanostring SMI data} \usage{ ReadNanostring( data.dir, mtx.file = NULL, metadata.file = NULL, molecules.file = NULL, segmentations.file = NULL, type = "centroids", mol.type = "pixels", metadata = NULL, mols.filter = NA_character_, genes.filter = NA_character_, fov.filter = NULL, subset.counts.matrix = NULL, cell.mols.only = TRUE ) LoadNanostring(data.dir, fov, assay = "Nanostring") } \arguments{ \item{data.dir}{Path to folder containing Nanostring SMI outputs} \item{mtx.file}{Path to Nanostring cell x gene matrix CSV} \item{metadata.file}{Contains metadata including cell center, area, and stain intensities} \item{molecules.file}{Path to molecules file} \item{segmentations.file}{Path to segmentations CSV} \item{type}{Type of cell spatial coordinate matrices to read; choose one or more of: \itemize{ \item \dQuote{centroids}: cell centroids in pixel coordinate space \item \dQuote{segmentations}: cell segmentations in pixel coordinate space }} \item{mol.type}{Type of molecule spatial coordinate matrices to read; choose one or more of: \itemize{ \item \dQuote{pixels}: molecule coordinates in pixel space }} \item{metadata}{Type of available metadata to read; choose zero or more of: \itemize{ \item \dQuote{Area}: number of pixels in cell segmentation \item \dQuote{fov}: cell's fov \item \dQuote{Mean.MembraneStain}: mean membrane stain intensity \item \dQuote{Mean.DAPI}: mean DAPI stain intensity \item \dQuote{Mean.G}: mean green channel stain intensity \item \dQuote{Mean.Y}: mean yellow channel stain intensity \item \dQuote{Mean.R}: mean red channel stain intensity \item \dQuote{Max.MembraneStain}: max membrane stain intensity \item \dQuote{Max.DAPI}: max DAPI stain intensity \item \dQuote{Max.G}: max green channel stain intensity \item \dQuote{Max.Y}: max yellow stain intensity \item \dQuote{Max.R}: max red stain intensity }} \item{mols.filter}{Filter molecules that match provided string} \item{genes.filter}{Filter genes from cell x gene matrix that match provided string} \item{fov.filter}{Only load in select FOVs. Nanostring SMI data contains 30 total FOVs.} \item{subset.counts.matrix}{If the counts matrix should be built from molecule coordinates for a specific segmentation; One of: \itemize{ \item \dQuote{Nuclear}: nuclear segmentations \item \dQuote{Cytoplasm}: cell cytoplasm segmentations \item \dQuote{Membrane}: cell membrane segmentations }} \item{cell.mols.only}{If TRUE, only load molecules within a cell} \item{fov}{Name to store FOV as} \item{assay}{Name to store expression matrix as} } \value{ \code{ReadNanostring}: A list with some combination of the following values: \itemize{ \item \dQuote{\code{matrix}}: a \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells are columns and features are rows \item \dQuote{\code{centroids}}: a data frame with cell centroid coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} } \code{LoadNanostring}: A \code{\link[SeuratObject]{Seurat}} object } \description{ Read and Load Nanostring SMI data } \note{ This function requires the \href{https://cran.r-project.org/package=data.table}{\pkg{data.table}} package to be installed } \section{Progress Updates with \pkg{progressr}}{ This function uses \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to render status updates and progress bars. To enable progress updates, wrap the function call in \code{\link[progressr]{with_progress}} or run \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running this function. For more details about \pkg{progressr}, please read \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} } \section{Parallelization with \pkg{future}}{ This function uses \href{https://cran.r-project.org/package=future}{\pkg{future}} to enable parallelization. Parallelization strategies can be set using \code{\link[future]{plan}}. Common plans include \dQuote{\code{sequential}} for non-parallelized processing or \dQuote{\code{multisession}} for parallel evaluation using multiple \R sessions; for other plans, see the \dQuote{Implemented evaluation strategies} section of \code{\link[future:plan]{?future::plan}}. For a more thorough introduction to \pkg{future}, see \href{https://future.futureverse.org/articles/future-1-overview.html}{\code{vignette("future-1-overview")}} } \concept{future} \concept{preprocessing} Seurat/man/ReadVitessce.Rd0000644000176200001440000000672514525500037015153 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R, R/convenience.R \name{ReadVitessce} \alias{ReadVitessce} \alias{LoadHuBMAPCODEX} \title{Read Data From Vitessce} \usage{ ReadVitessce( counts = NULL, coords = NULL, molecules = NULL, type = c("segmentations", "centroids"), filter = NA_character_ ) LoadHuBMAPCODEX(data.dir, fov, assay = "CODEX") } \arguments{ \item{counts}{Path or URL to a Vitessce-formatted JSON file with expression data; should end in \dQuote{\code{.genes.json}} or \dQuote{\code{.clusters.json}}; pass \code{NULL} to skip} \item{coords}{Path or URL to a Vitessce-formatted JSON file with cell/spot spatial coordinates; should end in \dQuote{\code{.cells.json}}; pass \code{NULL} to skip} \item{molecules}{Path or URL to a Vitessce-formatted JSON file with molecule spatial coordinates; should end in \dQuote{\code{.molecules.json}}; pass \code{NULL} to skip} \item{type}{Type of cell/spot spatial coordinates to return, choose one or more from: \itemize{ \item \dQuote{segmentations} cell/spot segmentations \item \dQuote{centroids} cell/spot centroids }} \item{filter}{A character to filter molecules by, pass \code{NA} to skip molecule filtering} \item{data.dir}{Path to a directory containing Vitessce cells and clusters JSONs} \item{fov}{Name to store FOV as} \item{assay}{Name to store expression matrix as} } \value{ \code{ReadVitessce}: A list with some combination of the following values: \itemize{ \item \dQuote{\code{counts}}: if \code{counts} is not \code{NULL}, an expression matrix with cells as columns and features as rows \item \dQuote{\code{centroids}}: if \code{coords} is not \code{NULL} and \code{type} is contains\dQuote{centroids}, a data frame with cell centroids in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} \item \dQuote{\code{segmentations}}: if \code{coords} is not \code{NULL} and \code{type} contains \dQuote{centroids}, a data frame with cell segmentations in three columns: \dQuote{x}, \dQuote{y} and \dQuote{cell} \item \dQuote{\code{molecules}}: if \code{molecules} is not \code{NULL}, a data frame with molecule spatial coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} } \code{LoadHuBMAPCODEX}: A \code{\link[SeuratObject]{Seurat}} object } \description{ Read in data from Vitessce-formatted JSON files } \note{ This function requires the \href{https://cran.r-project.org/package=jsonlite}{\pkg{jsonlite}} package to be installed } \section{Progress Updates with \pkg{progressr}}{ This function uses \href{https://cran.r-project.org/package=progressr}{\pkg{progressr}} to render status updates and progress bars. To enable progress updates, wrap the function call in \code{\link[progressr]{with_progress}} or run \code{\link[progressr:handlers]{handlers(global = TRUE)}} before running this function. For more details about \pkg{progressr}, please read \href{https://progressr.futureverse.org/articles/progressr-intro.html}{\code{vignette("progressr-intro")}} } \examples{ \dontrun{ coords <- ReadVitessce( counts = "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.genes.json", coords = "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.cells.json", molecules = "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.molecules.json" ) names(coords) coords$counts[1:10, 1:10] head(coords$centroids) head(coords$segmentations) head(coords$molecules) } } \concept{preprocessing} Seurat/man/TransferSketchLabels.Rd0000644000176200001440000000337114525500037016635 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sketching.R \name{TransferSketchLabels} \alias{TransferSketchLabels} \title{Transfer data from sketch data to full data} \usage{ TransferSketchLabels( object, sketched.assay = "sketch", reduction, dims, refdata = NULL, k = 50, reduction.model = NULL, neighbors = NULL, recompute.neighbors = FALSE, recompute.weights = FALSE, verbose = TRUE ) } \arguments{ \item{object}{A Seurat object.} \item{sketched.assay}{Sketched assay name. Default is 'sketch'.} \item{reduction}{Dimensional reduction name to use for label transfer.} \item{dims}{An integer vector indicating which dimensions to use for label transfer.} \item{refdata}{A list of character strings indicating the metadata columns containing labels to transfer. Default is NULL. Similar to refdata in `MapQuery`} \item{k}{Number of neighbors to use for label transfer. Default is 50.} \item{reduction.model}{Dimensional reduction model to use for label transfer. Default is NULL.} \item{neighbors}{An object storing the neighbors found during the sketching process. Default is NULL.} \item{recompute.neighbors}{Whether to recompute the neighbors for label transfer. Default is FALSE.} \item{recompute.weights}{Whether to recompute the weights for label transfer. Default is FALSE.} \item{verbose}{Print progress and diagnostic messages} } \value{ A Seurat object with transferred labels stored in the metadata. If a UMAP model is provided, the full data are also projected onto the UMAP space, with the results stored in a new reduction, full.`reduction.model` } \description{ This function transfers cell type labels from a sketched dataset to a full dataset based on the similarities in the lower dimensional space. } Seurat/man/FindTransferAnchors.Rd0000644000176200001440000002012114525500037016457 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{FindTransferAnchors} \alias{FindTransferAnchors} \title{Find transfer anchors} \usage{ FindTransferAnchors( reference, query, normalization.method = "LogNormalize", recompute.residuals = TRUE, reference.assay = NULL, reference.neighbors = NULL, query.assay = NULL, reduction = "pcaproject", reference.reduction = NULL, project.query = FALSE, features = NULL, scale = TRUE, npcs = 30, l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = NA, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, approx.pca = TRUE, mapping.score.k = NULL, verbose = TRUE ) } \arguments{ \item{reference}{\code{\link{Seurat}} object to use as the reference} \item{query}{\code{\link{Seurat}} object to use as the query} \item{normalization.method}{Name of normalization method used: LogNormalize or SCT.} \item{recompute.residuals}{If using SCT as a normalization method, compute query Pearson residuals using the reference SCT model parameters.} \item{reference.assay}{Name of the Assay to use from reference} \item{reference.neighbors}{Name of the Neighbor to use from the reference. Optionally enables reuse of precomputed neighbors.} \item{query.assay}{Name of the Assay to use from query} \item{reduction}{Dimensional reduction to perform when finding anchors. Options are: \itemize{ \item{pcaproject: Project the PCA from the reference onto the query. We recommend using PCA when reference and query datasets are from scRNA-seq} \item{lsiproject: Project the LSI from the reference onto the query. We recommend using LSI when reference and query datasets are from scATAC-seq. This requires that LSI has been computed for the reference dataset, and the same features (eg, peaks or genome bins) are present in both the reference and query. See \code{\link[Signac]{RunTFIDF}} and \code{\link[Signac]{RunSVD}}} \item{rpca: Project the PCA from the reference onto the query, and the PCA from the query onto the reference (reciprocal PCA projection).} \item{cca: Run a CCA on the reference and query } }} \item{reference.reduction}{Name of dimensional reduction to use from the reference if running the pcaproject workflow. Optionally enables reuse of precomputed reference dimensional reduction. If NULL (default), use a PCA computed on the reference object.} \item{project.query}{Project the PCA from the query dataset onto the reference. Use only in rare cases where the query dataset has a much larger cell number, but the reference dataset has a unique assay for transfer. In this case, the default features will be set to the variable features of the query object that are alos present in the reference.} \item{features}{Features to use for dimensional reduction. If not specified, set as variable features of the reference object which are also present in the query.} \item{scale}{Scale query data.} \item{npcs}{Number of PCs to compute on reference if reference.reduction is not provided.} \item{l2.norm}{Perform L2 normalization on the cell embeddings after dimensional reduction} \item{dims}{Which dimensions to use from the reduction to specify the neighbor search space} \item{k.anchor}{How many neighbors (k) to use when finding anchors} \item{k.filter}{How many neighbors (k) to use when filtering anchors. Set to NA to turn off filtering.} \item{k.score}{How many neighbors (k) to use when scoring anchors} \item{max.features}{The maximum number of features to use when specifying the neighborhood search space in the anchor filtering} \item{nn.method}{Method for nearest neighbor finding. Options include: rann, annoy} \item{n.trees}{More trees gives higher precision when using annoy approximate nearest neighbor search} \item{eps}{Error bound on the neighbor finding algorithm (from \code{\link{RANN}} or \code{\link{RcppAnnoy}})} \item{approx.pca}{Use truncated singular value decomposition to approximate PCA} \item{mapping.score.k}{Compute and store nearest k query neighbors in the AnchorSet object that is returned. You can optionally set this if you plan on computing the mapping score and want to enable reuse of some downstream neighbor calculations to make the mapping score function more efficient.} \item{verbose}{Print progress bars and output} } \value{ Returns an \code{AnchorSet} object that can be used as input to \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and \code{\link{MapQuery}}. The dimension reduction used for finding anchors is stored in the \code{AnchorSet} object and can be used for computing anchor weights in downstream functions. Note that only the requested dimensions are stored in the dimension reduction object in the \code{AnchorSet}. This means that if \code{dims=2:20} is used, for example, the dimension of the stored reduction is \code{1:19}. } \description{ Find a set of anchors between a reference and query object. These anchors can later be used to transfer data from the reference to query object using the \code{\link{TransferData}} object. } \details{ The main steps of this procedure are outlined below. For a more detailed description of the methodology, please see Stuart, Butler, et al Cell 2019. \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} \itemize{ \item{Perform dimensional reduction. Exactly what is done here depends on the values set for the \code{reduction} and \code{project.query} parameters. If \code{reduction = "pcaproject"}, a PCA is performed on either the reference (if \code{project.query = FALSE}) or the query (if \code{project.query = TRUE}), using the \code{features} specified. The data from the other dataset is then projected onto this learned PCA structure. If \code{reduction = "cca"}, then CCA is performed on the reference and query for this dimensional reduction step. If \code{reduction = "lsiproject"}, the stored LSI dimension reduction in the reference object is used to project the query dataset onto the reference. If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the embedding vectors.} \item{Identify anchors between the reference and query - pairs of cells from each dataset that are contained within each other's neighborhoods (also known as mutual nearest neighbors).} \item{Filter low confidence anchors to ensure anchors in the low dimension space are in broad agreement with the high dimensional measurements. This is done by looking at the neighbors of each query cell in the reference dataset using \code{max.features} to define this space. If the reference cell isn't found within the first \code{k.filter} neighbors, remove the anchor.} \item{Assign each remaining anchor a score. For each anchor cell, determine the nearest \code{k.score} anchors within its own dataset and within its pair's dataset. Based on these neighborhoods, construct an overall neighbor graph and then compute the shared neighbor overlap between anchor and query cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on these scores to dampen outlier effects and rescale to range between 0-1.} } } \examples{ \dontrun{ # to install the SeuratData package see https://github.com/satijalab/seurat-data library(SeuratData) data("pbmc3k") # for demonstration, split the object into reference and query pbmc.reference <- pbmc3k[, 1:1350] pbmc.query <- pbmc3k[, 1351:2700] # perform standard preprocessing on each object pbmc.reference <- NormalizeData(pbmc.reference) pbmc.reference <- FindVariableFeatures(pbmc.reference) pbmc.reference <- ScaleData(pbmc.reference) pbmc.query <- NormalizeData(pbmc.query) pbmc.query <- FindVariableFeatures(pbmc.query) pbmc.query <- ScaleData(pbmc.query) # find anchors anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) # transfer labels predictions <- TransferData( anchorset = anchors, refdata = pbmc.reference$seurat_annotations ) pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) } } \references{ Stuart T, Butler A, et al. Comprehensive Integration of Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031}; } \concept{integration} Seurat/man/contrast-theory.Rd0000644000176200001440000000131614525500037015726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{contrast-theory} \alias{contrast-theory} \alias{Intensity} \alias{Luminance} \title{Get the intensity and/or luminance of a color} \source{ \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} } \usage{ Intensity(color) Luminance(color) } \arguments{ \item{color}{A vector of colors} } \value{ A vector of intensities/luminances for each color } \description{ Get the intensity and/or luminance of a color } \examples{ Intensity(color = c('black', 'white', '#E76BF3')) Luminance(color = c('black', 'white', '#E76BF3')) } \concept{visualization} Seurat/man/DimPlot.Rd0000644000176200001440000001146714525500037014141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R, R/convenience.R \name{DimPlot} \alias{DimPlot} \alias{TSNEPlot} \alias{PCAPlot} \alias{ICAPlot} \alias{UMAPPlot} \title{Dimensional reduction plot} \usage{ DimPlot( object, dims = c(1, 2), cells = NULL, cols = NULL, pt.size = NULL, reduction = NULL, group.by = NULL, split.by = NULL, shape.by = NULL, order = NULL, shuffle = FALSE, seed = 1, label = FALSE, label.size = 4, label.color = "black", label.box = FALSE, repel = FALSE, alpha = 1, cells.highlight = NULL, cols.highlight = "#DE2D26", sizes.highlight = 1, na.value = "grey50", ncol = NULL, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) PCAPlot(object, ...) TSNEPlot(object, ...) UMAPPlot(object, ...) } \arguments{ \item{object}{Seurat object} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{cells}{Vector of cells to plot (default is all cells)} \item{cols}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{pt.size}{Adjust point size for plotting} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{group.by}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{order}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{shuffle}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{seed}{Sets the seed if randomly shuffling the order of points.} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{label.color}{Sets the color of the label text} \item{label.box}{Whether to put a box around the label text (geom_text vs geom_label)} \item{repel}{Repel labels} \item{alpha}{Alpha value for plotting (default is 1)} \item{cells.highlight}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{cols.highlight}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{sizes.highlight}{Size of highlighted cells; will repeat to the length groups in cells.highlight. If \code{sizes.highlight = TRUE} size of all points will be this value.} \item{na.value}{Color value for NA points when using custom scale} \item{ncol}{Number of columns for display when combining plots} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{raster}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} \item{...}{Extra parameters passed to \code{DimPlot}} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Graphs the output of a dimensional reduction technique on a 2D scatter plot where each point is a cell and it's positioned based on the cell embeddings determined by the reduction technique. By default, cells are colored by their identity class (can be changed with the group.by parameter). } \note{ For the old \code{do.hover} and \code{do.identify} functionality, please see \code{HoverLocator} and \code{CellSelector}, respectively. } \examples{ data("pbmc_small") DimPlot(object = pbmc_small) DimPlot(object = pbmc_small, split.by = 'letter.idents') } \seealso{ \code{\link{FeaturePlot}} \code{\link{HoverLocator}} \code{\link{CellSelector}} \code{\link{FetchData}} } \concept{convenience} \concept{visualization} Seurat/man/merge.SCTAssay.Rd0000644000176200001440000000203114525500037015304 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{merge.SCTAssay} \alias{merge.SCTAssay} \title{Merge SCTAssay objects} \usage{ \method{merge}{SCTAssay}( x = NULL, y = NULL, add.cell.ids = NULL, merge.data = TRUE, na.rm = TRUE, ... ) } \arguments{ \item{x}{A \code{\link[SeuratObject]{Seurat}} object} \item{y}{A single \code{Seurat} object or a list of \code{Seurat} objects} \item{add.cell.ids}{A character vector of \code{length(x = c(x, y))}; appends the corresponding values to the start of each objects' cell names} \item{merge.data}{Merge the data slots instead of just merging the counts (which requires renormalization); this is recommended if the same normalization approach was applied to all objects} \item{na.rm}{If na.rm = TRUE, this will only preserve residuals that are present in all SCTAssays being merged. Otherwise, missing residuals will be populated with NAs.} \item{...}{Arguments passed to other methods} } \description{ Merge SCTAssay objects } \concept{objects} Seurat/man/JackStrawPlot.Rd0000644000176200001440000000330114525500037015305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{JackStrawPlot} \alias{JackStrawPlot} \title{JackStraw Plot} \usage{ JackStrawPlot( object, dims = 1:5, cols = NULL, reduction = "pca", xmax = 0.1, ymax = 0.3 ) } \arguments{ \item{object}{Seurat object} \item{dims}{Dims to plot} \item{cols}{Vector of colors, each color corresponds to an individual PC. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{reduction}{reduction to pull jackstraw info from} \item{xmax}{X-axis maximum on each QQ plot.} \item{ymax}{Y-axis maximum on each QQ plot.} } \value{ A ggplot object } \description{ Plots the results of the JackStraw analysis for PCA significance. For each PC, plots a QQ-plot comparing the distribution of p-values for all genes across each PC, compared with a uniform distribution. Also determines a p-value for the overall significance of each PC (see Details). } \details{ Significant PCs should show a p-value distribution (black curve) that is strongly skewed to the left compared to the null distribution (dashed line) The p-value for each PC is based on a proportion test comparing the number of genes with a p-value below a particular threshold (score.thresh), compared with the proportion of genes expected under a uniform distribution of p-values. } \examples{ data("pbmc_small") JackStrawPlot(object = pbmc_small) } \seealso{ \code{\link{ScoreJackStraw}} } \author{ Omri Wurtzel } \concept{visualization} Seurat/man/LinkedPlots.Rd0000644000176200001440000000346714525500037015022 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{LinkedPlots} \alias{LinkedPlots} \alias{LinkedDimPlot} \alias{LinkedPlot} \alias{LinkedFeaturePlot} \title{Visualize spatial and clustering (dimensional reduction) data in a linked, interactive framework} \usage{ LinkedDimPlot( object, dims = 1:2, reduction = NULL, image = NULL, group.by = NULL, alpha = c(0.1, 1), combine = TRUE ) LinkedFeaturePlot( object, feature, dims = 1:2, reduction = NULL, image = NULL, slot = "data", alpha = c(0.1, 1), combine = TRUE ) } \arguments{ \item{object}{A Seurat object} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{image}{Name of the image to use in the plot} \item{group.by}{Name of meta.data column to group the data by} \item{alpha}{Controls opacity of spots. Provide as a vector specifying the min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single alpha value for each plot.} \item{combine}{Combine plots into a single gg object; note that if TRUE; themeing will not work when plotting multiple features/groupings} \item{feature}{Feature to visualize} \item{slot}{If plotting a feature, which data slot to pull from (counts, data, or scale.data)} } \value{ Returns final plots. If \code{combine}, plots are stiched together using \code{\link{CombinePlots}}; otherwise, returns a list of ggplot objects } \description{ Visualize spatial and clustering (dimensional reduction) data in a linked, interactive framework } \examples{ \dontrun{ LinkedDimPlot(seurat.object) LinkedFeaturePlot(seurat.object, feature = 'Hpca') } } \concept{spatial} \concept{visualization} Seurat/man/ProjectDimReduc.Rd0000644000176200001440000000266114525500037015610 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration.R \name{ProjectDimReduc} \alias{ProjectDimReduc} \title{Project query data to reference dimensional reduction} \usage{ ProjectDimReduc( query, reference, mode = c("pcaproject", "lsiproject"), reference.reduction, combine = FALSE, query.assay = NULL, reference.assay = NULL, features = NULL, do.scale = TRUE, reduction.name = NULL, reduction.key = NULL, verbose = TRUE ) } \arguments{ \item{query}{Query object} \item{reference}{Reference object} \item{mode}{Projection mode name for projection \itemize{ \item{pcaproject: PCA projection} \item{lsiproject: LSI projection} }} \item{reference.reduction}{Name of dimensional reduction in the reference object} \item{combine}{Determine if query and reference objects are combined} \item{query.assay}{Assay used for query object} \item{reference.assay}{Assay used for reference object} \item{features}{Features used for projection} \item{do.scale}{Determine if scale expression matrix in the pcaproject mode} \item{reduction.name}{dimensional reduction name, reference.reduction is used by default} \item{reduction.key}{dimensional reduction key, the key in reference.reduction is used by default} \item{verbose}{Print progress and message} } \value{ Returns a query-only or query-reference combined seurat object } \description{ Project query data to reference dimensional reduction } Seurat/man/VlnPlot.Rd0000644000176200001440000000535314525500037014164 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{VlnPlot} \alias{VlnPlot} \title{Single cell violin plot} \usage{ VlnPlot( object, features, cols = NULL, pt.size = NULL, alpha = 1, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, split.by = NULL, adjust = 1, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = deprecated(), layer = NULL, split.plot = FALSE, stack = FALSE, combine = TRUE, fill.by = "feature", flip = FALSE, add.noise = TRUE, raster = NULL ) } \arguments{ \item{object}{Seurat object} \item{features}{Features to plot (gene expression, metrics, PC scores, anything that can be retreived by FetchData)} \item{cols}{Colors to use for plotting} \item{pt.size}{Point size for points} \item{alpha}{Alpha value for points} \item{idents}{Which classes to include in the plot (default is all)} \item{sort}{Sort identity classes (on the x-axis) by the average expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction} \item{assay}{Name of assay to use, defaults to the active assay} \item{group.by}{Group (color) cells in different ways (for example, orig.ident)} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{adjust}{Adjust parameter for geom_violin} \item{y.max}{Maximum y axis value} \item{same.y.lims}{Set all the y-axis limits to the same values} \item{log}{plot the feature axis on log scale} \item{ncol}{Number of columns if multiple plots are displayed} \item{slot}{Slot to pull expression data from (e.g. "counts" or "data")} \item{layer}{Layer to pull expression data from (e.g. "counts" or "data")} \item{split.plot}{plot each group of the split violin plots by multiple or single violin shapes.} \item{stack}{Horizontally stack plots for each feature} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot} \item{fill.by}{Color violins/ridges based on either 'feature' or 'ident'} \item{flip}{flip plot orientation (identities on x-axis)} \item{add.noise}{determine if adding a small noise for plotting} \item{raster}{Convert points to raster format. Requires 'ggrastr' to be installed.} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Draws a violin plot of single cell data (gene expression, metrics, PC scores, etc.) } \examples{ data("pbmc_small") VlnPlot(object = pbmc_small, features = 'PC_1') VlnPlot(object = pbmc_small, features = 'LYZ', split.by = 'groups') } \seealso{ \code{\link{FetchData}} } \concept{visualization} Seurat/man/CellsByImage.Rd0000644000176200001440000000120514525500037015056 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{CellsByImage} \alias{CellsByImage} \title{Get a vector of cell names associated with an image (or set of images)} \usage{ CellsByImage(object, images = NULL, unlist = FALSE) } \arguments{ \item{object}{Seurat object} \item{images}{Vector of image names} \item{unlist}{Return as a single vector of cell names as opposed to a list, named by image name.} } \value{ A vector of cell names } \description{ Get a vector of cell names associated with an image (or set of images) } \examples{ \dontrun{ CellsByImage(object = object, images = "slice1") } } Seurat/man/writing-integration.Rd0000644000176200001440000000340014525500037016561 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/integration5.R \name{writing-integration} \alias{writing-integration} \title{Writing Integration Method Functions} \description{ Integration method functions can be written by anyone to implement any integration method in Seurat. These methods should expect to take a \link[SeuratObject:Assay5]{v5 assay} as input and return a named list of objects that can be added back to a \code{Seurat} object (eg. a \link[SeuratObject:DimReduc]{dimensional reduction} or cell-level meta data) } \section{Provided Parameters}{ Every integration method function should expect the following arguments: \itemize{ \item \dQuote{\code{object}}: an \code{\link[SeuratObject]{Assay5}} object \item \dQuote{\code{orig}}: \link[SeuratObject:DimReduc]{dimensional reduction} to correct \item \dQuote{\code{layers}}: names of normalized layers in \code{object} \item \dQuote{\code{scale.layer}}: name(s) of scaled layer(s) in \code{object} \item \dQuote{\code{features}}: a vector of features for integration \item \dQuote{\code{groups}}: a one-column data frame with the groups for each cell in \code{object}; the column name will be \dQuote{group} } } \section{Method Discovery}{ The documentation for \code{\link{IntegrateLayers}()} will automatically link to integration method functions provided by packages in the \code{\link[base]{search}()} space. To make an integration method function discoverable by the documentation, simply add an attribute named \dQuote{\code{Seurat.method}} to the function with a value of \dQuote{\code{integration}} \preformatted{ attr(MyIntegrationFunction, which = "Seurat.method") <- "integration" } } \seealso{ \code{\link{IntegrateLayers}()} } \concept{integration} \keyword{internal} Seurat/man/FeaturePlot.Rd0000644000176200001440000001320214525500037015010 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{FeaturePlot} \alias{FeaturePlot} \alias{FeatureHeatmap} \title{Visualize 'features' on a dimensional reduction plot} \usage{ FeaturePlot( object, features, dims = c(1, 2), cells = NULL, cols = if (blend) { c("lightgrey", "#ff0000", "#00ff00") } else { c("lightgrey", "blue") }, pt.size = NULL, alpha = 1, order = FALSE, min.cutoff = NA, max.cutoff = NA, reduction = NULL, split.by = NULL, keep.scale = "feature", shape.by = NULL, slot = "data", blend = FALSE, blend.threshold = 0.5, label = FALSE, label.size = 4, label.color = "black", repel = FALSE, ncol = NULL, coord.fixed = FALSE, by.col = TRUE, sort.cell = deprecated(), interactive = FALSE, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) } \arguments{ \item{object}{Seurat object} \item{features}{Vector of features to plot. Features can come from: \itemize{ \item An \code{Assay} feature (e.g. a gene name - "MS4A1") \item A column name from meta.data (e.g. mitochondrial percentage - "percent.mito") \item A column name from a \code{DimReduc} object corresponding to the cell embedding values (e.g. the PC 1 scores - "PC_1") }} \item{dims}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{cells}{Vector of cells to plot (default is all cells)} \item{cols}{The two colors to form the gradient over. Provide as string vector with the first color corresponding to low values, the second to high. Also accepts a Brewer color scale or vector of colors. Note: this will bin the data into number of colors provided. When blend is \code{TRUE}, takes anywhere from 1-3 colors: \describe{ \item{1 color:}{Treated as color for double-negatives, will use default colors 2 and 3 for per-feature expression} \item{2 colors:}{Treated as colors for per-feature expression, will use default color 1 for double-negatives} \item{3+ colors:}{First color used for double-negatives, colors 2 and 3 used for per-feature expression, all others ignored} }} \item{pt.size}{Adjust point size for plotting} \item{alpha}{Alpha value for plotting (default is 1)} \item{order}{Boolean determining whether to plot cells in order of expression. Can be useful if cells expressing given feature are getting buried.} \item{min.cutoff, max.cutoff}{Vector of minimum and maximum cutoff values for each feature, may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10')} \item{reduction}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{split.by}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{keep.scale}{How to handle the color scale across multiple plots. Options are: \itemize{ \item \dQuote{feature} (default; by row/feature scaling): The plots for each individual feature are scaled to the maximum expression of the feature across the conditions provided to \code{split.by} \item \dQuote{all} (universal scaling): The plots for all features and conditions are scaled to the maximum expression value for the feature with the highest overall expression \item \code{all} (no scaling): Each individual plot is scaled to the maximum expression value of the feature in the condition provided to \code{split.by}. Be aware setting \code{NULL} will result in color scales that are not comparable between plots }} \item{shape.by}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{slot}{Which slot to pull expression data from?} \item{blend}{Scale and blend expression values to visualize coexpression of two features} \item{blend.threshold}{The color cutoff from weak signal to strong signal; ranges from 0 to 1.} \item{label}{Whether to label the clusters} \item{label.size}{Sets size of labels} \item{label.color}{Sets the color of the label text} \item{repel}{Repel labels} \item{ncol}{Number of columns to combine multiple feature plots to, ignored if \code{split.by} is not \code{NULL}} \item{coord.fixed}{Plot cartesian coordinates with fixed aspect ratio} \item{by.col}{If splitting by a factor, plot the splits per column with the features as rows; ignored if \code{blend = TRUE}} \item{sort.cell}{Redundant with \code{order}. This argument is being deprecated. Please use \code{order} instead.} \item{interactive}{Launch an interactive \code{\link[Seurat:IFeaturePlot]{FeaturePlot}}} \item{combine}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{raster}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{raster.dpi}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} } \value{ A \code{\link[patchwork]{patchwork}ed} ggplot object if \code{combine = TRUE}; otherwise, a list of ggplot objects } \description{ Colors single cells on a dimensional reduction plot according to a 'feature' (i.e. gene expression, PC scores, number of genes detected, etc.) } \note{ For the old \code{do.hover} and \code{do.identify} functionality, please see \code{HoverLocator} and \code{CellSelector}, respectively. } \examples{ data("pbmc_small") FeaturePlot(object = pbmc_small, features = 'PC_1') } \seealso{ \code{\link{DimPlot}} \code{\link{HoverLocator}} \code{\link{CellSelector}} } \concept{visualization} Seurat/man/TopNeighbors.Rd0000644000176200001440000000073614525500037015171 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{TopNeighbors} \alias{TopNeighbors} \title{Get nearest neighbors for given cell} \usage{ TopNeighbors(object, cell, n = 5) } \arguments{ \item{object}{\code{\link{Neighbor}} object} \item{cell}{Cell of interest} \item{n}{Number of neighbors to return} } \value{ Returns a vector of cell names } \description{ Return a vector of cell names of the nearest n cells. } \concept{objects} Seurat/man/SaveAnnoyIndex.Rd0000644000176200001440000000057414525500037015461 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utilities.R \name{SaveAnnoyIndex} \alias{SaveAnnoyIndex} \title{Save the Annoy index} \usage{ SaveAnnoyIndex(object, file) } \arguments{ \item{object}{A Neighbor object with the annoy index stored} \item{file}{Path to file to write index to} } \description{ Save the Annoy index } \concept{utilities} Seurat/man/VST.Rd0000644000176200001440000000336414525500037013242 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/preprocessing5.R \name{VST} \alias{VST} \alias{VST.default} \alias{VST.IterableMatrix} \alias{VST.dgCMatrix} \alias{VST.matrix} \title{Variance Stabilizing Transformation} \usage{ VST(data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ...) \method{VST}{default}(data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ...) \method{VST}{IterableMatrix}( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, verbose = TRUE, ... ) \method{VST}{dgCMatrix}( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, verbose = TRUE, ... ) \method{VST}{matrix}(data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ...) } \arguments{ \item{data}{A matrix-like object} \item{margin}{Unused} \item{nselect}{Number of of features to select} \item{span}{the parameter \eqn{\alpha} which controls the degree of smoothing.} \item{clip}{Upper bound for values post-standardization; defaults to the square root of the number of cells} \item{...}{Arguments passed to other methods} \item{verbose}{...} } \value{ A data frame with the following columns: \itemize{ \item \dQuote{\code{mean}}: ... \item \dQuote{\code{variance}}: ... \item \dQuote{\code{variance.expected}}: ... \item \dQuote{\code{variance.standardized}}: ... \item \dQuote{\code{variable}}: \code{TRUE} if the feature selected as variable, otherwise \code{FALSE} \item \dQuote{\code{rank}}: If the feature is selected as variable, then how it compares to other variable features with lower ranks as more variable; otherwise, \code{NA} } } \description{ Apply variance stabilizing transformation for selection of variable features } \keyword{internal} Seurat/man/RunPCA.Rd0000644000176200001440000000510214525500037013646 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generics.R, R/dimensional_reduction.R \name{RunPCA} \alias{RunPCA} \alias{RunPCA.default} \alias{RunPCA.Assay} \alias{RunPCA.Seurat} \title{Run Principal Component Analysis} \usage{ RunPCA(object, ...) \method{RunPCA}{default}( object, assay = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, approx = TRUE, ... ) \method{RunPCA}{Assay}( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, ... ) \method{RunPCA}{Seurat}( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "pca", reduction.key = "PC_", seed.use = 42, ... ) } \arguments{ \item{object}{An object} \item{...}{Arguments passed to other methods and IRLBA} \item{assay}{Name of Assay PCA is being run on} \item{npcs}{Total Number of PCs to compute and store (50 by default)} \item{rev.pca}{By default computes the PCA on the cell x gene matrix. Setting to true will compute it on gene x cell matrix.} \item{weight.by.var}{Weight the cell embeddings by the variance of each PC (weights the gene loadings if rev.pca is TRUE)} \item{verbose}{Print the top genes associated with high/low loadings for the PCs} \item{ndims.print}{PCs to print genes for} \item{nfeatures.print}{Number of genes to print for each PC} \item{reduction.key}{dimensional reduction key, specifies the string before the number for the dimension names. PC by default} \item{seed.use}{Set a random seed. By default, sets the seed to 42. Setting NULL will not set a seed.} \item{approx}{Use truncated singular value decomposition to approximate PCA} \item{features}{Features to compute PCA on. If features=NULL, PCA will be run using the variable features for the Assay. Note that the features must be present in the scaled data. Any requested features that are not scaled or have 0 variance will be dropped, and the PCA will be run using the remaining features.} \item{reduction.name}{dimensional reduction name, pca by default} } \value{ Returns Seurat object with the PCA calculation stored in the reductions slot } \description{ Run a PCA dimensionality reduction. For details about stored PCA calculation parameters, see \code{PrintPCAParams}. } \concept{dimensional_reduction} Seurat/man/ColorDimSplit.Rd0000644000176200001440000001066514525500037015314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/visualization.R \name{ColorDimSplit} \alias{ColorDimSplit} \title{Color dimensional reduction plot by tree split} \usage{ ColorDimSplit( object, node, left.color = "red", right.color = "blue", other.color = "grey50", ... ) } \arguments{ \item{object}{Seurat object} \item{node}{Node in cluster tree on which to base the split} \item{left.color}{Color for the left side of the split} \item{right.color}{Color for the right side of the split} \item{other.color}{Color for all other cells} \item{...}{ Arguments passed on to \code{\link[=DimPlot]{DimPlot}} \describe{ \item{\code{dims}}{Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions} \item{\code{cells}}{Vector of cells to plot (default is all cells)} \item{\code{cols}}{Vector of colors, each color corresponds to an identity class. This may also be a single character or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. See \code{\link{DiscretePalette}} for details.} \item{\code{pt.size}}{Adjust point size for plotting} \item{\code{reduction}}{Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca} \item{\code{group.by}}{Name of one or more metadata columns to group (color) cells by (for example, orig.ident); pass 'ident' to group by identity class} \item{\code{split.by}}{A factor in object metadata to split the plot by, pass 'ident' to split by cell identity'} \item{\code{shape.by}}{If NULL, all points are circles (default). You can specify any cell attribute (that can be pulled with FetchData) allowing for both different colors and different shapes on cells. Only applicable if \code{raster = FALSE}.} \item{\code{order}}{Specify the order of plotting for the idents. This can be useful for crowded plots if points of interest are being buried. Provide either a full list of valid idents or a subset to be plotted last (on top)} \item{\code{shuffle}}{Whether to randomly shuffle the order of points. This can be useful for crowded plots if points of interest are being buried. (default is FALSE)} \item{\code{seed}}{Sets the seed if randomly shuffling the order of points.} \item{\code{label}}{Whether to label the clusters} \item{\code{label.size}}{Sets size of labels} \item{\code{label.color}}{Sets the color of the label text} \item{\code{label.box}}{Whether to put a box around the label text (geom_text vs geom_label)} \item{\code{alpha}}{Alpha value for plotting (default is 1)} \item{\code{repel}}{Repel labels} \item{\code{cells.highlight}}{A list of character or numeric vectors of cells to highlight. If only one group of cells desired, can simply pass a vector instead of a list. If set, colors selected cells to the color(s) in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); will also resize to the size(s) passed to \code{sizes.highlight}} \item{\code{cols.highlight}}{A vector of colors to highlight the cells as; will repeat to the length groups in cells.highlight} \item{\code{sizes.highlight}}{Size of highlighted cells; will repeat to the length groups in cells.highlight. If \code{sizes.highlight = TRUE} size of all points will be this value.} \item{\code{na.value}}{Color value for NA points when using custom scale} \item{\code{ncol}}{Number of columns for display when combining plots} \item{\code{combine}}{Combine plots into a single \code{\link[patchwork]{patchwork}ed} ggplot object. If \code{FALSE}, return a list of ggplot objects} \item{\code{raster}}{Convert points to raster format, default is \code{NULL} which automatically rasterizes if plotting more than 100,000 cells} \item{\code{raster.dpi}}{Pixel resolution for rasterized plots, passed to geom_scattermore(). Default is c(512, 512).} }} } \value{ Returns a DimPlot } \description{ Returns a DimPlot colored based on whether the cells fall in clusters to the left or to the right of a node split in the cluster tree. } \examples{ \dontrun{ if (requireNamespace("ape", quietly = TRUE)) { data("pbmc_small") pbmc_small <- BuildClusterTree(object = pbmc_small, verbose = FALSE) PlotClusterTree(pbmc_small) ColorDimSplit(pbmc_small, node = 5) } } } \seealso{ \code{\link{DimPlot}} } \concept{visualization} Seurat/man/HTODemux.Rd0000644000176200001440000000405414525500037014220 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/preprocessing.R \name{HTODemux} \alias{HTODemux} \title{Demultiplex samples based on data from cell 'hashing'} \usage{ HTODemux( object, assay = "HTO", positive.quantile = 0.99, init = NULL, nstarts = 100, kfunc = "clara", nsamples = 100, seed = 42, verbose = TRUE ) } \arguments{ \item{object}{Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized.} \item{assay}{Name of the Hashtag assay (HTO by default)} \item{positive.quantile}{The quantile of inferred 'negative' distribution for each hashtag - over which the cell is considered 'positive'. Default is 0.99} \item{init}{Initial number of clusters for hashtags. Default is the # of hashtag oligo names + 1 (to account for negatives)} \item{nstarts}{nstarts value for k-means clustering (for kfunc = "kmeans"). 100 by default} \item{kfunc}{Clustering function for initial hashtag grouping. Default is "clara" for fast k-medoids clustering on large applications, also support "kmeans" for kmeans clustering} \item{nsamples}{Number of samples to be drawn from the dataset used for clustering, for kfunc = "clara"} \item{seed}{Sets the random seed. If NULL, seed is not set} \item{verbose}{Prints the output} } \value{ The Seurat object with the following demultiplexed information stored in the meta data: \describe{ \item{hash.maxID}{Name of hashtag with the highest signal} \item{hash.secondID}{Name of hashtag with the second highest signal} \item{hash.margin}{The difference between signals for hash.maxID and hash.secondID} \item{classification}{Classification result, with doublets/multiplets named by the top two highest hashtags} \item{classification.global}{Global classification result (singlet, doublet or negative)} \item{hash.ID}{Classification result where doublet IDs are collapsed} } } \description{ Assign sample-of-origin for each cell, annotate doublets. } \examples{ \dontrun{ object <- HTODemux(object) } } \seealso{ \code{\link{HTOHeatmap}} } \concept{preprocessing} Seurat/man/as.Seurat.Rd0000644000176200001440000000241014525500037014422 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/objects.R \name{as.Seurat.CellDataSet} \alias{as.Seurat.CellDataSet} \alias{as.Seurat.SingleCellExperiment} \title{Convert objects to \code{Seurat} objects} \usage{ \method{as.Seurat}{CellDataSet}(x, slot = "counts", assay = "RNA", verbose = TRUE, ...) \method{as.Seurat}{SingleCellExperiment}( x, counts = "counts", data = "logcounts", assay = NULL, project = "SingleCellExperiment", ... ) } \arguments{ \item{x}{An object to convert to class \code{Seurat}} \item{slot}{Slot to store expression data as} \item{assay}{Name of assays to convert; set to \code{NULL} for all assays to be converted} \item{verbose}{Show progress updates} \item{...}{Arguments passed to other methods} \item{counts}{name of the SingleCellExperiment assay to store as \code{counts}; set to \code{NULL} if only normalized data are present} \item{data}{name of the SingleCellExperiment assay to slot as \code{data}. Set to NULL if only counts are present} \item{project}{Project name for new Seurat object} } \value{ A \code{Seurat} object generated from \code{x} } \description{ Convert objects to \code{Seurat} objects } \seealso{ \code{\link[SeuratObject:as.Seurat]{SeuratObject::as.Seurat}} } \concept{objects} Seurat/DESCRIPTION0000644000176200001440000001532514525771316013244 0ustar liggesusersPackage: Seurat Version: 5.0.1 Date: 2023-11-16 Title: Tools for Single Cell Genomics Description: A toolkit for quality control, analysis, and exploration of single cell RNA sequencing data. 'Seurat' aims to enable users to identify and interpret sources of heterogeneity from single cell transcriptomic measurements, and to integrate diverse types of single cell data. See Satija R, Farrell J, Gennert D, et al (2015) , Macosko E, Basu A, Satija R, et al (2015) , Stuart T, Butler A, et al (2019) , and Hao, Hao, et al (2020) for more details. Authors@R: c( person(given = "Andrew", family = "Butler", email = "abutler@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0003-3608-0463")), person(given = "Saket", family = "Choudhary", email = "schoudhary@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-5202-7633")), person(given = "Charlotte", family = "Darby", email = "cdarby@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0003-2195-5300")), person(given = "Jeff", family = "Farrell", email = "jfarrell@g.harvard.edu", role = "ctb"), person(given = "Isabella", family = "Grabski", email = "igrabski@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-0616-5469")), person(given = "Christoph", family = "Hafemeister", email = "chafemeister@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-6365-8254")), person(given = "Yuhan", family = "Hao", email = "yhao@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-1810-0822")), person(given = "Austin", family = "Hartman", email = "ahartman@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-7278-1852")), person(given = "Paul", family = "Hoffman", email = "hoff0792@umn.edu", role = "ctb", comment = c(ORCID = "0000-0002-7693-8957")), person(given = "Jaison", family = "Jain", email = "jjain@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-9478-5018")), person(given = "Longda", family = "Jiang", email = "ljiang@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0003-4964-6497")), person(given = "Madeline", family = "Kowalski", email = "mkowalski@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-5655-7620")), person(given = "Skylar", family = "Li", email = "sli@nygenome.org", role = "ctb"), person(given = "Gesmira", family = "Molla", email = 'gmolla@nygenome.org', role = 'ctb', comment = c(ORCID = '0000-0002-8628-5056')), person(given = "Efthymia", family = "Papalexi", email = "epapalexi@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-5898-694X")), person(given = "Patrick", family = "Roelli", email = "proelli@nygenome.org", role = "ctb"), person(given = "Rahul", family = "Satija", email = "seurat@nygenome.org", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-9448-8833")), person(given = "Karthik", family = "Shekhar", email = "kshekhar@berkeley.edu", role = "ctb"), person(given = "Avi", family = "Srivastava", email = "asrivastava@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-9798-2079")), person(given = "Tim", family = "Stuart", email = "tstuart@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0002-3044-0897")), person(given = "Kristof", family = "Torkenczy", email = "", role = "ctb", comment = c(ORCID = "0000-0002-4869-7957")), person(given = "Shiwei", family = "Zheng", email = "szheng@nygenome.org", role = "ctb", comment = c(ORCID = "0000-0001-6682-6743")), person("Satija Lab and Collaborators", role = "fnd") ) URL: https://satijalab.org/seurat, https://github.com/satijalab/seurat BugReports: https://github.com/satijalab/seurat/issues Additional_repositories: https://satijalab.r-universe.dev, https://bnprks.r-universe.dev Depends: R (>= 4.0.0), methods, SeuratObject (>= 5.0.0) Imports: cluster, cowplot, fastDummies, fitdistrplus, future, future.apply, generics (>= 0.1.3), ggplot2 (>= 3.3.0), ggrepel, ggridges, graphics, grDevices, grid, httr, ica, igraph, irlba, jsonlite, KernSmooth, leiden (>= 0.3.1), lifecycle, lmtest, MASS, Matrix (>= 1.5-0), matrixStats, miniUI, patchwork, pbapply, plotly (>= 4.9.0), png, progressr, purrr, RANN, RColorBrewer, Rcpp (>= 1.0.7), RcppAnnoy (>= 0.0.18), RcppHNSW, reticulate, rlang, ROCR, RSpectra, Rtsne, scales, scattermore (>= 1.2), sctransform (>= 0.4.1), shiny, spatstat.explore, spatstat.geom, stats, tibble, tools, utils, uwot (>= 0.1.10) LinkingTo: Rcpp (>= 0.11.0), RcppEigen, RcppProgress License: MIT + file LICENSE LazyData: true Collate: 'RcppExports.R' 'reexports.R' 'generics.R' 'clustering.R' 'visualization.R' 'convenience.R' 'data.R' 'differential_expression.R' 'dimensional_reduction.R' 'integration.R' 'zzz.R' 'integration5.R' 'mixscape.R' 'objects.R' 'preprocessing.R' 'preprocessing5.R' 'roxygen.R' 'sketching.R' 'tree.R' 'utilities.R' RoxygenNote: 7.2.3 Encoding: UTF-8 Suggests: ape, BPCells, rsvd, testthat, hdf5r, S4Vectors, SummarizedExperiment, SingleCellExperiment, MAST, DESeq2, BiocGenerics, GenomicRanges, GenomeInfoDb, IRanges, rtracklayer, Rfast2, monocle, Biobase, VGAM, limma, metap, enrichR, mixtools, ggrastr, data.table, R.utils, presto, DelayedArray, harmony NeedsCompilation: yes Packaged: 2023-11-16 20:50:34 UTC; mollag Author: Andrew Butler [ctb] (), Saket Choudhary [ctb] (), Charlotte Darby [ctb] (), Jeff Farrell [ctb], Isabella Grabski [ctb] (), Christoph Hafemeister [ctb] (), Yuhan Hao [ctb] (), Austin Hartman [ctb] (), Paul Hoffman [ctb] (), Jaison Jain [ctb] (), Longda Jiang [ctb] (), Madeline Kowalski [ctb] (), Skylar Li [ctb], Gesmira Molla [ctb] (), Efthymia Papalexi [ctb] (), Patrick Roelli [ctb], Rahul Satija [aut, cre] (), Karthik Shekhar [ctb], Avi Srivastava [ctb] (), Tim Stuart [ctb] (), Kristof Torkenczy [ctb] (), Shiwei Zheng [ctb] (), Satija Lab and Collaborators [fnd] Maintainer: Rahul Satija Repository: CRAN Date/Publication: 2023-11-17 23:10:06 UTC Seurat/build/0000755000176200001440000000000014525500232012612 5ustar liggesusersSeurat/build/partial.rdb0000644000176200001440000011662514525500221014750 0ustar liggesuserszV$ 4I  h@h@6@s}$@623[-IGmY[-۲tdI؞ef<;;ggfgvw>|OdUT!͈"Q:@%}"7o|?ɴeӞik'ϾLaty8t}zm3zcӬ`1r^0\`S3 ̞6o->w O⿱c⃥g/;v=:'guպQ=qO&^i`(}lnxN[hȁȗZIwA2\W+=F̟?q_,h|Qson 'y7+G7BM'eqk6?#??~jdyb mmpm}j"8F9NyRYuߪV44'F; FHE$TDn{!Ű܋ ,c 5bT=ڋ!dYDd!]:{gg D@I|J Ƭ/-n C:A3miZ]UicPή.Mm?|L_]5bGp,ˉ<Rg IHV|{8r0"g@.yWAVc[67Vf 3?Ǖ uQښ!oDxie~9,Jo'5E@299eڽDzdҖYgr2 FD>&GNZcv]A V^~]81t}LrMU&#z%UC!N8&8h1iimeNB>DY\.iKcz(I0z|o@^}a^\Ļ3E^mDn?}hQp,OHvmpth& x$Pb A,e>= 0g3 ^ǀ0[qjanG $V E9ȹBT&uG65ummMZUxC~o`0ֽ.d21J4[V-7<`!^ɤP ԖM"r Dj7ן'2&<YSjoɻ5{9@4=\?إrmU66kn=p%\veP7w1䯸@DM56UJa@r'mW!6Ӱ,HvC>.Yjk$7Duól{Z4)eȗs''\wr?x1AYS68gKFv`Pn5sfY)鶑g/bo~Զ}tgSLXv*7u.|T]`˄O EI(_EhF+fCZ K9<ϰ2L x)V&9-wr"a7!6uןF8"<YTLdZ4W늺n2wj\U-惄RId {k 6 E#`aA0BnusS9KRAM&r ĦoIAzP {2v씺othy\;,E˺NGphSTu(_݉4 wK)7eRc.pMǞrk !j.n%sYw+1|^&LEIrUD xĜxDiAJyan30ԙ u)F{kNeDs>Ƞ.9 X&B, 5+%8ēރekރek|[#0] $< YhރQA@Iin0,r)z׹хy Fga-tn4RnRw`3c41{ũUJ~U!J=@؜rb?.C2"gb400gLMΰ^,2h0mmL{x!j/HC9!fٕpS{ԟO4->M{(Ή y:/"k୏zU;v륶UVA:ԍ5KR9Y:f{ipʚ)'n=uoS{WU3zR!ip[X:h>PAj졵J]Tx1ee#%x8wĕm[>xҦi&Dd^j_{ h { E|DŌ| EIuӶ.R?!iG$ɨU:g=9!&sVl $ w2 VtKNd` {BzB~7A1aOHA`>ev75KD (?avs( \M 'ӐbC:JRlG fmYQ !K@Ժo?Y)k7CCN>G1W!. aHMʍPnNEt8 yބԍ?A ބ|Snav |٦j65*l!]w`KbH]o4$%)6uar ^1¶N_yxjAwQbPldZڎ6#t63V9r;,>&s\k1] jA+TlݏnU\hmeٜe{Mlf E K8y^99 ei)*ZC :[q+{^t{y= ficXn/ B(U֝Junم!6[kXkײrsze ~/83ˎF'FҦ,-͍$ʃř'|2L4F-m5f򪗐vDQD]. $!v#4t6|yamnٙZHN' `/V^bc=Tb}tմl]{o8ry|䢵Zv-~Ъ\4|s)jϜ4bFKM׶F?7K-ַ4YZ\ڎUc]/:Q޷[q]uraݠȴCܛy4u};;ۖ AkjW=FMFMʎ5'dJ&ËI Xs:.CK2ÈdєaDrF$¨YÈD ˒>ąݍU9?tQ2txݍ"yKF.lWF i,۫8@@xOdQ4A5b2wst9-7v0C#n#E#otES\4!%'T 2yb׳`K :%sR rjcMR4}ίUZ3Lxd ZKXu/-"Ố62T(>Yu2ت@6 rtdb[iRTvÈuGlRҾ{ihj4|| Y>3<N 58$cŸv6Fx J}Ȳy}D$d5%%4hs xJgϳd!:ݵh!ϫWQ H#c^3i۫:YAn`>^|-LP'^|=Oݝ 2RT4^/Kq?]~}I+N(ԿYт$]w/ [B w0q$݈ya|L hEb/Lkˤ[Vh^!/*'8[C|8Ae ' w}?a2}+d,e#8ēz-貑vz!;fȫ7`S, > ɭsÞ /)m7C_&) r-a7ļĮu8,dSbNA.F4u8vؑjUǐ{K̟,FgރGFڕ<%yML]y4mZypV\}gӫ@Z=K'𫙠||ٳk`b2R>b8.ƃ(Ek/ pBvNTֲ^6hVFoAM߀ ~#d+kfoR3(o 1;ՔI}eџPSd!&Y3?[h^Uc%zMADC),iwɢuڣƤo# ͨ^gNh\'*Ž^ _P8]֏vs'>8Td=X77Apr3)-1W7(xn,hvt* Jle`JSf}1:] wsעT^,c1cu>3%l |g!Դgs3 + " Eq lC[+QοYYecE"AKtܾC1\ )Lꉁ[l,0 Q~!_V溇1a {Ne&m߈MY*CLWm@Q&u'q [W!ށ|'GPb2| )3P!<2*r]tNE YP1B79uנ-Ga>oO<yt~DZ_irQ'Uq:E7c~%ȝ  &Bģ+SAJ,Af2f7nؤO/uƹsf'(G)S쯮xÙ>$'cgfNNNNMM ȍ1AW!_&FҤ COF! FL>yGIbQocԌ7$,iEkIlFy T=xfss#6fIe حO.xnqjw Vv+7Z%OD{PV7G8?(nz#-DCwcm?`F,lp <Yj=fZʳ^jb KtOdH e@tڡ3"ٙ ~W{ۿFLA_]uROjռͽʞUrm3᥃~kyɿw RድB=n_cX]76*ڱ^u ȿgG./txݾ /{WRysgE=uuR߉TѪ:5GGȓ"ݳZX7w{:]O~,o1Ug۴ʎԧ`NjVM}+oJɝo;4ղW)i{s7X mmշz͂νQ^FWײ}ɗa74W_Lk»VϗSHeSgCFjEC6S_ve{WȽuxiuUo`T}xᑳr6LRjY܎~94o|.viinsk56]owgr׬b VYRo{m@]w5 FeEf7N5tZXwv+w2i_ Ž˅':gN07o gؑmo썪KݣY-x*<SGW>߃h Uw}R@}L\&\9qzmǏw{w}=67}fwBj<ߙ ׎OV?ɻ?ߨHűGs#!F1㈳x>:^4TYyW#~fµX~l$y!r7E*s;O';eH8; UxY6xA{%|B/|~5 :7|iC!O'.o4em5'Q\ygQJoD2m(̆6cvyfwIp~Twg\w$Ң-/_Sd-y1${}9=U?$Ji09b#`:(R-jQ9{ճ g,~:+CF?>/߇|[`evy'KDˆ>G]>t0i$xHC W!&,t+lۙ0"dy彨AX˄|VKU   㣴Jy~7V0GCT>ӣg.uS6m3zcӬ` fBv.>Y\|-~_zaǎec73[3un,7m K%>![^bO}?_/7L<-g ʴsԏpGFԞfG0fxiZUH@>~ysk' d|Z],dQ*RC< q*梀SkT^,ŘHC>~ ,sK'>AxxDM= ˭DK!V {=S'xb+łq|^V3,X QbGAȃ|/P"j_Vpt8 v Ehe‚8 E\wO]$W=*9 B"sͫ,弆9sZXGq~GWw1.&\ !H0cAKYǯp8JNGIK}M4mV^fֵu?׮KdX\`L/% F@^*}+כHpw\6̍rf_H7Ab&Nj(y1p!LΟF'%.AnKp#4%VnA&>$;6S=Ugbx$Pb AzeRqp>G#}0 ~t:R) ''6 R r5u-ͩJH.C`C^`S>> }C8ē6 ~o͵=t@4ѭ33qn2Ȭnu߅͞v+& fR.F N.I@q)ǢV96RÀ86%=WcYzaK2L /(wegYA2(L_k[KI}xҦ>r ԼuDJOgDWu\25'Zd / fb}cҞ KxkKARz~J^#]dtG/tD,}LUN=ү+~CP7niQJ(jB GGT~n̡vƘ >~w$E2_2<&jWj5@ٙL&T'6mrBY>z̰ګ ) ~eѯX^xJP'43_ &lF&=6+lʾ.h]>)\KI"xү\ViDjiWc--:Fd-]XoN-F͜N.ERJP-y63KTfAyI9MO4=4heŠ{IB D{& yI"Pᄎgr@@ yiMt0AlA.]D[ Lz4U-+Vt]^Ƚҕ?' 0 9ة^/>CC6 06L-/ Kͧ#G̛eRwS6dƢu(Cފ*zC)ūD+߹/r Ħo']ݡ{f%B*@49H==n",Dl~? V)CVwN؇HÇjbv4U|NCP#|0@a>AKS~Mc9bs桺Ʉ;G!Kzo\nj:`b}iZ Ğ3=?h̦IGءOޙMsdEmqQ1eW6YfVJk=*nWLK  HyRS4A\"V6ȥ*!?)p쵥-K(U̾lڲK#-"5 qܴ #;s f7 8aZ}X M r 4 { nAƨ[sa {ɞwQXi uC/ƙ-)Gշ<&8oٱ,`-ɹ5~A>@ȅ'l;yʩM.p=&9 Dc9bs^?6 5wxIy##+~ZK|V7$Wb " gIs@1=COw`!Ky3)w85Og3w1dMDIgJ!dN!jKD(3{w8ݐz8jO{8ē6O!6u ^ }!k!Up)6/5P?hֽjZmx>>Eq<,H{(oB@qbOo$xҦ)r &5U*ja jNāLnI%nC9s50gyfiIY92?`b5/ހ2HF!^ژe5a/dN}S|? <Yna7ZΗ3# j:fq8y(8 <%2=!fW)F xҦ%r 洂k 3~GFaGO!׮yN`Q׳RlR+Vh̲#\V`@Y}VA" PX9{ II^ME؜Zpm zƳHV G\&⧀ |<π*!dFtz!˾kUX^3U;MGK:>.5c/a*xl!“KiLp-Cq ֖x#{MxDl%L^+=J ="^\PB?FMᓷBn]kS;y>} ,G6aȁ4ņ g} (akچ4uR*W%C%YW(3,dQ$Q)y>ƉݏdlJY5?D<5*Bģ+F $ P4Z\FW&>3܈*\dQjM𣓘Y"w!U*] px{>4 KMՋMHB1yԞ$KmIY9oؚ?0 :G?&$y<oAlRqpm=EDH_c7 RT(3_rO'pUOpQfg_& 6*J$' էyBUKݐ߀aV!oyh8иΐ]fa*ztM,\m:idْA;4Νl9_!.BVA(Q$W~ L sso@U">;4*Q]w P69{lQ"*3EvL/Fծӎ:zklz,'j/ħxrbuOAVߨ4Nsk|gJbE'pVEOCVw#!RS{pFGoZQ9< 9"uSխM"0@Nn~fߪxPlѴ"oAS󫰄(ʽ9o4(@aMwF'-2 9(Jd)LȜEj%buu38Yb1b,ѭ Doi= \=:;,uVDQ%ciӉՂm\ÑALK;oPͽT Ixse>8 !n j彾mzsk[}ujjFQQ畓YVNnYEkc{yd͡:[q+{^(0,Y 4!\i~ TۆOoTs. 9Zg_Ut {,co|\v47:1EAegs1 z`yqfvID*)#dh*鍃;u|ގq\b.]ֱ>O|fϗ٦떝eo`˓[ӫRl ʻJU">=f=to}fֺjh%m1WVZuegNQs6 e_j5iYj.xg"vzщ݊C;Ʊ7i7HގB&b`.}Z?Ukk~䉷b@1ɱafnTeQ{Ifl22pHbRD5k9~]rYk* 7ʶT oQ}4\Ļkhb:A?%{'<^1Y{}0 ABLb2kc4r"d.w rtdb]KGR,Գ^ysރWZ-n-e/XN zY7 SJ 7fӳXQnA|:Nc׷=wq@M[f Rw8yPS62%>0 9Xɼ֭ɻu d J3^rGʚtW7(f@>y 'j;DpHmssʅvji޴%h\KEMXBQ,cR|Ex(:^<YXy jYĭ89PesIꣃ -z'^>DO`/}F9h;ϖ Q,uYtH];+o{NAJo@\@kz㠭j(M /)S[`-t7`x-KQfӖϢQ/%t\'xe, .Z[/PO(>Kp'x}CVWCeG5t\$ GC i jJM}yQ6GkA0~LQt 12ȬՁԵ߅r) q/|lhĴYįhhxfܩBrٰMA UqF* [h&h|%ұuBm~qFP\]QD-9BbGfAT. 0I.!Or -wč2Ί| Y 1VԜw)1ڳ0 !KR$'8ēz}Ll-Y"@=@ ҕߍǦwѦf1<_, |Y发-)S7dǃ9JVn-6~u"Z."DFE{EؚK!;F9B27Ce-5f#_hEmmfx-p10! cEcZjE`C@X2iVʏ&_a&:%T*%;1\4)U\>MĿq;('6[wsLrg }Ď:G&s<7F/}Bug7sfA l~TǟµOLEY09z5.a[N^qsa_(ɝ)w4۠k{0J„O} $B$K!\ꍡBu԰zag™CR[gZۧ#@Q&^=c;B%?CudOIZ8&J߃arݵ'.\ |-VpXxJ/HO/+TLn6.bxr2^_`Qw+ REx3 !Qr]N{4ViZ?.;xf!g9 W )՘)uW-C2ޟ Q}- '@?Lbf,ӵ+fmc&I">?ClK8y?!JGڮU\a5aJ(&''~%eOs&| bLwbglbd٬U&{`('ʑp A}+ Wxsyd<>c_ I7 A>,:Do.pdXyGKX2)D-lw{wϟ ciO{K܇;3oQ Zw3?cf;bFq38OwC[tk_Q] 'Ujhޫ[ob;*LWa֍+ *aey%4׷̂Ёa-<µeqmeˆ5I¹GG8#A#2t vq:ԑb:¦V0#6nU08xބ!o <Y*"[Պ܃ؓ쿤U2 Œ鮖I^Ld q Ui`g~~#'JECOrGh+|CG1[z*oXV( =ʬq7bwmr°{}HӻVVsm;KL ^nC~W8-Ufƾ!ַ2oP~%<(ca qf.Qcj̲4̓F 3(8 exNWY̽nRڝ3 2{e,#(]Q7H2<˜SNap!<2/LֵuZJ?h/et;B gk=HczjDM|nYyq,~a,3PZq^k8}quZHvm{WTP x$e̺EAzt[P'SŊDm1^1r̿H׻AȃUE|,}'6&. ; Tanpآ( B9{n%=J>}7R|9䣏g:ũR!ip}5xf~>@Y$1R>%2h#9Ω6!3CHOC~|[WQ!S22R!iC[?5$<BüV *wK!aOF(C#f)BU jWIc:ŇZq mS/_]HH/pTqM)ar>CHxҦq 9bswPm8Ghɕ`0l g|$ⷢLAV_m^t6J@͑N46^ rw@[[.g[p Gxo˝DC.ױ+;Yox?ԙ/:͞w;&7zpŧ ᷃m~ =?ڹD34dp2,/)bP!B=H:+qz8y0F)f]9ojzNL[̪;+k0"Hb윾cClt {|jbcqw4':2^b94X{rKmL8 y4}dDMķ閊dH0nn͂勚sg=6&~GxrrqIjRZ=:Qqq}:4;&|).AV?yy3.mFx不(qk¹C^;Uwy°ho;zaѾ#?u>DG·r-|X OAnRCWQ׎5k6O4!6g]LL0L$T8y?n.5!TEvNȟ.BVwmXAFOQ܄\(f׿'CoCArnWs; <9n?;<|Rwxe˸PFnbOym~GzEM73}Pw3Ac]ءLc!uތ©#1cnm>&ws,>wEHrQp)z)/nq]YRg K4>ʥoH&qx|Y}wOCPf8ۦ`׳N XW מ]"ƚQy8 9NeYs)ZF<փCF[hm ~m-4ؘiS\ӢtDSاۧ009s-UCdMMU0զ~'ZCopA\p<]Ek k{bݮUOuvF`Re%TV_jJT1~ ]Tʓ6Ն| VCBLfedn;f,H(@Qr{*wIBn K;b8R7T31Iƀ!h,rrt'oZԃ5P˴Կ6qU:k`J.D^<'DO5&ӼǪ^EflׂՌxcG3I^,vڅۖ7jb2mGي[lwCsZ"ˍ4Pjr+)ܲ Cm?ڵ3==\`}} Ό沣щ/t8-w=I`ˋ3O"R#e 2M%qpGz܃۱_6#]2cר>O|fϗ٦떝eo`˓[CZJqc*V1֚7yᔵ7qb''F.Zkd-~Ъ\4|s)jϜ4bKM׶F?7K-ַ4LZ\ڎUc]/;~vco-cɷcm̥r_T 1rm"7zmRv@u#>uL0=%:{o3n/(d3]Y9<U~i?|BRCn S ~5쥡+&. Kh$WaFH&_#{!Kv q8 <Y>b~ sT1EoP]Q)@g+\ G}p*N%v-oQ o`ՄM令6_53%8y#L#[FM&?![!<˝YXy lD&7_KmTfZQ5b΀(YYetik; J8yLem#3\.0D^|YƄ-wPmi7<~ uO]߃q= U,("={I d8vpmU_{ A;?)#$8 RkQT^)%w(~p#P%<*C;n&3foE%ۭhe!> ';]QP2$07rA8Tq7VtAb 68,3-Y^^/G5%<r+\2x2 ZHv&)C_z'Zq5'-qKe?~h}5OµX~l$ؖ0ȒIlen;Ypl~<.uSy׶mͱiVzljf!·>Y\|eL'aŒw{w4ϟ chOI܇;j3o sϘ?㎿X`VM'+oT@-pm;WvԗE}W&T ड़S^bO|,XJI7`0pjYn@0 9L[8Hx$hD;PNMSO Z)g؎!< tCp)rӐ)RUhhQNn/@TɴԟOB3Ym|^Z 8CBwz!9-(7ېo'Xw!j=2$w9#YJeNbYc^RGma`잪XB A~eBYCM_o}r/!Wx* ^\~Eu1[)M՜[0#2ˣq{斞/,3":Ӂ|.yfEm[WmՑȳppz(QfRI:0) }[^(,㬣 T yPaP#d2/uXS8OP#W\ar=nF爨lGIq4|Qly>X^\#`/7YVZq>j8p0Vbl>U:'3A"I:m}AP7G);pfkˠH"`бˬ:7!LcspL鴌O4b͸g`؄2YBF~kY}<2Z9P׵Jѥ3W J$$pJmk3 ?KHsM,r ^S?&<@b~#[76q ^M pl{'mKM:wMYjkQvm~jk纇m{}oSnf | riAL% {_uUnI^MCo~%Z_O4r TpOݻݐF鑬GY,Z%#g?hm$Q[IFr `zL[?0A%q("ȶd0db܏2#BΦoF~CX.&sx rSI;YS K;8lNC"Z{ iȧ_{G"R{Dk/),wpP{k+9 jbٱ[$m+Xżʩmܖ~GԷs'mȁ_6 }+QZ EKA;R0fogwZ:[5,Aْa}:i[] p2vo@)H[;Qj" < dMR;úKL OCWy\vJF߇~14#0.!MibpnC$w#SDqtuh3pX\{ 8Ynt?=‚|&Pj%w=O?7z~{&vW _i{yKEigRx8y~:g ܤy_O4[0AfٸTd4ʖcA[t;˞mA6:UAˮwcY~o\̧?MH*xҦq9bsLMa=+g3/P;(0p0S]~n%O[IUx] s@|~xkia2Cfubtw]HFF4P3zБZԊ>YQvk6 )D5j!s'G1W橻Q 9^5r3{[4L,mdyWECWN΃mK-S0i!Vэ5kWc{e2^ؘn~(n8MGs#ln,;v-{mjtbZLvXFxED^,Alǚɰ<^,ͻ+`[K]+,#k%cԌk%#5&Y7|zЪLU£Fo}ymivŌcB'kcK%dynOEf[1+%DړP\J1MA+FwJ6H:=th~Oa;.鍴:i8K@^;u_#nʉzC_U.&%Ns`Iq$Y>;i l2824#\vڹDއ*u]^?D]Q^{: Lң${-vdc7}mI_K2#Z|8\/ gJWr^^IW_^EreeRW\eh )< ?.$G ITH}/x*z 8&'X8AiF\VɐIdUDDA>X__& ;"!=y#*˾Lb㐥Vhq_G[L&1"rP4V.XQg4TzMtspT?"/zBΦ_dΑ̎XWE֭J3*Q˕wffztd5_VznS׶CL3c8C5Uhz`ct s/+:eW4MmCɽׅ4ַol.so/-II^kkf0+,yԊ{ʤzUx)֙yWtx)BcClNmU"oC810> g~ 3#]8_=>5bc1u@&KF b ֱk(%e`˓d)&-"1Fh1fI[|*wCx( U]|4db1Zi[ dVT΃[1!- {ޘ q D5rg2:1E_es9ďȄoPO9*:%}TJDn?^$ /"F^$BN\7Ъ\e1:Fٙ@m607fx09{Ml. LZhU0sv3oRqM#L@29WK:@&| F+a-<GȌ>M^SFQLjf>!'9L.K⡚kI>gg*O{_p]F}5_Ϩ ^ >X3?qXNj܁2a i4ѱqaG`_ Î~u Xg#L}6{% ')&4HQɐ&%q=L{jz%hf0xkMH&5k w~ZM+Nxq?…E':u=3Aըޑ353DC< BfY< !׭gD,xdaa!JY:%J6AWѬ[ŢEG> ~׍ 뻦oj/ b oېo=P|V l]axxs1A\ |TqlI/9ں]GP~ !SmuC>/"6FZӊp~+7L[wuumc=8D*@oGʶ3 ΕԋJ䋺f7Q_l3܊^f4)^X*=el/uwQ-),{hmyCyNRu^5TST=aPbs3 +rstsG*JXgV,nP0|iؖIGEu !|rgt1syfi/~f=PP]y6| Qt(AnCUWIY"C6tZt9Of8-ͳJI+g+[Vܔ~lD\{:.{ߓn4d͕MkQƐ7֬JI5o;p"{yQ14+%& 5䓜|qӐO'hTE| 3ͷeR< Y}7򛚹Axnz* ИFBH:>3B:\,k3C"dJ$f%f jo3oˍwm.f ͯb xEqUjYL$M.h.^,2*kV4±{qp?bAaD &D0a]Y^ hPȲ`c8G>H9=hY!wcBnlov*0'_-ޟ7#8ē7;'l7; oV4J%-e֫lujĶx ;5R S{iuq$ʉn 1}cecbcNβf`+su GE Z=$iM,̯ ހ|ԶC4C<(4uqd^,sLt?ڕuL=:cW6_%F8%X#axozrq^2Z' O4+vd=j_Ofe5fef%d+MXi6 w[Z;/9ȹnܑ<~!cI &Zwel0͕ɔNI]kTq*z/F9sMQC_HC$6! 6]F hC8l*r D܈Zѱb9mHUZ !C/U gF.D< 9aޣۂB 71;Xu\27bcfh&=lYÈ .ǀ}+ArפkN oBXq[Kպ~-ȷ( H6;Ԗ>\w)Xv(Ҝ߰߸vmjxjj|\[@_@Bzt<'˳%rYS`RwhCՍiP'~BvoFb2 'n܌kӃ9!NtHֶP((UӃc Hm;0Z1 FȓN 'k9ik>+]Brl:ILm~rM'^|-ͦs|x|"756!tCM'QYC^o~I7 o(A)P|&ꘉ&p V xP>1v`r7ylvӪ65{[&W: KzCCb*ޙ13; J;) 7\ˍ ONl]`rޙl_@~|LꦁEŖ{gbSkw-Τw\_r]5aLt:{RD K_=3;US׸z 7޸>|}<D&ZLT RX1L&7[|U <3aԶ쑾4֋E,|<1|ݍ|BV/T$83xfw8Y3%5gAׂ43њ43h]e٫>5!-ӐO'V-Q{xrrG;;:{8Y8dGgl871:1| QCηCTց_@Z萺FvtM5['Ι;s> Am?lں^ҽWpT' y5/M.oC&}w i&w3gңׇƮ_D!Yjz,9/MT,i&u.d^T?xi.D+4m&V65wɏN`/6d>t}lO&|!H3$vDx Iƚ'2\sstʤ80 9LG9ȹtk^GzntJ)I৐?mS&*AbNԍVAuĦt KM+:ALjہIv,sGfndI'0DJ-R}EALNss-pˤ8y,M<>|ĵk7DD3ȟ-cR2eb@- JeRL-eZA/[[ZQb,uS Rqĭ8 Y{&}gS<-YirHO\><91tDyCrJɹg ,B"IR31@vP>=v`{1V^l5-e" $STr p` Y="sxT;6wZSI ud[XDtOdrtDTjAΥPHI(d]?Tٵ ]< >ﷶkCT4Y%ֵ!u׀[޵!6_/ JeRLr`.{JC:MyeRqĭȓrIo[2>ucjxƵO3Q.!#O LqI>9#Obc[9 JgRLrS+ lŢo&6+>+L. I)8J'2׀2;~^k=ԟ{֥#f ԄOnݿ65qcxR8QAkm'*Eȋͯp R>y>dPź?-McK76uĥ؄WDJKFG2'}'LvޓQX;G'&Ɔ'& e|YyOT~Lq=9;:;? JeRL-_ o~\֘f,[ln@cŴ.^fDukmGQ2xI0ۤE5ˏ4pY˿6len;YZOi;%L} ]b}^`k̶^Qnl,fZ. Klz.Ψg,.>X&ΐ5sHrU;|gs'Î  x'13fϟ/~7U[OV?ɻ?/ߨh=5YU @^zQU:n]^IؑPQs',Y{Z5G `+%t2at EvYb~ "m##①>۟`({!0̙Z)Ҏ D5ʅP.J;0oU+1r9T08x*Ca.%Cm/Jco&24rɸ[2{Q{HgRɐ,cKg"CG1[; LC [!Px!aNAv)s:!U#qF]`ʌt[7:Ŗ/,S:3IÌ}YmiZ\.|Tuh r-t^-WkeBϻ}L9CWfzXִGpnfa83Y$/a`zY^d]ω@#Ir 5b8. \^Bȩ^ Lz̺;șxf9-?.*ZdT}bJx.㗠ecyWV@҅q_~'@E6#O$(|JId]9~dx8F9#I5,.Q5@4jKDz W׉̯?V] *բaQ{>T QU^|U0O3kl[WDixa fAux[ 2Ȭs>D߅njcvAK!I@-nmNf]1u##9:R7i֏sQi٦᰼f59e=o1LW3k[ҶilC~,]2WxT;›jlDjہշ7dǭg:7II ыLsn4olIg-9%n`>>@D[;e,U.IyXtE(QWC\:VR29f~wbr{dr3 X+ga~%;K2xR#\ƷvhȎ{ȓKk9Uj0,Z.#ȏu柬FA//+WH5K-]VUQ%βuV~h., /)CR⥫hel(}~R Dsim?T/Y7đz޾lO\4[Rn& @m@Nݏ @@"~$nxCݝ!m. {g a(6D < ljcx, 8|gÚ hr5@ZkI(gŁXSS!}!DxҦE?m@$8&<YjKD$%nxwgWaej&=<|ANBLߞHxҦC>ܸuQ(Ҷ8 =r(Ҋ2o>Hļ > z}I:LuU1XFu*eqUk9Lfե Os\C^\ ŗFRӥv0<"?6N6 ON_tv [~`e?;YR]?iއ܂!: zk: zs|R||=UcZce<h1O'b{9bM]r ;86C;%m?kel0XB+m0ox; lC%KM1 {%^|r Z/B%C< %~qd40QNmԷsآ!6i7H #gL~S^S ciE < u{ @VH/DM R&T,"Vorj`,ԟO4n#BlNCqA#Yjr:rΊZՐpjPe=Ku)s!cr-Oq|Md[zi[7BȥLen_|f]L]f;WbRCsJ4!Bl/ w9`w$Iҟ4Xx-M.$vkT˼Z*7Ә7Ѥc#9R13綣$ZؾHCM>r &1BؓIzR*G$4epʇZq d/tԋld% \|c節Ywt'msȁczY w%=iݪЯ=)߇7,p7Klvek?tZYYZdF0 ּˇ(FB-u1jcf,Ð{kڠDO]؞cgHq#*јGؤ(/$u0;êm/7=yFAWfZ2J\pY};iꚭ;nY;/;aG䂄 WRCv |P!=_rqDO`!o~S{!RU7 g*WC,.px$1U:GBL*u&HS ì݂Fv~`7wI`22  U 5WܼVN([o%V|z|Yj8(^!oxlxeBY.BV͑ Bi$: EmM/-#Tj`> ZnJfx ކ|[TDq\>1I ~oJ߀7 u{ &g m _Ŝs,8TÙ$*ø. < 1jG܈nG,F:64`_/B ǝ0^߈&h 6(&h) ҅yt5 yuD $jBnZ1M j~c͙gE]ʀ$A9nepkzQ16N G_(6ZԊ(7=Z$ٴ EmMtmYxsez;wVm s'@:R& prr>FW| X6A&@;ҍ fzv#9}_dT\ꗝ =˭ZGVnvv׈q`r5R<\jDQ{"FcFQ Q41/`|/+Gd!wE T奙+}6(jLRYvcEQ1SɠnW^,q(RՂm\;2-6zgTx9fCL&0<ݛTҕ~g2u'1UqEk+r4>*x@&B& X, j*XEkcz1lŭh;yڵ3==\`}} Ό沣щ/Jggs1 z`yqfvID*Dd&Jz|][~bW%ؾ˱~Wuΐ<,>iegzd{;k#` `o]yIb=Tb;Y55[^8e-M𷇋Ѽ f LNnn!1RQŬuCVΑ( /$ q7QZ/BX3$n{zQR6M!&{ zx`! 1y$64֍ߵ mbH45?D ;aTŢs[4i{0oǮOp_D: /)fʸR[ŢEszniQfQ|nο !MuQ& US` l,lHm;P} lOCvڞЩmM90hGp|rd%5+}YZ1VZ1<NbdF11=~&A8Y}`?ϸjQeeͰVEoCӼmt!S49$CWN UucnH>b%2D{D G 47H 9ȹ6O~o" ['Im;PO6{tWsd^23Ԉ* r>п/1XvD,i =]D`Ʌ8.{@u0>6qPvBGwC<) #ۙ#I}-Fr drcO1[ /caIr;7Y7L0PUFVv%!m&>Czqv0YJ퇃fRd))]6ZuUc* @ULcNYPn61䩦Tz}5O mo!'6Էq(fd@x!*DJ!|þ$2w!w$hbĮ^&+D<|NvS{!1F~bk.Eae-B"b{7 w=#B+jk#7 EaBAH9YNJƚd5#> stream xڵXMo8W=+[@Q 4^ʒAQm_o lg%Efp84g q7f ˸0fcw0b1Y93ea27`= YLMXsXX%6t1<F89>A)}80(1H__a!%ܟ.s | ǸO W]~#xDOQObD#,?!b/ 4A%HxA7" eHGN=BN80/8Wd |9␲ M$If+Cd9yBO#nhԺ ɡp(=!$$ 3Pɏ9ށN~;D>䐜*pd x\T ^OX, BN$ sD{DS8 C@7P \8$2R;b`!_B+ԄBЋIBBY{b/ۖ_|%zȺ|?R;o^/8Ί ӓd22dYN懾-w}7VgOզWZucgd mvOd*2jeMfv~;eRH3'Y\E_#8\k߅[R2Sam6C@YgdtyHSNgnGju+WOuitk3ꡄuE*$FCLg<uTFBI5^dŊMF`g=Ǫ)kAQBVͲҊ#WB7vLvb)m=igvY '2KJZ*ǣְhw _[e$sډ iF'1}UCV]̚>t= ǫo= s=L  6sPE`Ӏ;LTlۉ͝( ї^Օ֚Й^L9mt|]k8t}y%aRE`erTw\;N`j֕:"a.͐]eU@ Vf{(uf1|lܱJ3Ɛd!6%Ϡ޾<+_U;v读wtuY V^'TnU2V^{P@CvXbG^\Fb]ڎ JI@ OwJ?+sַndC|ޛl;ѿ=lf[+ddE:O9_;C59t˥P&? YnevFաS=$XzZ5U{H]B~~uh endstream endobj 203 0 obj << /Type /ObjStm /N 100 /First 877 /Length 1552 /Filter /FlateDecode >> stream xڽX]s8}Wq֗Ng($!3I L"@OMm&~P9~J684_ s|̗!%CߊqN;bBjLasdu<`*Z1#|,6UXH! WD,43S~K!!Xf>r0_@&W(H"RSJ tBk"k4CdLrEr<yi`&jE"iR"ЄD,aaM :,r 8~H# PWC<0˲'!3QE #  @ )!P40MG)n=A<"h `$O J>Ry4+(5M!RL rd)AI "|AORDP@lSLIr ~(I\|J4d S  *r(DBA 6t ZT#PZhI\V|1~jz>*fu/6֤}7h|o]PD͆[j,5]dmBͨ;|ujH(ﳟI>KPPSRsOMFO't% ii),=!5j+7%뗕\ZSns{de:%<oOwٴ-ζm/&)3`n_8V뵁Cήr}NNFZ?2*?jΚVEtALa85%rL_[R&[4wi:8O4.9jL*rܘE󶠷ݫlV)Vybv۽;ʤʖm7,m&\(7PM#_h{pN%_6MW޵U}dz>kqhlnOlc7xedۧVj|q:=qpͦxQXM !OֳjfW{^q]N/ƪs;-G.Guol~٘lq^gvCrY6Ԭy1-OGiӻ'|0{'I?\ܽ vҗ6}Gj 2AIU} endstream endobj 404 0 obj << /Type /ObjStm /N 100 /First 877 /Length 1639 /Filter /FlateDecode >> stream xڭXMs:WhX`˒?ffBJ L I ]ql&vGi\S帔EdYϹa fxr%zmx@O0>-&mOIiLsv8܀yp|P P, G8̶?0¶K13sK Lrf;xR80ߣ11Zrr dr >\Z9p1"| :# T\|{s }T&q A9M.*UPH  qF k=adoqE Ix"cTh"C1# [$s|T`ċCJ-9Jb~+\&xD 2 <.D"̢$.((l,) "0إA%D#iA-% ]t TR&=҃(H JAX2I(8b_tNYPh NR''|%ɅO޺z^ˋlgt!nWe/ף py_,@EHED-+*J*rE?YXe7hų8ӯ鶢.NXe-Zhb̒\L'RmL\$TfR1}WQmWj/O.K. ,cicpv]U*/XcN[V*-|'j 4ƅI$SyU(˕ƿkj]HC3IoUL nVS])[ƶu=^VꀼwFLTTXT\S+vБl$MiZ0HuC{z:2fi'J[t U[̿ZO0X,i<*MPkmz], wX질lX+=4lYwOOTϼy{W?&b4[DŬiI5"FB]4S|l+? `ެ\ݨ{ox5]O"޷qoEu)uc}=J͉*^Tu rt?XhoŪG#MvDKmAw=#ǏkJztV>xZ\|seQydݶD!ec 8֑'M;?X[ [۫d\LPUmCXfzZO~hωfkE? endstream endobj 605 0 obj << /Type /ObjStm /N 100 /First 879 /Length 1509 /Filter /FlateDecode >> stream xXMo8W6OEi IZnC/8eɕ6ߡReגd݃ify>DHDDK8QD cJ P$"KDŽQFXJG"9a1O ǔcAJƌ$;@d1# iX8/xbbWƔ2i$1y!"t,2%8GBYQ#d!'$S)"TM.Hb%FBq$vR$1rRtlB %8)2(SZaG%# s̈́G xF2) +cӜ(˭g*#@Nr-1hC'FY;A$HG(pRKy*LF(z2dBAʹ2GMH8HD,RaQeC4M4e8"- kA#񑄃?JluI8$EFAf8Ȕf)8ȑX6[Us|EY7զX¾wS];-캟yڼޔ3wX9vkGY\=mg.Al Ws`d viL]/W qgk,O͏)[my1򰟮L$kʞ.Һ3曙 }sg'B9nOŸ0w>k.?rߚֹB۬fMnmtκmMӢH}_N3~-_ʡ:;ɸo Ez[zE[_݈/ږ.*=4(i@T!Ha+]mu5wxW6uo/e?^ %k.i(M{^8i/,Ye"4sfʲ0iYEMO!oz>^gx;2;>7f07n`ڇ͌+s7Lg.r-un{3%t=zٔU kSy۬a{m_{ݳ+]ߞ-s]{ln篇",tSVcύ]/ԇ n?T\8COBͲP~BWNW/xFpg)Z.+Q1ř&>p?4͇.Mn|pj_ԦxAFgƒsRvX={}_;-n<\=gk`m}<~}I/I?hY8'{O 7ֿz endstream endobj 890 0 obj << /Length 1132 /Filter /FlateDecode >> stream xڭWmo6_o&b+,MbۗuhvHJRYKbɲDxAK)kƝCgE'9֑㴞dT8/ 喻޲jWiKɗP VQ:US#R0d?2^) "œḓ@M} si'֎rp0ZeK@LtndkvkNV滑$VʎVM羑:-,bۧ#V)*jA]p$oDF{VVgm@G$D&B!s%%CMw6}Mz ~K>{2NE.n ,) !(#3tBAOvFsdUmQ@yJ&f\>Б8V}0ПdM%J9"४BSSVV폛_:WW%F)U7l n%zw#?A?C<4vcـHPbax. PQ CIPUX[y̋Fi4h~>Qr4OUM 9}TjBgPql#a]Q}qN܉KvJ,;}W|d1|jw3 s|g endstream endobj 900 0 obj << /Length 651 /Filter /FlateDecode >> stream xڅTr0+"$Ǐx3i PhcaAY8*GRڤ_AE?upn*VB K6⶗yOLo ^ äϖRc6JÂ;2K, endstream endobj 905 0 obj << /Length 556 /Filter /FlateDecode >> stream xڕUn0+tn#RK[4HK2Gtؙ `Z>Ź7F4L C8Ƚ cFch2$Ϗr$03DYQ k5#odYKP L#(M3r OBLU zHr>-qm%k)j2$|clkꪶpegZ,h~q0aGq0{fI Q+uĿq**rN^=qUF ~U1yٛ֞~ mn\Kr@vWUHge?0C^!IHjY׭0>`g|,Jgv4e΁)тpsE絸]:[ܻ'$%[pYw ¥ՖDsGR'Nbɸ@8*,B\H^cKsʊh4Y^X@p? -ikw5lݱ&jھ{kJ endstream endobj 955 0 obj << /Length 1136 /Filter /FlateDecode >> stream xOs6Vͦ3flAZ.d?}%pZ'tb@U \]>|L4Ei@bAab, _#Ib ab7$B4Ǐt #DlyO380D$P#LdCɞ)MǻWLG[ƿJQ8Pj.!yY%Ylx5/D1p}^ZC10`"%o0I{}̮O2^AFt3g²ja}Q(iݏq1(TM5wmD\߆Q@/=<^O+uk"O}{LaV6z)=V:r< dBs@a;vLo`܍|9)Aȍ*l8E[ʂ%^?oG#;5Λ׵F=d$q[J`hzmRӾ {S2B|J4\e^_vq˺,BBLfJ!^ܿHsēF͙B^-ee"y5te24;q4_)1vǕh^]ɭ FHJiC߼uNs$y9[SldcSzj⌺ =z"f7M+}ə6fv #G@ؓk|NޭXuͶ܉4^R^f^{I$[(ϩ[jepG]ѵj.7m-mMx'ȡvU;[9AVmtעUFc}'L̹amnGv:1!c?Lgn7sip-j_FI^=qzad 1OF7vC"Ņ aa!ʞx-s_Xs# endstream endobj 806 0 obj << /Type /ObjStm /N 100 /First 885 /Length 2436 /Filter /FlateDecode >> stream xZ]S9}aԒ>"d*v& mcSn3ϹPwbS$wKGGWGʉ %JZ ,jᱶ:(¢O4F8 $> GJD"D'?0WĜZ&0BkO&41)@L[@&aj 4^y |9i=ȉciFc JLa1*:Јh0`(0W(1j9a*hX4N)~Y`T^ru#(% #U$B 'X^n>))R0XB k1aR) .p*p Qv0#E/8ow9c HQlȄ^cۓv³Yti3f5;ܹ%/)8-ʈ7b*,hy=*I؎׫{_ະ.o_L跂lomU/ͪwoQA}(֔텏ܱ~U말|u?5Ӌz̟dn[7~w-x9;-0עcg!FWg9;mf;oAS(ضuo.]?}S,FS:z\ftdx;_>H0~/!(oˈY;<_LxY|wg]/뭾We|1 mKggFOd+q ׳ LFTofR-fn*9vy:mZn9/{8ў9bn.K@Bq#rmsrsrdrr?^.WgͪNUo=|@~qO=$[I r8T)50SPvHuX;x~XTr&JOQӵ$ {E$RlB&*l)j_}NQI)d! KKA:tH"cv$ [?IL T5HH(apy㽌ĹH~k[gy=^?a-+;zz]zmRKB`)^\'":DCC5Akp|hSrs )vWEu|^y`}q&Kim.gc^r9|my98zd{̅n]KZ:הkkk댧3x&㙌g2x&㙌g2x&㙌G2eJxwŗ1H.D ob x&!@8Xa!ZHr@ǁP01FRGm "d}^p Bspo89 M8 4a|rHE!?;J ?/Ten@%Qv?Qf6=܀J/ 1S*SoguFoۅ_{4v|VIFxH k^9n@$Nz 5B:y^UHZm`! 5% E"WN?/T8P |-h|멨zk+oI(p; 4J5wrpo}4KBM 5 SE y"Iɐz$ BMG&tIRK!jII >9 4@;!DH$d/;v v|4J`l8/Ic4 v|2FI+1 J8H KрHJ'2Z8(x ?ґdP .B#DҎ/%^/CH 3 endstream endobj 1048 0 obj << /Length 1072 /Filter /FlateDecode >> stream xo8Wp66޷nqUM/7AG n߯ $t7Izv^JR}fΌ!!/\8*P",4L9yڅrg D?^Д,=?Md0Ecwݶ(Ɣ 7y3DJuYT1MWsB%R^v9Sݯw'?fڞ=q|T+^z1qon,nb}'!B d/ P?|A~ nOƞ0oGteGs o Zy]&UQ(NRD"<ږMVC`rUTgtۣQV6GaO.`$;SgJ5qG㞌7ejrH8 i\?+`*S$V0I7T%Hs_d“wɵάB(wҞ,mDyke>7}mEo\){ g~ hn!BYĔ8D2OGs]ۉ9]漮#k 3ݗbBVPX7ڒ %M f> k[kEʌʲJB7 -oaZƻ2iqU~>Sb?)CLFSO1oSH!g T\}hU7|Ts }:5H sӷ]%azc endstream endobj 1142 0 obj << /Length 1097 /Filter /FlateDecode >> stream x_sF)x 7E]g$*63NgDuR='iOt_AHp˅8C닷ˋ7WAH_ &'@8YfGWx,߿ ų9 |N=.v.1ܡEF,$nV.k|살?o1)|"&y4tsjnYV=QHm)9Db_;vq}|qN\䋝[2̧7OA$w蜞 qU&(]2~paTTYV@`*)'[W/|] $E7 ~MJzozb @^ڨBABwd.sA.9WEY'AŪG#j9eخ1J\TSN ވS3Vz=Yw0򠀀oG a YjfuF:I+AMiQ~(quFL !TBO2J>v (=)ݒeըpSfAy>Q |7Eh}ci}#(drب(ZwGyedR!aSNjO>z !EJ~[ZXEPah6[x2CMHgj-ʼ-D9n@?3 _b<6ft1LvꀋmҙU;`%/2z4{{AဪYqoX{"DR?@Tmxz2dXxz{#&#tګ̖w/&.z8G;$`ש'.foWOS'K6>zwUibDTb3!յQۻfX AS@]ǎ^A:"֥0/c7ڤǫ7_)phZ*m+v f9lK;5x nUYQ j}xz~QFX J2 *r[&ΊnpQ_$s跦rПex9͇H@>c!̗wˋx endstream endobj 993 0 obj << /Type /ObjStm /N 100 /First 1008 /Length 2134 /Filter /FlateDecode >> stream x͛]W2Q} 'I/,3=Ϳ[:9%L*=*IBH8őc +=J8J(%8]W_QJ)Pd- Tai$pBhi`\h{V ܬU24uBnl`D ȩqf "Hh9HaQT5HkâA=(Yr5 ̂9hT@ҰA0mP᥅,cX4 ! 'ѯ`VR:dv -yZ/GGȤAL.pJxIc\}&áF0) 5ae93*ƭ723C 1by/m QalS(!Ӈb AoL#bzClfnfa_aސ! b%0L2Pћ"ɓrN]^|8O7:}wwBz}/_8:p.ʉ HL e.˾ OӋppz˛_ǜ cI}ϠЄg`v LxJ17 P} Oë(Q,YcF|ןw$(H\n 5!3xbsKxFu'pqzkf8=,^^z^:rq;W{woߟko?_Sk[>軞=hlr;8co777773gr={&L3gr={f3gv={,Yܳgq={YݳguzМ*S)U:!CS"nyPG-bE 94MT&x/rDWb1c_!ufEM3kaH%lsˑ€)2* C v 3[0+/OY[Rs)KAތꕴ*c&e3(Xf|_2qa@q°>`5:vΐbT' 禶f堽`g ǯfz04\fϢPۭ CelM7 Yg JT7 YXoXFs3HF)1,ȋEnƁ1:aǏt7 c=c8~e}< A͢:c8>HE,l' e{W(+( @`B|GrjiJ! (Gl.~RT# (>\[OC/`ة!@UPP=PUPP@PkޡƑ,M\|Q[C!%fiWVY@HL!qW尖[@RE]tAڭ̑.ا?m.˔bAڭa'"c;5{:o-KlmF{U: jttNS~RGyA4NxVGyA4r~ r].A1uD<\}\r] ~ r]5r]%]#ו]rr]ɏ@+e\gC,וue_.6?j' endstream endobj 1236 0 obj << /Length 1054 /Filter /FlateDecode >> stream xO8| pl4RTQH7e@?i?}0f:#awI&_CC//b@;O%$`wgYw~cʀ''zWd1q:뷺tpZƤH~(!8>I I0ew|}UTj> stream xZK8WSEe~%NV=tD)|/2Mr ;1oUs\ֵX/8<6te1V~'.c}QebKoB`0opdu(n*cw?YJ Ƙwh6kOҺv"2v.EAbo@hjL77z4A${u~x>ח( LIѻ$bD!z {~`m_R`b+nqۏ$Hb}MP_L3) 7+3*[Зt] Y"fƽP( BIB[/d=>w.RqD-SY?gmXiry,mcʦRS(E42o\+~lYӯA#_rm<KLbDi< Ƭ"+,A]F,ubiZV%DZl ld5 MբHf-&$QSYJ~tWp"j̪[>m<ڒ_HQCUsZFumݢf}/ :bVؖZl1`!CP'aOmyiHUtwsYJTOm)Mmb ̠Ė5 2+C;"WNՏOqP솳0D&Uv玏3Ne-06)n1?٧OVD_ $6 a9(4t-\V~h8C18Ɓ҂qb|db'LZc HGp#8Oᜨ #PZ4 (ewy*Sو,{|wޒ$6%jn̾i\m9yj}!nl)G7D0b%XD Kn^a2 endstream endobj 1179 0 obj << /Type /ObjStm /N 100 /First 1013 /Length 2328 /Filter /FlateDecode >> stream x[]\ }_EW$AN Zg=H;H}5m(ν"uHQ"ak$q&$C]U&*8W1(d>C(S(P?Z̨C1*9LbɌRVMD >jr\嚝4MC4$>PL"'%fζoWovW/Cxi~%g۳5g1>&4{mܿػvJ}~D!e>4T=v#rf(hX||) ^BT>K(}I !E\O:#OQc l?s_wWOw^OfITKG'H kZӫ'n{^u}õlW"Юeo?6:)T>}}mkdžmdq+ OnBoSC BBB\-Vt!v!u!wtA-SL2u-SL2u-SL2w-s2w-s2w-K,ݲ-3vle6AO\,Q.W 2@肅,ɷ r9(_O"2ybX崔fA"/S Oi RłE,E!{,3y M_Gχ8 3uAL(8ֱQ#{(tGVNl=v'A8C wF-H'v)EI>p)3fRG[yY łbebAׇЊ lOx; ubA:MQ,ZR'ҶP QXPI4@ g( I"3ubLH>qubA# / NPЂ* (8#Z(V̑ / ((MQ,/“~]!ZibLe XV4EᬾXp)>)+;p9X( 4@ClLP/Yl򂉊'DP"Xg VDE(a$: (y t ku,_]"_]ǒEuy ߢX. ]KށH;Dާur$ur>w;vIotI;)=wJ;ǝNqSz)=wJ;ǝNISz)=锞tJO:'ғNISz)= tnOhO?'EdObVPfR' geE\A͠*/<:+=HFL?Pq~heB5?: vH?R ^R}hN^{2Ȃ"aOȃGaznhSʛx`ʬy$ ,:zDk)j4pTh3ƴPL i`qHTYQGb2C}{dGy3TWpTWB7H^Q> stream xY[o۸~"8h6}-kI4$nןEr䪱NkA`j4$g“O^=y]M$,c$r]zdYSQynw~hCby&;NW^IW$x+Tk[dGQ&KOH9b'(8v0&]AגUUA˩82 $j4B,2^9)- ϒߌuܾLsFK^nkFf^Ȍ-3%Mmb}8<@˕*6%WʰRbSZabZj0.Sy-ãEwn\ f+1t}-СbH +h5@?)m wwn_c ;<ڐ ȡ݃h~ةTQm>bHV*,eapvsotJOaHyN iFM':%tz=skI69֞Jo5Pt~JGa)vbCκ)Ṣ̨4:#CAw!iHwLjN-vE@Y[.dvJ_@2PsɊ uxHF+l[jmmpY<9.DN@") ڨ!I!jOIuP_-/eF˭:o CreO"'N9;qncQhQUfkZKr8AaG|5QS/PSEE k 1O'Y8+jD{E#P].$wP4_bF0S35yPex0͏I<Ej aϭ8BA9ѫtmg q#3FpzZ7cCDC>4)/%jw%']f4m75{& "yo^X/ʇ +E6Nၓn?EΩrgP/=54KR1юp֫~@V|HQ?r= [ N 0vnʕfp0r$_ӥf< (-'g Da?ྦŖL4B=}iz^8`_ ^o8c[P"mn~rPn:])Kh$g؞BU 2˼ןֆ|{Z[5öQ1SMM گ*|vItLg|c8G-X/t/gCi۬گ (.ّ< ^8\LgN=k**!@^p9֑ԒwW0gSQN3=4P^2N:O-kar0!eL8Jȋ(VQܕs =ø>~?ۈ*J$Oȸ/ʦS;QsI}9RLt(awS8ӎ:Å.Qvv.YGth*xVATWgu~^i#8}WkQҳzS(^{Dbw^e6[;35<贐cmf;yz=Vc38,n-VuH0 pWiPyފ';2ngl Ð*8okbɪ-WbLW$#F} s+zuUG_CfqHդ8#b5g̅*^:ec|L֌0` ` ⑚${]LcM|Im^{Ɔu2>wX-<F`ӥt(Unn;ٴ}x?Ki endstream endobj 1377 0 obj << /Length 825 /Filter /FlateDecode >> stream xڭV[o0~ﯰ%K<1uleR ~='SC=|򝎠5"|4}GHb)@` Rh+ ^p7$L9Wjݚu^xBBěet0gKVMf0OD90qaL \W8 v{:CEG~S}2No,^ZOj>&AP||Z봾ixY>6;t5 զjŘYks}pҿcj(No#yۦ|t2]}&(oȇ} ]>e Ц}08U 0,#:PA0! !*&F3S*iY8p^o6in̋ {T.;^ w~vfUPI' 44hsMu۔,'Sg۰ G=L~]v.̪4S]w7eeB^lG]~S1[^ t' oUڌiwҐCCf2^UܯKj@d,.߽{f4SE"\g՘oϱ\=PVWObC@G}w <>_nvZ|`DhׄL*;N>%;kB1owiOډ(5I`& @O|)}0"6a=}jBd;` ۽y>1T#"GdObcŦ'I//$Y%xZ$-go3 endstream endobj 1387 0 obj << /Length 989 /Filter /FlateDecode >> stream xڵVn6}߯  $ER4NcҠ7}I+q[財$wHJ$I2s4p(PhCE0H&C+,z(:WW<qBquԘH :|!N'?\-%,'̓y8}'&Q>&7,SRXI_dqU}q0hD9O]տ7m5D$׵{|{=ӦpSz5MFG.@c]wbz hh+WY+wN6׉Uf5s]vDƭuײpSi֍Ҷ6mIlFԅ D2m\7'd{kŽFeqS?U*wWIX%ER_nԛݞs ,{(\fI~Y~h:oehl4UE~uY++ǥь z|E3AF8Sf:'2z_/Z~b])"۳E1US endstream endobj 1401 0 obj << /Length 1499 /Filter /FlateDecode >> stream xڭXYs6~8!g,xiN'ӗHHfCJ..JXj;1j F,>{~{*g Gq:0F)erGIO쀓b9,r[BBE'Ws bpƜdI?$gvyu9’qn$kbyުNJr!mS\nB;>}>BlϙAe(J#7M,.-vSaWA>pU'ö>kU[H`=$cA'nq'"$Vv$U JȷuX@U$a(Ra+? /g6-< 1lAbE\=XTPݮb/;|j Bιrcf׵#^BtGEѵ3a^V-9)0KlIfqmȵ6gP+Msߠ77oxaeAUuN4F G5qA) >W: وt]aF NH_{YT@t=#c{F]klp,S JHDz qp af/LJPJ<mm"АtzkOj&tXȩ$K@ OeBZl׼l7Bn`v*t5_ y_!l|dD %I~l !Qxxk=d8c8B0)3Nj=,6i\uEMRIYl$r}{4ҧGGS%#C9ff7?k4v%(v!ѡމ% WPGES/xzU+xd7Eeg$%r|G /ai+㿫>#DV|Xԁ+<-v85kl(q=[⼟J_ Y]A1h=Cz endstream endobj 1411 0 obj << /Length 834 /Filter /FlateDecode >> stream xW[O0~ﯰߒЍv! C(MLKsn%n+CL|Sdn9@@a,oFRfchS>L &aM\7̲uO's; apE_bZBF9dn3V- kRA)06ifN %pi  -4=&ŐrL!*)J_ؐ r= ~ z:#}r(d+T gPv60=#Ǘ9ԚMjAn[z,\xi٬"&04L [^9P^xI*UtUQ3$owHzwtBM+ZߴA^Z H}%t=[dL@-S0َ_Oĝt'F n,in#-6d4[~Do'J/]]#ેR Nbe;5JU%g3 @DHgl 05ǭhu&`_&PSTjCM)V}Rz׵/|7ͲgPLKa}<-c9Kכ|׉GGQ^ݹU5W.UqVQ%E7eQ7,dURUa%ic=i l~Vt1K1C2sٛ~VzYTɶG@H)wFsvXXny\M]ͧYoL&؁+}V'nYM0]g-#3 %uN^z4X:LWI&5,SMtƷmmҘ endstream endobj 1427 0 obj << /Length 1395 /Filter /FlateDecode >> stream xڵXˎ6WIK!V,O"IImQ6-%Cd%/iK^GdDG}QL F͈QJ\/Gwy_ouBpԞ$)|+}b/EUE.P}#\¢JQ+iv"xX@qbbUh${єO/6Շ,&#*~_ix>ucS bO]0ǽH&z+ϲk#mp̉@yQדE,*1\we.vxõ[_LQՕ2\;&^O,u/u&i) O"HӮJ [oI\Gq'$INiv?Yo7 B/ 3<ݹ"Hٲ Zױx4K."5*8R,k\T#2x9^ m?(>Sahm^6e%~ yХ&?.+$;dӖe>0$(!{:9{B9*([h4rx4" ]̓zMu/p8Jo .?ڻGN8#>=:i|-S0TO.rfEH<E%ДWRz4˕uad_ endstream endobj 1438 0 obj << /Length 1294 /Filter /FlateDecode >> stream xڵWKo6W؋ D(z!mEʒWͿPɢ,;NA|{㐞qNm1*4_~y=S9Stu'Y4>|35<],!1[+cjN:F.(tiU6"/qt&BzYJ'&zD-{Gvuc>mӈ$lT*fv=eh7;~UDN2;,v4 :2AgO~z &5h4Z}x*܋9vlNzʡL5/hgo evHO.S% aN|4"и|'KrHE> .=Ƥ 5s]Izz֑\RrsPR{3ަ(l(%x9\$]Fu{ݵ:vYny5rA`ccy/jh`-j:6Yg#pG^t,"{7.=1# P݆)O^a~߽®p»;sl_?)t]<ʚvz~CBH"WϠ{1%3m}70%Ӧc AjsǛ)ڛ(yreZI:ju endstream endobj 1342 0 obj << /Type /ObjStm /N 100 /First 975 /Length 2340 /Filter /FlateDecode >> stream xZo7~_A\rH F7Aw~5uϑI MNW h%|܍)&\L\ !']!68vd@9m{ .DHEv6e1)9 Ɇ+hRq鱣8PLRG*ɤRv(Tѥ$6/1 eV\,fJj)VMH/6%ʤbnԖ+U"G5O(Fdvj\ۜ.W1;n dwHČ5(;LԬL,^'*PJ[>VT8mD 6)6}UmCTܩY1AŰbJ!|jGxCj I-si{U!bɘ  @ʎŜba#k fÕ&'LɑW|/F0Ar$\MѦGzW!qlÊ`exJӐ8^{Dv/Ss Tʖpg i\m90 ih*jJ6/к#2ƒ $n09:tϖ;:r3?|A=a/ٌͷ_|-Cwjyy:߸3׽xu/ol|=fmuՆt'fu9_o+,W:Dvn%F;b0V( Poz!B?rG.ȥ#~ҏȼǴ%LӛM|Ֆλuϻ'g}1]H{O3ꭲr >"8@Eu\\䗅?Yŷ kf$gN 3q S*u^_L[Kw|tf/7WEwڽ:ynټ[떫˫~z/N:E٤ongdoB\}Tag": pv"bbeޫbӠ qY(TP2PF 2`Lɟkb{ IcM]="M~U/a'MGl)gx j~Dq*p+I^+@ň. ;M#iPC>ϭ˫>X:( rTPQţHT|Ec;#S)vu9̀ZwT|4@Xf oFi:@=6hqlIqHhͦlF,Xp)=_–Ztʰe;-2OѶe2B PL:;SW_1'܀<БZ%RuXgޢf#4\|ig!uDɼM/ *7]6πv(AqGYi =ug픦ѴCXup^BccK?";Qs({r=9IO:h:md(.hY)cm~].Zl|oFMw!Czoޛͯ7r[eD9&̍#>GZoݛox8[i[|c 9O>s~нUgt;P*=pE8Pv5FdWHA$i~j`R$<MfF`U TPvҏ 2@W.DXٻ.uԻRP6Y[IYFN b?tK N=ô/wFi@ms;>sQQUG=ROlB>F+IpjgI[޽mX^p+R&ꋂ,'vQ#Zfڽ$jS@(@ @{yخ9oO%|swq;hiۑrvA>okQ^-{@+Z> t= hO[k>ye{`S/@ܿ?OgوE\٧ְz{O%3z$aFRl/o0vH1LK5}ܰu CػjŞZxٓţAA'lJ soMfi;ezdK1j(=1ݳWVC=õ`$:P;Q@mkr1v義wJ=^Y(`vr86> stream xWMs0Wh&3$Ѓԝv2I栀l4  q=t|+iӚ5"rt>bW2+D ܒȦKnTNnyrCS_0TZj9oOrfabhlx|q1 Mw0߾AˏpRl2܌[mbث8145K ZXشqjai>E^'* hy8Pu]\XGV2bj;](3iv I Yeڸ3^Q% _^]L8^ 8*/++ca%'f:H+y<=!8 W'XPV8P2^[eiGcdBԁv Гr`UJ)ƼyH^̲1gf\xpph:!}PJ* }rLqq#XZycUc1י)naWYƃ^%Vs 9JV |?(DU^Qǚ5ါ-E:.&Fvt{GuSE\bt]W]"Q w@G/ig6k7MIO2:^΀pm 0pp(QTԩ[(`o 9.Yk endstream endobj 1466 0 obj << /Length 1108 /Filter /FlateDecode >> stream xWKo6WCmfD=y(n윶{Plq![t>2DJq>~qfUG?G7"$1cD0FQB8L=Y.U_eu6Sv>映$W$&#|mOiVjh>JBNƫZwyp^Gi1"XDg-ab2M%> stream xVKs0+<ɡ0S=m)3=4Ii!9 eﻶl6I }}߮Es2QHu3`w2匧md($c)&N*d~d4z/.pNT:8s^ʺ\O=Ȭ/=E~ՒJL$rYً[7қȏTy* }דקwzAGOq/C-pюv cH:QrI*ky0SPG܅qG6IQp3ĕL;L\2%TtpgoU F.'.h7,e6LYjthr@Vs30CQ\שzQVܩ (y!%Nw i endstream endobj 1498 0 obj << /Length 1630 /Filter /FlateDecode >> stream xڽW_o6ϧ2yXDC1dmS躮q#1YDmHJ9pnIxz+/^<{r /#Yoy0$)W͊Yq]օ]˛zOmgm;|qg;IL衑2t$aZ*g/#ע,ٓ|Q3zX[2R*QjN16Zv@"8$ c *|cIǮV3 zHC[ȕ,@jB[0:cF4 ISuq~x8I խk!Ҁ#-"*a|lB&M6|B48/5RQ6A֝${#TJ?<*!}݂~-sTd3_12_teP>AZJ 9>fs}E<%.7{˒0h儊2j_O U7 F'ͮ*3ĸyyشfei?׈P?~EVE@wr8[,L2zм}9c[n"ݐڶJCDH aa~'Nk .u.87|(TZRZf`ӧ`3jdF4!^9) | WB#𾘣8X4ºWygE%8x%Ρ䑫:8NNn-|14bK4h8%i 4UHN WfT+g:#Cvzlxiegw1qņ ʈG+is:YV@Vr[)fMtbO9A$H nR|W#l81q~@D]tȯ }L[iゥ,Uwo_YHÌv r> stream xڽUn0+bCR{R ZA(! ڢmZ\Q%Eı"'Rߛ̛3cl!`3c;p3$ht>:MsOi7uyYE}>1yBڈ&8t4a479+jn7ףSOC`%YYo?0av-ݗ_lIa5}aO1ݧI8T {x}䢡+8-G5Sř܍7dO¼LÖ,\b̚Z*t*C$iCFҬa&yS]rҵi][<~2ce,dr$D`d{ /rS5;+6F0BVZC$9Ͳݨܖ֓4)_beW;O \TkVu2٥9eKʂ;J{Oz_M$)}U4Rk!QRX޽_jg?Asz|:RjY&#^h?о蕃]*. ҧU㬗6 B*PxUU. Xum> stream xڥ]o6ݿB- fo E%Vk8CZmmrGږ !u;ÝÝ_&?-&/XX9,c'<(k7 g]/N?m'*x.LcI 5!ʄn'lIɇO)GY|Ѩ['i0}@88JF#z왼eg5vhh z|?p~g+)]L@>=Dwiֲu_+RZqK1ixE1^2V7R]8dd/u^RB)!PC8\>>ICj:,7L+5Ge QգLFJhhR1 0.T=|feTRɚXDtngl}騱±2Ǥ'BAp\]RnVƴ{)n{>Vi@4C~7;)}5aRE%1^R[C;O?F#ք%~ r&\S䱇 |?&KKNq"T" u)`.7G0Z-6b4Con= endstream endobj 1542 0 obj << /Length 1291 /Filter /FlateDecode >> stream xڽWoF_N ,R[5Nׇ8}E5^۴}gv ITU~`ia1yыā6^`CqcuG?pR&IMyRE.dE[7lNmW꘻!Ҕ֜Ym:L+1Uo;RU]|^ 35qj9gBbx?\ۼ%4ڕQ%>s0W*xZmXŦhZ#Q"hx.aLkпJXAsp{"6H&@S+FXU`u\uQr%O@^EX|^l3w܈8AM#%]@X\^_(5=8j\Ta 6g(o R;AN҅AWARKsѰIaM74B&SpsU'{M\cysx  Pʴ߭ؕzu"P؉G_2nhF@XرLiU yçe~<fyV%dzC]. c~Iacg3}6wlYԽm*)W7&Qxt>;xd}hܞL^)]._m~_?Ďe;xwZl@J-g+=9y/0۲qT u:")ӽ |mк;>81@. >AbP/SjziuYZ*iA@cʏkCŝ4 wl@MX=+q; }yf#ƞBxa[C06ojL@[Ō>ۼRBJVxu)8}8<Ȩo, jPvP )^E{mt)jgU\ȑ[Zr\(rZ4"ڢ=L> stream xYMo7W^3$#m l9lMƑI7JS$r qb& ~bMI`bRG#>"lR MD1Y∢8Sș@>"d9+rT $XJ%(EQ*$଀R!d'2yUD71D ꟊtفVB8PYdAy*x|y@?p6?CԝeLu $n7*R֬|T0!&5AaRʣxUP]ƁFh@{DJIb]GP\ a<"N@ǓXTIXS48~<,Ee c .X xT4U Dj^oaq7WHO#:HT\8W+LyD2]PBXt8{EG6R/qp" 8EŃm\,ԗ:Ֆ8£Q\ l5OMǟi>+8͛珠CO&9:2 #R` pM=Nozl ӽ~ub<:iaԽaZ 2>-9nDVؤHA3لՈa[JدF&|!W^d=%˖9fDiD[(b hS%%Vt(,҈-fŹ{񻴡ޤ7iՅ(Yt&ȇ` BY|Fl&Ϟ vɠIv h-th~Ah /QGC+EyG|#cթ h[ZP94 ^ VLPz&(Җ LP]Y螸E}/z:)b c $J$V7@d fhtU$؄ySWPfejȦjwlϦOt0[ =a*ik>:8HdzYŞ^sVt\:Yho0Y^{ϯ'c{z{cl>L*bI;TĒvL%7c Vp4I,S<:$xoF|y1x4m6b@+ie֒!U^sh N^r#ZH ܈]?8l،Fh_MCfG+j|NNh݉C?$vJ/7D&}i:btö*](m> U0aN ˩1$eʛ+_k-%BR$sʵ 8xq첿^cs2NsY f .' ~pfnD3@iF0MVMAV0ZH缔VCiX7 ~z endstream endobj 1557 0 obj << /Length 1246 /Filter /FlateDecode >> stream xڵWKo6W2|!I.zh7. Em!wHm9q>y|qw˻DRPܭ1bp|B`ҹ{Ǯ~4/>,o5{("3\c-Jc"-n B/.Z XWaӰRazRUI3t;ƢCWtԦP$֖ovsѸ2ngua9>=lmg&Z7_$؉c_Vm;"Pmfl&v2,C _<QZNR5 qDҼ٨];̈ik A~3vb '#u<5u7-*EZq]=,: ["c"M1o0c/eT$;ɚQШ#aHE6ZLhEw /Fj*babч* L)oT9:?N)%Џ7 "FIT<|Cq]'k?CʶpϛEH4'FtBSQBMl#"h`2Li ucUalCc#&={vK^H%J5WQ%jymA)JPU]>"A1A߯5LXTDKHd> ƶ/iqBM0Az32n=\aGZ~.Tծa,+0'U?AϾ]~٫4K&=r?[vԗK|gB$t:iD POi(>d^5qB'rLt9'?r'TL?C5|w6$uzï}A3u endstream endobj 1571 0 obj << /Length 778 /Filter /FlateDecode >> stream xVKS0Whzg,?g(taZچv:+뻲d8=vo_2AsDxtx(ġg{hgϻiQI>gQV f ` 3ZRQYeDyGRyQfb;׶9G4UrU'$ɋNE(DO/~*"U> stream xڽWK6WD*. Iv Ed@KW.)%p}͵v-ӌ<PlV7q r'A6;ac9ؔU垾u?=髮]ۼ30̈́vu2 {w+,Xh*I#(ob=[y~(D›4 _QS x0IAg9nh `aKcm EyD*6Ž9S{ibcQ mt%BN{4a8_*}"S:w:\sBI?@9D"|$\r 'q҉ ^y=2zl4Jb$ 8E _a]'%R ؤh9(\QTCM'#+!ɲjs"##lIC][isk!k}ܵxݴl;8қO&!NٶBXis 1C#ꃿXxEN3z7م~BE$H"k80U"T$Wcp%gjMi5W)0ҚR>n/<~Ĉ_ endstream endobj 1591 0 obj << /Length 1008 /Filter /FlateDecode >> stream xڵVKs6Wpr 92IqqN3kɽ$9$$Ç qx&iU: `w@Ao8s OlPʼnb$QlJ&$HFq8;ZCϫ;Ƌ+SZ+d߂)Fʟ^3_3y[Z^4,_f_xc=/f谛܃厙}J&JuIqYrpa4sz+rJU9ZvbAGpi) r/y-z 1kC/$1nl ܷԁ3A0HQ#0ݓْu.#SdY, \ZpVh(V(ˆy+g,HSb]WFPZF6 K1C뾒ok^zxn'Z2$H11_x iRFH^ .X@@ʂCNEaɣuw6KvlfvmO9DdΙkڔz<Tu٠#:I QF%i/Qb@Vѝἆɡ.Z=u{ ƥ-KM!{ B!( , ;^M'E_H|:Ԙb F$N*HN L7:E Zo";ݱhH8H*Yd wi n=d5Ƒf-6Rb@wґ]GN1,qLž+Ft-p{y1 !ثXDF+bg B/fts[Fpt3%+!1CA {FjIl2ъRDf}ui-4} LoL<Io$sU|Z608 endstream endobj 1603 0 obj << /Length 1390 /Filter /FlateDecode >> stream xڝWo6_aJm}HۤPdCeZm R`;%EIIwl3f/.oxiDr=#A4 q#?-ٯmQV5\.~8/~_ty$=#nHrYQ@ۗXbư+ |xըxZA UXY354_x+YphM9!9)NxɵFULBڝ]a+TWi%WkW@h 'Z3+G Cڲ11(nwڌ Ι,ت$yg60ԾYk,rа: Cn S =BRק0y{\Mo-0 }cʬ-Ys^$J~JYsT5 ǥ,r GI ߧnDl-Y#cz9r?Wasz.^ 3ҎR7 QG9F~\~2tFzJ,`AlR ,Pnx!|`5hg+f\qԝK*~w_47{+KbjBPy$U*8Dn/KQ1TxťU#%00-Ծ1(]kt1.d=Y;O9jt$!4B%MAa5gQmPwPbYf@[0)*g[O@hk;WŮ(4G-i 6N[mm-d͸enKPs2GHvu~˫)rɐvmxgq=0s+[dFPPIc &MՋ~j1Jڍu D DcRl*{3CS{-s5-͈ r1=JN+9jX\GI\P T\[琻]N# GްaclB]{j ?yZ =n&;XBdHW[H-Aq]`̰z\CQLYvj<*X# Z_tyx +W^`x6aCt$1M1}rnCS4KF\WNy}=+zyD? endstream endobj 1612 0 obj << /Length 1148 /Filter /FlateDecode >> stream xڵVIw6WDwzpܪ9QJv;I3ݴ',C"ϫw&HM#/AdŔZ۽&G~H$@aE7mEW]p$DqFqbeb{ ~IK+ qùW _m?F)qZQA:QoEvR īMD'&tdtY묽ج¯1yb`T"shhyNJ')ּ t=  !F~2kĩu51i$֚GBþ=2-EEQS6\Dv/ YvDќAsx+*؎FJ +:L ?5\{ `5X!Z%D>`ĚVd]0Nr\.nDOs<Dj%. |1>5M>^b&!P>:ˢ{=-(?ϛJyH{QNxNs[c;POP5 TK_bkPi͠?+ gBpQ-udLc]_b06.e%8FXx׸^+w}x7;b`Y0wAΊN|0H0;&'0sW>҅b2QyNdqԍc.U!lnZS#LؖO'pPQZ *\qlN4Nܘ߷9hٜ%o63EH`VS]:wH5#t&F"?VA&v[E< %\[TҤ'FfG~w}[C/"U^> stream xڥWQo6~ϯ@j!vyC\V(T%Xa1" }b 73b~]]Gk+8 Ul}ߋWZfPN?~Z[od$RCPw|`B婡z^߇^inVsPH݆rѩ$fLOiǍQOfU@,hi7e% 9ìn{YHLe9dOpePsv4ǒ /].oVBp?t3VZ LG{_Nٞھ$mK1('lZ3 BB QZ!}b{}%$umjqC&%"FC1y}Y' s1?A|@Ue{e| X_FE:;w0״Zi}pȾ> stream xڵr6De"o!L&mc$cA$$! _],@48I{+r_.O^k+u؏幮xu[]~xe (t5Q4Yu%k3d"ꏮ-D-}GX|Yxpu-/N"K+>VҵuH++0{i/MOeT0T\'=+֎w)+}2F|']{ h*k#nAbx(dۥ>EС. op7o/̚??4G`=?IP*v]MFxEc{6#jE6'l1tEQtF3y;:QI gmV4ɪcoDwts5g'+%-o.=Ɉ˃( }+>J^}XY7$Ce8 'd{|6:PC@{P_@xι:@N&ʮDkLY\tu{ӳY^ hSn9{;}WXpGflW=mS影Jٓ~5*$.}λTVyŎ&A|uT9t:6I5},~znL л+[o2pi?d4VN?Vq?XWZ4|cUbgCf9m!f,Lͷ m%dӖdݰ--+J}3^ G0|#o/L3<{p 5ܲͺN>W0VQi"h:}8Na-WP_Z/߼\>ҳa)g>@߮2d1oGn/lmmGZ\YQϘ3<7W\]vr@:YsVpDI$D4MK :AAsVtס-7}nV2kSؐi?ɕ~qBzM_%S5č-$k-uAr(SE_PV#[X]'CJml(`(Z@m.x鸜*(l)0 )R6ox<}Wtyi&@շ9kY׹8%LZ%Aw=JhjcI 'ĤKD$j#xJ>j0m`K7H;v{+zЦ2vTv{;vM-RE{`lSh QʍXz|y+Fn&#:˒n~(! endstream endobj 1639 0 obj << /Length 833 /Filter /FlateDecode >> stream xV]o0}ϯDj\vݴI6KWU BawMi>}8{M w1}t\c[mƑK)涏_R| p>9ȼF(v=@+gTMsC3yh0f%CE>O!.MVTit"|0m=EjvO0[>8Vf"Coa~˴S+ԫ~V.,L#y9E/ҥy ݕlnmvJ4d CX/g$c,y ?YMA&ATz.ua 6CH i\$K=VT6,Ʊ*ެsd3v5D]@{0:ïƽ-c=!LzwEmCO1M)tf^AT9ݎڮic}V us$}X)rZ["ÿI_Wš˺(2+>4`Rl%6C3qx&vgnj$1t/SҫYe_)N=.LL׌f,iut|֮K4OVyZđ~f]v[ .1 |; i&\eBe4eN {3m-a0K UC}N-rNkð0o<}wP8?art@X{+a#>.l55;acB>-$X._68<~zj endstream endobj 1552 0 obj << /Type /ObjStm /N 100 /First 966 /Length 1627 /Filter /FlateDecode >> stream xYn7}W1y0hvыGZ;FTm*u=uk9^7byH0\Μpx(AY"$BVʧq*#|ΰ)CR( e`lH0^t 9P̈u|& ꢐdPP8"!'q LpwM,3>pl𭧞HwFI$ЇV%$`2 N' ) (\*e' )[>lX9l^%1XQ1H*B'l3tp 8bN.o(L"B@8!̐`!(,dSf@svY$ )QɉC 2cd*t$ :X$Ix%9IBI9$%v\KJe#N2\Ϡ'.Y Ft=+RBHe,_(S #@ `fb!JDY?$Ў  FΩP.dQ󪛯ޞj^\1$0`?Nv !tP]eŨy&JW9i??[p~َCh童,2z1ir.~nW'u*G34I hLBQs|nU?]?n1mŮ9k~l4 NZ5RiYOY'trX5N5/ճ>om ցwd,iʦ# }ic̭ŽfoXh{~innn nqٜ5&<ߚm"[hV kT:--WbIgu,Ql)ԼnƫnVӕ½W[WqUleUsc}c{A[JԎɹHpvyq@JyMT6켃h¡%qJ4y t-V"4;Smv(J.}'myاҽQ]w$^{\I -TQ@M{Ioy^a_]ah|%:4=gO:m]MV5[7EyO:ns:Ÿ48CD9JQn6 +Y rMa,@Y7%mRd7$ChXWm&lOzzMϴnL|DvXP/ xd͔Z67wbyxx%;Qwkp,? [~I3ѷnϷ`@+~Vm%d1rڤT-LPv@vfmJ4'37ƍ,qb endstream endobj 1655 0 obj << /Length 583 /Filter /FlateDecode >> stream xTMo0+|i1†޺TiIOUԸ,&`I66^zb,f<2 +_dlyD1CN81y-J*_yN5>緖BFsO#ԅR嫞i/=.T=gҺ2ﷵ)l[^': p#<[TػnY-V=n;d]Vcm ![Te0Z'h]ʼ]6{nza^$Z oX<3EfL=1λ|M(P֧2>y^.u7A4 2W+`YeAZAfy$) laqu0JB86N7$>U9orj19"Ħo29&/GZ7N,BO<%h nG7kbPa endstream endobj 1669 0 obj << /Length 970 /Filter /FlateDecode >> stream xڭVrX+(gRUo5Y9qJ(JpTO4؈x 0>}n?my>v04 YJmƈq ,mj_&LՄ۩kn6<9&06wa,326襱N GN;ѩ)I  LF{P>: zmKc3>2.!S&Z 3(VvP%Bh!h#CԡQ*^$]볚hG(eaBMepe~ܬ;2!L ]/Un"tXx<*2~`rCz2^mn֤DBTWQYz4B5[I> س`렺(7coQ |K }* hIWzW`2D EacGu##L,01NOJ_BQMȟɔ`D7v-ު!Gv" ?vx8ʯ[<𚀁-޽m/1 :$V)*/.D1 < ߪGg0ٳD3ţp?2A\(A1۹_n,A2ZFHQޔ@9w##f)7 4˔ױvB$'EUXc;.0iXrUټQPq ]K}V6 4~x׊I3IʑC%N?:Џv>dNj])>ϥ]~MWX >\gak'4`?8fž72T'+ m\qa]dm WvzCQ<(PC< Ȼ--,o绋l;]?hS lZ. endstream endobj 1682 0 obj << /Length 1431 /Filter /FlateDecode >> stream xڵXKo8W=@͊z٦Eh[[Y4H)刊X( HQ3ÙO ExuR,"Y-C'"e1Yŗ Nzw8QFe.l;iyt.Bӻx1Ay(u1\af˷XYO$M,iPCߘIŪ>HqTSfRz`nyҀ֤EU}xy̆TM1>=!N3? &gj݇I{&6\;?}F_ kbáģ.]%]`m'=!\Yٍ=)"Kg ܱbCډ#ĉM3$I =*%~%JÀo'&$>P呕vVT홞:LIPzF=QvFi)}Ϲ(tEȱa('E FTG"jD 2Tf lQ܌^0ZmV˙_K1HlL_9Jj%!# tK"`Г؇p+ųpx=`b+A#E8/#05UfPk`iX4;ҶWɡJ&[sx!Һ^a^ñUmyh:Ra%PSPx S኷AmPW?/N?^ʚ/M]ۙhY3jz3UK5y֌#F!,UA(o*3M+V9-3R3j(lJ%t<= [2GZZZs>qb ^ɎaIpk\B_j[SwnuHWCÅ5d -I#gR C#) Mg7 ~W endstream endobj 1689 0 obj << /Length 1314 /Filter /FlateDecode >> stream xWo6_!{%`}ܤYmkEf"H [F?,FoP۾c举|'24,qc.|;Y-~|{> fpie @$pbj9%ajK\%JB-85=>%-Ep"; ]+ iU05MhI Vk:G %u0ך':l[AKI@DQN%e$S0׼;$XQez׿T1-wj+Q9XWǍڽzrRԙ? Μ=$ms.b:n@v@r9W␥B]ۨOSo]A72PuƮ-+Hd9 P~WzD^gRb!^<25:I2L,mf5VKbI:L|֩i}hn`I:LsT kx2^ͮ/[O[ZwIy/TP(5'tL1ՄfWb'nWڬ$@ Rr]4bh O%!(u F mSpR^`]!@g_of_.Wfkdy!LDvM>?o}쳁Ưe.8ܽbLjԯ8>f5  쁾ȁ豦.Ձ+̸lQ?{}|(h*eiS|`CB8t׬c)Ob;{m 5P!!! endstream endobj 1698 0 obj << /Length 1096 /Filter /FlateDecode >> stream xڽVo6_!,uI!-VD*$)_?G)8 ?g6_$tCc6FKefmz]$=F$ޑxd:6R^#%,i5P ˉQtJf*hjna4ȋ\ EqʋGuf'Dk~bhM0ojF?o@k͟d=T| ~7_܎ZFck$ sNԱMS dO'xXvBn[sij́51ii Ib{Y'55wi9ɀF+q #~^EcS%%Ҋ״'̒cqIRp]%U^+\< ʝѦnv"Wy]SبeT@y_4uבeh!&(V v$Zn :(?u |mryՌ苗ySlJ+H";rh#+' uDVj/ wvI!@ +:e+Z|S6m"]:\7z^b0`IJjTIڗcvjxk(I4qT/~%OEAe|k:Jiєe=%3\ (%ωlG;9_zbD׼`zZ%q+;G¹ _[ kIT7]>">+֫ݪLXUjOg `06 nvN@bG~ e t(\=!S+TRA +;0ZS5d$ݴ֡ i@kniW{x6nf3y֛S7s, [k<ݞ-'h[>:XziTH E1]%8~&A):  N> stream xVO0_abiQ4ĘF/,uK:ϱ, Mz~wXٝr0Dh0B2<frt L \d6wh1kFzj's`"[qHI\^!0ӛ@o>79`:Qg tԀ=A0ă^Z؝ S'0qHcpd,X4Υ2&6d3lS% n@|*Y7eZ,{~#l2pV$V\ A1bggrF!4u1XY&bU%X6 r;+Ǻ7m j< Q`n#}C=B' hk.0-fLhr72` RIU/R JU6&1F:Rul\) Ϊ>ZŚړF _3not!eQR^˕em7Ѐh zL$PV аg[*. o  ooaB8tZo|x<(iA]K읦WNJI~Z>-띥]Itt'Vu4z/cH~1ӗy`Aؒ1&Uw-MgBJR]کijk6WƼSO> stream xVKS0Wxb`#!ǝh0mzG$8k)wc2+iw}2[X{ #!b=$laع}>d@b/F\Тf#d,|W5Z[uj#{YR.= hxlx$@lfb~7xoM #4۳e&5:]R~K6v5U@X'=1IܔE_7*6uA w˂oG.=4vŪqK؂'p$\:xB&t#{* u6|ű9~m>:u-W]$rkɟt]U{ `$l;rǎ!d_3|& A’LO2?PL+}1"(>uܘ&SϗI.5*lkAY/aD< afp/Hփ][!?8tuo c҆"C7^s@]g"'`u acʳ:DʷtEMSj$Pd^HllȾMݢd5,6g#pYŃ MkJcuMTT> stream xڽWKoFWBуF h!4hj% E3KXww;}3#<~^HsYF)wƈ/"gt>3es./w2HHB}YDxeD{w/v&"(4 #Rq/USpb,I}#1u|wYQ՟ԣK35qEc}o ޤM귺{N.$EvEfEV'ET:,cTt&WI.tbe؍]nkQ==_]E9soy]ƻsUY\cm ͓U*ȕqd|;g}!qDDsWsG2g>,T/~_27g;rgaKC5&{19 [Fﴸ^!oގB0t 83վEa`5-fm;ʣ+]*ȁ7mq}HZP[vNjE" ?Âc5(x>aL1DcӖs=#a#]C_a9h.V ozLWJ?'pXI&.36Bp=,ڟ ph4fZ"ICZ)Tq(Ԑ )P!EOx=AClhm;O ?Dz endstream endobj 1749 0 obj << /Length 1014 /Filter /FlateDecode >> stream xڝV[8~ϯ@&ܩӹTjkҧ٪򀓺 8a~}%@&"uzj={~$N ȋA?!ΝE_o߆qOKQ2W(9ڦ B[õ C_CY^gU@Qo?Kp,JA ֌;ভ/NHEZO\ >\щ1XϞ-{amB. Mr`2K\sdwEի`?Hf^V{{Zn6߉ِ.+'EgѪS3R, ɇ\W+j]v=e۰74Qy7#iN6}V`nB6}_JʜDԠ KELWi6.{tQ\"/DI+ۇ`KhA H:̕ɯpa@n׏#&eHE e(fonֳfHx2!d+trR$i<* ƁpVO@D9AxmHHE\["x0d4Q&Jԏ,|"Ůz^zzaޖx7 7)YOC`E3&A+#hd7T|yw\D0,#K٥jm+/=*^i ;\PU8uw5]8JY=Y{\FU|U|8]è?Z~ElN66*Ɠmx-oh2VA,Ua/c $G_f IMSĽ[Zxk}bUS3op/Bբ'VQEIЅlTmPnY~l|YT endstream endobj 1650 0 obj << /Type /ObjStm /N 100 /First 966 /Length 1735 /Filter /FlateDecode >> stream xYMo7W\$!#m hHĭ#o(ٱ#)l,q8/p}b)f~IL hJxCDH1$Éwd+: &1+8?QTn70ۉət `e.{- yeTuj3fb^U>T@ Rܽ . * {*R)r]cҒM`oq6CoHrkC·A;uŁ/( Ad]? dXE/KNB߬ va(`#(tYf/u!,b,z+ 'Uwp IŨ"LT+DE)Ibg1WkAR ܋X )b^x{ !u;! &PQC`/U:{t2EUj7,QPu3vTj-0QrU_P_bFJt}pJ!z`iTUmF0N`P;Ng(uF6z3H:Bnpp0^SCF0~G9K8 L.>h:YAWӎ RtȆP҇zPN. ׳_Sӽ~qd7Z?zsHOsuAwϧQ?_f: MSgAS[,4aN&Sp;]&9e+`=rsos<`oUDGB B4K2`'`t6;<8 ttҝtԿGO']g;Qoxz ;!uñC<]KfEXy֝: 29%InE#_XJMhz 5Il𭼑--oоBF9~SY$PH߉U8ׇW J16CD/*aU~;?6_#n[*0ũ_ip( pzn.m8p[ōhlЈdQS6cK$%A$p+8[e}:aA%gUBׄuZ b#~C6Qֈ}'sݬ_Hߚav )N[TU2BAȤ|ɗ8xҬj;uU> stream xڭVo6BH?D*Uņ!ܵv/i(mkի4'xcDyރ;X 2' yh,Vs]C#b Xdƕ.\] ):O6z{j|9Q 6ϐejo6#/VʹHrOþ5,v˜d?o+i F?&q6q$,y3I`;c!e9v .A9Vh}a\~7+30p(x2DFNXF.ⱙۊv{<1˾r[!wM0iyIfT! F* JLҘ%HHlڼL\I#h+es$-r: cAOps2ɯ8L'//HT"Ppg=kL} y9>ffB@>e{{, ek^lKhk,mxmRAvi4ѵ EE>EU+Ǹ;ij`yNQ<9d t&QN8,>#WaԎm{=-U Twq&EAQFـvz(e՞' u8!wubp[]J GnwLB0$'(;TH eT#{C6ẖ6zSh6 mJPUR B|> stream xڭWKs6Wp 9S"7;L2Ė"UI~@]|+n:ʜI8wKQJ(qRH]|rr8Iq@b-[yYmnu+5 c=*A !NLTG&& 3g9/S𭬺R7ԽV:,w< )"`$c.$($(j|JD; y&]S^VѨqS% SWM*jugRlٸDY@RM\ç3>)/EWj|#"A?]x)T4sĽHbׁ~- D]KEE oUzB=ѯҜYĕkt-'9ʤ౩i'hCW< *VӞEỏ_T0IX3>^AZ_*i5?JcL!g}\HU_P?3N@ l;M9NNNJ VY4~]k5 |RL|^.baUOo:.69g73C ˠ(Bf!f'T$r  0iI=\S& ]aQj4L#wSn$#7{ՋZ w@ dU<(z"S#~~&0'^08ħ2 ån OcG1fK^0 xj{v'EM i"aĹ endstream endobj 1783 0 obj << /Length 2487 /Filter /FlateDecode >> stream xڝko{~bf\KEӠ+xH츿3;|YapXVbWݾz{Uꦱn+O7<7my_u84eѯsa29 ϕ `4|^Ӌ2MVWɀfuN/nC7_ku [DSRGlOfڂm5-"U_T]箯";=Įbח xFѦ18Ϯv&8۫Veni!AEFwk?lG2xhG)ѨR=ߠ ;a3FgB:ml&OG|7JuR9Е=̌v GŖ gkʺibWuM68b:&IxMdҢcCޠF‘y׶{=l,P; u&idYV +;@~$*J,!#hz+w uwǮ;48 4PQcQDg:?f}QWh X 4xM^tiUSƹ#I5Yu{& Z^]%r9T@Фҏ<x>6`ɧ9I0gu@9y{3VvP!P!gd Ȋ6+5ϢjM * $p YёzdYy0hϷoDnn4i+saf/)F{Af@8$B|¸pBг ~7 3xL-c d0|q?GGJyN\O)֦|?O']vM%pZY4~Vo"F,0fRnv>w>}A,Z<{*\Yߙ톿opO: YgdTtw(mm=Ɓ6HL?BzQS:\`$h/ E-ALQ 3u@x.*p]*{=^Dh)DlC؛ ̖pt7J؇)pWDҍ!cSr.}9vcDdQ7YE(J SK b8Ju(oՂ`{yRA}BAy\_bppPw7181v%I69s%4mtCz갫XF*1Q#(vBvX ʪ|tg pˑd@Ȟ&֮rZɶu,8HQ˷O ۯNՓPPS ƜV`h+fІSixn/j!Z)DЋ{#yDBYc폹!L7=K?I_IӶ廩TCf.7ȍÅY?լ}QЖD3gH|qI:*)ZÞt"6 )G~UKAjuy8h+: ,l2^=ᢺܻ7ubxBBL_c7U|i`|lu $=z8,:[Lzq8g݇ GϿ34`z[We endstream endobj 1792 0 obj << /Length 900 /Filter /FlateDecode >> stream xڥVn8+taTzϴ4u 8l1 JTIӢ!G4$,,qxGFCyyy'$ַF(K10֥~E^H3〤1:gT }x' `lv>IA9Yȅ1c-+KhQכVP n21` Rcs$?',0=zL!עn 3pDCxCxgϖ~0?c01 f5 ,j*bKhscynVH0 Z*.M(낱mvֽq7}3ˎ"|Auonb%uO/WST;2: (u˪58^ /H~ QrSUSf ~pp΃0ϼj *8H̻=~:I!D;$3|&f.wK \SHZq}etY5;FSfiWRbX3=ša JʭڮﰚF;t$q[Μ濼ghSNA0c9_6TEcqZ -{j*ٙjg=:|҇9MG:iOq*\࠯_lK%ʁ4ӥ"K`RWw\R6n#]77.w6Mm!垞87sadE=̌?/ dOZ]msDHdP2Z, jپ;}J}P~8yzW_;a-or@UԠ|܉<6ydj> stream xVM6WV!*6] ":lBh[D:"gCײ%nz"m<y5a6A Q̖}ID,sƩJr՚q2jLcOd7h#E[OI)8G!11k,H6fJfQ2)7 }ÃKc>ADqN5XUbA^ 3O߹v6uJ&7 'k]8k=.fjWQNo)OEQW[ԧ^y;jm$޾g۠;}Y]0eVoDj nz,-l}NMbapq'AA%)7E_ \Rx9i_Xzc|-}3KQ.rn(8ٗ \uih{t4`+Ƴ3'< qB8 #+JO}!ŀB_? WY%6'~>7PףFwчQ%R=}'u'SggLKh:¹6( t0%tC@o@L%XB?֞vsŸգ2 OKlL좟]m8X!BtMULOb5G&*ߨ\^z$w⌵ ǪZd|'\.ZPg?siO cmm5x-W%PAۥm 0׭ lhKnd_110>8) /mIo~[,IW6ɽq:) 2?/ZJ#؁T߲B1A4Z+a׌y~Gݙ|6vݭsصZZoa6hڠ<;=oz| ( endstream endobj 1816 0 obj << /Length 937 /Filter /FlateDecode >> stream xVR0+4{ɲe{PtZŠ0#3]dHWA "`<;v;ˑO),D \n_?{~KQA8JQ#vȬ?]q#o%aiEd;f> 2w<0XFu%ˬGVb}0Aʸ%:\-(8إ|k>K`op7K7ǬQgk H?)eB R$NyZDE,Oh<1pD]'Eq>"hS9r|Wbf|y#_(cmPSܥ )vfC%eu5UXc2N; @6- aXeaы\CT\D,x~ 4 )|σpwIˢdžx_v<֦(C*u8^Q"XME-@ѫ UGz(ͬ<&a$H@pWo[QC ],be5.\t.NN4;B_?E<=xoWϡQV䰯S7b(ԯu ވzۉV,IMiQ&H؞*fK23plXG (c;;չ,!až䵲 !J֛1<`ڣs%4m8H>bBX<`U 59Lz%9ʼ%Ŭ)ܛWBJ8zJD!(AŰJԵ4Q/ ^inʠr}n3GF,&Ŵ̛t[Q-FM\΁{G$jރ:2+io{<Ŗ<韇 gŗuռt=JO* endstream endobj 1827 0 obj << /Length 1004 /Filter /FlateDecode >> stream xڽVKs8 W( 'ivڙtV==0sR$(r@ԻUJs Ee^BJId^$ k=bmŃ廋du*!WҊ'YPgd-,2.'>ުax'x"JtNk8šgXE#۶IeRfnwlgAxpq-oO.eFZ ĭwߢ 'vɰadNCHZy"4%C$'9"AԚI<5L0F/M^3m`XI|cɴt.qc p#ZRq)`۶ 3!jx2l0'mV' k ?vxP<A:/n HSng-8GP$`=ZCGzNx3z  090VCIsH8Bm ξr+<+Okrnw̱el1X%c;!QÞӧuWsI$tNYi숮\"AqHh(t8}wR6'؟ƅ b@#Ajhߘ4 4w:y7ݨ =~p3 t[)ѵ\|[Mثԫ#Жh%@7OgcS~jvAo߃F9ն>U@\mb#oEU{e&3;8 B^z endstream endobj 1838 0 obj << /Length 1132 /Filter /FlateDecode >> stream xڭVn6}WRn@nAM`eVK]*Rӯ"r A ŹgO_˛Y$( YdЉA8m2^с`:sV6yWR "X|G3ѳ|k#'p1ǎK"q,?EA9?Q%%ڇ`dWt/< I"TKm6$6*W•l2>laAO4yn@zo^Jn$d^^6/Igȣ(xJ?Q-Ew(rul >HR>D'},-ì7Z ]ɦqr ZY yaF_i֙ XQ-7(84Op( @ &ɼqݾ;.KIۻƚ7ЌUmv4 H!d/Bߍ` bW v80OCc=v8{49$; =v@ڭ$IdJII/r8ppcHK"w{Vun4=#FׇJn7Fs#xo+p\ߢ`G-պ^*2T2fц,FQD!D9F¹§JAT^a ωYi;ꘙDq'Z+A81!Xh iIQ6>$oЀ:Nui ~ Z}NuRxC=nEѹ9qb {"T $o:!9Rם Ev؛ht/Sш|GW7r8}7eME\c r]LNHgyTUdٰ6z];.Ptz f%Zʺ^9#\uS!*t)9+kg IA-- y@bʳ˜:Z+K AVo]=tF3e!I$ !$Y8#G"+N1F&`$qb^ q#IƝɒOYRje`eX&wIcBœd3[h6)`m]V̳V̤kpbK'eyl}߮{6TD2q|{R#|EҰ2إ4]1-N>r̓C,UdA*@r <` y]$^QAHkV+$Ҵ,"";6'trWPHF4?9`4 el2l;^ GZ6Y ۽V][D"-OI$$*H@(C}*ݧl# KNh"Xd]E)v'Ao0x5,15W3djCu;qIPb:6Folki)Wua)jՋ'q&)|` ݚC7  'ϩH $>t[G+tI}a$U[O9ܿۚ+ CO}y|KE%gq!k]|Zr7!f]̜S'쉋:GY>\~w} )Hlvo@@{ijȝ!8w)wNACs!]>[[qFT_Ž<+ړ wu=z|Mz$R2ZJNAhӌg=Ho@ EeLђ XP[ұnF_?qqj 1èBkl[q}QJC>V9"a};?" .Fܑc9Kl@՚<N{m,/YQ@嗢o[G[j ÝŐ}(*EhT`.(5cd$W[럕z?$"0\|@EbQ߬L$넢j+B /%˿BJ*>STFw\8υdPBg/R\$j'V?W(uYP[Q|olLav߽X+]鶆9>!41u0mf^lmC*8؇DV788v [eDZu4DJ7i>XF~S > o}i%NL /L),g_p'i endstream endobj 1758 0 obj << /Type /ObjStm /N 100 /First 960 /Length 1590 /Filter /FlateDecode >> stream xYO7~yq" HJmQX)[z7:`)>r<3o9(l F V.L2%/BPdOE`SQRщTLI9*,A`F! 6%8`R<٫8'D$*{Ӄ-:XL 1b V˾3LYC"F\Y[Eʌ,RW8+VdD`ʲO\K\(;F0))#^p1J<$s9 lrމ}LCHW25B#َm!*g[y SKBiHVył%HBFx=2 30f;Xp{S$"1$8g3$ Q>:d9 I"y`t:=G)@@vd8,!*ͰD ЂqK6 A2spX$Į$L"qTYȷX-vb X2daP d*{ԉ!YQ} i6b쌚=u"dj~78(4<>޼[c k)JOց}2ڃ0ς6IM%ICE[nޫc 簚U>CgO 'oՉjUs~?jc;jv/OY?jewU,c?ӷub0Ք .0@ZM^D40i8`b%3zZjהjRP6{yR ݌J|Aۉ*۵sl볾<85oy(͇}ĖgP&b:ȮIc;lH5wǝB2^^_KXӄSE=|Mo'v|،'ݬ[oǐfPU;?? #|}BBu?ZhoH[`QCgmC]3Q'W7O28fj8^Ĝ_D>ܫn7HذSz6耝A>U-_tўTJfςuJh5)'5^?N!tH ޱv4N_z^\b\MmL,"H>Ԣql{ALӉ\?N!tHF^4@ա}J蟵$!{yGk?͗Wo݇؍q endstream endobj 1852 0 obj << /Length 1199 /Filter /FlateDecode >> stream xڵWKs6WHX0@91f֖sIrHHbˇJ񸿾LH- ooxػ]ބ,boƈbyGxuM$i!B)Q2W%|蘐3l<򆎕4"J{$jPG9(KPx7_۵^9>{(]Cuź] tEl.EajCנ o8^zAEֿSw֫4ߟ-B mI 5oDosp׊MuŶ-,k/R:1d!QHL}gϪPSn馐8dzGm'ě̷vz|g*'|Wq1e@x,Hٴ;5"p Gc&뮭ZYG~3#=Q#/gb?Y=* ]+]y?ǴT !!"!W)&&*\JJR>"mI3{OV\1RD 1 0KP&2]e|Ge>"s8{c_WLrʼndzU#7&O4_Ive$JKzhr׮֚DY͚kR0`)TqvrnVTWCsj̥߽ ~I`giZxH*\Ŗ㋵NTNxG?k΀"[:M<'>w$wדgG߫mz$4W> stream xڽVKs6Wp|fL&ndXnI IȐ q_Z̉N'}aoWM8b?v6;`0vBPdΦp>y;mEͻ01AI zhD4d{uxAK^`'e?( bD.[{~`~UV)IT5,_0YqbtkGbDrZ29Ћ}Z}-Q,4qb3ߥeǖ )Jck~cy;$,zAi`gě aKV\ԴeEi#! U,;sD'MŀaZ&vksdx ]ukIj9M7 iaCs\b3q =)=֛IIHW2Yb|uCҠwαZ vTVegul:$mrIbO00F?K#U[fVזoN6"M6Jک=",ykQ5d [7HPu2F/-V> stream xڝn6=_!23_P]VtM(hJ"Js!)Kڃvxx9r7/nJIyu\!~YOdWa<㘤1Q E䝄p4v3^K?Lͥ"hHU3∤g^K/tq mZ 9Crv&pb%[;\V_ʢiXwIS^4MK7&ygB5/;c^oL aj?/N`lp/ke/I Z3<^j !A@b@\.I/-iG_"xel5hI姫AY0A#{詶VDqܰҮڤd8Y 22ANg(AͲ> m E{ tڅ*V׆}ل/Oh]J+XSѝN[~]3iAbhzxi8wE7<+VFҎ!zɔxstH)iH*a֋ f#xsCI֋WſKt#QN]EԠQc\h܁ckHVDvDĒEPPrUcٷ^ ":.C,J(l8MS^ 4pmdf"6LzG0hB+jjrBZ vK#߾cRQ(`vx^vwbivj/C2!P!Ddbse)%D*[q]Y+9.,#`05m~ٖscwBDF*Sn$eoI;;R!^+SR ,jtS= !>֌Wfb0$q Gq y ; 펷_'faL!cE*JMpBc%RUx$9$up[mk3QH|'E1SlIgrnVIC[Z1#¬0u\'%a_*h9M()M".>qL؄˩f4GЖG=nd8dQNmI !z'bf"kI{VIũQr%Nw,ŃlS\z׸ r5 yށ닌bu XئbTXtSl^UE! etEJ2$C6d .qsQ.TUǷt,od?tcw L$^Bϯ3j?.̊]'ȟW2ؠ/lCiy̐f) endstream endobj 1889 0 obj << /Length 981 /Filter /FlateDecode >> stream xڝVKo8WE*a&Y`-{%ZfKK(urDqapጰS8&9C0F*rbBP&wqNT͟7d_ati\(;TYǰI 6zi F"_p: BPRm}(B̐)EY rb,vZE-'{p.kyU]4UWqZւ{ZY3Wqۉ1!7IF$c::ALr}0 }[V?fذzT{:hϋ;?_YhcEK=RэC(v ba˱ͧiB*۹k(xt}닛ևz&b$6(52O"Y݁<[_)ƀmfؒ߷{56 {$B$aЇX)CR#qk`x =t5[f@,,ڽ9*gf$ >ʢd2;0? x [wOF[*|s BYXZ vD@Cx.gV$AX}ߎZPwز'ok}{v~}dd^=co`1Pƿֲ;s4ȇcwYK9t XBi:οxiKsۤ"ت> stream xڥY[ۺ~ϯ[d`H݌i-N M✢%zF ]pu,p͕\/orM-VbdqYR %4 EWb[GMNf4ظ)Ǫq+}FO^x+0H4om1}3G¥Xeomo* ٺզ*G G7o:+Z/ [m^.RtmӼΚ6ڎ&o U|ޱ Wu:X޷ͰA`vKp[{ ]2eg{SWTîXœNHr[U`xEX]q= YӖ,l#?1P f$jDu{B릑!W^!K`JA2{wf.kjmStI"ڒ`&W @@xL)&'9&S  O,@rfo/9^_noyt`H8웲;&:b^yeUXByCiefyis#/coR`ySܧߚRViD}}z&AY&Dqx5~]QnpƶZbcuqj 9ny;Gb2IL&7ʲ m(H5]o[gjn~~O+gD"d!~jd* E깕5m[mbGkL:f&!пz R2/!g\2eGd\0f֐V6Cu%oIn)E^o(-Ӏ vY),6tᶥ-XЏ17fXI:z"3[~C!,cra~lQi|@Hif4f<4l6`P+\jv!<( d0#9&;5/hd'8kb''P%nI`aR8vtzN;PR|oi?L'sZ|=QO4p\U4ǫғTfm \ļ0~v/HtKvf8~Np)8$ǦQؿup?xbojPgH>{3'cXܐ["kc䦿 xCS"MS7IJ Q)ꐫ7WmmUu֒&jͷFeJ$ζeNPob:B48$Bᴿlba.Aܑp݄kddAauWHf\+ZHz=C\LA~IFWlmXG=y `WKJ̨piu츼vҩUIF7AXwck!y:UXPH_U`EBfB6Sg_%͏܇%'0>ݘsoyry>MHq|x(}eeϛݺG{E2` r5e\A?-ZOOO C-V7}}hگ[@W#T$_z/z #)yڑLdzG+ 01om?5O*T -WV("*߉xi՘( 5b5g9uxs>`aFT)(F;r{{ P%Ԉ,^hvN6HwEeVXmTn3_|qY* zRdC{Dkv]$j*+ى`[DCBS4˕ bDj옶E/h*{x'D C)r%C_Xa9I^4߾{z endstream endobj 1908 0 obj << /Length 1303 /Filter /FlateDecode >> stream xڵWݓ6_0+Bt&v:7IsLGLE"]}qs}mbK~vnyxzAJJh1(Ii1ICٿXI}Z*-R=4Kc^hy-?:>(@bWQbVqCv!ژޛ䩷h'a0/Y7,> stream xڝVr0+`dGNvii2ȘW__ 8q1qܗ t4>#4 "g:wȉNi\a]O?¸$NPdT.ZCG:Pֻ:~:VIZV1 /Q  5L715&[iQ9{tSIs0/TZm)C.]yYEh=ʅf%XW z[qQp6~BؘV.?weF6D,WEkAb'fKTS>09Fjli2Rc˼![eIocLABJ[Bw; HcvBʵ83NWeԼNvG~kW}|sREѾ7kݗB^Zڰi۝Ed=7Ϭjn؝NGFIz &N]]C'WTaNg QcZ9?Fߟ_ADCQ# @g^Q]ە*н_#!}1?H1QfId\KkJi9EY}: 5Cƪʎq([*=&5`o.ǡz'kǺiFgz<'KD艅y|L:B ^o`C9[ɣ} 4}xԅۙ;(k;Al u}L]#@_Wt E^0nVtƪQe=d F#qLIGs*knAUvY l\3xER]SP/gӓ?0nv4qr endstream endobj 1935 0 obj << /Length 1466 /Filter /FlateDecode >> stream xXK606s~ ad)P$\"%f"%ŻrH$;Aijf͓C:Yzm/RF^.\!~-b%.Ż3Ѷ꯻߮od8.)X\8F> ڋaGr+Y6#x}돠 7%#6YNB#&u:yG ڍH(b V$kr%(c/Pku!'8VYvEH26M\[*9ֈQYihw>\|ƊiҎ>%na07p ~ .)j{W:্Y9Wn/gZ lLGAjLC 9/:Nѓf/QLk![TX `F6AIZ8MMJ^w^+Mr{Al+:L,U1f4۰}`%:ke{Kt?C]o[FLt4K0yVi=be="ߞ(^A[)5<^jw⺑IJcU.䴥W^GA~`kyoT)LRRe* #=M޳ZFTkM>>':Z s>W|R6Eu4ci6Qԣe }UcB/jhEcu%-i+fƍ&%mZ6 yiqְo](?ف4?5aˡfA5AikL!'gI9nj6ڿqlD`*-|u]pwYCj&ˋ>3aP+؁yZOA.hr|=H2J@fb VnW ՛oq| +DC0}"4!S%Z6%Qlp'mOg+κ?Ȼ]KVLy$=Wsnt/rŨŌ 6p圞 ȁ3bnhYu!5k%,FzQ hQB9!4.zkk۠4]å{c23L$L9֮=sȪ.h[#i\]S r֝sB5t}%) !#B*8}}$uOMlݒ'0&QhYw;S>G@.>JJuH.Y 7ZO@Nʼq_1i̦#zK`lzx:5=y޺9) /9v'?c-c}${<ܘ Ft;<Q'Mx~O˺`~u$0GQл'ۏ:.M}hJ^ڋp=u:Pjxqtu󻋿tG| endstream endobj 1942 0 obj << /Length 1330 /Filter /FlateDecode >> stream xڭWKs6Wpt& /3=(I|Jr)B"UJR-wzX,v? ,bN>,'77摵\[!DVyn're}C:|yN7@bXe|b_/'O<;J߉l%+֭q<& G `rVbw~xԍC§nD-f1BYqdgً^k="ӄMzn*s]ťD-ݰ"R/XZWbrּEIbSQҜIٞFBt\0ܒ0Լ3'^nP ̳OP]^ׄ /r.nx:,3|E.*}I@jZeu80OT&]pA sM:@[az.꬧SpF<_saf=#^#H09)S.FyU$ʪٹOg)wnvGY=\_qd+Cm[FN_91D~)qhslQaΚ`{#t avy[ xS1SGnsaj~dbר\MMKkDq'az; +͙ YwN s9[a;V8c4*C!ËJ3)ԥ&HCU%^xHz02uwNg?=@dQ`F&}1, gMg[`T*Z)ʶh8J@a ׺*:d%.w~Q~WWWOjtA+ui"ױ={; Ԡ$iTfmſ2/t{L3tB_G _ޖ;tI~j6sAU=6[uvx94Z᨟zˍC`AK;MXUfo>0ZrAs>(ـBɿP2JBIJN{9ELBI VPbpƥE#|߃g*3)VDO7zU"&U x䩉\D7' }{l:mgÊvFbuՓuvId 66є¤UnQ)~M8 endstream endobj 1951 0 obj << /Length 1495 /Filter /FlateDecode >> stream xڽXIoFW,p E"H=8F0"G4!co6R%[4B{aO,&W~hVXm[ qEjܘlYN]߼v E[Xai?G&+ő<~i憰ɫiX,PV'q|lԄլ\lU%-v4õ:R1!S7w$ʮD9Ȋ"e$|yP\3aCQhdmJҀ@̾ؾԱMfj*8`9Z ź.Z>7NrǎҎeJG,UmFvY!SZHK%Y.!I#9ip̿|,T:#BgJ˧"NRZ>ȑC f\-|TǩU~Vd\9~d>Y94Phy3VK@4cO:#+pa^@{C\fSd| I[¬0n(ql(% R룘T,並LҊভ(w5 y(W. By3v\RUSdviyREe$b2Sn{Z¼{؅b;{hs$>pO #lTw򙲘x LU$e؀֤p26!,/;q47qzi`=b8!DnsvV} M./>4-SO-I&zP%IhY'TF.TDj]q5vcIMyB8Em hs"5ǷjZ"k.i}V6ŰϮJWrno/dE℀l\4)K ,$Pt2B4 |B0{P/&Bo|a]jcAP&wn(zIYCJlwm{;lu\E:y(iQ5fp55{M%š.=J\!uܪjI2<.jnu?j&cap[Gq v96_OڗA`v+1ާw_dS-irJ 1z endstream endobj 1858 0 obj << /Type /ObjStm /N 100 /First 971 /Length 1841 /Filter /FlateDecode >> stream xY]s7}cyVeRfL 3tIL ,bse'8!k!]t{6ٳW.4RdM|T6YE!(OY|"$RxT):]TNbLEQ~"a:6~!Zz-/o#/}p,`(6c sSXxdϱ!C0Wʂ岷*8oeK"Q J2Ap ?!aX"$6W!eH *!`l`,8Za=WEY? a*F!SR1٢!#gSFxXX'FaFjU2b=O2YFsX(C*a v,44MTcXwYa~;:8UUu<~Sͬg\|OFI=o.I=_ji~u>~|Tga-!/f F;[ 4V- eWލGMnEzZ=>AL:u?pd/á4YCupSUztVekLю.i!jgmXg~p\]@B7GC^>bŃ|Cp\y€qLNkX<c"'MF4t)%# eIr\E^[Uo!xtؕIl侠خ1II d)+%!ٶ,v8^[dAt{;e[On@a C X]X4f2v802X x;X˙ΆCq endstream endobj 1962 0 obj << /Length 663 /Filter /FlateDecode >> stream xUn0+db)zH@AHBג~V"JNbRrvg8"hz罙{"B1%h~(! XStK2?> G 1giW*.!` VnӾblcF#p hI(l,`!UVv_^2ImᎏodITXTW(bR@"ZwureGLڐP ,.>& BBvSͽ^3ihP(yL{W'y64GHT>CZ( PH,bSbd`tG(,d6M׍]rFǤm)p)XѫGD -SS%^sc/}3yb >T+jt֦ެ].;B^Цt2N_+0߆֝NWV}a[lrSO_)Zn4KI)tz@CcS5g\rt~jTb,a| 0BOzܦ2Dcaq O>Z5yHnFf4!s+o2˚+c*6|V.RPW34]⺌DX(~ץJA& ׅ{aV&[ endstream endobj 1976 0 obj << /Length 526 /Filter /FlateDecode >> stream xMs0 aZ!zH39h@`p;8Ѓ-iYz$2DSlmv~"qQ1̽)Ýfcd #PӾ:4;4P6l vv˲mffvT<t^ 8B:ȼr\oˤj;mQe<+"b:Q0,,.)ͤiE:5TvNh4*sH.T,Bѳ*gnt#?OΎb4CҙY~N_ &4B޺ (UoIS\SZ_/ da98`g ԓ9̶Qu0pYɱ\IԹ(9Ǩ4L `J fkWײIz5YiLf8 endstream endobj 1993 0 obj << /Length 1121 /Filter /FlateDecode >> stream xڽVmo6_!x*M]kEd/]Q()`~GeKb$X6 ^x=wGj5~̮ȈI8\Rz2F76 { N+se1Lzq~Q\}2R( ./[j n&P#OőkE7 =knچ/%nj q}}]urw&k>*`eDy񩙈lVJܧ"MR | ;7zMp$oW,'98:}d,"Aڷ\J 5ei9Ï `(D>p@l7M~?Q.Qןԧ0W u(W\D{JFq" (W95^dc[MWIG=EzH!}2/,ۧb/,B69IH N3fl ؀tat5Pmz6 cڕF.:~"Y ߻Gҥ޲w`WGQ!(|/#][YaTKNDԿU/*(;EfWNQF>0~a"ƺu#Eu5h:;)F!qYߎM7T^~lVDM*1Ķ\{Э{tЉbQg3hqfʺ.`b. E75%s*T!N&m[ jDQSlD{DN51{&98Uھl. IOC]? vq:^"v$=*U;.lDT@yo, TQ 4So$WrtH5ml"*0`<ܕzdrV$P]eZEpPVȵ Qa Z endstream endobj 2009 0 obj << /Length 1635 /Filter /FlateDecode >> stream xڝX͗6_[{A+۹mihӷo`lnM${H#@y$KoGPkmQ뷛_7s?bN`-V^`Ef}<|Yq;2FO"šGĄQ{lyYx;w`#JjUxZ"Dp8rZwㆶ>V0Hd  e|j!hJ17y3NvRJJ$&oKY l2/k>aTH >$#9FgSfN ₗ)Na`?+H F4_ Э4KHJ"q%25° An@-é4+IS^N EHKr% aVWUMh8;RqL&[V5Nקp\j\2Cy]1 cyuSvA42uϵY3& 'gܠS 6>BF< 2"ڨ0H^R(z 2]~ 0QG4Zx pbXi_"5~ϒJV m lԨ X{HYoau'F;!`KۛMv;b!?Eb^4|$d%E^:%9 o|gT>:%AXP~D iR4ʌNҀx@MnYhCY!ne4Pd׉ް2sv\\GeЪķ Ѡ8wiDj5$85Ѡ@D wM}ƱOן)T]B$pjpI&N0j*nYjH_馧Y5j v,rG?[;B(e%Io ֨k CÜ2jYwI卪Ծa-YjiJRYs*]@&E[ͥWLKH Iڇq6ieh#BiЧ&wuW$\I?1yLY`7˞Iu ۪"ďG5w'0ó=޷3J^yU#enHg@Hyka*/C5ۘ%a;;{x!'1OC Ӧa?'ЇxۛO_dƑה[S\a=ubzpWB'N;RgM#*+lzV\2*jX;>F~ ^N\qY[!_G&_qm!xfy$ڎ`:7 endstream endobj 2017 0 obj << /Length 931 /Filter /FlateDecode >> stream xڕVK6$.Ѓ b-Q AE{"kzTo{{{^D<<1 K AI{pD?l&1p<3fÙ8_amOR~؞[j֛MxJG2/ 9hʋ8ƒrF#(cwBJV&{Su;RH+Azo[ǻ;7?C֕Z]ûRZQg`փ/6|dP˟B׊L>$n,),*u ] $ǔ0#L$Mo=[މS<i푚#GcֵZ9 J4n+B5ؒwa\jZh(X- chyD3?Rۨy;uZV0HĞ0O JG5^p;1}hM݋#yeu1UQv2Q2IZ.V}E_k(T>f@}@^ EIf$+,_MhT%eqG( RteDQ#BH핥JaK'E20CП=g8+hJ. j%jaj_1U endstream endobj 2023 0 obj << /Length 2276 /Filter /FlateDecode >> stream xڝ]ܶݿ<E}AQ؍/maAsiP}J]ZI(38y7Opn7=z}%7~|aa|0AvP .NX5r߇ɖcn=6p+O`1 pO֢<͋R]yu_6usd_~䜆^r'h|V>#MIteN9"MGV4eKx^x"Eq~0Ŷ,rȧ+ v4~ӗ(%_A+J#KWՄ,e_GВXh4]'4nt'1{/JQK5?Rb"-%cGewqM-SDk*WMDvۦ.DH#j6`40ѣ#79*^ ٸXpȋU Рfb@:?4RA '@)ˁ&$@%'6ۉSָnG.9/GQ k*VfZܿ~~~di*xz7/yP@}6AtZ*Z%dN m-" ,ZH@+7 `1,&}BQ`qtelɽU-h ![_Z /0\Mh/*`^H0KP$B6a9Jdf$$nEg^+OK "ݗʴAhobk6EaOȞ`Ў *C-u$X}-r^Ӯ^ꦃ^~YlZ mXO4eAEBHʵW_ZY+_(*"uA&NeM5K*RD1 KMW(;5 [.Żѱ*9q?X~2YDR6"=L Ѻ\,fj[0}6w`˻׾sQfR_Gye>By7_+N/ ~,׷"%p莯m  ߾,F+i+`P e1p鹢"n%-G&FQ,YewLa&+ =1Qȴ7 K(%^McKyb#,UJIz^\y~WCT"UYRϡrMǪ6ֲcRTc8E=aN7AE{0{.+v (ga3Uiֆ'lgJZ˜ʫpnU #.΢bdIMl`pQlVHL4²T"`Záܑ5Gbi&qIw Tc=\2whspdn?ʱн".;ӂ\y̛S;3b'䣰ߗREp䭸HTP+\>ҧ6Hbi~2M\8΁媱ݦ% zRv%R(Y"B÷*82?~5Q .J98Ci5/]UP0c[X#Pu+*hhIe z҂;q5HШL~Z{$Cd O-z;2#3:z}/qg_tUJOz5ңŜ4c]vig7粇n?fmiƿc_vhrYWaﱩ.6> stream xڝXn8}W}YYQJBݴ ۤ-Ӊve+u_3 Mù9?_'\O^_D4cz5HLΙrz 1z89X @YqdmU.V5z3p<  L$ U.뾈`YꎾR̓;E`jΜaԌ};8=lA8#Gk,ߟ?:?v C Jӏ&׋R#۲-6W+ (ؔu붞G^q[ՍZl", P+)7eѲ-i?+瀹,i}aZ숅q |ys9%[[TɦUU2VThbWw4=b RoTn4-x`'aw{mQO?<OՌry D6 66IˍM KgSς7h (ۏy)("b?\H)Rw<)j)+ dcExmP1f[fO;<͌v,EKi[4UIuYƒYAYGTU;XtX]KrM]9A&@cA4H@wmW&zZ0fw6H@Sjs8bD6?}xT: qCbX| #NYc/^^g$QV6ȡw;-[!1y&rDY!p04=`3^Zho J*-XNa':4`0"嶭"e@z>++0ORs@߮FulK:gۖgMqB-m F@vpt}Y&ǐ)$ݼa`jX:+ -]i}S*rF[21Eɠv w!o$ AZ7>&L ãt&N_OplwjZ"B`}D .mZ,A֚Iڨ\vXcX LƏRITNз0S s'!uּĐG%]WQ$ fDvTفOſseBoJi%5f>a@hx8 endstream endobj 2044 0 obj << /Length 1201 /Filter /FlateDecode >> stream xڥMo6_aJdYbh@KNRZErVMaFf]ƫ$a2峀?Z&UI'ohHvҺfrevP-IR'+D Vv%,,"\,2$XzFTLb\5Ac)_YZ_c? {a01)+ 5_Ąx? ul`=06cYuXMFP1{Wi&Kˑ'!Hۦe_LJA+B=wa;  pUbjD}Ҵ-5YD^cly@Csbe%E[VY@ݳFRbaڞ%^OeDR(8bDī`.7n&SSZO$l޷J2EiyHHBY {K~bG am4Y ޠS}@3(W RaC#ů-? YV/S\jT_@ոa"i]L&ʈYmCEoF+ב| PNL0*<ѱ A0*Ù#9LP";reMޒ6Ma`LR)PcJ&幵g-;3JdEv n̥tAxqP'W]Yu\45cm>$F6RI을(YAlN2j 'z pNѬ`8m8,]1% 6A!t*5U!貟N# *kK d0967ȐJ:{"ظaφƝMjT scobk B:/&3ZpKT1lJ3#0|ݍmi!ɥ㫁uPBfg+M CCoU#ܮ`]yS5w&q몭z폝'd0|Cjmy֌fً߷D\3SxcاP/VeP/쇽ސza5!KjUZ Y~z&>YPT&G!gq?MM| endstream endobj 2051 0 obj << /Length 1122 /Filter /FlateDecode >> stream xڝVKo8WDl,l/PE7v-Zmz-)%mERR%kL ?WE8)J#?rV[ rbCIF"$IQ`Y&?07-ͥ`s]0c$ڎ̫f `4o]*bZy6k+b?j^3V2ڲFrjٍJ+33=|>DPï>}?h ӆWfnMUfU~:OmfbB04X /w_ sv8gT6 ݀>Oe|Dȭ+*H[޶m $ N=RO1J` cb^3N0ڛlԤ c?#65?=j.`'>v]:6Rq#[S)~d߱xGobyh:}߽&ԀjLos=7gG-Rg.#STS?t_66i3HtXL_ީG<- A~`_wi޲7°%<&wVX֌ݮ|1 iA%`, z ږx囋z]dO^CR$ҦlKic7VkyV{ 9`س)pSA"@Asm:b#7N1濍-LH1p0 rjDy;^?Ɛ$F!BVL|!T҉N^,'ݬ4$yExRANoCyB/Q PSA1sCP@зnZQdM,8.7=K#$ )[&3뎒!_mMGiZJյ?|رIXwpգ;v:] u :HYwA45%Gl@KSAcT-8UŨ<̘x\3$EX\#ur@U W( endstream endobj 1959 0 obj << /Type /ObjStm /N 100 /First 968 /Length 1671 /Filter /FlateDecode >> stream xYMo7W^~Fǩ)`95rpmbđYn7Higp%8C)>!I*5 b6DéBl$*IFT" 8#U&W!yIhrT}U\T!%R2'pu^' ]J:XIRV C2+E9~u:7t. P(t1aퟴhNMɾv\pV>=_|(0 p%Ut* cT}D Pm^1;PЖα@].uH\ʂqΩC"#ĵJu0=Tk[0|$'RaĎ+P ZZ Oԉ¨rYՑ{h mxKVs =VmIJ/<n>_s1848@jU\ׁ ^H(&fPaaE<$5u4(L51v(ᝉLARr^#N:cpcƌ1넰1IbVowΧ2Vf܌nnގ=s8ڀkCL6cYђ4`=K#X64%Ud<3;G͘۱Fkoʈ}񋗨 AxC ߠ;N.97ɋcӽo~E?jbԼ:N'Ku}|Ŝk٤o1 x{lD|4}%5C`\[ь%d@`q>0l5 y 5Vi5Жi%vM)>)GєѴ ^Mh&!t@gT=mhbg/h*6P+og+oqʄ=F4 6"("qܸ;'vnj|jJ!Rwzq=Liν^vs/sC)e⌋nЌ[A:&Vɛp_^ap|a2Q ^+/ՒxӻxxdyZy{O#Mӧwq_ E-),p#@u iE v8X[ђILBhS O椕ܵe|arU{=.Sz+VϝYkbѷ|, K%$@/4n})QiL< 7I 9>U: zњVa4zu҈&))F܊[=ŽpmY]oa5KiekC<6${= LVbM|ox/{! e!QJ1FP <^N짯'twdtt&e|X樿lj wP-%oF;%0Z| 7OL4X#`۬eM({eB?7kR}l:y ,K!lW*ŭ9:yy-8< endstream endobj 2061 0 obj << /Length 1373 /Filter /FlateDecode >> stream xڥXKs6Wprf,K5;$TCI R _Hrttkw k-urt}N؉#/E\ȚD~l-S}d`"OZܿ]~RV, f%tWKM4ěž,s>&nEx{j 2s"p%绪N#'yFxⅮ-XiAoN 8PRhp&2 VZ4MqfhNЂ9) <>m[%Pjf%M[i~ڜ"bmaAV6ag9 IFc̓i^>Ã-yLĥ,@BU$H8Pŝg~1.}fG g]{99f ]S8KFf58.jR;bb.0j G}a,X7r%Kk4 cysnN8hTN;#\T%..CR3y`ĀY8+@29[SڄxZ1OB]&Ȧ8@ 9HE2ETMJ58T#fISZ Vzfs?74aY鍻t!]աLr I qsqr+(@(]0T+:Dʋ U5qf2&ǀ Gi)l?rfqFS .K\h s Z*hy~QYΗZRE[x[iBV Sx*mq˲ƲBd5DP*!f")FhE/@ڪ1@tv p͟pVk@hl-<{s{WYZPzq_窴 . .ei X!S+#J\[tQ⹉-6`W+ 0PC/rBc~'t=.;Dޥև- #=?/7=<[ݵ4:q3\tqb꯽l,GEgn4J㙵W[+> ~z+u#%C;o#ғ-wFLp}(;p,FBP$ڕϑ΄ ,D;SVES[`o5_֔d&rRRā)SpP'Չx rЅLػąUг jTD:*DGk(+ɴZkDŽճ}0б醬8'd}|}~;}0Y[Ci?zj J ]WC9o>/]ӥ0yiʿ_\׽D ?Jq endstream endobj 2071 0 obj << /Length 1270 /Filter /FlateDecode >> stream xڭXKs6ϯ| F9dשMy]{{l3$<&ȯO0<8{Fj5_-4KmfuzZdFkT#e:iDm $+4Ct"笉WΒ-g)㬌8NϝZf0 `AM9}b܌Yڳ,g.o..~@<\X%,?N̄sfq=ܦ6-gGܱW}e!`=߸g|[3ҝ:pȴ A3nKߵ+z8 PD"ӷBTTmbq3!ˆtoFtfmkr|VŘKRǚHD>KZUc i~Oj]ܱKPr+U ӁuZ„9q e\Ob("3Gc粘H8ff808IKr绒CGMݴ-5ѽQU ixߖٿlmU(e:W4V8Yy.p7 AсvA$@`T.]KV?10Q&ptEp3E]>gۚmC;f2[:aR~x4d_^!$XmE TŎb2"A I(z>9nn,.Lq/Uq#puˇiT# /_^GrMl0Ft 18qUۭ+aWm|p@+[0 U`O2QY,BJ1dk ^Ox%܁rZ@XJ}j ^ヲ|AVa=zL} [\oxK-Y  ƜF r *LY!TA{FW2`Nw {"YKx?1}aCK8{PT,mqv>U-N:Oh9{ r2 qRĺ1&RgasF" cV7;?D2T\p\lD/:}"\1R#- %5Y_)bx%zF'$J|TtoVhiV`(q؄0(:Bs!SW<szNjo=~]X,z>dZNHƭa\ed EK& ''.> stream xWKoFW.'nZr{H`E$&|\Ү}g_)UR3<_&VZ'V`\/BBPF*>ً4Y҄-׫7hyp&xHK@{L ϜxRytzZ/^-ܞ`Q0מ:3& ׊mXŊ}>楦 *Et7o1h~2atcl~>> *nq)}|X.9 /Rx6Fx?qB( &gE_qPA7eEsǤ6gMEkE96fAr~셰2A% @8we%^k3ٔ"bMF ܦsv܃=xsjHsWU-}S.g<)Rh:s!?hgqc%o'a3&/^hBz |WQ]ͭjk.=|g[3z0;!ds,+S\PmTE(|kx-0|qEr&7'0%VV zus57 Uo$͠Ad̾mzǘ];)^$EWxu];Sj&gmBkz wJ_Xt qE ʈ𺬠.]t"v)!͒/W|d; Gp".USҀvtãFĔhnPp / ~;;R'~r Mi ]%h#|{ endstream endobj 2090 0 obj << /Length 1028 /Filter /FlateDecode >> stream xڕVM(WU}+{VlͦjcO.IXB6;) _HcٚعX_?4u~zz$N ȋu DNYΗy-~ H?A*O<?X<.!5fkNnvX-&E1kzbTi(QbH .96in̸G2;fgW9;Q#f ۍ$=0ό0&>c*P04|5p-F5#;p?Y u^[AmѬhv X>A&1 h{/e$Bhӱ%ΚJ E+i/&*'QMudu5f}~5^>rd@'2VMpXܨM)˪h`j[sPz=oنy4vVWf!j$-7/ɝ8x#ioZ endstream endobj 2099 0 obj << /Length 2305 /Filter /FlateDecode >> stream xڥYے۸}W2Rʒy4]޲̸u*"$cs EOh4@ѧovM>7q:Y-VIL7aLR_$jrO~1e~]Ty{S]7M<2EIJ4)<sOwy'5-=u6ۗoX_.Y{iX-'u97`|8/Q_c3;V̈́Artk¨z/Z7df֔'˜#Vzouil!Աh(#gTgk|mv-dUE~X> IZu & eY;혋YO_ЁCjx?]QVlS|3㟼t"f[1KdИcju5 v>wuu<4LWh//9 L>~Wf5wb&"A\p?9Y }5(ƒ)1i$p1RkHl7Zg /ZzF0Lօ:Бe)UR B3y?A<<<0EV}y'lzw9OXyXNy:EMrD/Q"oM& 2oX?f ocw=D`#yLFLgs4dقg73*H$Ԫ kffqBze&sh E. 3h` ;aG,.;;qwh+ hv v oAXA]R?hUxB'g#" Ov. }ևʏv"7ugNk%o6*Nv p*̮yMVņLS7UiѤqFݦĺ‡sC)L ʔ{F65UF:0վEB6/J# o62WЧcPÐVNp+QN3HnאbxO?Oj(&buzAfr n)/tD X3I,lJkk AG-/ …A8'S(*fĄ f@3J JdrֲMmŷwSB՝13(kRbR`Tìb37زfLVt$MO/uۥU5 yVVeUiݦWJ&'*tQ2awjӻ]{$!wmsgi]SB"ְ(TPDUak0V5(t>6}UZ}.Fwl2W-:gDg wͪ$$, oyF $*F{$9I`I_OŃT-;$"dɹ35A;]'^$St SvGT]Rt!K MQmMR{5Z_^# )%eS6׭V]ԽDz, xV+͂7'w!/xVL`a ʫޯQ5};W9 ,),֐wVRFFSRhuj,ݼ~NHh*8$͏:N6ӃG'/@Ι%;X[vvn_?ivijQBOjl[I}*r}6Vc}ĝa髼Q#.FܳS9l=' _@Tni[FKnəy^$Av\xQ hc2I5 {Vs endstream endobj 2106 0 obj << /Length 2238 /Filter /FlateDecode >> stream xڝYo/[mQ`oWlM_zEA˴M$lpxv_y̟n,y,}, Fy=趑rV]_@#;-D8D$dJȍTй|ȋro-{IԦ5{쎽ڐP pA%ۨW]/*vDq((q5{UY+]zk]5Fw]۰]ݓ߁*r-C6nN.E2:|Ahҏa{ꉰ*Ua~hx%m F*Cd#d0u soJe6u"FbGn%!8SQfW 0!wa3m xgޣu#yP(+9hP;qz%nmǁ~l"שΗ)iIa`MΙUKX7̈Ù&]|?t 6Xq(5bwLvT!%{8bv rDl'b'9PtbW^AXUikvvr2@dH/;UL<fqzLַן\09EV6߻F:Sar3h`X׏Sq,*PmDΎx>DaǐYfmǡN˵`ð/yz^ dNZ11ݘ5wo2>wg2VKh+,Şя 2R:3+R@|Ϗއ=(ORj)΁`4XN[ԛV=(U\a, E`R3 |ӧbNj~yg%M.z{+vS.]eQ]骂uK)D- g36}D|,`MR䄊3H<%Øz[-`ǔCŴ}3j AR):w`  g&Ƌ#OƉiSu0'x+l㻜H az.7a>͂]90/uY0~1D{OK,szb)7D{&O _U׏E*No^GxL(Ip@ hq}C_IZP*V h•pozw@)?k7rPR(y^JXk]^©+L=oX7 z^*Cs#@5D||A=,B͇1ࣁX汃yHA3=wN,b` x{'еpP^S6>Rȁs ;IM9a @honp>\Ƹdt`^CAQ'B W~?ݞ endstream endobj 2112 0 obj << /Length 1102 /Filter /FlateDecode >> stream xڭVo6~_A/6P3,R=8[\Hqb%֪.)5#DJbwǻwcdGy;\.V43F~HpuJ>MVY^,e+5~w3=qUX2b1sUDqȥ(}/V@9`GT3 )t!| {nl3WY WP_ҸBv&p䍮y-ю/e3Z[&5CHCG ]7R5Shw7u߲JDtJrB4=>xxdyҿVR43~JC.~C Z2!˼8F> 1YE$(kLqe Ԝ(TUcPNuVr9De#"̘gڰ, r+Dy"!wYi<1fa\݃Ngs'7<%&9Cmiw1A`s=ذjډ=z/m<:Co6V86lH~@4w4+gV[44FVU3w&,4+kz@Z ҥ)hDԎˏo }/ۆۇ2>P7Kt4; wοzjEjI4C3S=DbUR*BBxYGC&e+L3Y{YY+jGq 4Ӭ%&`݁l 0ӈGZsaR2X-oz/J:p `u;^K} xf=/r#nisw^ЇJy9(!^(؜]N[@~;v^OCZ endstream endobj 2126 0 obj << /Length 1247 /Filter /FlateDecode >> stream xXKs6W ovL;mZ"5d}X")Ytt\>}y> E|>tIb4F_,;1:Q8KAyu x`ulq[=n}"&Q0bfF ifR lRR3~<[oWO5/qpmUz^xK^.%[*QKQK3T&Y` ,)9uh$,ږ7L{X[qpq$Ъfm NFᐂO g )A ɱv9uo%(zAc8*zhE¢iX4U4CGN,ԤQ ;?a:630I/F̱-]O㾆a2I|{PGV3 X,-͡Ș 3uiic(圉tW^/sQ آV2bP1O3Ơ=n4<_2ЍCQ &5.^~a]SVYһVASҙ^6BOrTqj[Z˩tש-h:Ѷ&Jj-f޺MĪhz=[X_aՖHmwx!}LW4KX$U0B Wڽtfb{*vNtҫ,ѐ5Tyw/x6`ww^>L$d=&\=&f?A 5,[0WjK, N.rգ&[ Ht4&@4Ȟ AϺH}̾;.lE=\PnI$(L |%i)x$ONlGA9Yii;ze.~޺dvOơ8_]I?.#GGx3.B,EBރS[Q`%#A6З6͟? D>;K-[HklU[hζ72B϶]ڰ䅲I ͤmg)`۝_[&v'sH endstream endobj 2133 0 obj << /Length 1460 /Filter /FlateDecode >> stream xڵWKo6W9RHzM]6qCY>1|f2KB?V #Z^Hk[eQ埛"߲U&mZVSp%Gz(^%RA ^/ Cd0 <&VV.~ +ol%*EKQ%0~-bRlA y֣^e ]rke lP~```Ijc-s0U&$Jȵk_'^#U9˧z]a:VI(D60_eL7 >| sQ$ TF|aZrhnBvdwdu۷`wylnx{5 Z{NRz]ޛAj'^A۫'nB!bd^}hV!fk`EQ[U #111Q1bQCxK.Efy*!Ȓ$?YzƗ>p87i&ߣG 8>iBz,?ͅ|w#@U__ȋ SR>gf^..f6,g*&GpsI체[<ϋf:udͺS 3#4ERdU\ ^8QE7 ȜT[V1c:7M]8!_7l6lr,þt>K}RܕXޱ=8cxij3w{Ц64:u*-5 n+ te[S2ufmVVq*8/ UYT`A]M|+29fwNCĞѵ5`a&OLo?"Wy_6u)OM~]~)H3,hrl) 9n3w Y[+6 WȊHB0 uڇѶz0]K8s]*(|- oi88d ]xMC{rbvvQ@ }%Eƒ~cD,*(Ū ((B@I0Ci((ŨKiIXQ#-r MT 8M߯~\J;"\-6eU]bF;KW@&;=$Bv#~J&Qܤ'PK 2$BuXX wX!Z2 UKL4*MK.ug jr]9 Ee#bF6|ph7:Y@<82$^/V@c.$Nb'I8IO$No 1|G]^պ:3WYn}WԳf̽"\ӡ]NuZ |3^ endstream endobj 2145 0 obj << /Length 1508 /Filter /FlateDecode >> stream xڵXK6W9I@Ňz$A&N{䠕 [Rrw%[v"p yK/~Z̮42{'G"{ ƈ[ރ۫9K;,Cqā3ʥX2'!obVER=9E0NH qFE_u$B>5rPͷ[! ZX3b+E+.*w0gQ$RDy c1f'ԾŒ!A[Qͱ?|lZqpld,?Ȫ0fm/EkfZb5f Ɓt,ռ˜oAz'.qVj(xчDu/s5eNz Y׳yCVnQW0]+ڹ;w*Tݑs0IPLKwv(ʲ9]9e0[oW>X!D^ oA"WuQ#DL#{0I$՜l:0"KW!˞wB}"#Nԥ8ʀ$.h-V58L 5Jm!ƈH[}=o,zCOz{4e6KzHivtWA4O+iRYQ[pn!bQ jd,Yf$(2 &6OY/tE!_* 0Eu+)ضP)`6L~妩m"(CL k?!/zjU-1ɠP0 c%NdXVGjjuVrSشK"Dj(!xdU͂W5P.\W59Sg42;5։L|@jJu]E_TKQ[4dSTťT @U 4%G~L:Z]nOL:beoXVC\=o(dA,%~;YB! 18:>&dr,XNe3%+l@u_N9AC+NP}ӻwJ`'&ζbu~vr! u[AS⥡#> stream xڕn6=_ 5+5MѢ >msDf_sHʶ5:$[{$ Ra-V ũQJҨ?muǕ}]y4q@ICr8כE ,S2 ,0LFy~VxٝiH4Aw@zgoVJ6ʽ%Rjd#қYd6ϲ{{Sr7wIbunx[^vmA 7Р̒DNX4P՚._(bVDYG~X7]^'x;aaDQR^(nN98uޘ0_oW#0jhv;RlwHE+E< 8%[ 8g]oӗ &ڑn9-O@=R8P0" l,{v Z]nc!rbT ;a͋4u"S#W} 1_"G̡L@j de*.}\A4{З 䶵H1(  NǵLBST-%/Kv/[QR)^ B`,$qn'5~7T?l>;!pk" ɀIF =nQugW6t*0 zLΩd:I Z VUz##t7-h~Ɂ%>i;F)$`rKtT7 $Œ0Ȇ@k=V\Nc..߼͖DI-Kvϗ?=M5?h!by endstream endobj 2057 0 obj << /Type /ObjStm /N 100 /First 971 /Length 1716 /Filter /FlateDecode >> stream xY[o[~cBq.@@.M@H[\.L {Bf쳘?y+5q7SES˕Z4@Pn\EM] |7_)*E굫0кj'AJjЂnYH!JEA5$Aԗ"[XMT ב{Ua$;(qq0L?eOU/ghe]B1p9B!1FY6q9Yጣkh% DȒ7r1B%0Vs3Y_$`jq&e,Bo :^{\\I':1`) hjI;H||hua1?81 4vM@$Dy`d-1cdz~:ZGơZ4H{,F^n'跳"x&okEio3 ײWws;m#Z|v~X0ym|~_ǝ?ip6y )p?{_ ˔׿4\^.\Bȣ?b9f{Kt.b8elj6}qɏ;.4AFp_4GD2rD9r|&}x/NͰwq>^?\խֺ5歮-[\[ZQ7X{;b5֭tߊI8MZrZE' ' ܶIumضW9EYp$:yI򑕎ώ6l fKQ{1vT9pzj=Jݙ\^޽lʞ'F/YSEfEM%Z}8K޴}YAFQG5; v+8Py$`I]fm[3k݇^=]{_ a |~L%ؗ9$t g8,A5PJ\Rpn tw!= ?67ٯEU 5 ُꯈtRpfc珐/HuP' smċ~< -G)4hk4&#bgqXfDz:tu)|̀^/%! Zsdz kCH,,PkUB2,~)X Y4\yP:Y߮Š˾sei6+4hBіJp'h.9њ:;lJ:yUW{ endstream endobj 2167 0 obj << /Length 1197 /Filter /FlateDecode >> stream xڝWKo6WCe bD,a[$Ţ@[B"]c}ʑ%{␚!(fO8$Oth(Ng JIU1˂__E3J' 2B챟  Ʉ)wGV\UVHlJ4hi}Bc,hg2*&yvdmEaN,{9cgSڵ` lvov5;d_-| JmjIdPY%Ђ ۋô*8iwLyV 5T w܎^f8ˊZ<]r}ɝѬC`;}L" C7Apܕ\+L8ͫيRm+_{^8MOiPN@ae Yd>sxRt=)VsrM~])DGbrCXp>Mbhħh}`gRZ9M$!ZQm Q$opJq>GB{&4kiH] =o}BZxWldxA3yg*rn qr~SJkzjk) Do áu"13y0qӵ2N!I5/a|m9) pj<MQ3vj jZ3|ʜkVJWݦbek ~k׍d oEMgz&f/[)&7)2d nZI+7bGR4(b>yϺɂipӷKGA^ ;\s8Y8 6c[˦{Zzv42D2 QpAJ|wM՜83N@nZƩBi0lD<%A(i%|ԋ .'W;x HG~ Bdͳiep.;6Z\pSt\`O2yJluC@JIo=FNc8MqilzJ1Om.$i*Of/c/_n+C;h^qfpoZGgſ2 endstream endobj 2172 0 obj << /Length 1095 /Filter /FlateDecode >> stream xڥVKs6Wpr 9cL:qĖ{I2H$&|Q}\PmƱb v?:[:-g(q2~,7a$8Ȝe|rEJEǨ+,\鉏27NQQr/iyh$EC-lRYs]4ѣILw'YBn3NHbQk!+\ۙϔPV(K#.e7Oh޵b]l[:oq}2iBs0"LzG?กDo"PJ`+~DP \XmQ4ɻæߴ]ʥE;zrը3AQ:m<3Vvw&~7Fcnp|#f}!a]v ꫨr'4sG9d!f^h^;]T"ȕ#z8#SS2G=`z֝8 w[Bw6G.7CމNrS+#ʖ fښSgV6hӵ|p?ܬzb%X*=1 3+1R'=5 9ù6/QsL&v_ S=, M #{h^~FY&-4>Pګ։A.bm57ЧHv4md*j9{ uҌпj urXTHξWJҹ}|RYi0"((C #4< Y`Rc6rsZxaUwVL w !*ދ#o tz(%)gփNֲ̩h<]^ŋ)`&@/A +ÛS9"*C}Q{1jzn"bpnIWF^޼}5FU LVEM֢,S{6Fk򑶙o߼EUdujT&!L5Y UI's endstream endobj 2182 0 obj << /Length 1780 /Filter /FlateDecode >> stream xڝXr6}WpR3BδypNⶶǓ(HbC /ӯ.HYHX .ݳ 8rN~c+aIliqaZ,kn؝~}D\\fC U-ڬ,΋t] =qFP725Rn:'ܱWFxKo(֔, ,M8׍4M)o-`dҚiٖ_-$}mD =#k=mYmt iY,m͆/c64+7,d껑aIW?~'pr]')tsjZ22)@5 ,@g,86쪬X{Q(O<)~\JZ6RZCIb uf[W/U\u`Gd  :vթCUqط4] 4[GE7Zٴ qf,]{H>Hӂl uiUf |7aG P#3Ի]XH7fIh\c Lz?=vӴ=O_{,N\ )DG?eE6dEmRrf0N eBs47 8`"d1D)%vFQg߬.T8`R"œ:Oۃ.ҒEM>R,RţT|j3b8(!bqJ^MEKQwś8 RᴚoүSss5lR`mA? 9a?1~ޒaMPO}3?FxGBs;LeJ ::O]{A~| lE8g#|>cDxMzcٿr8et|v-F.q2|V'Qhn-81w4ƨVS(\ SXn}Yv1;~q,?Ƹg˭tsr{X KbAMX>p]crgDHeI||T@>:,(Rx5;G.GW牋Ý/peYlu٥$mq}rܣr.f) nj4 b^Yy 1tԻDjsj_t o2wZG]sSMEni.p;S+8lh#'a}]XؕwsC/3W(`[ endstream endobj 2191 0 obj << /Length 1536 /Filter /FlateDecode >> stream xڝXKs6Wh|f" HLnjwD` PJv_],(2bobM~=yqvq<|XM8c瓔p.b9ܔCݩ)Ou+bӴzⷋ8s$Či k&=mfgQ kX>kVױōtA< 癯W0n00Oh RwYX}>޾3LgZ˧H[R udmR[=13͉I/>~_7VV4Vufqhf}g);9~q|TU [YթY*2AQΙ|s'm:UQ=2r뛛_qsKu2*{*N~2VbxvtYBue RǘK@\4zKn~kBx(a`LҐe{y,Aq g]"˙fqOyطBn :`F1AH#s(tN]ʭ`,E|HݫɊұeI,XP:&U6:,indvۦ_o0z#]~ ݎNdc.ּT< 4F:ޛ:f<qb/V=U'"!(J|/{@HeELV=1Sp1وQrBJs ɜ Я@{cEGڈ"i&"@~4P.?¥7m~ƶ83, yf Kď ?zrwG"{\s3SAW;KC~#rPы=&`: ڗY("z.fuD7]y74n?]~姦FNcו|<2Z괞>8x}=; Z!J/sV<ܐ> stream xYK6WȞƎgש뚙l hSBt)UG__wSl3 f|]RJVfQ6{\ `q0dY\o"YǛ4Ìi5wK-a:(Nj۩ ,G]F/ @OQv~[A0*b-l*.Q R5̉%of| %lUWg#ӗO|<Ǯ/ھ~zx?qsM,r& pX Oænod)g6#xV-kh[Fnzk-MNN-)K-LCex$`$7_.ۦڒzz=Zu;;$#pqӭ'ˤh6Ucy% &/_"JOt%rVOA|76ML".eݴ( [+ ^/Kh@ q{;E5= ˈAI#RF Աpv]}AN 9NK'pƤd;t3'xqwposЁדſXC-lQ\N@-|HL+qF_F1l\FQ94_ѵ f zN{?ѵɋaQ34Yluv6L1Z}xcҚ/&oakv#7N3P>*_ 6t}+{QzTk& ]%*XXdtB󦽨 KDQc;F["">ik+Q.m#EMh->gc.w\p|;wDm9jK8]m7Dr@ pa3oJEhZn-X4DAXXp|AirC0Sw;7*n avap? Rᤞx'¹7dAc7`쇲{dOvtSΛ5-&sطM&MxDk"# c'eE΃hE5Ʊ;aƪzgbԸJvFA0n O%B YI$~V%5~l0fI  (Fv J߼'%n rш _~<@HeJPU;I+Cw3e_a[I6a,˚.Kӓ㈕=;s.n ]d˄ Np,>n^)WN62 rV8'k!55SaXx)K-\ HL= mIMYo&OzMF)wwhwiPp, /:DmER~q70}1Ox_;j ιg^@=5D{z:&}N6 >rŝX'*1špjRF 4# vC79=4YwҞzgu݅'ڹ4=OZ*W H8Fw^haYh6nY )/6]2 ^ Ka_ր_EGO j^~)>4N^`^)~G,NBK}|~s$/۾o6(tN9koGa|S`XstpW%(5v2p I!I Y<9Ә~\[<|9zud+>ӾJo`lww#k}-R^+H*+)pش(ۡA“oAӀa|OĕOBѽEl+ \JI>ݦ;XP\|F(J3uQ&0`>:&@NU@2y+,NS%&akۻk5Ψx+(.7tj^%iai{W5W{`,]/' *csc̃/uPƑXgQґJd.6z\4Ay}C^È|/:KX棖D_ m endstream endobj 2210 0 obj << /Length 1104 /Filter /FlateDecode >> stream xڕVKs6Wp 5cBr?&6Fj/ It(R 7ni"B>.ɻd~d-0V`82BP-UUSB>bvi~m1&(S2`l.kEf'a|Fr5%47Ky\΂j'Y_Y;Z#,}?pcfa?H3cLn9Uvc>.f[=kpI%0aNCL?,;'ydCEH"I q7qmn&m&'L=*ppDNj S?/\(rJ^ ֛7 J3'M#k'[3VT wLJ+~NvR|FE ?A0F¨5O'gFcVth¿#CMYvŒƞ-+ͼ]߳΋j {ޑ>PgO.8]''_<~tƐ:c`(_5I p6p:fJV^MFҦj["D/:jd9F1QV]׬]ǣgk5: no;W@?~*ճ,Fkvͨ> stream xV0+PD*/6=lM*aOm6:V8z223{Xz?zf wnPZaba1J+9~?A>:'"'Vb4:FlAL\L,&"e 룔uEu0pYY\ CgrDPn5a+ !* WEr?6f䩀]T~#8NaI/kyVO,Ev4Y.W Hc.̢.jjZ.es3Vjax`MjmΦncxHVräRhl~49 twߐ[C M-"r+$)Uzvjq]揀[ءG(?%cc ̞ژC7&LuIٙLII] HbY@YN/y)'dC7U߉ջOH{Tޅ5j endstream endobj 2229 0 obj << /Length 535 /Filter /FlateDecode >> stream xVMo0WDoqaӚiSCKO49`6m C\0y3wi!4p"ęN1$r6UzFC.Ge<0` 6xJvC$$k; 0EkBdQO1BKkiva&(og\.> stream xWKo6WCe bDa Ei,H{X{hJ*R;|k)jz"E~3[Տ۫ ^$(H ycb-px盇p}#@#У1EBۿWsX+gy/%]? Z%kX􍂯^HG 6>oƊ<筙%pKkp59~g 2J3`ú E$AqAF0:yڼD<_,׽z+2[Ej13q/dxAyhH0O"BRn%*]fTږe]* ^c3*yx <'ew d%$/cQtPUԨI刏7^"_H|R≵;>~5|ч4'aD`,{ͥA%*h;3I͐;s$Q~)iwt02(BewHưP#'n[]8JQ_QuELBoZnXCgF}lNKnkԴ-E|k.K,͊*f^mN%3@yV'"^υϤ%#x~ a.k¡54™Y67o-5ً p1uD(F]7G>?vV&B;f#0Ka8ۏaph@mqRPDuf*RZ2dC ~R:`ThwIt j:y>:“>nuJZO휻5=0n/{RP_:YN hgߚ:^v<0TQi23U{l[()~E7G99duOg+ܱ\5ڕ#~i-k@|m6}pl[|,BLM^,KX4P GHWQ\+{I}آ+9O*YKrt {n߲jޅJ_ZV?0Ν|;?ŐȘ"௡3**|fs &Kh.!lП&`c/h6La3S5u9\I-3;2jbme/5U2۫"V endstream endobj 2238 0 obj << /Length 2515 /Filter /FlateDecode >> stream xڥYKsܸWL"NJC͡썷dK+u*!1IḃeקcDENtFB[ݭo~}u!NW%Aݯ|s(Y&a-V:T]\ٴ~!N>=cDZm+ Zz'kNvR4͋y#c% d3cb:ei Ua7rQ<0kW+"pŋBї _^JZx|n׾ÛXJñ7 {DHmv A @[&?ڿJUUK;X'!p'p3DӴ;Z$u A ?40ON =| I2Z/$VZ+;vjFskEK;lAYxnd{wkuiPHQIq^7AɢW>Jz#&Uѫ$K)WSZ=ޯJήeb!Z e%AJgψ -6OnK|ؽF;Zj&,0$rK4+e/o(^{j>< AP4wlC@zUv447QaE7{z`Kވ? h͡=1X vCk.B8ok5h{i9Χn(ݷbqQ4VŤ8 =N915ю˼3! hr G! k\yo4!HH-c]tU=!> HS5uw@~J+ei|N`7$@5Yfř sǣ([MC#qTDHA=s` hj`g%SB@ܘdQjV) DKZԳPp̢k7l['i:վNLא }й]2A 3Z*v-6T `XEW[}~p$c3'~"M4r&Ɯ&/7!#9d,3O%I3LHHMn0m󘄃s")E16W۩ZW'&4Ex*.."<3)*-b׆E j5\p\ ԈR RR4ԇ4J`B8-kO ӂ!|ىA' k_Q5"H"@?EEfG$`0 `ɘCTm$"dai05YC:"5YУ:tD~]O3APS;BJTmoDe". RLCFm!;Td]8|S≟ Ymwz>Ǘ j&@3 ƀU`Iҕv5*|'PR@DVuA6:sxM:JCztzKCu%.:j#Ȇ%58ͫ7hidL4NM&K#MGM(& ϗf9~?O›:s*?2ψ[xLgB% 4x EN-WR~y#l kKC3ș77I$uM,!'#9nY=qE ޲ ]J`5$&|5ئNdeB:wƎ<(T[岤L1ӿUaW >wywr?'ȄM 6!R1]$^%.K)~gCqd,(-o( (UÑ;l]wl\\u]@n`^4oV^c]쟔M|^gg1>[(o Q =a2Fl!=agٍ;de +yT浽JfȷE`@Ҽ[<ӕp%/j[x۾-@6%tuT:d>h[X<[~7[`u^._> stream xڭY[H~WDđN۱9pVYAؗȱ;w}[v Z!Uu3wgnxXgC/mv3_Hkl~wbw/haeZWٻBiZ̕ca&>vhW7"o-T47x?Zk'id[4HY;1hɴoI^9`Tgi?T:ZLhمyk0̿1aN0U]I6י]>*9uE @Vw/?nƛy%3 5U^Sا"3ؘ =R+QIǿINXWq`gq&:7ulEki4Z -nKV2-v[`>ALf0H_Z$ibNTva^Y55WnFckN";#c'ɲ sM72; +]PLiC~SEW<`$~3hU*WB[\*Scv`haޒCyō6nq5ɚ_fk;LC,\ P~ Oa**]io[]ݡ}]XF&&J Rs2:};tfǶTC;Rr˽gض&\c [3d=n^:EcIV+<-X򏼺di 0X¾ 6Q7@ endstream endobj 2247 0 obj << /Length 2171 /Filter /FlateDecode >> stream x]s۸=B{(5cR6>'Mr[N@$,1! w Ji\O`sX,{ۅy:N'AX/{a,RƼ$rOjd#z/(|49CWhW Rtмѹ] YLG;at$pxyPu˜NoDIAUlE˶u!^N1gD靽MKkۡو`yO[ ehQ=P0 )kt,v~|OSipVjrheڗ<Lwb蹥zAѣfۊ (ߌؼ_282!F.Y<,Y0PX^k"RBBc+]?-1Vع=Yhp;LNVF[}>$zMa⨝jv;A8dԃ ;(^ ̍THMT7(hF0=E!+žQxc= xBc){T bS@|-Xl& 8mYMbFtڻIi[nfr{Y<7:{YdtC>%5Z'S!Z$ .fB/E(VqhEɾZnOea0R^` n[LEmPS(zK (D{r\HZl{9tʣQ8~dRq G&[zu Š@omdo1jt&@{ A FcϹ%+fD^`(ˆ<k7Ώ%-SB.C/LcB[L mL_OssRMpiFQI>'8Q6G3i:zbcR;F\"aM[D$Bh}_2TaJa9/L scQWPxğvKhJPEnQ%% `8uu-ڲ0C3`X4n-\̥rֵn5%Buua̓CLp8rl,cck]D s$ۥ.1?WV)Ory]k`~2]̉kc Ʈ9S {vfp=,cJ衶<`pX`Ib&Q17̒>88趂\%)lAXc83\K V\5vzRh/Kž0E-M͌w^jUW E,vSe_y쁝֝X00-=xҭno 5X9#e|N/K;a(^/ L ( `V-  /[n}!Wdg\o;Mgf*I)l O?-@?{896MI5Rώ}BkyH.̣E0ܟg> Qd`ߓ~'x76# kEdF 5* )qxFh~hxozw_pVCxgH"/‘`f?Ha ,lidB6BsCsjDGZD"MxKm3+uva8IehH>g[vvNޏįT(a5YFϟ ziԘ(;g284ߤR}JFD6u/^cGy04wZKRC> stream xڕVn6+̢63"Ef(2.8b:(h!E􃉜I}C=e~1,fXyI^B)[ާq&^͢I x fV6Ǿʏm(כe+8qW R`C0eid(,M9M[׵8h:,J9h)E Xq kJ\iJ}ErпȗK_J1P[`s|+.. 73cÉ{vYW/CիNȅv;vsր/5겟h]Sd,d> ~@! `ѹ` 3da=LI`JqFBUԱ{ތZw6jo:oodˈNߣJV>ptE[y[/ˆ±M?nѨu])xI eAC=۱l}r 8QPʵ4_Apm+pE`43'q4&<:@g*ƁZP9_!l֗Cwu9oRX]J+?tʼn:UG'ܧ:VY 5y`?zrAfP5nf};bwTPDxyq4NfW1Ҵ~֥I`5Ξ,^ R/Rv3w\޾W۳NJެBn>wFZDr 4R*eǡu~UݶήonYˆ $p-w;Y lxm%1"/Yl;E`Zb6mNKb󇏹k9w֟A ̩҈`ƿ0>XN{6Nn'" `k5rc1VeZ.P@Max endstream endobj 2164 0 obj << /Type /ObjStm /N 100 /First 980 /Length 1751 /Filter /FlateDecode >> stream xYYOG~_яKOWW!K؄R, RƘZd|ջf$㫫8)xCBJr1I$#+Ml|Dљd$#&e~61cwWP !_TĆXtmP9〄Xۥh4TT*4el_")ᜒaº"sGdx2zHjGݝ8BZ> $2Ȭ-pDanN@*@1L1$Я0 _A%H0J* L(0|b) . EaÊw4_!oSA8|p*'`Z.Wd X(.@ d;רs焨WHj>B5zC:86:5b")<)L&B{D&:G64Hnw/W!PbEQ,yD@rTR@339Q`ŠʇHp$1 4+XRPe}UCE;;4x6h?󏗣?͛伝8Ľ;m~h5oO> lfN8Um8I8 ^lt lfg4G~|<6͞Ov7 _ׯ0Bъ[aC@$ͻѬ gw~h/G td+<"XS Ln'_ gN+_3X 뫫'I+7Fԏ[[dp{kf)={"`q^X81{˨XCq7Au9> /VxtuCO ;~DKWB:+WBGś_]foXySINhb fwg)wͧۦ9_!,6۳  WO@ea%[0W{3JmDXH`N&I H0i '[osXY+ۖo Jlp%6ršjVnC b}OLL}hZ$Cjw|/MsZsBLu$/Y]dw}?`j6"Tp ZfQo3UBYnvs8fȜC vz?n^y_s{ʹއAy ~!֝ږmeymeZ][^rGmcZ^jsJ[IV69v#Z.5 &4_{5<ݶ:{u=I>o%ᆞ>#_nG:XrK@[Jҏ^d8ht6tx7ݰwCVuF&O$3 endstream endobj 2266 0 obj << /Length 1354 /Filter /FlateDecode >> stream xڭWKs6W(̈́07kw&M=i4%4$w EZ+͈o _f?f7IH[y8P^1J[U -ere&G,fw-7X>"{bD5jቴ,EE.~ssȌʖ+8;W[jrZ.v@I#ATm U"(c+[ y׬oՊ5mEjg0e6NQ#R׏ndξK"ފ|Nü[l}'NdEo# - ( SF/ȐF!6 i-ĂY}Zg7.h#n8iz%fκA(*rAD|q`ϻXCKxeo83, ǟ F5aTp^YvTx |s/^Ad7EOIBg݉S=U]Dު)ɒ%jCF~vUׯQ>7'JP*8u!*u1BTMN-@Ųځ,`~ZVцg׏w*V*TI?ɦmtk-~c(GXЄ^ Fl; Mbs^t~&C,GC!mhň:;+/l5O`i5!g(ͣ:E 1r*@%ฦ]HYZ7/9Ή;Sn`}B*߀֤O;m Nb" "}º(W.iYWdNxX#)|AA,D, *\56 *;R ^Z*)J҇w|9呫x?SY E_L10A\/hGie鐁s[ƣKOM[~6hRnIQ3tEԲܙkC3rGp?-]I"y~gŏ P[1 2&#LxЂ ~>fVD A+Uv5I@om:έ֖Uޓu@z7NJ1U{3W#vPG%{&XK&m8:X* endstream endobj 2277 0 obj << /Length 511 /Filter /FlateDecode >> stream xݖo0+XzX+0ēvf尐ӶNLf`j S\6iAu~6Mfԡ@C/t⍃?c}ĩW_N>j?U9ftn]ȲPWDU8"ھ*ٖsd&ZTzu.@]Mz>PBK jW{ %LVD;3A,k.,VUfeD!gn z1]]WIjY H^Rn+)C]qX.Ž8 k1 &V!3!Jr^zjS|%f>}r&Nf {p.Jn(+{_BO} ,qPojuF^v-=dH7l%{V`HK-1N ˒-9R*`o8SoβPMJs4]Ju-Fm=0y> > stream xVMs6 W3+FԷ!;;I}nD[jdRKQI c:rL9]}^_]/P:PN1YΟEŊ%v冋v"LOB$J;z+ϜqNp$@)NO`36=%/ny_0> 1eQd,~䍪*/QuUP.m#Oxb[jiʱV}ĸ[ʋ%EP<!Lݯ7/ƒ c zZ-ZkҚ)fpQXUQVD83[~\f( ,xBp${r-\Vir2? k[rT_FDv ԯAlޅ/C\T}IV˥eÙQFt)ѥVJp\NND4ݵ)Mz7bٮS&[kZg;)ӆ>7hJb`ho?l1.P sij :.%7Ȓ -h ^ "vDG3RVwa_mv?+Lz1d,u}@p9!8=}H/_JF9f@1(jJ2Dy pƸp;A:YjZ2'C׺8@A|#3  J6xA倿zm)9$lC~FdJyϨ A/%S0>&̗w\Tܿlr* +sM`zPޡ_G{)rB$/#"&]rAw'ӫG̪>>aUq©C%'uk(˜-PEWZVM"9tɶ疭x9m$F l'~ bƝ ճcKϥ1Q|&ME%HزzB΁څ~`8%-vժ5C/5m$Kų#蘿]_ endstream endobj 2295 0 obj << /Length 2122 /Filter /FlateDecode >> stream xڽYݏH_dHw-;-QEYWlUL#r":k/t>?ŃG]޼vKăΚXoB/pVU]v\60K$szP?d:+2)eLTU8қwɆӊHV7 `cO\۟:i% 7$wUNux!{/o8֚5PVmg_VcWV9H(ЂCudQ>W)U+[ެ*N+.PhUuhFy(F 4{FCт4xRL-xFL4~*Yh~6\ -N]bQ(tM &CY*3NPu0-Na!q5j>7el3QJk4űnG:WTwFqnB @CᯌJVnN?:C2u}8@0եZ 8vw|/3(ʳ.[lD}OhRvȉ}+=z]YV0TDTWhU)Ef8S u%th}Z毀bAҔ񩡲5XI6S#[\.L]\0+IKsTaIj{gWNWrgCNE_P s6;rE@!AKS+4a,Ź%R H'hl 8>|ePDq<)v~8nMgUceL/RlqѶB~kد RTc2Tfl#"}OEDhʺ[{MwA.V[6ZPvHMQTfذkמuO4:HUR9 NOd]Y16(}I ZRϳFf`4 nZ!v8A/guPlbRMQ" ݉ y-{A U)cy"ԛt5Y٬3}{:QzP+t GI~Ƅ68q}Pz6wt/d4q^$LggT0 _Y&N9\5:p:]#o$YG'b‽ns ҠP895=m J Hd:\`&? e;Wt*]6S7?kߠ4BptCP 3J(gqJ'+9BT^O]1s++~uvEêfi'Ə `|(xd8QIcdv@@8R%(*lIQQ-B 'pvG 8't0(V 88X $L#qoGrը맀ti:yoR2Rp=yLh1wHh|qiH'7"5~ ʹ~+H*NypA%٫Ey!EA6qP$Rq/|֞ dLIQX m ĤB VA5֚˯RaSDg9r_($޹K($7nP] ?NH0ǂ\\BFs{MyQ˾4"&[ Y~L_5WX1rm7Pq@GBzA 5@AULTCbo9u"G<^ocl_*{O`k ۸gj" %3#u:Rg#!0Fޕ~!Ӷ*hŖ_=^˚O+\z}塀4k_ûݛ_b endstream endobj 2304 0 obj << /Length 771 /Filter /FlateDecode >> stream xVn0+P R$EAKD;jKtb%DiN=ofgF` 40MX RFaf F F$zΧ+,cb"xnf{l1Qa(7"{Y{Da#cV9WY)~f9|޴;bkYW4پ3xEE%F:ʏV"oQjtz@l@Y\jd,D}`:rUNcM10IB\;R3 A2ħS~2 ^Xv[h>=ǩK9ĩK`Pb/p;tU`Kju+*56a.2.ʢ68c]9˨e~T1uYeSYPQVXsހMtm>0 HNa 5;B=X`}c5"l@7 ȃ8 .;V;o1U)cOM\ۜ1YYF]c)J4 &A{WwElyiy-q/"02' fn8Og`/D;E}=K]9pm/|/֊Xri5G,ƛWፁ ePPM)ʦ>42㥀9U[#)^kg7Ch胩Xs5B0$4B )}>&{y^&`u endstream endobj 2317 0 obj << /Length 1072 /Filter /FlateDecode >> stream xWێ6}߯@E"R ".R$iڢm6Hr(XQp8sx ~X-WA% qw "@HozϓxF=K HilVH,.-Bs&d|Y}>y"}g> @$PHMzScVmYAj6NS|iBYkzQk=fZokQ LU'H,gY\%Ux&⼔LQR{bxihLTOrm1 ܔ Qc@3'͹:hL5/Gb}wg -àO`/C$qgXU.#HbzՓhHܔRTJsH6 c.\NneBYe2o+)U@QתF@RPr,{@p?弨]E>TՎP ¸O$; U SU$Ym-[PH]~¯R$3s9S'|U}0!|U8NͶG#O͸.mH ia) 1jcYT6]h˳lYz,=a;+ Nti`:rQ]8 '1d c{Ҏ<' 5O1?{Rccp*ީ-׀ 1ao[k&2f:`N[/$q|]\Kb@` %E!&kS1;Y j@c)J`V\gX-Ӵ;=nrO23:[&QGFy`'[Ȇt<;ZG)|p/Ejкڰ #mq#ъ539Oon63_(}a(U𪺹oɁ<6_ Š$ީ)ˬ۳mOol7i{%7aDBHy8BB%&nmO5/\ߚpᅝ4 'Qq&}7Q`=`:w#Hnw endstream endobj 2325 0 obj << /Length 1027 /Filter /FlateDecode >> stream xڥWK6W9I@%%Q{H8@-PKeqL%ѣ w-)\,>f߼9zYݬid(<8cF0s6]wevn>ެtaDj,S$+l7pbBZ?p o'f%1`xϾP=JѻNoŚ̊ 7| ٛEg`ey0,/9ajcA$AahmrMh }_3Sݾ`^-,%hdu5TQS/Jf݀`2wܠg9 hc4CζZ}.?:pΓ&*BXSrbB0#R2 RrdL0" f(l֊ 3}]7zh1A!+LU bR6b Y+UR0=YjeJrQDFyY`)#i~ְg+H }3%dT=J]t*K'aT endstream endobj 2335 0 obj << /Length 973 /Filter /FlateDecode >> stream xڵVKs6W`|fL$3ӃDf<6ON=jT DȐ"2:`~v ߧI00] ƐL)қO74R %&z>4V !orHe Gȝa:6ZDbYBAY"0?^&K7axC'1F%0`3nL0,rtF$EQ+8,ui4"OrH؛  cCuP\ouS{eŁqLq(F;?3~XH ͇Kw:!_e qgt%`nxk63)PƄC5_EXh0-sr+-dS䭄]FZSzy@ ́\q1R@Ċ]!J̯?/=2vV; BLQ)e 7T|)U*"?yio uV}赗'6nd7)^Xv$EY;%quJeSmV@)z ?; R3Q-լK {vҼ+K-V6*~`yG 5Ln.o;MeoeJQPoC@a%O; >zw!'tUHqI`6u85l8BOb뫅V+|T@VK |r֨eƁޤOn *Ql =: SBAt6ݞc_r󌫓?|b4H}9=7cx>~Qb`DfP sEWp9 Hyc:Β iNFό7FFƝ^7]C[6k'/ſjg1͇c#:gXnbc-:/*?G endstream endobj 2345 0 obj << /Length 2020 /Filter /FlateDecode >> stream xYKs6WHD ٛm:@ņ>X3@<O@~mً0ޤnh?l|symbw#n7纔~Cd{͕̏un:H<8Mas|$z58Qv,pZ8H\YfmfƑ&l󙛆>-NbcvG=p}M}p) pιN#u枳Pq_=6nz$"BP箬eVUD(dvW:m`yZs#t;QPeH^44N]k#>G.UyUX2{Lx/YC^{љu>@UѼZ€Sb60/jꨇ&a;AKI&l[^mퟷs|[>_koH0:0;SJ] >[%.s(Oطj=m(ѠScUrUC y[\[EtY<+C`ۗ:c4@/awv7.v|Dæ_1˞]z$yJ^v1|*{2nTd{J> stream xڝ˒6Pʢ˩Ǔrzfwk+FpH<~vhbuߞrمWE\*]]VRXtIX]mWDR~vdZVEUZ$l\)m' gziEQm$%wp/ŷ3DgTgi\j*.I_wm Cg;4KnxOw3Jߥhy{+2Tg$DZc׸mޤY{-#lTTچ0Tmc3{þ v6q7Kc`.7ݯDkhhi5楝; ;l۵J:gI`0(m]kWi7j-xU":_ݱ)' *+ vsT/Rn_Jw]]{v yn#ٜR"EXj&W7ت~权 +?PBܡI#~IOtcǻlൌ &]M"_Iz.2Pވm@,%;ݰo%n3t1BU8u3`E \G PnmmQٚlMߗfH Yxՠsz b|ۓ~2}g\I,I ;gK 0&[TԱI@ P*9esݮ\U&\ݹ 'σ"xnWl92:mA{em5[^% $a d⊝W"Hf&=.65 Ǣm!Jũt1ES(OmgAY]׻R{Nz3}~8aX)!zǂ"kLټ{:ВAݕ~<8ɜi({oyNɳUHP09)(u+/qzыo._-ܰ(bx?#a8sR EL)Y,5)z%tM |stQZuعsR/aapeagZvp]ޝ@budZ A \бù 88 Pl:T,ۂT:N׌,dE 9:H@I@N.P:J8)RyNq;.IG%I=(],GR$q* f)pwys"Rj..ob,4!6C|H܁%=nyWK3|t1,8i.l¾q?΂$얏hV^̪Z ‡,erlPԧ=H D5j+ ("akOӥiv;1f#5~"LR[Ȟ(J!P )p٩Pw0\Ḋ uRTP(N> '- C:i^[XZ+* D_͕˾mp},pH!mx? %(Dh6JJ &{Ս/^3RƓj5_5_>gHNa:# ʮRbңSY8&ќI=,FQjsWc`u,~钉ߣ3UN`MghFPERE֧O&Xed*ȅF(ͩ!"8=υro˷o 01kS7 ^piYK<$u3TT 6i_w_;*+_v&Pwe)]1ufp{Kg+ipőCCAudQPՍ9%`UHP KD|X'g+.SOc˝%>+gh Hc TỽDw5") _ 7f$D.<{c)iLl:t eo1)ktuAFP&&8jn&-Yl`i_bLS#v^zIJ@Yu^ğd/nsY5Rzj@F/iG*J:=p)$i| Tz_GV#2,u1i`vn CSXX~+Csk݆,p]wOCO2˞<7Gsc {8b? endstream endobj 2367 0 obj << /Length 972 /Filter /FlateDecode >> stream xڭVn8}W&E]E @7/i%&f[):/ERdF'Q̜pGdc4#|!HÈ`W cÖZʦ\oV0yK%A ; _ *^픩?:kVeF~ | S!ZEZS輮.MzewH`Ufhů /.3;ޭ70 $H0~hP(-oPa945X:2_F-dEوuWMF0tcrւ'-s@yhxֵh]*KAV94E.w~VفqKLjNU<ކ={{;39^T*Uu<Ѫ%5u@Ck]fށmJ%hqG3;Uö_\/'&XDo2 D#CIh ճ〨un&_vLɨ+/Ea;Fx4}_̂p  7#B:)-_CYS \O|cf7]3LgF ⧜UGWrMYQaYp&@S\ڝ)~b6[̏`Ʋom7tⱧԆ-(ω 8oȶ\u7dmfa"0taDFL%ER'<#@D-Tij'U#-3jYN/S+Ϝ׊Z !mh5)%}Ӎ AU \2P endstream endobj 2262 0 obj << /Type /ObjStm /N 100 /First 975 /Length 1955 /Filter /FlateDecode >> stream xY[Oc7~W}=3Th%v)-Rw}B u}q6! >D9gֹ_r2(+7>+ JxX2T $AZJW$u&U/"Jb"< DSJ΀za)S h$eu9wZ-FL>XG &2`\\Xi)8"LxΫWSXl5֘L`5&H.SXgf$?{b%͏qlNMiN͝O}|]w7@o4TA;Qfx;:LK}~{=לj^3PxAfk Cp;d;sOl%io?wyTasH&۫<6wÓ5:s`{WO//>oV+g{R͕[ όKAv6ZA lT|q1[ӺXXYX"HY@̯= \-#ݬM*[+*Ӳ>sdiVrfl6o|a8ZJ[q.KB4w_]"V,+N>q:(Aq B&gDĈ"-Fm6tnk7:"Ӧ ߍzCN5v{ڂ3YtbkQoGqw0ZtC+/Tz.eŢW=baDgE}r#8bzG;Yo:GߖZ}Mm/%.޵J\:֊\ p,Y=Q'8w)5rgzlxsSj55v n5Ge}97vXT޽x5:Wo|xi;띏_ue:k\q `@pt4z7=AVP>73E963#V} U=yNY/N`NAs@E]V)lQUwzۻ?I _"To˳;O24:|Ȭw;[uiwz&p%ul{v^)yn~qm-5lWO&_?$ͳ2 endstream endobj 2381 0 obj << /Length 579 /Filter /FlateDecode >> stream xVOo0SD* )LZ6M1) C}98T83I5Mԝl{ʁλћx4p0G8u gNt>ē/iD( VB-(ʊ!4UJ-uw_F~_XEjZ ^):+K Ke={\)VWiꤌ\fr#*hyҋf I`EVZ` ϿȖ E)og&j0}!C3$x>`H>?ך7^LJoKJq?$@]\_i6 Xx C2.`;CꞰg߃?HjRMkdmO^ )p3TeXIm?꼩Kӛ6+}K( $ۨC.cMc t"tPEUx0}B&aCy`{Coʸ'⵭o){HR?l[hm}J/ÝNGhFNrpY6uĶB(a`FIcr ni(Q9hu,or endstream endobj 2386 0 obj << /Length 2264 /Filter /FlateDecode >> stream xYK6ϯ[crݤ,KXlU.G"Uܞ:H$AQ||$ew$^ |ߋtFѴnzf0fͫ+Мs.&Ӊaǖv<˞9jXagAc,G6QG7" h>Lܶ4cuq!˜F,yoFsmLH1PJiJ~v?xDʵȋHy7 " ػq}QKQ;Z!CDA䀦h@yg^QojOSSJpSY.R4z1^f՜`OZRڴe{Z)Cu #x('Ms,yvWhVɷ[ p,k쟑m09}~hiQ,Fk'DiΒa&T!? }QqD Ԏ$& T\wVPg!jId8{oUב^' 'u.$n`J/eOͽ8?P޳轮n, {gO_;,9^1 ښ|8ǩXFZ.BH쵨.zq %J /mÝ8Ev J =cSGCdtޖ @,kYx6wUiNpůQ HcCK~jJT 3ӛ.Rz$zW= @@Cl\qV/A^<5̜Ptq%B,{QY540hzRBG 0E)-J#\PB iFud ZECY( ^Ί&blo}t1֌eIvA?EjTⲪxe,i**koIBZXYd2 7p#fp2ERI6 Vgu:aˀ + 8S]9 (.87)MU&&{(p2RHΥ(hJpυ$n1ȡm9Hׅ9 8hXeڴ I9X׋k$$@q`,qT#1% >il>q(FkyJ08@\Xgz'so[>HSiɵKYÛ5ixQP ^nˆ\HiX^ANe$vYj)Q0ۆ81](7Ш89e$Ri[ȢtK50EV}PpR,dW#{bJİ0',D bK}y)yGd0Es>sNeʼn"CLy BZλz[̿Keab٪ev+t0yUY̚S6`3H" YEU3O4{Ҷnh&ŨL!VP+5CyiyIC+! z YʡY6<ʢqE "/@I X1cqdCj^ RQ1aT#:M0O2ax~A##vBq %x=<*Wq51^VSi`h4ݗ-w1H4Xy˳X]G#6~GuI2ݘ7)(H_hR!@}qceK"_ӛCWt{:~^a{I2ouu%l܀oDMeq=U /U-z؂/7hBgܞ_moQ#9i w oֲ9Ԓ*MQ&OqW4‡rzO8'"'VF|Cq}WI1^VՏ[Ro >s80h$55>u$ʑ,|\ ri^iX^:cyطZXQh|m~}ڇ]]Q`Ū qz褩6.L]*y(oo} mZElCjэS֍,zTHCҚ= y}܊2 TޢM FHDTEwtޠ`e>齣 endstream endobj 2391 0 obj << /Length 875 /Filter /FlateDecode >> stream xVMo0﯈@"qF" uA M6(,qqJ6dɞ2$Y ƈ%iR&U| N,e,I@'a[WT1Kif WW*X*#C{XTmUۀ5x{ZIl97Ni} UDyxmOT8)l `1";+_69~dW&WG%R`ڏ shY]ku]r|u0uPPvi{lTgj]̪sSL72W1YxQn}+٢ x7mCF3Rq wpưhN ΚãioZHV^Vv-^_\rʔ D9 9 shCL,oSi0[ NW'}63! RB^'PĒFY\:ЙYɽffgf%KyWm̌;3&,&KQ~iޗ l%a7ļa#͓pЖZk<ͥ vS}x*U*̶i5 45+iwZNy:U΄{3!d?C!A C@P%|q`]-ux2G5";}> stream xVKo0@̛͡CiڭZEQl{i{Y 4kzɉgfi~8pcжF!ئƄU=6o\lhpˍ1)r\lܡ #n#䥳3m{}gRH#Dm+6[28:*|(o8Ɍ))_dA?2U e6&LNsql\-`x0h[wqzV-Iyh1MIBh%8$5 * dlw1uiCSL'[Ħ$AJeGOyzGM]; <`AzzF7)ЛY [U{ewhbԙ͇^.h)6TX]]6&{Z궣?}NH D.2y5F;&dn#oKr|o;!ϚwaqĝQbR[s/}I#$X}U.C~Km}^$ɢi؅JŃ~t5 N^]sWy endstream endobj 2410 0 obj << /Length 1532 /Filter /FlateDecode >> stream xڥXmo6_!b5#(m؇ЭXK[EdK#eV\C̗sQ{o'/o&W b,nV}ƒГM}R?}j!do)~ ̢yQɣ^9Z93 5e%[ltE8b쯰)ɳ&+ٜ jUyUVn0ӫX*WUY3&3Ze1FEY*?3GTRNK&HAd-M2P96zY!}7d#Ot'ubSڡ$_y(3NlbPS`BO+ԲgtW/B""mr ~}.F&r\/EX>BFUM%,ywF=2 Eڙ}vBv &+8~iW7aiYjJ#W鈢<"/,O_q#447ɌpN)9U[&9a|jжBHeY(g{=[ERUnwIeQ4[c ]]o&s4 $э(g]YV,366 )Aq%t l)Z *{<O fq8R͠| kכ-AUN;6V\zϾk,]Ii;r RV_pp޵EV$_O%"rPOWJRwj\]pLK|LCXp|/r}2(~(G7 BlD6>+KRag~E0qvTMC%Gӕ,d->6'sˈ8;Cpv9d-=3VqTzJ5aiǜ]K, w*Mx$j,_[8Xx N މP6։\(g5Z'.4di هB;>>S)t1z!z+AsVJh/Iq\`^m"QG?-'Dl[hƊd |}뜽B}xjO -βXv{_Y+1+M-vhkW4;c3 {[| U~cUy]BzA7X|it 3i=ͅËm\C+n򶆷•õ +C c9p3'|о.DG)Rg LN:@Ep^n'^ 8ҭ#u|/'=pqt 0&%\oW 72(Bt7okY퀥޽]'i{ tBA87r tkU/lwL9B/;PC%@_5F endstream endobj 2421 0 obj << /Length 807 /Filter /FlateDecode >> stream xVKO@W¡T/ګT< !.*c/!wu \񼿙%h:#C +$"JB"R,B]u- I޾}?~K+=lB*,R{g!0(ण?❨}P{kyxE\av\BcqNJRgDܜ.зif0d/E;*EH'F < ¾oPpR\wKA5gs<2>eEY Q )牞F@c[v'y*%Lmd^0C%&7 |A~dvԑә!M仇O3(bnJgm.lA<_=ZKj3]̳i].Zwb>>XDAT؊"({7IŸyq=ؔluљ' kGwv:h[#*' ܗXpĺ&(Gh '81Z?֦^]tpR,p)'ik_?k \?ڰLz/ɳGZ2n2I=yMf$MS/3;;8ln".lŸ|HF3a5},", 5gqZďE<%lU RCoP‹I>'[B'P֨jpW̪G@I><~ sR"vw]:#P CSm[øbh)AD2-1 r-&tbR/BoS}q}^ቸ8av= endstream endobj 2435 0 obj << /Length 765 /Filter /FlateDecode >> stream xڽU[O0~ﯰxj%b8J{E ĴR&m\ N@;P@bS!h:}^8l-V!R-zL),]e<;Aщ(Bˊ'e5n!ΗBL,%Y" ?:8k8`N*x|-:U1z1|E%\AJQ|>Aox[wAR="zݲwb p<;%NmQً!!u1X&p\=9>4C!"S@ڟPaP[T(C߀4N/6'qIkbD5802Ko;A Td X1!m`7c_vз2e8p=_[,T_>jG߃Ôv:~lh)GuXj_z]r顣x'=I%TEYl [yjm ,Ų_c\nBzOɀ:f}|vDUuWOO6,&;+o}Ol)Ox 3 2>FTz‹Mlj\UY4PVCʂgrt #hh[9Q2kҽҍrKhEA*slo>J; 㛞.J0.dzˣEڱ]̯MDqKXZ6_gQ H܋ endstream endobj 2449 0 obj << /Length 1241 /Filter /FlateDecode >> stream xڥWY6~Z+VA@KGCuy:!g3S7, DA-8o߉Mb|t-29yE֩H3l~vÁ#'IEprA)J3K c`AeiRko2xvFY"QqpDF4g+ޔѡ cA6zqV7%^~ ˸>rcCIu#K]oeYUmzޛ@V1i&k&rQ]wXj?B1 \@FF³<8@9"p1s^\Fu=b cfsv3;8F>h?8+"GjB$ K iI9'=̂9Rsr'z9^+7)+r(劮 )s.379*X~DuTr8I̅iI;/C| h!u,]'UlWsO#r69#O 4e 16FoBVt# Fy1A!+IU%O$vW:9RJyJ:EDErAOV @-k8"(u^`i2]٫ uT7. XI7"^.\ڪ5gm:q@W,;'x!86 Am,k~S8)H,((Wm2 +n>Y #].[^+mWJmWiKk@#l0 UF7tZkwIĥ\LPyp[ty*n}twp\ԇK1D8^k9̾ͨ'}|H`8Qh+p%5ֳ?Oօ(w&OGl8 G;oJ6'QM'N#@XLݩ\؄Fk7^ť8tAjΓԆi4 endstream endobj 2462 0 obj << /Length 1123 /Filter /FlateDecode >> stream xWKo8Wb3mR;=2s!H7~-y)[5|3q {zf,gQ Op-(R@Bro>uT "X2J\$ۢk+%{g {+!N"DqOG۞{_9y!@Y;oa YS++yk*%@>U<),* 8a Z Q% S&DGqޮXSH*oRd5BL@~Dۺx>Y}"#MWUofqu'&XWBCqXsL#6(0o{'.mմ(i' !Y 1MTJww!qFGxg]Uteeɝ+rC[+jvɞTvCE-FHZ8>K~we+th+(fq"$,mP.|{Y i΅Z(SUJx' xՕvKh>Ys^[djD*J~q1cW2$ 6RA7 2xՍ`^X붨鸞T=MQ@ ͗Ѐko_"BfZbrGG]-g3ḍ,Eb+[o3ok4k/RUFDɕ,ym. 0|38%hd*>ʌKA$ŞzH&qgΥ\MSwClكn2ᇳ8wUd_wAɾȈO *ajHNpS}Ln6߬jw.W S s[;Aםv >c?nاrQ25ppd>wWt&Ov)mAW{x&չshV6FZL*%w+:/f 1ʃU,8eBLkg>w,g} <3薼`*Iš_(j endstream endobj 2474 0 obj << /Length 957 /Filter /FlateDecode >> stream xڵVKs6Wpę'=6V{q|HHbK @%ڙdt]|b:N M#0J(`MPLJ~u L/~ S_COӈPFlkv~C%_+dfSUJZ]5guS'Ml =S١W P扵Oϧk0&a;jn)nlO`DiTy GScьne+ٳzv_]AJ+ftMպ f[#ȩk[}I|m2OI m ZKlq xlD:~SH3xV~ۇyTGFԄؽkZS~j?گ}S ULۇ!S*C endstream endobj 2378 0 obj << /Type /ObjStm /N 100 /First 974 /Length 1590 /Filter /FlateDecode >> stream xYKo7W^( N4h`A7W oh;5% I܏p( .Qfjj_u  (Q0HIC[!\ \׶"NC.[,F5Z] Qk'N` J["#"c6M*4IB=GU-s%PD&oxͦKwpGpvK۶#x o2>o:؏g'ҝGn|}YOC7Bn\X4>[\7<}qg *5 ؄yeFljZ[kz87ƣ& ^t |6;>DѪcqTQg \7M6n~H2-O7qYl9lKaUzA ~ "3pϚGOtOj\`F)f2srzz2;TXcBRxq/TVt~}̼rP@4r :V_+ D*>TZW zӁ=,`A݃)mٴ,z#Jq(:O.q}C U JK " %DǢiW#s'YODe33]1f U-L՜)_}涘_?tKnqy\mNi\(̻fxX=ԇkw/4p'Vvv`Fع'JJ1}aCI TϸĒ/:;\,V .e{}O!BmLDrt񾛿,'%q ~%d{Z}h*0T"%yogq/8$J陶̼Y9=.WwYշ 1oųv R*G$ܻ0>> stream xڵVKs6Wp|"g""=ni'uD%d` 8xʄD!v}|nq^\FGIp⭷QIH#^ƫh#%n[V.J h F}Wb-ԇscֹ6RoѸ1_a J#8ֲÿ,ިmnQ`CGEtw(y3Z0x+G'Zp'3l) d(#VtW2Ԡ9vr]Viظ)@bU]/Yru"17*$XRḮs(/Qp^ZM_feD"F϶m]N6 @e+=j;ʂ_z4֊ٖѮo$ P?5-\Ԛ0^HQ酚*^dX2Ӻ2Uc3'!&9@Cab#fϳv0 >a]O6.Ą̔?P6,TVn]I샹~=^}6 ?Zj+`wƗgz!8ى|`!'8Mw83O\xhکQة QWNbɐ,ZFc'$MW#)/Ƴ6oTLzXkm@nv(rPq(T5ҊCD,uK~-_pٜٓ-0pHޟq.{n$ػxDH{de endstream endobj 2498 0 obj << /Length 737 /Filter /FlateDecode >> stream xTMs0WhzJf*ْ?NghS.(#X2i(ڎJS8p,}o}λs|FC80[IBa`.rd|Hk&3w7;?>#Q/0c_c1 %Utۅ^6}:MҦ{|G*D;O@n8:׼Q¦G;`GCԤ`BOt.Y񚛥\%gvӺjY\VfʅY$U]\ߌh:UVև.Y֭#EI7EI0߂KR檅If^4)CJ /؛  OW,_QtWt̫N<iJ3燃PD< uxQ-Q_sp|~ # HBBikZQTqZݑ$<)U( )諄jA!.=Itx{Lv9k}Fѹt(e _0~R@R&o-s-#s`Y%@5Mb]#\kHԁu2xϷV ܕIvK40Al䍐1F*"Gfp |i#Hٻ"˫Ҋ}_S(bE̮nNBZO %* endstream endobj 2509 0 obj << /Length 958 /Filter /FlateDecode >> stream xڽWMs6WHD0E:qx==@BL,V}A)Jqx4h{o kk|:L[. ZzZbr5\K f/ojݢ`XL^:7v,|(_+x]ԂO/;/\$|LL /c&@b5(XgBs(X&Ȏ/w>@~2 Bʖ `Ȇ6Ɋ'EPj%y dzTZ1N_N(k/ t\#q' Zș?֧ECi8`n '1v%7-4^R AhdP  5˻)iv8 hC#P664GgA7 ? "mѮo"s|@i)Tm^h[O;W#g#҆lv#cBNJZyqeDU|?˔wdZz7P2;j[m(7: /Pv\BWr YrLYvx}Z\;^~M{5lczq^i\$uCO'm\pviK鶍c̽[G [MJݝvM=zy{2~zn0(C~p0hpv 8 yߋCLyrUrw`:If:Skdv#W~q_/o4 d endstream endobj 2524 0 obj << /Length 1212 /Filter /FlateDecode >> stream xWo6_!"J>m7g{HUURJ;~ȑ\9S= C:'wdzm 0i9:O#"@moYĸ|'h:y[W/>{aK֍|+", ƀmlM'.8/3;ֳ!=4LZ޼91=ڰ8ADMCͨ) i{(Lސ=D~|c1|#ח}W%/:,lj>jCt_R,4`yZ$b⇌ jFku8GƯݬGAVy*ԶlTQkZTd ӟ5KZm' I.pN=/IX}UT1i;?!9O i\GJU N.9M)-4/eb$cAȌKPTOЭ<dC=W[EM8:Y^6 g"Yn VHSb6le}LtuJ9=y7'5׏&1.g*DغIQq%0":gxefې *Z#N2h oj~8L^A&L޼[2\%9gvr\4piҢ$ႂr‹ Ȕ;( sFomdc;v. 1],G_GvlG=wq>Ɠd e藣'< a2\7\i|v(׺C휘Ԟ L Nٝ*GSb9F0T_4i!j+, Š\Z`;nӹ&*JIS.3 ڪ5>hJi3}nv,xKxly}۴/x',р= d5/KM2;?zP|͙ĬBYVCǢU~=yeF%qÐ>,jJDk[1|bv@պ~ h5E~o\dp 6i=ܚ [uNZվkd$^!ꛂzs^ۿ{ZkΔ Bf{mG^cB.d; endstream endobj 2532 0 obj << /Length 1222 /Filter /FlateDecode >> stream xڕVKs6Wp䃤|ܱ]qX> DB>t.D*X±6c6u9ywVB ڢC_ٝF= E.~ZEvزiBhۜ}uȍ~}y+yՈ<;Rל?BiQ 0V<\h`;hbBcv8;Y;c:$ gL>,XO?衷_l\ 6A=.{ E]B*jAcVxȲ"de6zzd_&A~џ+LWk\1on oX{n5_e@Żf>q;Nփ 0e:L}4`[awV \ 7?|ʙ+=O|=h#LwǙ mw0{j<xcGV5Cz /=h:\@@[0y˦XG{iHk@ lRx)y1mpaB`>'XUӴjJu0<{]ٱɰƅY ?]pVvՂ8se^'2ڶvSF036(SË? endstream endobj 2543 0 obj << /Length 1234 /Filter /FlateDecode >> stream xڥWKFWP X7Du$b0Dj_x\g{nlckϫN`6m#$ X'n5'Ѧ 88>!4nun)¦ďLFG#KފY/\3%uRx̚Qk&ּܚ/KۤmhJUӚ^B}\HA/}T`)R`ف kg&0N2ܦUg װ&CV$z,-)wϩ3`qp$|O'B)hQL<<b@%'rLkvLfq\3]#v◺ES :#(-IMT s Rхև?޿}8Y鎚\++D}6UhXɌ=Vkcre b$D"&^u2X[Sm9_=gH s>w ԣ[)+}g2מk RP׆ׇ#kf YؑCg@9E< s>/BD:5z$`.2$ۖ:.Q(MKxN+eQS\>Uj[ݔ#˵Izeia5+f͞r>cִjʕ@B#{>۶aҠrP$(5 $8ǜ;> stream xڥWo6_!/P)׷lp}HOv%{f:*s%C3䠍%bZtu ]ϫg=*ڏ>-S 裷]†#|D7T}'&5>q{~u|(.Te0S+@߼1ܽ󳏫^wN=]C`EH]Y'Z^N#I endstream endobj 2563 0 obj << /Length 997 /Filter /FlateDecode >> stream xڵVQo6~ ҇I@̐Emq"(͋g $˓lٯQdIv\gȐC5"f|t5%RX &QBb)4y7y繫J!AL,o޸cgoW@1ViXǾlA`nuy1b(+(uiw3ή2ckO g⍩${AXs}1XPa۲i7ȒMU4l}'#ˡ)u7]vilPXmŤB߼ g蜠:nU< 9d cM6uklfE9I@*퟾6CMv߳& A@ 2 endstream endobj 2484 0 obj << /Type /ObjStm /N 100 /First 967 /Length 1714 /Filter /FlateDecode >> stream xY]S[7}cP]J CL3<8cY!/u}Zhu:ZIL&C>i!xφ hB D(I E48<9Ed -u 8{CHDwMBG{(T >ĢD_S \sAz%|Bx򨆌B.8b"Xs.$I1̨Oڪר3*j.Y`RtFU 쵭(lX ۧVa>C ޡT}" IѢEoCp&N@R'\B2* ^QkO Dԯň uF`8Ƅ.J3I`@-6CrQzK YkME11dDuJXg"ƨc1qF*)3Nsq5; c5?4X%Vu;hFZrŪ{~uo`uͅ,c: L6&h81[[م [bEdĕy%8DE5l>G'92͇]n&殯۫>- 'k^?M'Y|NoF7],lRct5+s\AE܆FX>%q#g`!wD;n,gs~H򍈮5 ?uis<;솆qLC,%fhw՛u_ T?Ph!uu aHzM6ۥw~ٿZPz~|/JG/V}L]07@6=>%E"Wy\pj$1I/M;BV*a - wD{tD3vcW4m|,[ .״-5P2X)+?9I=k![oВ.X/hwO%WYm )f`]鈖,]sG4v$="%^ҙjh\cY-r/$ endstream endobj 2579 0 obj << /Length 910 /Filter /FlateDecode >> stream xڕVr8WP^UFxLSIR=HP"Hn+8u*,lc$tCk[cB/&p  <#imY+<c3hlzXl]%'OWoĖCbqw$qQ=}:Aq5.QnMȖe~(|p>m2qp!7f- NJ9 .U`e Ÿ>V:{xQCtIspe,t_1vYvxv"28ɨHLpsִԞ5ØȆ]6fIΜA0m8ٮ̊hҰ{UԬrߍL M~Jg6:6Ox`=d} sWt YůO#%x]X-(~VZ|VwKb^Ckˇ>FʺYi J(0 8:+!)yXn&KHPaǯ&m{ TA=b8!K1975u C?,SF!xׂ< fe39&.qƸOQaz%27+Dߚ4: 9[;oz]QQC2lAUS^b\<:ؼy7`X]K4<^^Ӫ ~wS? Bq1'~_*v 'RPtY^`bMY o' ]8,)wLTtc6 endstream endobj 2590 0 obj << /Length 1971 /Filter /FlateDecode >> stream xڭYm6_aKd fDQ(æzm پ+-Ӷ.dҾwCʒZ`%E3 g??_y:Jf ݬgY9E6Y}}(vߛ,2hy o7y}-;,DTijEEy[/"Aǟ}ɢ͊{l,xDlhc7?Vӑ<#Vf}oT Ȉ=mr, WX:kpu?>)3uT'Io˓ry0N8׷+NA{s3}5{}g%\Ix/ZɦPz %Gh_7p\+ؠe[62{DMM.zK#$Fc2%[M%Ľ \Qw?yNJx,t 1QR,ڭ:mȒYR:Zyyh???lrp1+ϙ inϖ}8I(l+7Ǒ;S\VW^ݮaZ#u!@S7&(V*KִU\p`"J!S MYQ'-7 {0[)lv"f>4T>Ė/>U^v+eSOұހ!ZlX7(Bv3d?F|(>S,*Bjk`LX[H1dYA,(J4TRi#dzW>֭OÄEDIK07.M A |S#-UMA;R8Y$>"r$>EZp!S¥ ٭9DRM]']]+'Oa}kSٗ':JZK(&SSGÀe߯>ED" 7PB1S.kٸm>~+'c9k:_.U{ܩmpe3Id彭OX5B^ᱪ|y{GW&t2.{cWcȉ.5s,N#Hxwu}'c5ܺ$i сx *S(v*s4~.b*h³^GN]Cgb$ t9K`ٳGeũ5=H}^t8dLv'ڻ\cJ{Tt;ݺS.` Hl>%xVh"A|`6d&z?^>*eGFpwWGV]4wի9F)㮤e \ߪ~O,2WKS[].]CS0:> .S%{" r%{>*Ѕ%ze%ebqŊbևkѭ")@cK|s6FB:˄F ,倫^JJ TO-OMD`3.8^ލ+qz<ɉ#.m'&LX&6=.wzPcߝr/^_3!dn]SFV|r{FH_pp8t6Mt!+wnE%WCE'$@l'&{5bK[|(zx\L'g'ş4 endstream endobj 2595 0 obj << /Length 802 /Filter /FlateDecode >> stream xڝV[o0~WXHŵsŚieZUMJšM0 ٱI>vޏ:p$Q0B0c`  za@A5O4S6d\싐ZV+ٯE~KpGŒ9; 8g`W XojR9y}?Bwv}ryD{Z Y,mPc4 [ Z,(AN W|[?3Vb["ڠ32*lώ x؏` w+*ڕ$ ^׃ً݂qV1!!`[G7: K^EIN!LP]`J (>ZRn0G`JNn,RV i)'|V4zhw![!j-ųWeP͗GjL|n2Ђ3>tp<+3n`Ƃ}qIF+tVN =7Y3B[q2dG.fB1Gq 36OxOKlªFw`Ԙ9a,eٸhF+j!x b.KmYh nc TQ[>TOb1.HVd/YЁGi8-L9BSfqËXXI1iUci!-S&HTQTOm7Q!\'VT+^m>\d#:0kv>; fN/z endstream endobj 2602 0 obj << /Length 1963 /Filter /FlateDecode >> stream xڭYO۶Sx1#dm3ICiiIȒ? HYr"AAgۙ?ٷg/nU4Yp̸3sxJgy?6t]QW? u2YլBgRnBD@覞svl[9Rz9[qjW ՍB/mպ/׎-Y@ 6Ms_s$Uoڊ@MHti>jr X45;qbo MIYHƥt4~p0#7z+?c[MmVR;mpï`+ :2%a+Q@hn_RZRjU]´}Vɮ*:M8du. g8-|*% BtKbe17-7m(%T?7Y%  MŠH^nIRjw(YJ,\%'P\Byk]x}9}ǭps 9=Ʊb!'ug|~rύ&\W]:$W8rqKқ [uW%Qlta3{cwb`':fiJӵNZdK{ύcH)C>F !jX[Y"vY;K6#)e:G!(.[Ĭ|_ˊ+{v+n쀥mzo)nͶiSR ]MyjWi˸ik;F5ݝ{n)1ꢹi\BnElLR sDmL,C|])_y7Ec:Id*ԡϷp,quHm]dtHЗljDrT"h&u^RЃ]lKcQhI)S81]܁țg˘^O\7BI3ݹZ H$ 8!#}gfSTNÌ y۬W49hMI+d8HeW5ːw+]lr0?Or{(l Mem ,G?{gN_`a3}\W87Jf_uՌl_Y.jƅqo knP!}fjT evc e54DXud{ !ob\u|]h/ ?]/NxؗiEԛNvljģF&OˡpCt]ԥG6t3C :+oAlLN{ogJ#_0pMa|=gIm67. + y|0gF endstream endobj 2611 0 obj << /Length 1543 /Filter /FlateDecode >> stream xڵX[6~_ٗ&3W(l 8JcBYc;$ }b;:W̯݄i,q0_c8HA1˂20!4~~t%Q%(NbPhV\\s 15Jf4DQ[URTK7JY.{&1REYDFLg4“\R^ՔFRrkW./ZKN+YߟWkQnG( 񹒻3;?7HzWJ]JpQNLVv66)!d"r/DNvGx)VS{Y8q^ Jas^1 }+pyn {)9FZgD ")Rf= fě\r=x,Diy9P>8C;Of^'XwefaL, NAeppnMPʢصXh0Gpfԕ%Ƣzubex=4E4n-sBkS , RXτ1kskŔLpsM21wuC,̠eQr\?}iH b,sPn<W|BU77V⸟^+<_ՈBpTW+>޿p'1$? U MG`6=djUa\UN5x*| 7sL0,˲x4a(7#QݟᩒA|Ga*]`dy~o X{Cՠ`Kt0}du1Rكe]0".鿙p X{x%\\'8nCnEuA~6h-Q .#mc>V14PF];1@vH;AA7},1ivX!AoLVœWcԍS(.7nƨ ]tur%ZUC]vl!fߜ&6&qu@p*=6Jo@[QA A_Xu]ȑ7Bk2jFWAY)] c:[b߁8DIԻLj4QJTB (=a`{EԡE{.7"W #Δ!<)3^}%|| QƠW:4 (I`]wWxwׁ0s,efǑt|TlbǔF)CKeKл]Ύ͠dny!\; p|+JBǘ3}}g\Wbfn#<`2YS2nAcJcȅ+vy_>a30xt1%ɐF#oЀn$pk%5@uL> z|/ 4Bo䲧!7v]')@}|~mvd*{/5b?> stream xڵXK6W(#=I7hld9m-)pHY*0` 3<`[7~{&+0]m<XsF\{?׭\)עˏw<¨HYVY '= I0{xa-zĔ"IwM4~jjKǦ#@vXЧi`R[;V5G@HNC(3º$j6RK.XQmXeQ^lw<'˳ޑϰ#)֠wZgtn62<"gN ،>{ն]V+;{% k m4izwtQefzMfڇ"PZOoԸcH*dLܿ$\6Lۮ /i! Th+Pqta()s.}"zmkj9gH(qObkagIxxQxU~jhAcSHi(JX.Yl>Q( PSL`YcuH$߭X rqF0  Q2 BA"Z oK Ɯ |KnjK2u2{c-"C;%Fnyl4 ,zy#~YCsJ>CH*Lo{H!=+aJ\QBÔ"]kRP1) זYs8kF4[CKG4NMkP*A9Si[ua &vaC9hA3ulǺn3pEE/^xf_@֕niCckٞ+4FT>q&#(ުHzn5XFzAql y8hpf#= -2A .@4IǹĿՕ+@P55qv?Y7ӺOTU Ej3$n2O1>j5pX N5W )$æhG,A^?8:[՛Cg#&ڕW ( g{PӃEQoۮ+Ƶq  Ԡ zW ;dG6+q8 ?'gD}y4T~15 <+K(+W#TE$fE+e:"g%w<-FA &PF\[[AmEЭamLϰϦUɵ\ sOsʥ_lϠf W uۉ}Rjk r#_LT%t"heV%>Sk4c=Uω R?-ЦL旋RuHAۙev8$p!V}x[k@%"W1hdRb~UU/F;VF8;~{/6 endstream endobj 2632 0 obj << /Length 2048 /Filter /FlateDecode >> stream xڥXK۸ϯP^k]N6)TrX"ק IxF׍M"\]/ V"]vǶd`Xu\ /jf-WqȃceQ\e9R2iTGR\O, F"Y6@wݏoY]$A2\4f)^߾0&d,ObdǎDY&%wʥ,<,Gu?"kNt]"kښ`= 'H @Y,yr`-/KZ~ҥEHdƉ'!z yMy=q"!$'h +_+r6Ƹ>dc sbdц6lA+,i:U@ 4{]F9g1C:0Q-jvtqƶYc }6\'te=j pO={N"r13߁^=[sVf9#RP\t[%".y|;>VzWJb~#< xx4C`}1]Ҵzk5(zWBOm;X?H÷*d_2gsƖS)WJ8aYQs":ETAP[4VnVyN|N5ctP-$ Y{16PVyxx1"QJ9?=9 ?~(=H p> -S&D4_H?ZK("03,eE=g)d1E}T[PU{y~a EGۨc_Q9N*2"d>^fA׻JNII:uGϏPy#kIA\_嫭y.C}=dc}лᇥ~TNӿ"aߠNs[]-I<=WiG"wg}ͫ\}\b܌J[>c9lu%ޅG۱lrj .Rt4u'u IE;}H%p6qqrnst7y+Vp#U4=Y Ie|A& hcDm6g) endstream endobj 2642 0 obj << /Length 951 /Filter /FlateDecode >> stream xU[o6~҇@Őu A6^``$֪G1f&`H&;V~ϓ,(PFiXc4Ai\* |FJ_n%gwsEڑD2`|gA)Ua<28ӫY`TC!G*BϧTi%paޠu/E5j cx4 rJ8igp6%&!*]"b#hKfX0]b\[lr1{Bd %(Ϣl'w8ȃ6m ޛfaTOLL@@JI@$=(G'%N9˓5Aהi֣O~g^坄s,~hR;HR SYbFSXGBsAFP5#S;C$^Ň|59 ˾lu멋C)~i冗'#c(lvXU%±ۡ |А$昧P #*lŽ윲< ,3F1;E7yk币ʆIGa%7*EI66OLvH7|- ޞ>"Y~o.6#$ ^m X*Aę7۫+=DCKnю׫+D+1=TB K9$k7 G)Q/*.NɷofzG`\rPyP`H|Cq"GK07 endstream endobj 2652 0 obj << /Length 1570 /Filter /FlateDecode >> stream xڵXQ6 ~_նl.Ї+6pMaPl%ؙd7)ʎNDR$ŏv3(e,Kdg WI[dTvmZe[>}qgdMYWG* cLw^ٵzIȼK(55돘7vn8pcDzpJZc4;F?nZժayRmj-i3DϑA ^UvQVU\ cqHrDkesxei؁+}3z1$e4gL;\ "YtV4pU84M#{^/u>:ZUrb6Yi8‹]9@dAzǕ7piHw`uƽ;\ABzcOJCce>ݗV ƍUѤHH02' ApIiQ\"DQZOWx: S'┍){ ΢qX@͡7xXo a 78mKg /,rߕ{tSqU# 5pr7q)/7AO#&CicbG,^h lXxZOlZU=rqh tXP 5REG|Kk ixp7/w;fsf0Oܚ\ Yg%|68#&˽j47-Ѓ naT_Mp 0 }J[d^neq'M]% ,.ρo5Tees(I]vLZviayT\|`$h\#K&6 0Z[3`t`()ӽ8?ܒ^%5MN(,I7Cv\vG:o֟i4 LxAS(@{PJ >g\JoHC+NR)^ZشXng Mk6I>tKU$~4x~ {BX%j0caR(bmM,=4aDΥSH֍> stream xr6DH7=II'Ci0 IL)RHw]ҤLv. pw]xn//X:XN9WM%ܝ|+n˫//tDd1K ["pe0x>B@2W5_e^A, bka7,ygY!K zd/2U%ܦ BB7>.&2DSMLZCta{'KyTawY}d H|( l;E7GAHHGDGgG8ĪOŸq@ 76(Ƃ/nzQwQ`d>Ko80K}>h'm3F4uamam endstream endobj 2670 0 obj << /Length 1020 /Filter /FlateDecode >> stream xڵWMo6Ẅ>{6MZ{`$R!.IuERdwS(||383}?>lWQ(OHlwCD$H1F ̓m|?o?>RJdEk%ӵfaJНcl<ȆZ;;oyz `sB ۛ&(ψ-?_Oʏm=X}JMs~~0`"cZ+QݞYLdf.Qq&B{$y/mgr{gYC~xEBHɯdt*d/BL3^g\]#ͩ Oē7-W7Ku;Քj'd˚t =l7D(A ͳ˰ "hU Vfgv1CD84CaNNdpO\/62 @O1C/ی-]xȲ+ 4K;f(=W'~*+EkGژo֏i^g"Ə&g{k&[Vw͋2kceL}; pFonhYfA@;U5BIB4$?|Q|?&)E8("黨EK?dDհV;l-/kQП,Dr踴˰jѕڳ2 7.eI=HQ@k/h\}{bzVdw}yi+WE> Ԣ endstream endobj 2576 0 obj << /Type /ObjStm /N 100 /First 979 /Length 1997 /Filter /FlateDecode >> stream xZo7~_KrC`p.]9w>?&ՕIFP^IVJ^}r!7΄!r1!蛌:A0!Ɇ 2El(mJ&M1Mʢ10H`SW.L2+`bA)aU3r,b| (_J*TTIʫSFs:»HGlYGX EqP() $DQڢO'Vfk&%2(*Y8+oqP:7: ;UL<6YE(]`6`$xb~j/]9au g(U.*D&V1*gTi [&AC& &8(<&(]1Ee`Uբd -Ya ƌ+]2P"$ЙHrès\E#%9bxn C+$U=QUFE&qՙ(6`.bƨBIR5yRu90+U"<,V/y Fl] a率#[$d,B-JƝ.f1j+3ܙ?TF@jNBMQb0+Vr-vHhz" D !wn0r%mqfmɁhgХi?'}f(?+L|JH> stream xڥVK6W(5WM]$Xi@˴VUwlzmכHoy {;{O~YMQe([m=1 i%8̼[6L|[TJ!_W4= %YJ Chx}ld6< Fh ;GtQPb_;}f(%tPtr: "십Y%{!6椨Q4eg^6F}&q@b͛0J)J`x ɉW(+BI0&FI"~=8߮7QUԒZM:+(i4xSVf V.}'n;W{>&LFZ(Yy%?ٕBm%!cͺJSɖ&o_FT_<+J±,j[Ȏ gYV,KT8б)925&, %yvۢKY6L2 o? #cAYyy5{|UzOiQ(/䏋S7xi UB<( Ӌ 84~,2; 'ط1|zs`U0HG9 !DN 7`Db.of ˡd8tҁ'F a,MOsyyu=ty4EVʐlŧ{kEߠ{V6{\> j sρ:v}GJ'0qޛ`-yUkpaf Vqi=x2@?0| 1<V\2-QU"fN$^~M WQV) l?f!EM"C+T>n.>3sy}@}kզR T#'٨Q ZYEv*s&JL\wkj,8T\éU}T݈g5Ι 1*<5VG6jVifnw:S ?FJ endstream endobj 2695 0 obj << /Length 1098 /Filter /FlateDecode >> stream xڵVKs6Wpr 5c =qԉ'qc%4$Tw-Rf`qrS/#Yj1JIc/aa|uusjP*Eq[1g43꼼]1XR=8$WAx(KU{bHxһ`k1FI̙GI|w-E,ⶬ(eF6O|z~RUjZ -^Mꊲىrn]ٛdzῂz n_TZ= wF:kTYI uUrWtи41rp6,!AG|׮Щ{xxb\Rթ^V)7ǗJj:|;dåiG އ4]YmǕ).Nܲn\1_66ӳ~SWK F~݈\{W5VdI߶\(&7H-_M2xjIbo0$jO_ xpSĹ4> 鵮h!Ofuh ry,E9ͧ~!'kl8$q|!g?_hPr%XjTI4 ɟs2:0Su?\(!Jã+- %4(UhmxЀ*,NHD2|vacPuT&g=^ P˻RS#"^ջ a-m@mm\Qk[T~Y KF WN^FIg|(LHZ$5#>p9ΦFQenc Su19Z]QS* 4]L$o "s)lXRO)\6}b f"2YGV endstream endobj 2706 0 obj << /Length 1119 /Filter /FlateDecode >> stream xڥW[o8~@TRBHVCCWSU®Vje&NN%*hbsA ~?8Ln0XlB؁ uJGz,.oHɋ{E`R;D!} H_16=xK _}/o=qcb}&Hؓ` EWHꈑuING B֕1>7)9NuqCErlF4qI NF0ÐF6r:}|%lsbuϤlP̎;"FEaǝn]㲱V;S;BiH[$\h{\}+No@R7jqg im> f/vqF~[]8Tfc̩ޠlE挷YQxwXmrv2N6qEAN%*9Sh׸&::}5k_25+a]>ra-7Gó\d٨Wn& B Z@KJ}boAy&+\lﵘ$EtrSQ蚰%6;GPN=)EӪ{근mnsRVL⹡էnb[;#mGQwOYp8 L;??a_^.`-ؐdE ҡtrrK1񮉪*cՕ-o%T(af,0'\Y_7z#peW+N~T+X&XE-&cŋ+nDm !G19fi nyE -]HW!\[Fv@܅Z|NRj J(gS4{wC0N֧OٙW&h_{ *}^nD M'fT*axp_YFs0ߏ(֫{P,6ɝVlhE}mȞ 9H{LM;2%EMCV:6XP2F̨8cjw7[B endstream endobj 2718 0 obj << /Length 1204 /Filter /FlateDecode >> stream xڵW[o6~$#C,bKwi..qwx%GsmC"!sYk˳g7Ab(Ț,yc"Z$roox J%I I[}hkU+Tfg(Pc++fz6o-Jk+E +q@`[QZB0P`+ cr4#74#s\?[6߳qduھϝ6;PO*JXfMVM˫rdμq)"^o!U(Z[d-ys69i3* VTbK`?X2?޽ Z piڽ88z B֠.q4uq񣌚̀`&~ȋUS~2ӕW&M űs<}$%QD]jFcfS KõJԌ]]KCɮ٣Q:SA竕{w 4 YUlh˵.@1YBS5(VMbܸOkԊ.&4kVMU Ib0V P5t%Ӌ)(CB5K$[3ZX^~X-mD5\Q&3J_ZazD~ YCC_f?Q̨d$H!Oά$M_Ԁ/±`d) #!]2k f7}zO $RΝYh\eyZ`T~j &֥3mJwt]0;kjY &D8? IZK"OᘪOufLkMU.% ؠy؟ZBJx( tϫ C4Dq3,.5ǀpJ[rXA]_9k[sioݰLƗH:f[V#rU t>r0![߮DPh{mx6CVсhHc=),L+*Nnc>ɼbA1;wixT@Ϲ;F=R߀+8<#ԣnLivO _UJFƏɥ\ y Ȼ{c+t2`%g`<g endstream endobj 2730 0 obj << /Length 984 /Filter /FlateDecode >> stream xڝVMs6Wptf, ůIOVJ{H2( `w2PXbw]`/6WoPycb/!am VQlA"TUm}f,FIscHD>[ eh~fV\ H-I,l9mbę# mNj"hwT/X~J;fE%kqvw =*}VmJhɇ>3XHζN%""K1o p33s+MQ/;jrmq.@k Y1$aR鼺h&C+@(&_V5bIԒB[&%kcVv׊-)8.`IVJ`~i8}* &\Qga9EsXLA5E4<2 QOG7Ot$-?Y`Sxz ۿ%ڷ$4w`xWʆ8+_J\1bzBo&Sxޭ[x&J㋯sD<yZF <8 dM9йwhڨ/2.OnNz3#&54?ŒdE}>B<Bh>̽췳 ͐P2!lOUy׆̊.d^tY 1lkrvYL^ WpT3xenގ|۟db_lv$N8.z <> {bp:^eN9CJ<TN,=Ȫr?n4W-> stream xڵX[s6~ϯ# qQf!nɤMcGd_ߣ IL;; B;[l/k,(-V{ H1FQ@lAk:{OPTEYK:*3wq5l1F/6 5V|0ۯ77J%sDf̐Y8ӲlqT OS~\rQ%2.kn?M.zb]##K5E߽<p K9fZP?G`B/1lhG/g4AKT~nfǬuŠi$Zħj[v]OU~9$#FA*ָ-P 9 L#k>9偧EUU["5G ˙y͎ب6Uڬ($*]y.X k>Ia ]} $ki 8>"FD "[Q N0#,b:QjzB^?46O1K@oT+q6|ZT;9zRt?Fʂ#zP}.JuIR1$r3*,7=sD%9+h&jv&p52̓ǞSu2TLBGJH䈮im֖棪=fs`x*U $k~USx.Զ<BAH K݅BrΡ@) J|Ѓ(`jy.`G@"g۵TSR]z7ImjZٴL-tW_ WҒx (:MO USCQ7db 6H5?t]Nn7㻂.U\s>p.%]Q_lͧ: endstream endobj 2751 0 obj << /Length 988 /Filter /FlateDecode >> stream xWr6+x, C'NMJ55&9x|IXbV9_䈴f9t :k:~].oЉIY=8R" #UܺK~}}5[qyE{"H`c"6B3֧?j=N`¦Fn/|0(L1h$ILCgbGAva}\~":y:FdYDA !aɒ hű3Vme"Zi*Y.drMapQ'+5gֲQ(RHFA%6orm*i<}fcg^`d0 e5~}qԲ8oR<{Ib˶ dydĖ);€P!ҏT:Uz>%j#3_UBYZY FF\n/:P r+3j{V{}Nv3C !x,rm>f[Y? нqD deⅱ*M-Vf Æ&"')fwIuA9 w>mD]cZ1BoDwho*]|KA z=áw_'GHwU':ۚlȓ* e 4t0_LqaM|i[O.4LdWj %8OE[w‹ Eat^ݍyy?\Uram5wUp+̀^Jۤq {V\jGRmArvxGe^Wӹ0['#*`Y*CW:\7F;Pz0iŷ*Oɜ9?÷pֽ&/S]?سX<[ {մĨI4?9zς9f pWxzQ endstream endobj 2769 0 obj << /Length 1096 /Filter /FlateDecode >> stream xڭW[6~ϯHcs1dhdV!A"@7$C*6>|Rb qXmFƐXm=4XA )xMqZ3>CFjL![$!Hl#(Gźxqk{pUV]h#H< BEo\, I;0 mGVYL^cVy{3,SԏȿY'C,gr#!6a{30b[s]G{bqJGq~z;W][(*"2_|wz0ʝKyYgMi%=a]}~kqyx?ziJ}8 ~3*P,SMBulw}ahObNbzӂF9y&"*Tbjc@&ڦ­^;Y bۖWRe9c-R)#ۘK$I 2[l~|Gp2AO(%6N^Rܥuyo[[92\wл)hƑD mPr)W42'mGN_[ZRYD_/Ĩ<} AGVN_K{:фoRӘ)l%7:h#&oW˗B\" eV Pk򪏨omfD[kSSЯJ\ՄC1M[@RjPX@γĸE\Ik]p2JYs%DkH7G;ѵOm^\k9> stream xYKsGW1{AQT(B8(I.ȿ#VFpݻMOO{)'$[\eCsuTD*2u6(^G 7)4GI.QRR#D jpYJ]WqK`\m`DHBv|5Dc+`ܤMlh@ u$ 6DGJmVgn[89LL`\LpU<U=VAP[_V T,gؑ酴d"mQPX^r|b^ϣ_!9T|@*VibvS8HA`mOa {nXvCt,u ϖqv'nS7~}\u7?ƹ||9VTڝM-> y$W~&K xE>ݽvu; gKE>-U#{hĭ7mDSFroqrݓ>nϿߎƏn /??>`NqIM 4sAB saxpwG;_Nֿ,7E";]o x>5Aܻdx|x2YOdOgjrZ[%2*PģuoEn-̟_-'O6&oiK4R@OVeMfz]ed?3KVT&6-I|Mv}X'zKFڵ[}1lF.wO.&d0B6'[=L*lB.MH҇ք8ЂCuܱ">|kLyƾ9t$\B38@OWU| Fٻn:['ewz>]q54\;XA@4ј MR}޹mKu;S{<&(9rMmK"3f%TooS&|AƠo#/WiG3a9y̟}TG9j> stream xڝWn6}WRtC,m-@hG,#^dɡ6v p̜ᘠ "QhFEBJqcH]2+zeVD#<pǠP SA[ 1wtϊ{,Eݼ$].ɱ(ÁP&Gȣ`TҤI&>y[nW6w𭬂%8glfOO'XllWV?Uj9^au•fQ?="`sE$@ʢqa [f1wBa ڥs_Ja<г"V_'FQ0E%A@g#j,ɴ3F/[e)a3 *n@S E6^UٮɆ!kϩ.OY^b _d#'븹dQU]F%J|,X-{-)߈ʼRIS\.GG]i呚=g >r"n3 y6D?ژ {=]לҕ6#3 ]YP\4ؕ-Yalz'Ui ,F)7h.B!Zk`+&֛˜m G!4b@.RU"zhggRHS_L5ݨbzA=90]ǩ9ū9Uϼ_C-]8hȗ}2ĔteAJ&McaV2?xJO.S FVe5:aѫO3Ҥ1T;H(;+Y93Cem 8Ё[1Tt$ka̳a'E( ᛪ7  Sk;^wfuؖ{[ kKҬܪߚ,3Ǽ~T%TKu-+GH 5\3#M,fPuu'Ky^#]Ud# m4yQ endstream endobj 2795 0 obj << /Length 937 /Filter /FlateDecode >> stream xV[o6~҇@ŐK>li2l0mvOB RS> sRTًͰ@}Msu.Kz}GN aD(4fx _B|h W'=5{l=UUIvO)Z}N/'ZG(b @-^H!K!μy.?(&O\󪵝4y\ ޶EWTK k)n.؂oek 80BYB ze1A")z\[A 'G},sg9RmU~qX B/t4EILF rQݞmGyU33u&ZQhK>?@v;P8⫇ Hc])ډV;ROz>m4Nfɬ]3unLjT}ThCVrw,f23?yUׇW 5hR_@ĺnp)+ Vf&ctk=ps l.ZS;e9i9wȄ/UY{"_EdS2!4n8G r^i{.7fU;v*.g(Q3vLpǾxa++wYaw_7%m]*}"1bFJP϶j^~; y5q eFL_j ium"r>RO`܎'Y&+(|QoQA)!^E:o_̮γC  endstream endobj 2806 0 obj << /Length 967 /Filter /FlateDecode >> stream xVK66HKeæ] ^hȢAJg(Rh ] (Cij8o!J{ܽ_=.ҙ<2o]xc'7#eqw_Q$%[mdd>9$BwZ{\CSa? <)E@ MMH%)8@irN݊]Am3v?3k{;/ǥw>./D3#:D$ںYuFhֵ+^oL:;Qavt(SIobϣjBq8D1@k%mƸ| 7s$SJMڕMTYSJTӃFopDiz &R0% (CY_AO2 /̆Zi^Lg20Jbi#װ⟵r * I0&7ڇ;G&(ouk>tv4R7X4Bg95T -%0~F;02L^7Vl[^!=!G\h?BmRw49L$c`@U>2[lNi¬ y@ 4~7 4-ΊVC8UYY!~ *ܞ]fLVTWS05=3ޜuoL@^BrC4Vm>4uԜHeo5YtB\`[f=qJq~ZSeK^y\MnWQ!4=݄ڻPIk܉5KV 'Өݛ]B)JɔuP ZĜ/7{g@tSM+r*o 8Atq~(ч;ג endstream endobj 2812 0 obj << /Length 1146 /Filter /FlateDecode >> stream xVMs6Wprh"tzpc3Im9=9$$H- ,Lݷ8?~Ɠ8w Ra ũQJҨpf٥q}}OI T4 \^}[~g7'ѡ?c3،:c"wE&Z?8?5Ӕ1I/Aϵs;{HZiA5Ľhrv+нKB͒%O Ov6bKfge/%oTe7ǡ @8s]U7tae{:T^$hRKA<8yN;$[|':Wm% ݷI6eU^t޹b{/6E= f24K6:tѣeЫչGx3b )(=$~™%剱9{n7U,!eSs endstream endobj 2824 0 obj << /Length 1385 /Filter /FlateDecode >> stream xڥWYo6~_!4^ZѠmc) i$0hFWEG}JdŵQH\!X{˱ޮ~ޮOJIyY?uI6>g5]ϫJNoǧA2Rӈi FJh?;q6^ >*,2jbb0Hx?<;7^9/Y%@\ޫ5gdQ`d~Õ!ώHB`Ln6G*jkaSCǖbGf8,6u ƍxI%Cgnpk5~_5n ٱz|]CO%TRfuѕ4jf#1-}~пԕlB(NvL3POvM-.CC[о9? l/ˤb'w\Q J%m ÁM!2*}BLaاʧvŽ^* `$v3KtWPq7k*:#Cۉ(Bd奉W%Bڧ>JLjgZ}5dыT0fJce+5˓׽+nyQz%5vۯHۇnJmFEWH;̨8ֵBjL<Kâi-pA8qy(tQ M"o:%>;]%у3_Yhtiלݦ 0X>/'[w*%Nh:gJB(8pkVǑBI3A?Kq㨂G{@AL9*Fy:0e#vπ-թ*2IVh fA|aJ8쵠 [. 5r@ UgxQp!~ \ r4N-> stream xڭWێ6}WCeIUdMQ(-ѶZttfÛ,^g~ЈM:ily$NȋC0F~91!(S>w޺$Yބ@I@JWA+Ŝ`5ٜGoqp`a^ ƹ%¶osyH,HD䴥(/)qij*j~/(v3[Zo{;9 0JYVvل"A^)JiŴYVrj8DnQѭ9Zj^(NS Aij[;Cf,18AaX͞-G0DaI]s؈~*'ówd:xaheML楢m]Lpd0Y@>~ UnS/`ϐ",_nD=aLbM='!ZV?C7^W^@50$i{O{q- nm0&]zk=RC[]Pg^ >,ZP `N̈ϳ~aNV޾N8yP@ҹd̨y<ցQ@xVFd݃:@);Oh]hu'\jN o&1xaփ~f>ER@4 endstream endobj 2847 0 obj << /Length 734 /Filter /FlateDecode >> stream xڽUn0+$Iqt!]R ( 4V{IGDE- t ߛ>!|Igc&< !H(Ɛ98>6"nf)w#OEz8N(I8D%6AFiާVKd\mqUH"JԺMB&\0b3cbq$>IGB:B<o;1K)D<u#g]}J[j^N@'#&҉t8D5mNC̋0> stream xWK6W>@%V (Zt lr`eV!^Qd}٢VIl,y|q83&hz?{]])p1Zo%aJqdhAw WQ2 ӄ!-`y]t^ YRu[騏WMGSΆ of`a,{ԥY[z!Y- o6r>PR.8ھ FŞ7L0e]댢Km"󭊈u#gfKóc >f= NeARmLWm2]ɚ IwHiby/'~Mo9dP?1Gt}ȕs3a^zdžN 'X6> stream xڭWYo6~_!i2+A i\<VdžlC)Kk%€yh8ǙaկՓ( 2,v CE$g>_zOD:9³$Y 0ZΆ-OR{lxzT'd)v4\[\UκKyldk2WGd8mu7~Vf< YWRQ}mYtO7[N1_qY68˛ AHd G5`leH & ^G$tVrEVݡ{ʖ2E 5^]Y P{-.7kn|Lfe 'dg\$H;%ֿ<,5C v(:8xzR.KǸrT1OɫӚa|F!g 3B#w6[jPwB$fYr(Ed$—˼ 4 NH$?N ZK'#+tp1/s@,U91ux-)(9wzVUaoueFNzJRVC jad?hu*LHro!$BQn(6^3cKkiȬovHn6bfp9&cvu\'Of[{\64|ȟ#orFilLSf$l:$a\M^8 N=; Q Q-\N]D ڭ-,:r"Xi0(#7gipmE@DpW;\N9@b8aݘ˜RV A,׆nl%/{qzJ"[\OPӂJ7޾DgFi`Tff2\rCK|,Y Gi4.Zᖭ}97lkӟPZCob4FK$8u5l+̕#b!3ś;p<#A53CXlXq-N;KIګ{USS>}g/{֑>Qhי?>^rz zQ,4S7_^DXi15ԑԉ#HGڭ*HQ'pDį 5ޝ9]Fw2Lg endstream endobj 2780 0 obj << /Type /ObjStm /N 100 /First 972 /Length 1620 /Filter /FlateDecode >> stream xYn[7+l6p$pҺ^(%C{J)j([ ҅!9Pg(%1?&Ss0t"{|1KqшDlbIAJY7.dR08t ,PXA%)%Rҡl|8 `ڳ<(8;).)@I]T+D/P9G1R< ]Qi͌`P@A ;I 0)*cU P7͋Y\ I0P%c@p%P 3&dPbWމ@J RM*1sB G@TB623t2UZRaQ)r*z,WUVIST,1[=,WBUW\uYXYVȸzXLVYT#&!cz ]+\Xg$R"‹L!QjRԄJhsub,J WƒCӽ8A9 1y<4d:#ӝ@ ,e'IO{2L+{/)ذ{3^to^lqngs7xнĹ^CE]?r|u9w BͰZ}/Ǔ/ű0N/ד݋jYwGqB;;5Oӳ5[;|s gU=KR>]?2܆l%%V4lPD X YWZ%9Xj[|g]iAt$ vǭIS=zW`4QjD IL hԬ8Н6h.>9zs-2%fR[6܊Zo%E vHzd\}Ynj:Wi"jކFJ`*l+$jk,nϝx7xc,9u(C@)# (ŴzѕwokA67bav-h259ɥ`d0hִ$F4Rkz^1ΫPNxOb%-p$}̵b(Yh'f}kU҃`a>[.-qE{,`Fbс: ْ[^t#s1YZlkPGYFN[+ ",;=;ގ6|^> zC뻆>ᷡҺ7D:hV4R{䋕7)SO͙Ps&ɍ~_3*1L"CEH÷sǣ>"Gmж~cw;Ϯ?n8tj(܉ endstream endobj 2883 0 obj << /Length 945 /Filter /FlateDecode >> stream xWM6W!6PsIGn6@鮓K-QZRn__R$mI:VC $̛y3Cjfquc/I& ! 6i\}޼!Qj@buވ❐-L^@ bk'#rh+k{uա[U$w1 A;Jڬ>Ӳ^BvwxrWV ?|m.GFNH; ӾbG^Ys=8ׂtFo*}0bYBbxqr#jΣM;ݧ*RHPHfr' A>xHi`L{ d[MG未O˩ ~Pɭ8ql}IKQHV[Cg 0~\%-,?*i!R_%DL҂9YͤVO_C,~[=th&XUZ| Lv{ݵêD8Pһ[dհ#ص `F*PtT Kj -)ub y̸f̕:M">~dbفxchY;d&dD7i$ S+3fC{mgpb/ >Zw.$4v;ΞKbvMe/ 6 sn&vKyӝy5n5 endstream endobj 2895 0 obj << /Length 684 /Filter /FlateDecode >> stream xڽUKo0+|\xoe‘ M!N)f@9|ןdz8XGa.jhBG`=8 ӵ/‰,ubx!M┇„LJ|kϛ>URc6RuZ^e#ZGٛsڻ/\Y}0:"Rup mE?IBtZwJR#y*ibNt)J("IE/ep pۻh7b(D{&$ČY4HѦ,Q Tq}mQ; h S1&<AsXM;YօlV>HK5s⪣r&=i&䝢Z8LP}_TH|h=ġu}3 Sg0zP 2M `tqΚVnԛLjh]5[?jNw%q &,uKuX]W|B΀#55'.f@ 鮖Bڣ\UX"yGuH^Lwmvah-όt{7c endstream endobj 2907 0 obj << /Length 1097 /Filter /FlateDecode >> stream xVKo8W!P3Z`[Yh؍sj{%VCXǽÙ<(m8%yJl&Y)HNCtP$p Ͻ%IնQ2 *`98R@+3#p0pwE`L\ Me WLD{MvBeKʬȸFIH̾ q(Zw#t7q8.1o6 a|ҨBw;@k^@D(EL-b|PA ߉f]a+e뵓QZ H:'! Qano vcʫ I|VEkipfN*=z_uZGp_SݎNB}kK@Z||9߷U%eX4q,ǖ9~> stream xڥWɒ6WHU" "9LORRssHHG<dL'~cuRA쏎y$IξpveU9~Ik4&4fzҝg7 ;8!I;` !.⤖:<',"Gm4p J\޿};hɄӽN{^[FpmSSR>ٸ-oцI2T7$M֫ul\qHsS\!c(Z2%G)JЮ+iHN Z/g\|D68GUT~Zk܋6/;/X bJ# PY &X몓/`kx1i "!VwEO(*\̥2;Ynί։r2ف].|9uUCD݃upٮz ^}xqe։n-مyIfyRɆ/R4@ ..Zo TȤɬ`rJOS)5~ =6b!o45s%\h[]vͼ,l:u1 HϮ32 +Is0!] 54= 8G+B1`~> stream xڽ]o:=G"5. f}i6#}@I"~>6=\Uroޫٿ]̽dIx˕GDq⥔$ʼe})ߖoX:BNg@H#w{T0]ξ(GIHٗoW24֋}GǁRQ$^2`}Z뻄:Cq:RgY5X-9.2EW<4NÐ4𶢋Tr夁^Mło9| ЊĈ2wb5)j~k1Y'ȩ} X 0O'T1Օrdi2beeIR84ʗD3/莶3{v`qݼ|լm)I$.r?mbhAe}`m/nCۻ^_~Ţ0d6q7n:0{VٟM4$ɹmƇs`cE|b1;!^: HraxR)MVFΑ8dr~\I Txws`q$s.o&DQpUXWAŬaH 0g:*} Cb}D+ GџX,+ًzyp{+نZYdZ$m(m׉~.`7$N*6XHͥ~p/eˮiά ?*zmBP =;(F@yaHJ/DC;cHn<!v}kR `(ʅ?R)gNc}_ur}#Op {*Ix^n_tۼ> stream xWKo6WamfDQ=ݤbKvD;*,KJwR$YiK"g8<>_g?fWQ(V>"!b%ʽy'-G)_H^_Wί¤'MR4ZG:4un=";$N2QbDp#6k]S&J#lTܰ/AY_UMmVsQw"{fSbv~PE8It3/JyDڏE+ P zז-~+S-r/[XÐ0AacĖo"ϋ%E$RCP/as[Bh!<E D݈`ƃn4E8z5!0[x]`n[ 5db4$4`l9N>A OVnPf G^uژ>s?HfwQgg ]&Ĭ%o6َIk  }V?")JԴQB kA'!4ӣ?QxcQ E;_ _=\NP1$"'xaYj*?f endstream endobj 2948 0 obj << /Length 1289 /Filter /FlateDecode >> stream xڭn6r Č](zHE&E=LH)M~}([r4AG+ُŕ[ IB7+:Њ(%Xh ZzqDT/K<`>]qv+cd!բ'[|R̩c]Vϋ+oȎւ&&ӾqQH4GbK( o_HIHo `!9K(<_P0}B4),eQ%H{I"Ǩ97OGYa5!.rDljA7~D"w%e$eIL0vf-_4AFqqպuޮr t U0 -]sg# ½n`jpIǣ*HȴmPHgH.r,]/TjVq1ŸP]qs 9hK4 ɲ`Q˖ /Ps'nRvi#i=u vQr[ڢU|U<*vNhiSAWxfVxbp^Pኟ D#nI2}* :XwևԌR U9@.oc߿?ǯ&=ZE|FhCu=9kbhыڼ-g_0J,M\QؿAc!A(bJssl* 5Ew3P\_c+jĤQ6ƴٗn?IHVZ>~r 0cQV^|Jn7GBS:$0###ZL[sYA:Ac?m nlMiWZ4◢hp"J ;E+$(6LSkNX'8\ 5sufDSxPnM(vϻQ| ˰|orX;[2ȍCMK %ы쨩`=7ޓPwz$^'q>I:C9Dgϯ5¸+FVVhjlVh> stream xڭVK0WXJk'qv Pt+nIv[KYR;gEKH"||5K3ř]"ݦC35O|:prW,+hajE]C}j%52uo=I:G9IU9{obt#Os\o&xF;f+T~*X)5l/&.DKŨi.@[n?*W^E UaI3S˕AQmQ;È{NEph8PܣVG(/ | endstream endobj 2968 0 obj << /Length 1050 /Filter /FlateDecode >> stream xڥVˎ6+$`R.f&EY$YmDN$/15I>}99o,IAE\;!BAdNAβv>aZx_nD4NӠHbaH =4Ǧ<5^ nk-r~GATdN->}AN-~sįpJs!W[O! $t4 2,,B֯{~")kzbs>$ PyUP|0J$loX[Df׶pyڿF<_pzh}t5[E{0me.- *危[ ] R ɍ'q:P c"geB0D:>l՛VIٮ(RZ5e!®RH(E-;(%`[c PK5d k& ^D*R+]Wtle&]DAqk~P0,@Y;Xzh af"9x91e.0 *>NaX%[6!4t?ʆ 1w:93ڑZWrv!h/f%3SXB2re@֐r(>p! f)wPtl睮$p%ϘH=VJ!gX^+j`t fwT:+'D.FhVmRτ('ÝO3Wゾf6cG/(:-[=b|3_+ *oro~YW-/@jbcbXy]Qd-6\h1 AyJ#(#'WNPx\qbq+$Ň6-0|Ǧ;]o B>*j.j ޾OB0OkJڸc&+:dFeKlpFʚaۙ)Z3OO \׮EoW݉{F7ק9 U?2!?30~>_ &y`nr>yH?%= W<%6E endstream endobj 2880 0 obj << /Type /ObjStm /N 100 /First 968 /Length 1619 /Filter /FlateDecode >> stream xXo7}_şöT!XҐm K$߹NS%$/!Is}=>u*Y\pduldE*(.ɐ%9O1jtLj=DkhL:~N17ذehJ kDWlƒR&&l6&l gؤ~%|2G6kLby\W4>I"J"변h X &T#B IŸǀ<@>.a͒0905/$"Fxpf‡8 Q6-QjbGK-D-X7_rSR?&^N'W磹t˧g}{[Ѡ{|f|tFj4[HU{1>|p͟kz C)FHWE_{m9U2PZ+-|}f%GAc&t$JzV퇆\C,^#:WFU #…uNWxK{qzeNoESEKHh= 9z;t7:)6n2>K]jt_Zhiר}ii7޴\-KQT=nA'hrgLЬb1OXv *>8E2iOt*p_s;I7x8v].*(w Y6O\k 7Ax2Mus{rۇ|RPPr(j8sTt+vڂzgCN~xv/ht8h;39xTPK_X[m%g#ތof g$ ė8}y8gE(N RrQvf|L T(бQ4*NhN=OgO9Զ}ezV>hMIu-u7RGNhy> stream xڕWmo6_!Ke bHJ$ 6.EA˴MoK_H8w!vvv~YY-ίIP(sV[`9!8]J*xje*(I@@D -n 09`Q|l`+Idp(a9/:~  lqTq~h?EILz.vIbw+h%ϖ +woYf~om?۲6ܞ@J ^}0DF‘R4i-VqCF%5_=4O\>C7aܚo#]&@*nA#.1d zެMNYoD-6Fb}o&۽#[f5_Sނ- FR4y:OM;1Cp >듩=]hD"mFKrCd?Y/0}Fj^ @zusnV:+w{ƛ0&4~$ūYٜߡ(D3dJiQ׈ll~3ˮ:-Fh}N ֎ 5_V$1/),x>w_!|JsCSVr@td9>S&"6}#K詮&zjl/\ 3-F|Ƙy3wO0C[#FAϩf`Ag` )Ur1x{N)}f]cGch8׺xZu]~j6f> stream xڥYr8+*|kBEq_'evCc R!LlD\J#H@"_nly$<7Mn曛r'_7_0L]/`!=郸 _9k/֠C t2E~8A| 0`9[ : #'BKd:I7.4~r7p @!k"w:Qts/Z⭒PpV5Q|P̬5z>{Q%ZaE~_P׼G)˝B"3%SƩ@bL@NH b89ƔD;‡mA_EV_rωYH5Ttx~|%(Y bm1҄qfԢ3by`k'QczH^||CְzRRXPppNE8kw 1DZ֐f0j8{cUO]}͕fJbri"Oqznh<7"!9`͗Ԝ5يdmX`AMkE1:Z,XC/z?*1_ND%bݚxpu>筶 !Ö< E3c>uHG=15Gq{D@2F3yegf5'$V#I4C8IY 8F!8q<겪0#dP° Ĝ Fk4e2s ' Ve\ p=;09؞S&X\`L XG,l9jﴘiF$M֗ҙ rA' >f<'I'4@9!AIdKcA}(z(bh8SDC +pڧ|/ OҊ|nF,Y{>]'2e@*?F' 3^T|͛Te#0ɩT50FUmUxQ50I+qUBZQ:b0j*y+:RЦxjmIP^j6FnlKsU ]qa1hOՊ'A&HlH9oޫ;1jfBӚ'b`,`M5P1E6>e=rڢ8 pb$`9\ YL[XnQtCn3dM.:~yaTMIdK~ b~x}}FAAa`#z9,SZeωz:XOa;7a[_%2KH{(~BJ*>wۢ7g&+LѸ::'u #q:+6Bz?RLGYƖ37Qꐳ :٘D9xmb/d!Prօ h݅z{=m"}npԤ4v%{~*3e ޷БX㶒'ITqJ*oB3Iy&4OvW1B'9A>7>FꤏT-+ gҸ_S^<+eѾ;k+.$@܉+)dc?-KM(?ɻV4D^Ow coo^^ endstream endobj 2989 0 obj << /Length 1257 /Filter /FlateDecode >> stream xڝWKs6Wp|"gLSLr/I&QĆYtŃ2)ӱ(`v6~_ήJPy,1Ȋ AX˵)`{|Yyvj~8IR@""fؼӯgW1׏Jbri^CiAݳjdkRBlUѕ\ۭ623^Jۼ6<av'`;E<{&ĶRU_U&׹}~ޫYvJwCBвɪF+tŊox+Mc\qF(l_́vEqʅ^r|l}֭4>z$+XGhB 7)Io^\u8A¢qt%Aօ!+ P¶Z=AUʡ/I}$ݙI<4<"sH/OM]T&Kտr9oF"l7$A$VZ>}!1̭JH8Ựg_X `&B0bC$ b/\zusFM$ֹH9NZ<$d̄q|?:D돀HAL LTԕSL:Rx/c{cP9`1W-oW`4gijЖZGAT2 nJ4F+Ə$z+eE!IyhZnQWlc`?B`F腶,FsZ79O02iU$ FzҨɺػ'bM3~Xd~ӽEo3t0)rUd Ag덁jmCIԤ5bqN0Bۿt dKdڑyNgtæt|<ҰuˆU'דI%d doRNKoOnO&K68moR,_dG֬*F˻Ia7`kVMQ%PC[٠A,_`GL|:cˠY|xpkt Ylo2ou> stream xڝWMs6WḦ́0oӺxv&"! K,X=  ڀ?/~x\ܥeP*q0JIAɓ*xl!_ohkSX C;i\4ݘ1;>u7ߛV&4E,ffNP9#-8%@KO8x/ȉ9g/b%)B4ǒ1n'qjgL~Ƕ&T~Ew+]qnh510׼Ť'{9ЈP6~# U8 < *CGė1 Lz$4o[mtN7r=[ -T-poQp.<o"*sN5l`5)Õ0{!&IYqVc 9( V|$fUk@Ԇ3s :m6픕7Sv5w KO(ޣlAEAgm|UjN(T8 y3NL3 IOb^y(s 5yw"@\"ge,X;4h Ô䡽cW?lϩ+/u endstream endobj 3006 0 obj << /Length 1174 /Filter /FlateDecode >> stream xڝWs8_31s}M6KC۹#H+V䐌}O~ 6>f B3,O%ĴO)5=;ԢT_~1BK.v}^\8z B=[0͈Դfw3 $8T[?g @{X ;|:nf ' %=rOT/ *-3 hy˙a}o-՛IܰT^~bb*Ez]Wa,Ɓ9.Zj1+ˬJtRIV]{\XpvNM OIAjgh<}ȚU:ǁ~ R?4_ eJכ7ӪMRD7C Ś!׊幼5$wqcuVimx̙jǩtx5ZmodMq5hJubjYVlh(ϛOѷ7*wm I١5k "g ;yWF VgH q|jTOD/ ymPoڮ_-K3}t=:ֱ+L9o0F<|ːhvla Iic0VJ4+Gsi:2=ڭq!O`jOAwEb0)> h7觗RmƺxւNӫ=õli|NBjX*JŦ S<)b$!e[I$(XbCO=!g^ ,:>DSG .mz˶HX֦ץKږ1{D׆Cۥ+hQK 0'^8otǓ?ٽ:+ys Uw-A^c(}H?d|#4A`_:Zگ̂U ^~rcwCiel {~7lְ͗]nxxx0aa5zXU%a|ˡz]pT0 !k޾kőM0x#sLf:da@յ,S> stream xڵV[O0~ﯰK+5Nؠ 4 aG0bKy[Bt;zcl0u@[Cc0D-UU U ?9a"ʓMdi'h b2;Xg.fMToΨgv҂*VR"0˫ CM~6wtaWvC.sG]::z9һ 2Eb9QF1r7Uϓd`%L|ٯv\p^oeO-fiR"-©ب٧wdAl0iDS0t52(g6*K`)L'H%\t^%L]G9ݺP$Xǀm7p )mZV1MlĖU8pRmGC)4#|=|x0h1 g M4;dv!hϮn̩mMT}zXī=ɌR{v23d9OJ<ಭX*5IQ$ZJȪV08Nذ(CfqE@QъJyj cI%IA(R6ifDY&)z1;ise(ޚ$/ߵj endstream endobj 3031 0 obj << /Length 1382 /Filter /FlateDecode >> stream xڭXMo8W(1#>Pm`E6Ai]Yr%nCʒbH f iY;s?m>㑓$CgrND) Y̗')t~q}$JNJC>xfcef,̏񷽨I&M2X2 IvE|sbg ۷f!z(2}f$L(r;[E-2./+h;?J9LᥜX7*[Q _O)wE֔;ճ,8c x)mM8o=#O"<mg FCPĘ@Q=GVJ|3(5so.XzF}1Ԏ3ken;Xe0 {i&mG2w]]LyM W(2=COgdz؆4inR+eaBXJ3} 7.ǮћW--!aWVJY%c98Yh*| Fi62n$ J)r-j;\;Ԛ;\kg$o2SQz0 sSU4O8-oZ۵BʭaAbtj R|JbqY`$O#j {;ƭIʥ`3sM0/PW|0KV*z0VϥXZ86GTB'xK+BМJV,PlpslNn4IK@/j+"i{S=5+MzS(س3Ͳ,glC1G5{>뚏%8!r?*5aMt+üciq^B8;gSz0}/ 2¢Q.d!.a@"Wf9W/rSȟۄ(BDɶO_9G€:,HHlwu>uH{pB鱂_=Pu"5"[ 4R+%fj"B.FG!8wdPJ{!>x/A9 p+zRT߁/-!xA˹#aOޙž;w7oʏJ5p,E^>xF;;EٱͱJP꾤cu:fǀ` SbRw U߶U ˠEMLiC-. ][f쫢}g} N" endstream endobj 3040 0 obj << /Length 647 /Filter /FlateDecode >> stream xV=o0+dT4)~:H\ pdYvV*)ǯIU1f]ZdA|ww|AKD;BkJ4Y Jf\R,F9Q)E( !P M^J2iN9x< &A=@Fk*P -Ar8{އ6õfQJIXw{ ziN|+s$UP`i8K$_ݖlk} .CyIv-9xmjkfoCѻ襻R% %挙LEL w>>Aٗ4)_ 1o:b!S3n&{H@ʰR"j@4yW:W?JP1' 9V^;0]=tMRVYNFٲ=HuqRz ql|Yɩ :K⢖k|9|6xqz4ρff' s̅~Ay!qzǥKd(uDdp_ y38xܧ86Sp3`p~$AG@6hϖMG̦ijj9)4Λy#kR#70Y㗠 endstream endobj 3049 0 obj << /Length 1424 /Filter /FlateDecode >> stream xڭWs6I^ @2̓i3ǵؙtw4|\g+3qbw$!V]_f',q81> I!wr=?:,Vd-ǃ(\ yL΁&fѯ'P&ш0uV,]ԤݺGd'1)u㓹Q˦51K12AEarC)S]{<4>YS.8mVgVp$I8Fo^3Dnʹ͚m)fD4u,Ks&y1̽-rŽ֖k:QFG$@C9cFȀ[֢3^:7GնMYH%KrKIBRL,Eց̽4HݫuaWUh@ZB+I*iSdz5RA^*Br(?$?!N/&iʣh/}В FBaL֝l{[w^h{QXȬIO0h"܅{EtRJsj! 21d>dl]'F^Hk1ѿHifF$di!#YDPF` W3+' ]X>Fê(G` ݄́4LIS" Hw&e.g$NAW\bJQhF88zv[Nt=-r}K#BRtij:VmBfSՔ }sg"?q g8Z˸k,WHg>.-u"fu+E^[.(mB hôݕLXmP >[3QklM߷j$GFjzlxoEŸy(dlq#]2r|5g=w݋%碬]~sԹӤ'ҹh܆q8'ħh8݅C +uq4M`G` žIi۵ӏ2j{> stream xڵV[o6~$3CBIIbKZ L;tD)qERd;Lw.Y8m5{#+F1uZc Aԋz OwA;E^Գu~\JM6S#?uCXYlTx{HdMIhyRoRG~ɛ iفQWOjbTR n7247u1}vfYÕ].ri{uɲjDv+.eӮx~VehijR>ߨ| R3!JˆZxp/AG*Ssx T<1\s^Y\7Ud~84Qy#j#F!зg/ogDh+O?!-vk^9Qk!vƊMUc{uHM7jϜg!1_n꧕& nt @)`uZ4bOC@9,!R#@}oՉ]8KdjgʨJzI]V:lhn ӗRha7^p?Q ˀ7j)7݄*[c$3+aχ]**W5Y8k1ZrcLrJ~KzpTYXϲ˛_3}߮+H@;Cp,d5~'^L<c1066~Lw% ȫrwj0BfOck`yqdGsˇ }晵ym0|"pp\U ELj,i2VeB[dmї#2XbK:.ʼS6q7ڤDA |"}-I$S 41D\/Bڼ:Tw6KWr*Ry׹D*&'E]ugVB %)|܆ ͕'/ܰxv(xa?y=$8Ǿ{n"GyHR^AW5.A6q3n endstream endobj 2974 0 obj << /Type /ObjStm /N 100 /First 972 /Length 1635 /Filter /FlateDecode >> stream xYn7}W1yr^#m lhkAX+3T.jW6wyȹ׾$o%c4s2 1!79#D2ȹ7bI!\d`\t /y :"" N; tĆuANՍ1ADQE`,*&8I%EE=4tur\:TU/0"!~1 LIE'w%1W.Waa9vU/**[WBYl깮D@0A!cdW2!UFFX|$̈6^a*T@B]CBpi1 PȆC]7U~۱T#`R}]b :ʃx)eBVMk+G.| j#V; ZfJpU㌑qŬh8Q@?Q.l (h e.** @#1*<>jA"([UODbIO UF61;85ȒǦߑYB؈7⭠&980\eG DID-#;P}u6lN'ܜ#Ӝ?WYbnk^Bp y63y؊зE]^xtޣ`op0MIkyr5=k[{1L?>]3XXސDAVHOWX]-]Y #dFnfN>SRGtVZ'Szd (_zV@(|Fj+Kkfڟ|'X.;eF0Q}?1J']=rGQwOZ}e?h/]єmhMь  ;)G7wt5:@IFIq(o]aƌПn'k ^>7Jg~W ;ߢ=RSW+}Tj%u%8vy;r^!OihQhOhv1A-Yl#p:.]:q?x[6ѿ#tu@8%wckʐAvhK#-Hn^8]6%Hx7G-<}_FCt3P|ڿ];e:%fkֳVZ3D+Z ťL endstream endobj 3074 0 obj << /Length 1354 /Filter /FlateDecode >> stream xWKs6Wp 5c촇;ͤJg:ہ)HbJ,ѿ.DYv[/bX~99?ξ[ήɽ< gv|Ƽ0J$̝ʹu]!7ZpwFu K4S~Ќ^/g| $i%q9+ƁOy<ډ4 o8NDĩ%:3깺N3{^}ŵ/$rasV^UDo4pMԽd\krnO!R =j1ٷ6N)BN^:vot*W{qbA+[]fI{oSm- uFىW~WjGtnMhǸ ^A&Ťİ{!Eg CB~͏C*U Ħ(?f';UI,$Ȥ5r{%}EI ;Q¤ukqlt&7K"l"U 2J-lV]+bmD#AyV Y FVt8J8lɪS8HJgD6w#1Vc$qw.D)3ı*A~*WbeO`9k,_y,H{YqpBX̌?x*$?B_*k-:K̘--޿}{tC_" { ˤR|HH.NBn[ޫμ}-0Ϟ ȇIen&SRF)lO\:~,ܫ;65F$  9eΰ9;ʔs^ hmA<Z$jXP妑"08`w97@cw,,uˆZh洵 hNQ&EhBR`jbLy\Vv-^ a-kt;sj/..iGtac ✡Ga6@a$4a}<2FäMqAb*9K+^l6LEB 75=okp ^vK1hoU16,^-5"q#p8!F5櫴؇,N믉4c3NڱwGIS2۾Vo# HyDW1(41|WhE*3ҢV6Pzrf>Xvj}qwOo>5:4ݫIX~^.xk?3 endstream endobj 3085 0 obj << /Length 966 /Filter /FlateDecode >> stream xڵVYo6~_Al^V@Ee9$/iPp%ZP.joz5f : ~[ܮ.8m]/@!8pcM i' AQ'.w\4]'ƊC՗)Atv%'R8|(]'Z ` ]#Eo*oeV9ijˀ;8;v|I}; 7F8qTT@T|e"ɪr^gj`7Y޸^u|fEU6>ʖo@$N: A$;EǞ^d´BX>Ų}B6i&`-:;&R6rSr >59- 7>7y+Ż3k 1%l7a% lDV̼I j?=R3[*I=P ;z }2TRfdkՕ<%po-M3P+u m}JS#^ 1~ endstream endobj 3097 0 obj << /Length 1210 /Filter /FlateDecode >> stream xڥWKs6Wp|"gBĮ%d2Qh(R!$ RB;R{ÒZE'&al$Ț--F)Ȋ#Z~ݵ\?U|qy$*~8M@-b&҇ŰSs1P{n&R̘ 6>iV# :W H([YJoSV\$ YqCJ8\b\(U[M7u{YU ƥM![2ߝf\,!3ct{u7n_H> Wkc/%adxU;v m^/8F!㈤At\/Η8ĩTlkޒs`$92y@sZMXnخFdre߼(p7;,vV&kaܒveBFRer3''^2X\M6[^y(ø w>ѭPX-s/#^j7\N XiBTՃ!1/Z+E /e}QE]mx&6{qI[N)ʶTY3|lأ\0KU6` sB./deV|ѡղsONO)/4vu݂Y%Ϊ,a&8$ 1P[k+TNgN@#\xXkqvE&M 9 !w&ozߏcp T>9bBҾq)dboV?xHU-sY~F?L5e(M('l#\х&hŒ.(TZcdt\(Mc^M&ǷӇG\;~/Z$lb|. 9AY !LwSuLi9rRHQobwJ| *Mf6:apX Qh֓Mpib}עk+yaM'>a+(,I֎8.n:<.41a,=3A0!kdD:9'M??Sw^ڼ +Qj$4 ~h'^Ig'ը7j3+n zVՒCg逓גϻ;e9{nb)O/Ͳ endstream endobj 3106 0 obj << /Length 942 /Filter /FlateDecode >> stream xڽV[O0~ﯰHq;yq1;; qZ%3C>c!Z,ѽA}|X|=@9>v "} ^եJJf="j>,2VX`/s6TGPD0񶆜%2;q3dSj񇈪fⲨ7ٴ\B=\^\LJG"Ma0 /yGW O?1;4(y&ϩ٘y#صiiRbu&Jk2wc>L,a`wr#Wed;:j1 N>Դj`i/ ):GZLjpDlZJS豖bGIgL|E:_ TM,]z*Rw6$@R1h'ð h&BN2fcϣ98|w&peu[Qƍ@=aZqרxӋ4ܖ\L=裫EsuU{%23p߶|X} }SKaԓI0y%T mȠݙ {nε&8F[3 : 5<;lv@6nD3@mynf^l]:.6JP(CLCnI!h3HFdU7(0=ǁ?8`f ɦJJGa}; R/ ԧc> stream xڭn6_!=dm3lh,q#ѶVKrtwCʒN'G~c-}rvzVLreQ!X!$pckZ7JYҜ5Z|[yzE=7HQaЕH3G ۠,DgՂ:u(Y6H!_4c}mÀ3^f3M;fkb!X ޴R:Ye%XWSۖ Y.8|B_\hU2BJh`tnʘ?ĺ}Ǚ߁%u/eD8R_dm\B.wF).,lIlۼ; UnBX.NRp|U-Ϛ2ٔEZe|;Q%|9>I @+"^ jJ""Me3Ag C?Y'H]~KD(=F3, oɊ5JRY @@"[V-"GZs/?֤0e7/IACar?3.'Tbb%oitX)|Ă jnyd'u5uO1 MQˍ#zE1_D/CczZx]uczs[~!QW(UY*5YYDY.cU[$z=drۮ~Uk o8rjѠxHw*W]Y#߬HQLԹ|ƱRB lٽd#*IRJ=-Dk "G:滶bw&U{z\=auO~S2Q1 zg%qZPDMڍL~/b9I؏d5EAqIo=D bǗO&]aDIP,/?NRͩyf&)j ^}H3|2{ I{Qݖxf-L<7'( endstream endobj 3123 0 obj << /Length 1193 /Filter /FlateDecode >> stream xڵWKo8W>@Pol@KZmn%%/:oi9Mud@$vӥ:ޓGTF\J.9(pEQ-1tdߵ)F0ogtT,E$E!@CkNQGzHV\|@|prC򜲝 Q0R|4&gkp{%mv>A8H-W*\)ݓ<:䲐Qji[J /un8 Tn>ᓙږм-REC&_?z%7X) ˝G yF6ՠǟֳ3I Z8}6?[.'uBRY0NA9fCnzF/<#U[R.[Ғnԧ.AL+4IT0>(%_2?ўND᧒NnG:yLv06]6EMP=M,6U ŻhS)jnNJ5a9G dRp:.PKi (tCqBT >,Ŷ(hQE/1/+FGR?¦p(Ҝh\$]!R]#Zxq|5{ȥ#H|(pFudk endstream endobj 3132 0 obj << /Length 1221 /Filter /FlateDecode >> stream xڽXKo6Wۃ%RDo6) qz-LZR._C)ۉM g87Ю6\׳06R'Ș 于DF1_[JjL{/䎬%eF?ϯDȉts9lљ+;սl?Jď ۋaB[\oqҡdem^N b١뚿z3Sm5 _˼h4yt_k& Qc%uSj!\w)Ft8fPPS47Ƒ&^ũe{H=(nEZEYWK6 l>3}0u8^tok< /Rρ@X˵#nfar!DwG`7=tj!̹ \ ফ 1s+qA5k4لذX ,N^B (rP R9 MO1{>%].4sP4H=O/ҌOliMI+V`[:'(0o܈'vق aL0B͙H Jt6Hxܰ{^ &K||x9-?zP%wRvWV 6`V~` on[8w`ljv!G F:z.G2Z0"X(lĸۑQG!*6ʾ4NS:ђE$l%L.J^ I4 }eS hmGIsjBdI3Ly%컞taWưɼ‚6}gC,10bKAL`U ݣWĢVbIe1^I73K| jI&E8wɛ'Wf$OAcdҗ4?[^Nay-kJu25zKv[ִZÑlSSx=T,۫ٿM endstream endobj 3139 0 obj << /Length 1237 /Filter /FlateDecode >> stream xڭWMo8W=@ĒaN,YG],@˴DiI)nJ5u" q[ JB7ҝE0FZ!(+Zm7A42"8RuFշWK+m u7R8L\-`-$ޚ5r ) g$^|gkc=\&g$. wi3aam%ۚ ׭n*C%t80\& J?&[@ ʥ١ 3oa;=tj PGP`TSAK0C@jDl*O lzI^K5 qdEхz.[82gObڪW/cgZzXd'V]p\ާ lyUWZ,f [P*ۜ3jWD^fVd.) a@9E=WLé~⺦\؋b{70E]3/Zef,f)25"5Ѵa7X| ӻ!ۍifT>Tf7TcH*2>Q@M5A=gvZ'̚Et;/2xc7Yٜ߭̚:<7US \y:]:L#Jip=k\T]`PۉFn:ñ⟼wVbIoKbwHKC*er{eZR5~0ޙˣUw;A☍"oB:X'[NQ|u _]K`2`Ey^<eUYWK:DOd3dUіcL`!y"Y}|gS4ȁi#?M+rYrG%|u6>]xM+%3(6 D+~yQ1GwE znyذ&K SCw=Z}\}Vz.]l?.I`3^QuG4:|3])GS̶-XŪoi^6G؋%P]4PP;mMۢcD *[- fضn.LhZSOoFWJl?qOh&'/']ѯl4߉2|S ʲ endstream endobj 3156 0 obj << /Length 1848 /Filter /FlateDecode >> stream xXn6}W,-eDݕ6(R w]eKIvگ kg"@8K3á~.ޞ>{yƋ-ֻw]"E~Xgε?*{/_K:R5?ֿ l?X[q\3JQ4myw_)MzhL`gǟ](LG1^^^E|cimLjǎ<j+/zܶd5ݒ'NҌEA_so63 Ii?d>YeW P&CB)Lq%Wq9Lz5eUŘ$O'A¢f͗^/y|a(#X22y 58 1,X :.GwyCݻڶD>i˼ˮ4 mE€}>Vۮ>d+0Hƶ~kB=yLr- [r`Q5r17MY{eHl #ʰckB[G#ECնwn'~ֈ l Y۹DS-yG"`A)]=_&໒ZdW~FГVEhq=tXH =+/bgm#||@B:=%s7gԜÌs^M99~B oBHs=яO)@=+dDK:hO`1̯ ⠜O+o'zcDitN*cj55h㚶V2~,wH ^^GfԵ+r 9k#AZJqL=GF,`*H*q@+j3Oe:'k}A; CV, Y닔"_"X ތyv^E>K&fxQ_+YeP,hiEe}i:z gve%6XT<`^hyqXeDY#s=1R(mXy3c=+ǝ4>Oq%}; 5ƚ䕽qPY{`_]RZ۱ky}ƓzH.2 tmYexV]&[8n=rϙ&f<]&qaY!E#.c1!9JD6Ϗ;Q /7]B ܓEuX$ve{_ `ؿ+4g1qC$Ƅ3Û$ \"cc|6O>Mœ.DU.K-*n`a r^m.35]5z$S>5a@uŁ Vu(kP#R4(4JY28&H(L'ʩߧh·FљQfh7bIý90p(ֱ4PyQ0PHfU %u%w%RX<ben( є>Yz~RFoA$9ju>C~Vz(?d! Cj]nGZnNuBp HT=ދ|C: \>|YgМK0'^c|aiJͦ~jJ8#= u!a;:p_Q E셴."}zM8zAf;)U v`Ӳ]!k?Ɩ̱ylcJ߳ ذ}<yxj]u\Ic3 endstream endobj 3071 0 obj << /Type /ObjStm /N 100 /First 968 /Length 1801 /Filter /FlateDecode >> stream xYn7}W1yCrx NRZ pEjTZ=CXR-gpeQV>QK"$EɉGeќ"I$DVlyGE A6&f!g2lUN^+n h W&Mb, z#HdtY Τ1{AI4@t2ybBJWQA"ebv8IL!yL $ǥpn yig| 0$( S"y+MК(jR*gJ F~%<6)Ш8O$-Ft l"TXxPbE$V"%Av չWz0̓ԲEsH_B+*9m@7qu:}g]:} oO"$P ;g}a₾c,%-Nrhpz4GǓpxL~ZoF,XRDS"QԟκaۍGzvy2vmO&o=r6h.fJD(pB|sLrt2#Zٲ R:,t!V2w\Ĝ;wF_|L0gͭZO'/u7xU1h4<% =>I g endstream endobj 3180 0 obj << /Length 1382 /Filter /FlateDecode >> stream xWIo6W9᦭hiSd6qN`@KWK_EɭR۾;عYY\yD(lv1;!gIG˵DA_6F.+Ѥe-r'>W V s,2%f0v2q_rYeA)e+QYD "3,ʒyYQT<Т+G`C#=2G_ZZ~\Ѭ1F! 5_{?dtCPM%zEOU+$RƫO &z $N'oAom☹wpO 01Ϗm#9߿ZyZ=EM}aDf?}Np0"'@gPj!}ug \8f~wk *Q"aDR0lBe2{WdDrSĩ,bE{0E(kjԕ`U'0 C-ryْ6%yHb"SRa_п?΋;:D1C~W y7],mYO7wGqOJ:#\*^K"{ӮүN;09C'[E,c>G}}GTFw endstream endobj 3190 0 obj << /Length 1525 /Filter /FlateDecode >> stream xڭWo6~_a/2#:YÀDjeI_;(wwiw_~9[^E,cYdzs],<즘}twZ9o~[^du,2yI\\^=Ae ?s)|o$fY{l|SX~9͚F|"?cCwdL0/lڱºhcEe=5X08=dYo*"Ldl% H- kY0ȈĤ$[=!se>Oƶ|-Y-Iaoys%:ZB~r5ʛZ4z xҙ G|{z*w2e~0ik%X}N4 @Cn{$lE^~r]:XnKɏLJ(vWg Pz?^$񆈡;^K(S4E!j4,P?vޮ wFn!szX+Qb* }׉Z2Eql0qKL)[ ZC'"ÎV# /gbħηMւZx>h+͑1XBݥy1XfG%"~'2j@zP#&8]VnԂQ2NT}ZW'1SADR"FA#~f۲Au67˄m̹'؈u)E82H:.mɦI!ׯ@U.|ӄyaH5"byU }#N?bK7 / u-z{B ڕpd󺠯;IljzrˢաTq HOR a[5;z]ʶwT 5,6B{'&3_pe7-ܡ|#_PNU)ddAzhB<)B,iۻK-F4x ev%Ą@H7y_\MPJVͰzANӗtp{ > stream xڝWKs6Wp|19S:Vg2&!L(XbwLȐDXۥcl }jr7ccy~hDЛjر>̓>0`!yoR #hY9$[o)ig12(; endstream endobj 3203 0 obj << /Length 1160 /Filter /FlateDecode >> stream xڽWIs6UnYĒ˕035&4$,mw0'[ށϫ6{;)H#?r6;A0rb@&w>vW}r}&A8M<ŋ$VШ`gĉCq/w4km)8r_p6u8@ H 1tyTs\P/ܪȖV#5 q[t+yJsދCdWdaYoe5eB2unQyb3c/T}Ai( ּ m^DeP je# "DdF!yYci;z YA=ͯ{n=Y7WPLGӾO,$CRB`%_dS];Kc *$o0J7ѫ%'x?LJ6#zu(Z`VO ?/;Jx/.^i> stream xڭVIs0+4Ş) incwO&K '`m__m`1&$o}~ p Y8N|;K Z\c`!5SfdyAq%4A3T鲏 0KuLdjcΎ-\x*VH rk`c^$!d}@aյ-vq0V`džNIn;oBC[lvZJpeuHbtmҕ0\ÄI/ॉkR\l<%I2f5'`"g[MV~7-ӞwGL!Ϯvs!m+Y2Uz`!* /ZB͍SZ,R{*0WF _]X_wpɏ7g>)^/?Bb endstream endobj 3221 0 obj << /Length 1160 /Filter /FlateDecode >> stream xڽW[o6~R RwC5݊"R/i1hD"=:!sxw7UX~l͗F J0 ug/w7upd8J8_/K Ζz8 ĦU`I*\?atN Rpٛjd\M }#/:`^WQ^ UwK5ST:i [IjIhYw9&ͬ'[i>R`OQִ@In>iʒr)bتK#l_X@?ϠHhđ}> <lw=!ytXS_Ѱj>@ {A|v2&Ն(ԋS{Cdm(ߺwU{0gL#oI-dLoo3 pw6Ih+ X|oRZrkmI>س <:H( aҌG͏x}=R>{V*u=JE`o.b](,E /t.8*h5onʽD qa\rA "]b\kwTp)ew3VU;Yފ(eTWe7#XW Nv HItj(2{$erױn͵D|c> stream xڭXK6Cl fDR=M@۬fEjlcᐲn)r8I߯~_/ei$"o3D^9d3n“`z?uHeXT?z||{__陔9>1,JbR]6yYBH8bi"z&BҔ4j*ÔS']D&^M9|ۦX2?L{U7~ީ$`Q'}XYlPѓـX|$`,L7tDT5|xOVeEp.jஶ]1HqYiŚJ`KT"Y ;.x`X:̞ͦC"#r?eQ7LWM7l[Zҵ.eQn*G9:X&Ziyfg9hNÐUY_0 3!<_Twq];( )KsY!7ә]>VJSSg6nU޶lFfԐ,MA sJpGܢ!z= +@2Ǎ.NB;e +U1bF$1t'M^Xa`.c&uL.Ì&dƟ{ ϒXʾL:c[uUPVBgWU0/:!3 ?drNյ:ߟ:/H_DLPa~Fj}K:or,&z);k]acj67Tjs"n-Zs!\Z3Z 8T*t 8Qk AJL%ӾӍʷ&m>i:jEdUUT١AHqvL  r=XX^0k 74.4V3I[`f]/|kk$"Q0d8Vjw%BUeog,!>|Yxf7;@ Ƚ0(ٳ9e[;'Z}%Ҽ"|'˂yWȮ؟Mgq%,*ic4:ЈC/^;7(~J!ԫst!I͏AZ!N00; QMwLߟ{wxB$ endstream endobj 3239 0 obj << /Length 478 /Filter /FlateDecode >> stream xVn0}WXDn8vhPAӚ>1B⮁N*_%ּEē#s}G1F7h]Ux%*8B=$6xJ H(GA}dy0>{e_vD-I^F0fJA%.I;G@c<,)s?u?:{l>qܢ,_I(U!8bRey i>2,6ySk/@s]cFv1!A驽uؽ6#_gumGy$녅8H7euPQ ;& ԕ,*ti{;Yۋ?^쿽nܘ䰹\zk_?{g=Os ӧ'a{Se}ڭtt$f endstream endobj 3244 0 obj << /Length 1251 /Filter /FlateDecode >> stream xڽXmo6_! I K KfNŠHY\Jj_d[Zk1(mr _"4v%p ᙫ i;ц=;scly}qכ3)$AFz<')@"xy|l^`{Ֆ7uOuE " "-Kzav{ϩ,i5lŸ]dY.MD$%k}SYC%H={û軖KFm]w`+e}:s`HQiU% umkKQz4+6:\2xNWnݽ1߶)*1h1T(6m%\}YZ?tbUYAt(e9_!gBcyZ{Dy%dMJH#`mqNilrWzk@ń<*ѫ53zI[Q=e0=kl6K;Cְ>]OX,Yy:%& pr= ȍڴ3(cN+037eI&bT((╘ '6 鍔r2M&&M5C9mILOݭ ִwnv)慎YL/]O%fMB;mŝa+D&62Um z"2Ⱦٲ-R_Չ# %G=8Udrbi fcxh|@-r ʿ*Ne'ncġ9N7ru_CCy^s ?em`';S(׹V21o|[&[=z!1rǡk83%o(|*O1k:F1~9cp*k(uL}zc=4OCʱeJΣX̱Nr12#Y"^4ʱ$?Ƒ0d/]Xm:UFD=%! ?x+JAba'_!‘$ NE}zG84dI66Z.oVqff:1 d^p'1UdzQqtW_S endstream endobj 3252 0 obj << /Length 703 /Filter /FlateDecode >> stream xUn0+P oh (PviE;*p$EEʕۉ)|w~C D` eCA%v4Qq4Mb=ՏE7* 4ACryn=IӸ16>\ϯhsl6Pq]/!.P@8HUz/!$[o hDZɺ6iulkQ?Қr5̢bJl/HlMt)sUVfYxTRuRM˙aE!7|3 5v nDo8FP0 8 HAvx 'rmʵv4Y6zCF)l]cAZzW[xP@[xTj޼3?Ȁv"(H-,zaGzX}gJ%dC{Z8Xxl"fx> FB>ÒAF,߂"6~B@LC({pTɮUdNҮZV!Su,10/ӟ4JE,*)oE|` endstream endobj 3169 0 obj << /Type /ObjStm /N 100 /First 978 /Length 1738 /Filter /FlateDecode >> stream xY]o[7 }+R>@"]+M.[f~r:]'vׇĴ.ER$utt) .Rb'>l( \] jBRhQt 1;b L!R!|48a%T"fHIUG~HXZ,7);lO+92{X*ktU -$l!cFQa%uعfJju\`$keTd*ؘYf[PABP9h0f{IB3EIx  BT [TS\m._±EQ,h *)9 l RR /'Qp"NԬQX<:. j.j V'yv8˥NJl.`.Q%iB-nEl )Ybt[LQ-e$4# "(̕2,xLmZ2 ZQÒᱥ6Dd@ZlQZ$nB&nB3**CEu)!fdTDHjAJhTmkdD(CM.Π{鎰q{ubQp|q~~#SJ\S̆bԅx<rZ< 9/╰XКAwp~޾r6{=LOG@8~^w/}OZ  {$va)Cmu&za^Nه'ĂCūaVN(KE15q:M{b:m.4W(>F &D~p7= +A8t%K\I['(b G:ݲ; lp [M>!`$|0b 8a+(8BL vXqgSO'Eip(썊KQ]^:VVh/P5ԖhF#݅ڪk$)gjݎv$hZvDIsюܮ?{՗C]_ OjvHjv,ӎt]% HqsppGc]ER"Z~ #` A8"p- 59/쾽}+ŲW.HZ?melEJs\Ooݞ[oMW]wm#7n+ 'KEǯOg;0s] S[Uz mR_U XbyY^ endstream endobj 3269 0 obj << /Length 1350 /Filter /FlateDecode >> stream xVmo6 _ Ԋ%o-=lvCzNDcҦDٱͺO4茖#g܋F1fuz(n<%kFɹvnJ=0A']Ocw5Ff!lxU=Nݮ9l?z|p|GH ľW$8 <z DE ZU]1T*BnHY/r8?r5 곯b1!EY1"' V8bM|/ 1a kg̖] klɺڞm0T\ TQ`4x-/ybRqXR֦Էd"C< #(hpY3nt!sp+c9Yc.Ҝ'"vV BAJ 8C~X GX]շ,sT1R̖+u% W<0څJVOp^V+G (dzjr);GWFrPln>/pSSsk0QA%Rԥ0LYd uUyVC-xJ&JTv %dMw09;1ձ܍]o4H<܂צ H6gqt5Q韠ӁXWӦItu!2Y(LuU_lʐh/b ς4IM~vj;}t~ГzߟFݯQs]U9NS` Vܜ!r nji,V* aJ@Ei,[a]${r%.tkv g"z?(eiN9.W ^E~? U[RbaSCi-_oRQ?E`,r"5C E}z耇 ЌM(!Mmt0Jz U}0opqӳ5 ѝK8nᗈE/rWIOINHvfԇ@YcUd!|ݙ KNU~RVܙo=i)a7 'Bi0ΕٷRn?چ<xʧ}+,-k{S*J7_*$˿Mu@9c$~">[_|ahA °!|NRY: $ cCs /@lUZ䕐ـ!7r6@:ʒ?(4IYV> stream xڽVmoF~ . #Gi^/V$ְwG }ΜK"7:yufjqz^r1oƈK Aު>KK&8As<{FdZ`w˂=Ra+ŇO+[>wkD/Ҙ]{-;~t`b$GYAkwU#gdĢ0JؖR1jacSp#K<FruPEWJ0P IX6bCr}WkN6kQWQKnDg?(cPRmiZkar:뇖TqBUS,P/V C[̪b{@IBj B(nDCn+qiݏ8-oI&GUןc|zh/@k9hκ`34գG3-‚&γhl?dR`~<@̅$A88=ۏwZNQ~MV.$:b]CٚbkzAQ;8GY~T'RBo]1 .Zk-sԝpM=n]]yz5qV{bۇN KK%oTkKKXs-J=~_=@Zjg}#ZictC fÙ)SWEOs9]n,#뇎9zxiPH{_#ŘԺhW#?ڐo4Ϊp دT54n3e'X>m9Xd TV[80{ѡBIesJY=F>E/m'`rY/-b' Vu$ڢJ'cqR-̟7unfՄp̘XsW]}VEo.A!tRuHl;wRUmԕP7 y!(ni=;FM )LZٟpU/S3*+'彥<Ɖ] Z;5ս}-1a -WbNQw~N?s:Cm\C 7G/M]Ǡןg] n endstream endobj 3293 0 obj << /Length 917 /Filter /FlateDecode >> stream xVn8}W~T,)uϪ'5~k5Y%x{~U{IſV}J^N}&x܂ endstream endobj 3314 0 obj << /Length 1739 /Filter /FlateDecode >> stream xڭX[o6~ϯ02Yԍuɀah@[E]-E,` sV#oŏ7A4D,͖#y(\fVnBq?;9k?>xDB*(n7P̔1y8yiY<·)A`eUdxy\^;y9B/V:4Mb*UQy]u ?{ATA5)T.7F($"f-r&4z=ׅynd]xrUZ%f:ߙgRɍmb*WEXS Fz}¿o"]Z%E:HZ+Pͩ6?J {}a,9mFm}(̂'SN4"JND /`K/r3`$>ː4˰3EտoHU)S!e ֪6U>` 2G6l#E8  xe/lmABia3S )CMZ9͵P G&.۽y?Of"an}Um[bi<٧hG;L|I͑GQ'#DL3S9Qj `&e֨b@INmN* ]yX|dYdYU/]pJ8^&yӅ3oGBfʲV)=h 1<}O֬j 4Vr, Hhπrs/'t\ꀀ3@Z+ǮE n\ȩRvˊDcpͣPwniݢJگ7uYBjy  ⓬уmmUSn9`$*Wuir x^WƼ&uu> stream xڽWKo8W9Y@MzP=dN"]K#SQ=\Qf")3߼:4s= '1N!Dz;< @='[:z# 8 ̇ e|$n*ʏ^I5jA65X]tg^="?^{퀔d݀iȅA]Jn\-GgCnZDoҔioFAD :%@+iQWJ|Pa^չ_iX?3hi3(BTxrc*͹ }F|KK1q2MXgOT^wQ8ݲ*øfNz//3>NV|t?حU ^=+NȌÑXgQ !+ԱPB3BxP+7oi?lhc늰3 ]i )̎9b2d+fc]( 377^\>| C%)^xjcE;{BQ W endstream endobj 3330 0 obj << /Length 752 /Filter /FlateDecode >> stream xڵUn0+MZ̠=tI&) FbJtE9$|39D`輛:I i`z0B!0!`KΒ Ri>?~NQ Eh#Jǔ Uۨ&"5I,hferw8!)p#b,щ t|!T+AY7d"fާJaHq˂e*ق-kZr Ƞ̨BV73*Tpk\, gW ypwE#Q}+`r&X6_(Q|1&N+nOBOR.OCs yZ*1[G,=y^$LNCF⌧+$x'> ^87$jQCBG ]_YN5u7EJX1Pdi# oKO\/@,gb3Uc4W@WQM~7H%UG>2.ҥT9[NmRV@mcA2wjJ ZLbwb]}q/dxRt%-<'(BI9h­8-5ڈV:y_ x;Xb0EZc'xvDVE(Ww%JO[=49<{j`cOd}I=Nk<һk8XXY憃rtrߧU endstream endobj 3340 0 obj << /Length 1082 /Filter /FlateDecode >> stream xXKo8WP3~l@"(hȤ!R_Hڒ,ǒ,șok-׺sru=^ߚ,BG$p8?'_AiF Tlz(~D2%d._Q>3'{@,M@מKL8~ B.@G N:`_ڈ/6=i35((!Rb^<ܫ*i=8A?P| fI]4Uz1jDb^}2u7ܷJLD,!Q #kP ( IL58&, >4QbvlD s'Ճ-t ݁oV5q-O V2֧4^xs#R#>if,m~ ~MVw}+}Mb&&Gú_OJP:> <*R؎}z]oMnwin(BQʒ5Vn@|[D^e*&{Czyp 2SN|F_;KDҍbRW/Xa4#_P`K,J軦ܑl%h<8 V>e3ܱźՅI:[<,rjEAqp['*:Q~*J~AjV8JdIElC14}dTG)FY*Vu'f es%]8]ȹvѽɴM*A Ax+ɳt9YK0JkZ=Gĵ?|֐Aͅ]2̘ 1juyr/53y endstream endobj 3356 0 obj << /Length 1746 /Filter /FlateDecode >> stream xڭXr6}Wp$Dq%N4I6:J_L -E*Nx,i`8{{v0aًٳ2 b+6aHPAD)Q<$ws*gIYi1|ЃxHǰ'~#=eN_yfI֘ѧ|T P$/L*&,Kڵҵbo\k]Wiib~K?c[dY1g>vј|dN0$1aDž CΖ蘵,&ZG4XKxk^؝uI!l%k^\GQubEIYys΅GWgJ.yc۸l3CΗ@efS(]V̹؏gyD)'Tvŕ6;KVdq@t*q )hIU 1) >Sg $m/B:#%#aQrrSI~sH1=9FXbs,DJ=^% To XHy;Mu4sPEĈ06j-)1 {{t' 䙋C~aksj%H$Sk:9ԗEY4K7nq `1yBx_9UeZ'C%."jJNH? (Ly59[Cڷ*՗&slL-gx SnwH$;8Qwie˦g_ΨBuI]>~ ֵF$0x o4B!p0x G~H;C3ԇƿU]VI%`j|SQXL cHƊcRL$TAꥩez?27ؿu5HP OZbZGޤ%^ !m=]B]v©H8XW ^E}ɡͻg ]) Yf=FakjB2.q!򥇑{}sma endstream endobj 3266 0 obj << /Type /ObjStm /N 100 /First 974 /Length 2011 /Filter /FlateDecode >> stream xZKo7WcMM[ ;-:9lmFպM}[ʒdHřQ F]Tv*XO2&F9_f"E,3k3 SR 8DlJbmVQRpT.F'$$ *vOpT Xȕpe[+G#W[6 `mJ_0f Y9eL9AV':oe@#c)+TU e?(XSVAV(lp4( W/K5-+1,sX TbPƂnsYk@[ZV@2-*[['&З E` `*)GHpB,y+62"+%Ytqb9΋yy=̏dE43(s.`hGKFX!lDU)~&Tp+VQB9(W!3v5苞A 8ӓ3`1fA%1G=%k?:"oN*9SOF[uENF?o-@g|ɫӧw]v1 ms" Ϧ:9F뮌OߍgiUCcͩ- 7A,&i($-FM.wTyg/f xtT6/fD0yZ_W VfDahP:&b-0!`@42ۑ17r+-_7w VKKG%@H!:͔6-4{NOܚD\ݸ.HUo׭h֧kk0ICh /$iF:%o @ D@B7Tau6yJyhp"uCTm%D'򒰦'.C4P9MIJ (xSX zA)iGo&DL+߽B\WZ]vmnMGkgm~r嵳H]V."NzovjuW~=ig4W Cn "܆LwJ{nv֙W~N^ endstream endobj 3371 0 obj << /Length 693 /Filter /FlateDecode >> stream xڽMO0DZ_'9 ik]iܒ)KnNIJ ۩_~y7+GC Ha` 1 S\egQܙJ#1Fͤ< ROgJ^!wizt3XLaX{Ws`J𻙺Cj%{ڥAb"IL:e1[L`V'`B"7'ʬnjuK;ȹͦbXx ; סDEUlu!7=MN06^!hRFn|W|%1cuM%B*ˢIϕ6GsK|d1*_bճ/>TU/13EEڢWK./۹DGѼ_6eņk7s+UQ:OpHGW$X׵OZn~)y[d~0 ٣%a s!#KDͅôT|(w{Ѝ( %zv7xپNowUF\;ʂ:[KFyȀIlDŽ?%#_ v)-~`^T΀!/-ycmIQ)a+7uw:2PwrmyeTJcDuɑ^REI|w\?U_ endstream endobj 3386 0 obj << /Length 1586 /Filter /FlateDecode >> stream xX[6~` fZIæɶ&it&dwiq'ߣ MgGGE`/'7(rfQȸx(w~duȂ7/X[ b!@ZAD,flf{݃1Jz#D}Uk1 a\xofOH8B0x<(¼Hωv'bDB7rY0MGWAU"\wA'44R=N+Jb$ʧYmǷG1xs }7l}]U 櫲ZMu߼|G0EV ])G!1z[٨o#,U*QnWx([Vee-vↈ6s$:]g&g2_g DF_EUޗwi.'f>rRUHC$ to~~8I+cժbFi]gjљ=۝>H0кaPHWR◕|Rk=єz"կ̇o gVw&w` N%)1 Cw0r _( 'u fF: !bLV0иbQj .\8L<@1a3L`[!(ʷs*0KB'@"YA4Y4FNu,RDEJYYM9ȊޖPo!&7)h~CĴ?̝P?1`$3a~!򩐕i|t \9ʑO|Z<mإ`f?A9DPeU=O&HJs+셙2Bua'U͹+jdn]U5Ok}CsJWTBZk =tc(骔 QlC{M^A6Rӭ=pθHkg{||??_P :_., Gl뚔on0 26ZdzbwД-ji^GlN/ Ɩ? :]z3םiCe,6~%c1v iS [`U6] ,vXنhMPymҽ:(ןe"l]鳹,2,"_' ͙t1OBcJQ"ZhXnJ_äۜjL@\Ge4ĉ8Ra)ma>ҍ~1 Zf#ry.bI0Xe)UB9* ꠀ.q!V"28h1(ŐBƉp0.{&mp69Fcm_*|]d`K*K 1`mFÿ"- N;UYL|3Fؾ muQ&bIHο)40#0&&$ 4Bc,$bM_6("cTthiF;?*.)Q8!b:Rd endstream endobj 3400 0 obj << /Length 1352 /Filter /FlateDecode >> stream xڵW[o6~>@L"%QЮ!)AAیN~ɒdɀ€EJ<s:oW Ei: # !4 rLR>>;()i Y3)V'1g?ֲ4#4=]SJA ( nOaVc:+h "E6I18%$lǎ&SDz`ߐ Əڗ#X# ˪5FFay㕨{qD!۬q/;{u&riYUjMȸFf)# (r%a0°6(q7w辚?(Wi 1S i=ջ~_$ %9M/=cx q̰Ou˭uo{hh6\efԒЉP>ڋ݁3͞zG<:1 _oeUm#Ģc@C R4 Eε6lBŭՕSdʍu-7r@a&8$MA!(p 4p^哼whbwL5P!P6 }Ug{J؎1[=&ϳ@`(bi?&zpq1f-\ M,1b`6ij'=mHhWo]_˛۲k_x!y_M]ol)QVeU̾ySg[%BZJZ nEȤY%7{nRZ:FE ?ǨV3' ۺ 'D ECwB3Zq|)M4Ay-,joM FWd=79.x>:"6{9lbty|MypkEAHyyp>^O-J?‘bHa$;cvHwiRN-^jO7zw|v-7\U)pgF$bP%OA5g@s4 "z^ o1,\ 4~S( Y0w}S?~&]<:ߟ wdhsjLWxʘsU[W6?J64-zs^Կ endstream endobj 3412 0 obj << /Length 1631 /Filter /FlateDecode >> stream xX[o6~ϯ0ڇ@̊(-a!k.뀵CXt"L,Hyqw!_??;.*ą3z,^t91ώPƒ&)Ji2[njo7rdQ4>(yq%6kROp4MWu>xU[dY[~r2z]lµGgˬV䦻`==1kj&vLjaH`;}RaAEyeJev` S Ẻ?j8_lUsX}(5!OHGί Y{XRm힗,j־~*://}E%dpe}2SnXWNZOA.zaԨ.K"/ɾl@xy`=$ >GBX S籕,˳30]N. pj o̭뺱R3gUo)nB- &- ͌ Ú4FIJb nGjȺEY+㱳[se~6YVMi11263os2i~m)n^Znš}8 Y]0瀷M><6\Ff"l }*5!d9r7A1IȀfvڙ(cG =C/Ţy Ëϯ~8~O1 e~OO ($DK`za%:S]s]:LVRP\c R݌Na($":|E/*p&jy+T83Jzҍ( Hx;Տ,,%}Sm2pѵu4 z䃞iQO@ٿnȏ*qPҧ 1I@(GQ.(VC> T9tFsf;=7me]+vJyW'PR*-I<zR66D8f HuwƩca2_{{c @s폄R;"G2tƘ3;9h뮶%*$l8YSx #4+xݭՄ"*PʺPH,bH?L:òń>.p?dշ=:w3fи%_lcy_:j\t0Zˬ6L{Sa5(!<KRNQ- R\ =W|6wiYd\{.-l۲l`/$ِ#F"#(8t{=}C4<,##ZdȊ2;߰l*ZBk;Y,x'nojhV19/}{{} caYE ߇_TL> stream xڽYn6}W4 DQ,L`؝x2,jv[Zb{[Ţn{ƆCT,U~}[mW K4JpΔ6XfTޯ>%_mOLUT &FFDc]ul|劇`孌aPlN^P)]me˧_?$Wg Blm2"oZj 䨕U]^m^4P跽 (HC}4of;m!I|2[}F㸢̷-MdN&3vY ۆ 5l]&zUR_^dݜӺ- >m/D]V0O[hdh6fUѝ~m,k¸YjbF* 6x_04cL!"gjH `AāZtDР"/q(M\x kNqᩢ/=:0a#XE @ic_ | Ha2H"XB/` K UU1i=0OoK8"K^]u.a)5KY`!((tG@b 47$'= c 3N>ewxX*z;Y}9ĞII,=.;}sNUᲮC14pY =>ѧ 0sl>~pYLekpn2ӷH%AO%~FOJ y"݁aB #dT.3` )΅KW2-f Gq>@rK׶A6GE%xgt!5:YsExIAbvrZd ! CK[9a<7 T-.l*KKjœkik0`8 9id@S VQNkX%+ǎ|0&ugm̟;SZuehz' Ek@M|W#~/ͷ# L " Ճ? /7 +3aoohTq4;+=D_}ں7o>k J4. 97U]u|&nC_`wDa*˲0e'c9 ;U3B'|b[ݺ6wwt$)j_N=IZ;{و!ŌD*@ZOO)=9~ODze>:Z:KJxWp/,uï|? ;ozZid,V?_l|zn<^ b}Nmdhנ+4xw;Yh B,jXz PV$ρ qpѝng( ].q`ȥ <(3RzVz'383ԛ ЇM7?.ߡ/՜]\nĺxV^9R.Vo}`_]! c endstream endobj 3438 0 obj << /Length 912 /Filter /FlateDecode >> stream xڽVK6W{KR,!ݢ&'4hvR:ɯ!Yto[DjfFA͞-ge8KX[D OЂRDZn,D,4uWMa5OGQEn#Տfęc[aB89b*R|<|H b ?y#󿛘GűZ=.T3]-g(X% ƳQ6p8%Ѓ 3Az3{}g*L%"pc,w{GL']t@i eqj[skj?0M|jZ1^foaml٫5Ml@Q'q$^*G^Zxթ&vslK ')Ae8~oɉ`(\ zBjp2d@om l#*/ [놠}0$e\[{b*rQ/fIDb1ɝ~,9@݃[V̈y{χq&^V_JhF8 .N//#^ɦ+Z(n;9(Ӈ:ؗl^O^;p%[F\n0?)3s?nm7J1^RKͪ<:43|P ftJ_lӿ!{LYSYM4Y endstream endobj 3451 0 obj << /Length 727 /Filter /FlateDecode >> stream xV[O0~ϯZ;$ACcB#C)usb7MJ(c*bꃝ|;'C`Px6-AB9p0x F&8=8aNϔ8.Q@E'<+B&";p> stream xYMs7WhA7_*%)YjS>bBZrxE}ЩD<F}t#&lJ}dC^Tr}#9BCHx]~&I&@Z l6UU}jVs|Hm2jNtٯ]SFFd m Y5i>97pϓ3{2'ThB 4P$K2L *❷Y+LkzpV !wm,RCĆi?&o 4c[WzQxjYz|bQh/D'%{9^{s\,帻Saæ62-d7Fh PDƎbOVXnY a-khICNB%[FŮ}VD+V{w[զw0t[) ͱUjo9JI<;&Yst=aӜ-3xz| }wr82 h}(^sT0%^[tjj XˍFyXA}L|.ߌO{uUQN,Y~6V>'ڷWբ ZE79[ɬ[E~zP _P *}/& bePye9M']{uCQ "DnR{\6bhy#IۈMڄ> stream xX[o6~ϯ]>t]ݭȊ"umD$ &ޯ(!$}vqW?na uA@.wX@KW>n7ɅBF NS@ ]f!}ڊX Z^zn%;tCWD!4 2~APF'3R:tpw7*sTFZ{۷w_ޏ. i4'(}|&`b.I=IZDx9:QAryψ9p1 ?H5I<faP J z#া{KW,Ja[64M\d~4-񿠆B?tu{ۀ4Nj7]r"!G$ $# O)aR߈A8c+X~~EYFTP"g~u21HPCpU.r' "HI6U&) ez_0l/B !5Ծ~Q¡i&GLQg~Ab6f1X*JyD:W% 9O)<+5СT}8kArjrTo8)Thd6x$o"-A&T pW,ǃYN|-Ǣxs#qr>˘{03u@ < P ;Z_9וȞѡ?RFeAz@TS=*Ϭ[-^:6(lM͘ޯUSM$MW ,4-̌ӍdJ,_ kf[/۩Vw5œpխ)c3XΏjӱ )h VX+[OrN\k&⠋-Eİ+ġnOIձ -ȷH#NEU4MdtOvI]YIWM0=]߃ 6$nmJDP,Am(@P~{HM[h- OLh l cV(LV^w#:_Zo$hUW |37R7zqDkČ뫐 pV c싆kMƤ5=d,AX\T [%u^]ҍ6b*x/?̽T(=~>݅ pi)F?uzdhf;G6rОΌO6_Ɇ endstream endobj 3476 0 obj << /Length 954 /Filter /FlateDecode >> stream xڵW]s6}r#3}pӤN&&SC@v Wܙ>ܫs=mF~^.i8pzQqNty% P@S4|[Q%a,R >qk\|LTF٥PTSC.Wџc(7irx`ଫ6M&+&|Ka/~j/;v(-1i/-`L* Qx,}Q1"] Q>cwJ x^52GT{@tqT{9pCӧ1 I_ȝɦi,/UbYHjy,Ö/eA34fP۪̩K.}m!9e?Mz$%W#ب@Y]jv EÐyڕ+H-FHQbZc/Z)h!S6r{!ż3Z[5q&򣇭ȯIZHʊ(wԱ}ј(K=M̯*hj k#;}tĺ#Ӓ?FE@)d kЋee*c?2Pe\љ_+Ww{W_ϯ#ͱ+as x%RuÀlhL^Gh=X{`_ZzQ}4GNJ6_$, ]ubaЂxԢsޓĨjko}ZX 7ˈzjB$U47EADYK]-(.HَA<|[k!.[D<)g wk"GKQ˦5txJCX8j9|j/jRa[n6=>}oOUoKw endstream endobj 3486 0 obj << /Length 1732 /Filter /FlateDecode >> stream xڵXo6_a{G>dmStȲ"ɶ7DZe_;)K:w;ڙU𶨫ݯo.dO#)V<'D3Ge7@ۏmfu[vU#i;gKobKCd\^XƑTZҢIOmuZ3 vk4HX#׎{_E%AC9>7뼤F]5{3X;N4ʖEيxB4cyF8/TfO'J=F>#Fօit4(Ib |{Y[3qr pf$Ɵhڿvc㷩ZȩEj[8T(cTq%1tdQ=(Ab6uN>9At6" ZZ֕\#4RLxu*٤w/R ۀib|h'x's;tbjijɶfp@܈X>)ecDT*RW 9Mop: N7jiGyYjQE֪$,u&:`h& d2` "#p)CW$#ÍGp:_H NC<[Q'd-%y}!AKcQ: ?v}b\*o$ڟg^~,p SLWyOBkBKWJ" K2@͵ _9|inMޯziԁu:?C4d[»j;._*,v5.NDSrV{ @s6Bo-(K$jc2ݍ(YHrUweNB"EnviXsP`r+ը$+H+3:_hB8i9R6u&3ʠIWcrSX!ˌ-ixK0nEǛfM *Oq?)8|j?ܝ qm endstream endobj 3492 0 obj << /Length 1082 /Filter /FlateDecode >> stream xW_o6@̐$J֡%f)5>}"Hb0Dwǻ?`Mʒ0 `h,X'w.ob#,BI让+C7N eA Idf4NZY`Z>c:1>vf v~1 or}eiVPvWbd..{.ʪkRVTQgыWQ(ymrDytE{q sw;*H["9?s7Tᘪk2DCց%(AQ HL !T>Vs1#2t{R7W=3J(QbZCձtjXJ;xt}]@uJ8uZŦjj$˕UݽՁ8"쑫Z_VPvݾ[7.iSA8+ - F{8`Н_]9w[-]0E'6guX$(%sc8D6{FDIrJRHu;WU ]'ERpCcH6).q + b,S|? X,ĵC K. 4M&&ea&o᠀3 D4xh)wA=e~~V'`=%PCi|bOȀ TJ}񾝦\(Sp(>JVenW*5\lml7`xԥ>a8jI>rb#OnB\`Btd7(HqlbgTo9睙q/J,wؓ`zaM|CU p^?p`!F}f"Gї8 #I>&X7(BLj "6P( eB> stream xڝWKo6W@P:E4#Ѷ I4HrHY'<b_o~nt,ab[0Ji$b84>-nD+;f|ЕɊdC QK?$ y_E"/X7wOInB>3m {$ <>֢?l+u$rD$N2ADՙI -'VA(;yרvmm`2\m:/??<ߏymIΝЊnV4yU3, :\\ǡd/A*ɋُM>>S$UE"scfX{(1VP6c|w2~Ewp^X 2dЋ62j:i5lqlʶ>-'Mw)Q0D.r1O[w-V%E'^?j텵V 9WQ[jRkn[ULb2jt+IUgs*jEBꠠ*sjri^ɾñ3 ˈzk 㫰!\@yߖ[jM7G1eZ|ɨ b@ub%or[ւ7Hѫ5xCXB/M YKy/ #*ދbv>Btdm.D1 C̀ߌT5YHFg~sGN<U:;sxon<5olERCekmK`RvngvN5-~a[*Nxg+ں,Vz;$u's̪tf/gSMSڻsY6{\R̉elv!x\Hw_,ƒȃG+@0'W@] y@.|9q5 74pX7mWYW?!9X vǶTXmp`S i oec> stream xڭVKoFWʡ`nvD1ZEb+AQk)_U3YZ\qvv^mvH5;Ļw7H\Y`*t82'4p1{,dECᗒШ; cyDx)̿yqq&2Év4 6B6+8JoLǃ8O3FM^l8i$8!#^Lp[#\=IVrupagnkD ln6i ^bfO(Mg( ?I޷ۆ&+gg7ܛyNCN*=9b c[#ɸP c `4\ֹpqy endstream endobj 3518 0 obj << /Length 1093 /Filter /FlateDecode >> stream xڥVo8~߿Rq 6*^ZEUlV 6@6S0kόg|cl6.Z/.W^`D(]Xo cDo|l޶ŻK劆m("%\uJ ݃зݫnD-kfxш{"PCvBOa]Za5k96tnSG.:3PT',fjiYvz.$ Պŏ%f0٫#[ioeqlʶerYX Z$8W==%;ӛZ> stream xWIO@WXphdƻzTTU$ MIb}g8`c,~ߛ}%X2\&3'0Bzg,V؎gEl|[\|\#j>GՔ`}2Boz¯g \noϹ"df+EWӺ$0o?ƲA8)b_D)2Y-vG?IhV%=A'=&rB8O33A/ubD"ss؏o8oX1`> stream xU[O0~﯈aD_#[鴩4ZMiBF.].C;P83n-l}|N'ܳ$|meAJk2 ۣ*xKHe4Mv=nz~R휩U,4Aj Hns*-?qb~HۡIu|5ƮʵWq}^darjf͹6ä*Ӈ{ǽ.ǩʇh\8 V9ځP!֦AR+T彙Bg *6t(t ο^NzFQSpZN:ȡ@*( :b ֣Zz],q7*|s@ #bj- V愨4c5|2^VB0X\ ޹>\JZqLDޏ|qI3}߱r#"!XkL$ c0%UUͦc%ap]j]0hR&a2!Q#j=Vfd̃[eD1Z Fc!gR$cm1V렌 1zUqzZ#ve=5j 0U,TyluzhKeĪk<<7' 9ڀ=rOLVB >Sѩҷ-sv01tzo;Sw,#h`=`{NǺ2+=!!e?(v^7#(^A7`LYP(Q"ƞb*}:i endstream endobj 3544 0 obj << /Length 994 /Filter /FlateDecode >> stream xڝVKo8W9I@̐{ҦY`mbAK͍DjE)i"X{2- g׋ qRF8r[A rb@:wa0>C8Q:"ݟB%5ZeK(t8V}cBlYsR"RGoB3?oWIтlh!'4mM5QCWU{~R) 11Axf]'u cճ˜ڧ+G!WM*9ۖwd#k}OgJ=iJAdwdgzˋV*W·Coz|k)@!fk=b=rzN( endstream endobj 3550 0 obj << /Length 1020 /Filter /FlateDecode >> stream xڭVKs6WptfB ס2m5S@$h;xQ"dҕ29aA߾>,ttM^'e;)H#?r;AE(H޵{2\"i4*eRiBש{~,>覙!ۂY|XxCab5 2!T9>8i {o aLMh݊5P^ɪKU ԘJL8nhN;Ac˄q 򝬘| )R4qj,DE ܹ|-r՗v#rD}s:B(u^ +e;"5.)!J Q aYCg-:7 H:ϗm)[_:Qb[H!w[FeSĥʪV T/v2TE.e$ϙ>07GfY]hhsNXj`m.hK?̳)hyv5wøVpDO ɋ}ʸ*J6 U4m1Nd"!oz~[+n늫s$LDm$ kd@J\h1!6 70l_l L6pTָ8v wPU]Aʷ0Iʶ} ͦbv7oYS66էA- ( i5|poQeJhmmůkz;pstI@!z\~itz$$/RO NC(t)?@X/NW*WP\v-lU͌t)M/cw endstream endobj 3463 0 obj << /Type /ObjStm /N 100 /First 973 /Length 1504 /Filter /FlateDecode >> stream xYn[7+l7gCr#@p['n&F)m4Gʒl!<ג$k o kA()PˡXY(puh ؂9VAP] 7(:JB>}J N*jk N+`Z]oå+ivU>oJ\KY &Z\F=rC)$ǎX'.uHUWlRs0QMa ew,!<фaB A;q7 I@K0$IB9DE| HS8 NR qK-SXaSWX 0ͺd}*M3!3JI6)$[Z`,Z_C^z^>eSrLp (P &)\"N$jhE  R&B&u947ghH+2g"wRehTPB$-0e̐|[爰N(A27a+B]VD8u+6::_SH'aock=3 qz{ufUkc8ViE1DT hht/'dzM8: cT}1nh Ь7$uG*9-A|vjrN0~=t,NFb2ح'|r}ٳ٧pơ9VH>NgvO.ܳߑ?ol~1w+O9B"{R[i9E ӞWa,(ߝ5n=GkL8yc*tD3vt;f:sTyhg@t|<Mm?@ޮ0zK~4QkY'j͏'*.{' ؜&F<{&ꏔ|rq{~KneYM9.JhHEYjlEouɮvG3m`rd\L3EmtXp1NW~Ì~qY]T\c ;_-1Z;뛵ԶHmRh«%=HD5"5໫H$ `0Esn*S>{XhJv mCp Aj/m&ǎLcj&}dv_Mngk_Wrkf&u(:H,љRDAKU0肆b:,{}U*U6-ޑV_&ANhyZ-E6 -bƃДm\OSn[t|6 y}%<,Q~DH2`ѿbC[@Ɓ_ŔlKa?8S)@ G.m Mwj ڏ(;jyqC5m(oۡMymZkLBKa\9ցhfH endstream endobj 3559 0 obj << /Length 1975 /Filter /FlateDecode >> stream xڝXYo#~[ZE}H͎g$v&`wZL>F*Vnk>]$u|uv~}2~<[nf g8fg˿߼A Wu#^|oA ?b@?qysI>I?#e:T5 ZzMM`xn:۞֞AF5!=nr]]ߠ>.5z0Umg \6@65; WȹFm|( KrګFnbB@-_5Sq#;m. \gBGS<95}l߷4ǘÑnswje_ty+fwUހD}/+X CjWI p=GS.3tP3H:$/ݩF@z{`< 儅)eQoD@&q5=dP P!c4ժ-qMȆKr1D闞J 5}6NpZ)i2-l(PfA|.":F KkuilҖѱ)X^ u]X.2 6ɨBd*! ( 3IǠ0)/m`Ӑz8<I7h6U QUw-BԛB*4);R월obtWG;˱o0qf 'C%Ѧ~K̕"8hT^*j Je~&*M )7\G1\_^W6YHc? ڕWybr)8$Pֿo-aSgǞ. j31 t599}U-Af"p-2Qy,q&i񡃇6| վB`R38n1p3Ѯa$g_3gڷ>B􌈤E?x>#[]IwG5"3=9O=!EkgOl.zuԿa,k,8sA~$dnxoBi@Jw " M'68JO@yd2L΃,a N*ifjAX`5X9#cODu8UusWWDGXyb-2‡Jh5b@ݒ&#qRqoZjS[W7Y uxvКq ^YT</,>~˫/W(μ"Y^^;["T!d`%!ⵘ}fsEz3ODyy{@ސqr`*Khj*gRu6/љ~r&g~rz)YК^W7޺~j)ލ7LkaOCwmWSA^&ΧyiX d5swnXCl+6ʏ޾Hv8` endstream endobj 3565 0 obj << /Length 769 /Filter /FlateDecode >> stream xUAo0WXaD J;dm3MO]5IH`ͿCSe;?=Z">> P# 4[ J@XэsYOOh DĪqh@ލp `7q1YnVj]j}&u4"4q沔g B=S^(l=t=N,÷ҹMr49LrÄ˴:OUMsV'{c3)+I^Wٝu] yH^g@TޕiRnf書J3RdECPLRD6ye<+z274Vع,J`!V OqycwJ A[ AX '0ͼ,wN6/߾No*.A'T<GM1 T-&-Ǖop?!ݨ}U%7[:vkR7Ouk I/P8/zя*Yޗndaqf~p{j0ye@y?~d_ܶwe7o:"׫W0O w"kjR7M&53|ӷlwk¸/G=+?*/f2ԍ endstream endobj 3574 0 obj << /Length 1336 /Filter /FlateDecode >> stream xڥWo6 _f!-ᆢ({ b+wYrG?2ounO)")Gڙ̜/gW %$h\!~K"?F_/oE_dƠH1=6R9Z,p6@Q"fDpqx ]u而Oɟ>q'=2q(hv=t.\cO204 I2)&{vaп |_,Q!7Iv;7$B7b#σ"zB%]n4kjF\VJt1*ݧFr%*1ScTg1&}–/գ8eq#JԖڪ7yT{w[宔"EFsZMOF^?1ZT=ߤs6]rhVI3l,ʴ˷yt`H;|B:w 3'Ts>?~ĺ*ip&quQOW=ڵl`PJwV2o/q եlۡP6&T8b*7\3ygļ-;Z 5aW5a\_*DTM_r-=Y^; IǴ&벪Xvq4tM30v֏9ٞEKo[},p_c>Yެξ endstream endobj 3579 0 obj << /Length 791 /Filter /FlateDecode >> stream xVO0_ai1v|x؀ TM-{ah2fI'@ɹUB#`/}| AsDЧч!Ƹm 07WHDUx.e(wCvnjͼ|1l"8-k=,/Ahރde+}{M?OKZ$LpAǺ3؞hYK B֛ 뱳o#öS~\"cnf>%̤hz=/2Y438<8'x(}>G); endstream endobj 3590 0 obj << /Length 1182 /Filter /FlateDecode >> stream xڥWs8_/x&&i:dz>˶|$M[iJsv.\rr~GVġZˍE\ס~h qB[˵H4*wHu< MtWo/5ϯhcF}КysؤȘOgڿ+pϔ` q QuR"wr+J?>fq8׶d]f􁗫BYݿH,9_;F;gvT.Cct唸x^^\A 9ar<e{MSjh.ܧM $۾-}W&CMA6QH *TGɋa D!ᬪ˟f+$Ծ1Q AG-jڙÇجl$om~iA  L55GM4l|~#xfUNi \Aa&uk5,_kRw?iG>Jqd_]k \<5ifOaZɟ'\)H2ύ!y2K0 ;&oner"$^ib< bc%I):d/H]^pBu[m+ ]L=N֋ :HVgt}y йoj>5]>$l?"ĭ Yfd&N$;\f )*sњyyMEꝹn+r=(nD gԼۆjCerPQq ƑM@͋C1>;`Ue(zԉk$V~,(3^oc\GFqPd@ endstream endobj 3602 0 obj << /Length 429 /Filter /FlateDecode >> stream xݖ]K0+v=K6^Lq WT$_sM+[m,΁*!圼i1E5F$A _h@@! DQ.ϊQs,\S) 4>tܣ" AyHmhcgw_]JBrnuN8OG'NAY[t#>9j:IvNX6Z'P,əj@ujjs2)Hb[-اv}] &*cl*-HRٲ' 2 :ƨI_p#ҝiK9F[{z%̓ڛUU7kE6*/2mS8h}ТOpg4 a~܋\o> stream xڝXmo8 _a>4woۺv^0(hslϲ~Q]V4]YIȇki9ď$Z :Њ(%Xיv]g|rD=V/r&H3]X18F?(K;Ci=l׼h@srӘ1)_yݿ\$v;hnFc#8hℐ=㭏zۊI3\71Ykެ0]<y=\؍< Ʀ>Z^.Oq%s.%.붋kEʇx=UX]yhTCmʦ q8Q {UMQ2"AL&zߔQ|Dfx*>9%JAR!lj<& ]:2BDx}G\3V惁c$0I4 "U@H2{Ta;˚UT]mɶ=&]B+ee%/xWR`MgF4ƓX&y9r{d#g6ʱгzv UAB5G7*g4,;4R5z`o_Dse$ykX?mOR Κ{I'}ϸ=$TXHg-LyziX͍*``j%)¨JlYܚ3 ؐ*g7-K>mkm@h2`h顱C4UQ⣝nH|λQ,oΙ68baNqۚ5a6ey=" $6fK;Tfjף/R."MTMNiS0ӟb- j`ޜU]_} H{SQx˂k@ F JקptZwwC;uNpČ& jKIe])۹X˵/5+Ģ3݌ǾTZX^\BPvJyj(RC8H܎Uá/]=O&fCd*Td$܉LX݈4Jq<8ALcwI)ëe,ߓ/o6:=N]h ~q坝fo7ūyYE0;zD慧ϼ:~g~ӊOQS{|}`ˣhb냏+pKbkYזFܚ}I_W& A|ǻҁ E{w=Lr>i#ˬ)So+&xazw L.u}h} B}s˴.G`v>y5]: |o 9与P~ %I[#: j(a^ޮ}G`FQ}hꎭ7q YAK38Co;ԏ9ة~df[5iz5[)]񆉼eڙHv5Сeץ95(ϏSzĥij)'dW"4qdK~率}0ի+G}*GJQ?e5Z|jz(Im!XfP&$b-_,]v~>NջP endstream endobj 3621 0 obj << /Length 496 /Filter /FlateDecode >> stream xˎ0} c$ Hh46L&E#1oCeA*7#?}N>[[dZs TFR& @wMS%j⁜*Rά[ 5GT0DDIꃌ9l^Fs?৭3A^jMaCve>%#ȤyP/.|BMGKʯ q|!*/.qPl.aǢ? ^(7mbVTt0W!C]v{lX(MPWWPIK8A{( cl' r1)IK){A_ V7u:qKF94Y8,hSY@o;#7g Gҵ(!cqo ^KM+kve> stream xڭXݓ۸ ߿Bsy&t2i;Lrӡ-FtHJt'Q H @m<&Le{OpT{,Vw{|)ۿ,oU1SQYCu{dnQ+vi  *ڀ16[|"Y,iBfؙu&DZhu5-vֹUNbw?i˒ywu;zU%CAlE ؘ)BDNDEvOR(~}O_0,:LQmhۭz 3Ӆ7R@\M6G23qj{ 8MwO_z}WF>t>ҬOw/jFgkޜjPzׅ|]zU6w6Эy9›>K͹>HW3LLv!d2\Ur(wVX:`@[a_mebf=7#Oln̘R..c~!0C?I/HY2.qnP=4 Yg'摙r77L^Z7USJw#0~u SfLfrV-qVFC<tt*˽ Ē)IMNF<8)v(o0?Ɲ:4S sN@R;r~a5Fb_gfF)>maU/G ƬϜK YDĕ X `dVY; AG9 Y h>uq,\R'|:D0Tsp/߷z׮9:'g\<''IMU3X7 .xr<=wG StmmjP?\kCud !9^םp#F)Ra%E$aӔA\7,v{MR5hz9T""n=31&>sLG9L{`r`9^qm78\πtRGٝ ҍf,-}1ّ 3,!O;}nRmt3ڗ[%`SyM[fWR\2BצR!͟iEĤ}HC:ew61Cs T/i16&dXV#q@)Le0|T%xyN:+8 endstream endobj 3639 0 obj << /Length 597 /Filter /FlateDecode >> stream xV]o0}ϯ@ZiMxC'5lZMx&] $QAB&Mۓ9ޫ10yOs?4"'0a[^` qj|yoܛ(0X tMtn0M#ȷ QLup+:QxƯoY7)U/ tbq怒K5cO:%(u*(YQ0IŵJU5$z5 zpŒ\Ѷ#N ̀#5_ !5b/XTt> stream xVo0WDV*n~C;tZ4j*pڦ$kbGC~a=?uru>>zwp$(ؙ-uQP$,s]~xcn A T na F(H6tjD.ȀWx|?>C_` inթ$gc9~ͪXW!6Ӄdݍ] %fJN=*V,KGZ`!Wqxhhz!9!JF{#|PTAFY%(t͇ڝ zɯ,b;:1ci! yB%CتfowزLʎޫjFJ=KБ=Zu*y.>ׅo~xMSM"KQ0ay[70OIpGVԖg(p'-@Y N -PtV`*9%4G0. Eqf;\qYV1e0ݬ%KqT4MZBQՅhB!$UE \VJn[t5IG":Rd>,C.7=y]`xTg> stream xڵYͳ6QR=vN:N-67Gܗ~,Ejw'S  Hy7?wal7ϧE~sl\l|~}?Ͽ}7Qg,a1 dzÝ[h4ov-b.% }pdYnC꾛]D)1_0gYz.LnN$,3 lLG:Uи7NZV1Omުb{mjYܯsA6I|F<Ǧ" ^$q5*ן8UG$Vg"ɴN @h"L/[uv6<:R SJuls_P'TYe{0l(h$J!{I4H׋4t$6`2nЪn5Y:D<>rk-I8M gGL[\(!cOn'rk;5lru&~FieVPlfA'A f-q(_\S]z J6.+Qci0Zf ֚ńczފ]*{ 7 aWwPWMd?8?J7uT)_*,״NY8U=(hO?z?B,mLzRI8o!yEH#vHlKJ1U\.b#]%-=E;xQ`s1o g՘@(S 3Ѕ1dp }jE$[ f< < +`HJW~Y^{~M0@ z[$  I'>K#XܗMG9O4>|p"X{[|٫X G7JgV>N}eP a8W'9\ި:Ox9 wܚn}GSNZ`Vɲ1v x,a0_zPtpvh:')=)|YЁo AEr "kw#5s<"P{cM r`ZoaT5> stream x]۸=ogbE>l]M6I׹m1%(f ${>v:~ /t:՟M[e~t? 8]mOl/VHd;#ԏYCYhr;t#b?IRbP5kFwݦ~M(, þTM%Ċ@o-N:N4G_oD&1Au7G))ӻ>34Klf$$xGiIc37؉M@f|=:L(ifeV-a,3+6|haNF@'(cfe>l[X0(*Lµ. ' tdqfG /\cc %r#4K-8d4`};3yst);6]Πs92^,:qb/^dH.Ku  wǔVJ{goQ(|+?ojWz@ tF}zW1:L(ˍhPuwFHuj q]g4@NI3 en`t-jxs [CwF= &/g @q̐\ѵU8DY. UEα5`-9wNuqy8BV,cW4Tևs` Pp_+ev48`5 l3cv >̱t 9as-0VWw:=cQHD92 V| )\1@>\~O 2Cy{˦y+ U MP6,S+ʎk,q|Xϒ"^<sʈB4>A gnMMH"iʶdy Y:5ƾOZDҐ^U0+7ŐFݼ2w^.)kGO(M3p8dt' j2,/r, [Al;rэ NSm2=Bkm\vodh-Lex ?< {BuFtC nL]R 5>4>a/]Rr$7Cg# endstream endobj 3556 0 obj << /Type /ObjStm /N 100 /First 975 /Length 1629 /Filter /FlateDecode >> stream xY[S7~ctKCH!4⤥a1lKIt)36G]HeV9*!i'^+bWAQ6F!H9dvP. |l0ӂVƫHV}"dX0砡BPTJ-<Eq5#xP^t 3L=lQFԁgd`M"","BDtStRaɊR0̤(3`ibRi" 9yiDIJG:Q*YPѷq&841|Ny nSS ?e彆(yx @k' C"F"q-Ԉ8 ċ3x$ƂJ29x^(YQ E(2R 2K#ǁgoHb~! x  X2e+81(%;b);%@R䆈<`<{E_[DxK,wVa 䝓XF%QCUHcp VH]šk++z/szD|gk, ]p4G|RmR+&^p~8]"fBQW[ZPC ü!.BA6[AcVh:B5d_I'^/%o\0EEԵ&`[ASԅ3)t }YLwm5efjeqڰZ^r)gd|8UV˝rD@4#@== p ^Cٗqo/{?y1~& !Nˋ'eg8uO+8KZYngy;|oyu//GSئȬOkvg~}}kt{pwdގƽx}ORXg PlQ뷈+ݛ,D;.9Wçrkg~cV)+q=-ĕ<9jTNN;g+w9)rH@z{4$G2I/5W7pbgg1Qin띎2<..[AR+,:u4Ktu4!ąϕr.f7c>{@M.km KY[`VLz^dk֋rU_'L\ 8rp0!-CKs8kr{j{ENZIѯM endstream endobj 3659 0 obj << /Length 1421 /Filter /FlateDecode >> stream xڵXr6}WpR3B$Hة3vL@d!A8wqHrvDX=X.9缙t={yFNJsq!?N1~\+ߜ?^}yă ~BQ$NIg,hfGڶ Lzg^ʆ옋(Jbb˼/H蹟8t}Q2n,p(Ժ\4Чus866+rؽ .Z-b)xF+@:_^͊o&{O귻,lLT]ڮ͋^tm= AF9!Eaª@b+neƇ}P ;uzO-?,@ZR(k-szy'ՌFa0Q I9L.o%t[t̊pR%>@mx ȭ;4z9*L{'Mfը荫_L4-gkŊ~۱ffbđ]b3<اlR(3s~4qWF8nȏ}˗; 'cvŵ1jF (`Ȩ& ǎ9oGmͪ+8jDwM/6h/` @B0? >olG{m ڶSzI٬e&Lnwf"(H" yB\w|O4ChdzKb1 EShX@)j^Col/_ W= N69ЁTqd3g5oͼJ{#2Bϟ3+_eU嗬n(&zpQ@ OKYֿgsB~EUg}KcOFl@&eX9:i'[Ѡ5B46EyddWkf@UYŞU"z<لVdjXPt`4zPMV7W?G7,*^!.ԟzBס~sҸq$<)w(A>b*(w}nފ(6vB |`-=D_@LbXU3)qv|=k&a{$ȃ=g @!عW5 $wf w6(@J'$A'o]x@9{l2<}JOpG8l&g79ץEPrZ\u<- W4yD0/b ՝!4ֲWJL^oP$isu11zTރ&tH&i-? g endstream endobj 3669 0 obj << /Length 751 /Filter /FlateDecode >> stream xڭUn0+, fh=" R A[;$AA[B+ʎ%EʖٵH7ofXS @Cx`!HC`!!6r}_C!%Nτ 6v|e!:)s egdFėgTÍ ܤ<+E˺^&$a u7=⑕EGE\]zS0ͻ盬'-R#eC:Yvp{ڤ3^hBϏ*XNRV8ֳ0:/.e;c.vO=@C"c"SU2֨]b]N cz3qJ3s\4XOʧo#o>OzaU c׺Xqc<_|^;RryU l ܊$/ۗQE(odEH٪eRIm)nU曷抳ܰix ۩o4 . ^GfߡrVݽ%QO*z'l[u[w;[D ZwdTmz :% ,Hi 7\̋x]y/lβp`K~.OkA?Wq";)s(6< |r5fˎ%_2//x*[=33s_"q{vrJAU endstream endobj 3679 0 obj << /Length 778 /Filter /FlateDecode >> stream xVn0}WXÈT\cI{fjMڒ>u}pItd~6IvmҞ0sk hz7x;8唣 `r,E]zLXbx59;VK9 Tp2|$(ؐ8, P\^3}7 ~2Xh<{x5 QC{lG;hg!S˲-Ro#-!+Rzetvey6.Q)hjB ٭g3H="tFxc= p}83͘Buڱ2^k=V7.^>\W16TlwKP7e/,\x+JfrQ8 I/:'|InB:Ju|&|QT2ei<3N{*o%iv =m~O=j6]nR@qo,ępr f#hⶇpx¬:6/iq~ᡀ">!%>}ŃH֫ endstream endobj 3690 0 obj << /Length 1144 /Filter /FlateDecode >> stream xVKo6WK@Đ$KRl\~nM;j%H;|H6m"ș7 _n~^IQ9cDؙb:˭PnMD E`H -6,Xä 6'蹎OCh8~0EU 9jkOzg%gՇ[iR )FmY àVZg/Joxj ll Zx?/5)eK~OȫGR~rb}tRBǏ͓N69kM8,2TVa\i$wTjj!9Zxj dE7U3"y>\~;R捖ˌ "U[˂:?SdO '9јhH䚞%7D؆0hEkheޜ}mYS&LKХzf!烕V>zkctp5Q׏7TL-:X)GƸ\?67d_ܫ椧z.6`5$zh$;*oj ʓin%Q` |[i}˿aYJn@!(;DŽ3MWJJ¨M 5o}ޓɅL$[5n{4Lԟ^sաxL]jKʜ^fiK^.IEyWJ]TPћEru]*[2Q.|=4Sw|08DHﶧ-O55:1NO?+MNE=Fen vEGhr>!dM{GeFMCW>t *P{< #wmm?Ƃ|(xcw z0c{h=/`Z}㉟!Uo:jG;\GCay _ endstream endobj 3696 0 obj << /Length 1505 /Filter /FlateDecode >> stream xˎ6Cd VHullmK-6ItDi;áliIwsIÙv,~]/^$WEzqƂ(N o]yRf_7q>iH7ˆ#ӂ9EM4ն^[ #lP]Јo3fiPw`^E !XYkBwp[Y^UcDvə/'&eeM[=Kj/:f EAؾeoJK3.Ήm}"Hb٠gȥZ" !iD]_;4ٝ5_ume[-*@8k˶'$%SbS[O%ڊrWVZRtnt%G.`_M%L ^_)C9Ȁ$Ŋ8 %Xa ݣMC4\`Ajڇ̀"&0h\|gcL9*dQ>Y`yDՖ;! .Ж?:sgw]#1qZ'imLvܫr}/MSB/ZY*{kK_p*d$w1.Z,U 6_/sH<'H[l>[Y6@#M|l<hANԀvюvWٰNiI7v杛A;0]F蕰qz!lF˘^cT xkSa8zP!Q@vܜiM lYM^&b7S%y8pO#D~܅Eq ŚaKDϺw N]A6QL&6asV\1DˆmNˢ+9 &QLhzE9]RGj;Or''+>mLZW^0_z\z yEBgU@ATѲ6^ fdqp.=l4[S^i̽($]&+"HjY-EK FӼs_0b\XS'wWp8(㘤}0(?yu; $Xa|\3=^B*q\##3b 3{+'ge- P**\Qωey<-ʨȯ O~( Eh>q2[0}y endstream endobj 3705 0 obj << /Length 1108 /Filter /FlateDecode >> stream xWKo6W@P$*ClZb[i[lz!r$GqE= o> i=}?n>,f7_y!Km?'u~&=R`M; қM~z[ X\\C3ȣm1SAN2Qʀ\Kr g2r{ Uu^{[Mf LdpSPot[.rW%q_VU, Xmuyn*[SWn?{ص5syɪgaCЗE?Ე;lSldJyg9 +]dA 6$/HP`Sx^WnTݠM<".T5H+ٶVKT BCyZؼ) h{0mJd"8ak3!A?aDءpCQ[vO i 8-~XkHXC6п?~Z} ԝX14b "3xu8W}&3#>PIhuA3$J쫑F.V6r_,H%AԂp OŶF[o"7϶ htbg߈+4 ep)rG\"nJ6#Жi@x)l7l;͑Tc>obK'3dɾ4uzwgF_;x_> 0;m`0 } endstream endobj 3720 0 obj << /Length 1198 /Filter /FlateDecode >> stream xڭWK6WP@բAHe2Ie٬aw,$ f&i=`x$A~&^0n1MR/G9Xy8K1Fb}7 'RR-F$g(skhbBYD)lKt-n+VMǫ[2Q;B,K,wQ Mʳ8M$0 ap!G] #T,m*FH;݊v,j1+iYnz}ڔ BQT'۲HH6+0j9vj[Η5݁Pʎnu]~uᨿDI[-/+{bK ٶ~Ht(ycٺVrS/_6|o 6 žEAd2pގm'k{e6̾M`Լi>3:Ior,CՉc4߁دKk=N 5BQI2g9Oz ǖ }^u(xi=_˥\tയ4:)!M8k8/0ߢ:)R 0-ʖ8UemvrWhdtgnHӘBѡ1Z$qJC1P@ih>v9>wUfApU?Pn}jsb`Aw=}Jl9NhB;7' `P`|s1PtQ$G4"̨ă0TW$1A9MbWBN؜R1N&u US#ME%ǽ5Ec;fɞ,AuRcA<[z.T r~qwkʽJn endstream endobj 3739 0 obj << /Length 1649 /Filter /FlateDecode >> stream xXI6ϯ0 Č(j-zq$HE2mim%W,QeTa(.of{[؋77?޼x톋E/n nL""Z&O#_^ULҗ>]FZW|\ř*|scVgxվ^9l 2r,5j'!$xXK3uD, V+m-lP$*e.qZ +3 ~Z0p=S\$z!07p i' c"k0w9Ō fM}ɷJM24:PeeM>mK{] q KL\^vUʼa"ؓF'ceWD,U@W~#o3ERU*S/!0 ̛*ӘNFwZt;sWtpJŽ#hnx:+:0oB7%˪iTU42C@}I!e3%yH[uطA)͟U)QbiN߽5IC"&%jMfޣA-arԓ@J844ɯj-3euu{ 0QJLepd5#A%̿.I,Nx/۳[PWZk'uQ>բ. d6|9X5#`w |HyЉa Uq8o4T:;S2 դdjbw0ApUvnB#nu ^59- wcDpZ}N4r ȴ[& [_DJ(ȭĒX.eה,LFJ'WpT ݤS~vE[lՒwpq۴< w ]g9p|̎" ȗwq/=!ڠگ3GCam-($l[+Ω2 ,Q%cỔq<ռ"VxX^ЩsRe3]Fbug 'XνM_N媤6OsЅC]L@2 361=82Nפ2ؚ`ۍM].0{l;Li14Tډ8`p2YUO#Q.*Ab¼˨i`K-iҿ7i1mpbXEwI38n \Fס\M_#!dR u;y> stream xWKs6WpC">6vg:N˽8DB TJ (Q[> _'?&8ur'a ¢I)% ˝Yܻ<-jѴka:X4T,FI`^@;Pgi}04Foz~,?q{ә}`~HINɡeHǽ9~YrYEr)ՋuS,%_ 5b'jn/u}oȁ8bB3s(c:q BWc<<)S7fQ²xl|$Ns>-/4j62AAE$)֓)ie4#9?üG{,<UV> endstream endobj 3666 0 obj << /Type /ObjStm /N 100 /First 972 /Length 1523 /Filter /FlateDecode >> stream xYn[7+l7<y}ۋFA\)6TJf# y9p)qJh],rj;*eJ)[8 Na5;I:@OqZ.L9@`}QC\M . MiC5(Wh9,YW5Ŗ$$I^Ee0lHdhvBFT}ÙjT8)*`$F H9OS!8JsGȱM"4YPͭ?p$L#/ ?=>mcdA!c<1¤\zhcFic)II&p mL"j; & .eɘNs)Q`8 0+m%EaSKS=Аy`T糨$ NXǴ&s?Ç)(CjcE]bĊR6KmDua$\U\ !`YKJf<6,i2 e)l]}|7rAt2fv.+Š&7G L>LrL2h,>z4ܹOwP)mR>TJ[܆8ǞhZ/EEs!AS=Ѹ<|hxk\[g*Xx-T}V>Tۨx4j>4KO4/ƭ?"īaxGx*:irΪ !-^~1f*\lu"k);o7o/ÿNn}a m JJ9 j4F@.1NsfÏGt LjoV^\ endstream endobj 3761 0 obj << /Length 644 /Filter /FlateDecode >> stream xVAo0+"zƎI;tS6u=eJsBtLTi=_ 2 ޻7(!c$`B)<8I>ePX4{peaJG󌦬2ԡ10$d0c.Xީ{!ͤ1uSny ^Vq#ZtE&/ )>PpʿT|iyRO*,YڟnV5)c6 W筲sfu8Y+j1H+VY2rm+k#JfA^>E(<2֕@@hR:uV LسH7I` iIC[&|erG]q:b '85ө_ƔFko#LUaPor̕~8pc5.@؃%Ϛ ?B{#FA};l}m3unk-6.E08^'knFj<2o+˿.IS6Np5F]s^_םuse8PorR/oZiXWpҏQYzu"(by=ݟm> stream xX_o6ϧ0$5#lC%EZg/02hEd~w$制=wpwWoW׷|,ɲ0$,N'sJIʲr=-rʒ@ZRm/=1deDlLW7°Ϣ9an/a#&30&K&WyiC)UExӨU% D ;\j:K0>|\KK`WnKOfƗILh~a\"M _癵JH N՞40%lLVF%6 -KӃu+q߾fߢnq,$svyU6SB8I3u:j[sYRm^H\nHQbtYklV3&lD}lXV!qq2(BvLwCC#%l㍚0ش[HxF.H|y`7OI:t%!Ĥŝ|U>+:-34тcY9Ƿ]~ no:sա;y,2j[D'K_Efc?.֌WNx T3ҽjhZZU֯SJiWHϠ o˥Qz\;miy>G6`/{h)2Փ#iF8˰L:ߖ4Oວ^6E ~֍h8 ;AdN&AaIOIoE`y9.#m`3)SRW)p/nf$dxI#7f jk65 #k5<="0#3ʯb}a{dAf> stream xڭXKs6Wp&3L EIL;i&f< lP@RN)+ a }|}g ; I r+>a.h)8,Ffťlmяglyݳ` yd5iRQ = IԿnd{#7 Bmk!lE3]q#w ]PRNerQN߅7W{Ou"hH?TMeՂߧerR_Rrqq}֝io^倊Q37U8Ԭ:ɞ&oai0#ӿП5T0ҧ̦p23{ [p $ qY551*DՕޝwqQ4Gl۴dUm}@0@ =z:4uT 3.ע|Wנ Vk[]e)QRsZtyԕ1o-%a6G<J}^M몑j+I)Z}y f4(nUr&Ld¸s5 N|*EHjmPGg>c%5p0e)JB~uxs`AƮ/>pp wsemndbw5$)e8Ғ0%h?w!UYvl;U(t;^4 }N- 8/#VHG#Z0{8epxvYՍbTϫ[cp,B+ǀ>ad;U6yLyh-f)X7HYU!3tf]؞b+ղnC(H}/4bg!nTغ^Tm'NBAH'-O>| ^ Kb,-OEsuF *QD:|N|pvV=bg}1Abzi +n쳿ɐqdG_Z,$,%٤*ioM#І?g0 wK/H UPbߘO$ׅw׫=LsJY.}2Φ e~N`_K8@E.+8 ?LR=vЩ!x?n?t endstream endobj 3789 0 obj << /Length 1264 /Filter /FlateDecode >> stream xWKs6WpC L{pӸNrrHr(bB w ȤBRgڞ.9}|r~DN’P|ΙN$ ę/O*T^6K]~%!TKiKrEfy)b:tpxj[ڨjՒUUic~߬"'bY pfgTň(ͤHbg>L`'Z5]6yWnʒ1-i)TZg48#M>>瑃f"H=l=hZ߾{篗Ao[s^%TUeDo!{ponNTz*{a5BYDB#ثSiuQNg2.4!MY lw6j[pZVJV5N/ZIw|RMe>eڦC:g=γQVXmWfg0%ЦU]mN8 `DV+\cBQo"f\ 3RX {1|ݞE(7DfiIBZuo(bPPe!R[܏L9x]yTL ؛/8O/:tȇgk]"$7׆XRFN5-i1;`0]VEJjE,z~VYVegΥZ65F"oZ0q5܂m Lɮ,Tc8)"sUţ ?T 21۾vMk+ZOUeX#U _ ddɧ/Y?ɼ$v4A{@g[~Y ,q@,F^p @Գ @%q%C5O5F !y}.Oq6{HI?bRD7duՀN7몲Aw !v;V&# z- 36Ah{ zP%]/[l HfQjU(˗(dR0/2T=p'/a&.j~#PVT H˱M$ҘEh9iVWM4>y0]L˶ښ̪bSţ=imީ #gXS޻*;Z'S{õ䜏8>zN٨/ lM endstream endobj 3799 0 obj << /Length 1153 /Filter /FlateDecode >> stream xڝWK66&֢=luSHr-f"SIm||3f46ѯW?/拴*Pq-7$i~>|ڏvj3,nQ/rY endstream endobj 3807 0 obj << /Length 866 /Filter /FlateDecode >> stream xVKs0+tH,ٝ顏&ɴ$"ZL}ײ q3I'j%q;@`b|4XjǙP*G*6nrq:^M 6}\GJgޟO΢{CwsG/z0Jr!~5WCY(S$\=ؠFit$G0_]ZW5:.r?q~ʽe ~d_ic/(\ggjm ZAp1% \+5 seIJrؕU+CTv$+TXoKr 2[oub}h ]B!-˫oZ5G*[&naRMvi%^Qx71_5LA]v+ IL9}YvWd_QVFgSSXob`xg^C/Sҕ}{JQ\4'!>?MZçs87B}`3ΛL,=)#8z`His1 endstream endobj 3818 0 obj << /Length 716 /Filter /FlateDecode >> stream xUr0+3Zt6N&., (S4ӿd&UtsX>zSb!HE FtF>|1(Fh"OE-|UV23!NZ %5v2[ Gy1]s80IhZɇTeCTB@!>u0ahOyFS` Dh!+IqDo׽E5+p+aasD2æYnuu(%F]c4ZnrQhOٔk]L dz$D*8EmfDe>sLU欄,DiUV.Gw0<,nJ@7RL&sRp\H=F[s q&KpREX.W8ܻG`ՈB܈怩j/`İ güaQub4H:YED+q^4'&K!iڽOܷI"ovLMF_maf^uJ.I vw1wC$ًsw;ݓ?ZՊ6a9.+vE$=w bUHQ}̸([okY &Z^!aGW|Mc=_ endstream endobj 3832 0 obj << /Length 971 /Filter /FlateDecode >> stream xڵUrF W-dꍛ|T5mgDCJl|} )CRbTM/@vsv(y!J"A)89ږ}p7GUg"8 >g 2gtp"qP5erђ=;]ǀ.[ZzᄦK髶v,_4C#0&H9q^3, YSw[\NCδ )>ٕUw=x%D',Q`m_qWҝ IFX0~gi<6c'֏#iwubW~`g@X'p"|8ܾI~oY[dwn0kʌsᆞQУfggټ}8GU^V~bWVkfQe[>>)jp=oO;:8cu?\\Ms 3Fo$.tj~S<ӢMA/+ endstream endobj 3847 0 obj << /Length 1040 /Filter /FlateDecode >> stream xڵV]o6}cD(T`iZ(Ma@Y-ItJ$Qve9G+䣛r( -}F qB0 4[/gA}R iYVAlTE)ox?3!3ΰOb/_}}oEK5 0- FL|҇w-2]/Hځy9e$&:j =Tםt>} ]~5$8NYoT.Z{cp[oO*JY Ҽ/M!ڜl5BfRUBh? p=_% )q"S8{^06Z4i\#1ys,$Ý90nmH\SX8Ŀ"8'U.u zhI7" pRbaAap<.pCl_֪zLLjfr]2R1y# F( ,+S>tđxW'h>,mv1WVEWbi*LvΜp:JנvɫǸz/JfWkaex1dgO5¼+*Ɯ`igɢw@bZc\8ʃP{C4W#7Uo\5^|yh?Տ0Ohz~,D?36̭ endstream endobj 3758 0 obj << /Type /ObjStm /N 100 /First 974 /Length 1687 /Filter /FlateDecode >> stream xYMo7W^ !#@m5lhkjV 7\˱-r,>8 I)xB6> 0D&8mńP%*&hALʢ-lu@avϦb_xf$x*R8H1t&: $t^8'mWRm Զ`ȅ!S!Q۲!0qԶ)8exH(IDXJ_aCx$ S`n@AR'+r@1fs@E%P@ $ɫH<0^ۙ>3ɨ;RÓdejO˫GsG3h@oVM1g&JD` FĽce+6n63VV5@y62HdItf9F;S_/[ jTջQz,m}Cwx냪zEz/VS y"^.i`oWS}7?l7'|@{B^oaTydńHPIb\C~zp Gk]3Xj:$=8yuAaAW]Mc ? oF$64`rJ4 KpK"hxJwB< zѼG{$:AɍZ1%JRhC whL8=7 ]߆4Skz,ש̨J-j1~&? UľJ#Zs*4lҊfgCLh}5؆߼Cw;Ӧ ׮^yz`^W/-_e{g[ˉQ2eΙBhRŋɞ\ڋxqv PKܖ:dҊCoee,\7)y/`8rSL%a9XױSa>Y1 ~K%#ي~JlГbÇr[y&g0# ~C$X^z^[ϧnj|@BUdnRՇT_ O£ž0׆(SiFK[ v endstream endobj 3871 0 obj << /Length 905 /Filter /FlateDecode >> stream xڥV[o0~@5>Z[inڦ n WK'!x0|9c--Ǻ=]c+qr@?Ʈ B[>gls<9{qG-`ܡT@(9lF e{cQbh3T}؜KJ XZN6d+.tH¸ z3^WjsKNgepk> z%R^tb>N&+%e'}Y _Ӓ3ȍqM `UF;("#=#ZF9fչ~f m/!DaZ9#jZ^U8idrK&+.k׬ ߰pK \4)ӽ7UB3P5U#X+P~'{rҠa`/"Ygo^s p9M >䆿;> zvժy endstream endobj 3880 0 obj << /Length 1048 /Filter /FlateDecode >> stream xWK6WCedEI6 nSDl(v}kMh{0Hqoc,׺8Bk֌RZGO7w쌕i:SڢMoy5ܱ`kANapy(jkߋR4q^[ܕUrJ[KuINrYxcf0\Sl^-|74e4!m׸+7I@QyIGZճFS|T7 t|f*m[gPny]23\dF;>_EUZW?c"*Ub!#nY ďЈtD e)9Y1 H`BF]5?`0Q(C.2Q/WFAdLҐ哇"zW=.& \5?GVRL>~rpѾc-,ɭi6{v#!;(}/Lz!5D=GbDB>ApLs{ 4neU?aNRD¾cRtfove))џ0܁S n }xQi[mTTȩ@Q.D9}x7e=G=ԍP+_H^ |)oeg?=Ho"{jCN%62zm#}#[SnXގR a:N=6t(~<+e:p;L:~ Fnkճ8}'*749R`= 8voaEB4U˵3 V YCj쎡ɦd^ } endstream endobj 3889 0 obj << /Length 1498 /Filter /FlateDecode >> stream xڵWYs6~ׯCg$uJOOnwIO}f`PȎ뻸dl+IG'w糓8( Ik K1FIH»X+k&Y N4A) S(UL^֓px>9漮4A4#ӥFbӉJVK/n8^pEU+[ްUmQLVmi䚛rLݵh4$`ͦG~"D[`e JFbaZ.}J5c>P3#NS_A[R=^z۲؀z\ =g 37oJrCd0D-1L2(ΗPx0.]) pJ3'Ԅ жB/Yk>]qiGlJʻG "0h+fnPid#G$)ϙyWo8S UHPΒvSO X:UEZ<ınxL?5Ă(*8jݓkԇJ(-v8[NG9\5Xto3]O>-]VSOIxs&!8w ˻ ;] X(󶭧3 ](ʗ?1W>!!:qD(ڃ*)*Vo",*8:BG]٢6V @bityhaFhCbDSw\R`gF>PQRjr/4qUO uQ8"s۬UvIJ mf( Q^..=PH3V6^$BjӃ9*J"EQ  &xtrlF70Pc ԆD&(HQa76cB"{5.l Yl`Ve+j `rSH{WH/Mͤ:/r:cNVHuyESB~|ꖦiߓ#2(2zF!<MvUZjmQ%PӉ6r&\?`L3,q[VW D@ 0LAqDp9*~4ֺ}9 2ȼm9$]f+c$%4wQcؒd8PZ׼-u=`؟v*%Akk^ b*:y|ɐby `ww؍Sڱzf0z8x3ipV]w9quYmܹMHLJ8Qt÷{ 6 z;!׵i j^PC' xĽ ]Lکjhgäf]A*CחcPk/JѸ]s~;Ewo=GX9Tŋ'BiB< endstream endobj 3902 0 obj << /Length 864 /Filter /FlateDecode >> stream xڝVr8+P΅`;9dT\S 06%_?EhS$, ""P$HвDF J)I}(,?/&&$Gt1%Xw* r9oFaH}r1 QQϾ h l7EA0󓿗Cx'EIcL|ЁmI}r* &줨vR^4;g0m)MD ΢]l ӑ&4 , .^tJgrYadhN3d՛$:I=L<ՙ.3ooyģtE$[ @D8bvą(SC4̼/|5s8tp_+;+Kk endstream endobj 3910 0 obj << /Length 897 /Filter /FlateDecode >> stream xVKo8WދD(YR4^"q{i@˴ĭ$ $kJ"H 8t篋WˋE:9ʓ0q打b(wk{H{j1T)/;ʌݰK`cɬɞԺ`xa Neo3Zt0N$AYeG AҵK Po;sV~mH'6G)m5HZ]/u\V{ts,/~* endstream endobj 3917 0 obj << /Length 2192 /Filter /FlateDecode >> stream xڥْ6_WwaUuF w`3FRNn%~ϗb_qÙ5Xdi*ZɪSlvוܸUV n$!U˖qm1'h9a}<'znڕknsZd<\2á6xnuC- F NNuo D܂Cax w6#wQ8;(o/ -ZDE9l뮯(`mVf.;\UtRk;Qd5Ă&2~O` $ PYЬƃ|PHḞ8Q3{9Q:5ˆF󣙬noýĐ*";*I^Sn CP 5o!q% t5+rB[cB,* >cgܙHjzD׍ ,V C as 'Ǚ-)gv"GZ$2{<8T]:>;bZ"_RHLOHQws/sq~QRE+y/l+2-y\WBDVBHrwKZд2߲+D#PnE fdo! 0,:9.@ SZ# 77Er#)^R_8=Q5_jRga@RKnu*cpǴЏ:'تۅwU6pIIF_6؅OÛuPjEՅ?OdԹM]|`}gRyXeB/]Ċׅ7 ?<ӠOµeiU,6Ie Ax$xm @D-#J,=Rҷ'4eZڐ`h wee?dҋ}f?9['3v=s~\od)*(NT)8`{gEvZ1x4gg;,m?[݄z?}XQ'|9q ڜ+ C)'Dy@+a8Ě:k/l_v|WqχaFƿF>L)n=01)K$R}qY6:ͫJV endstream endobj 3921 0 obj << /Length 1151 /Filter /FlateDecode >> stream xڭVKs6Wp 5c!ggzpKLA2ZT0}X-:v@}DV?\^]fET*Oz1J O`伊ѻ(_6׿LW9) q1k>X6&)`ӟ*E3șkd%83ªaVh{$YKeneoY,'?Y_veh4ѲpC g,{ nA6I!H *#Hs>fTٓ&fSFNi|hiJX-EϡRPT@XR`cә} Uً(K#xj}-M*8ISa*]X3bm rNJ?pl^,rJө)I(C/כ  rs~hsYRL!=ߖ֙hZzK&b f_Qk9dB=~^zFVtP F6^ 峙p$7w\fP)2||ƅ'Z䰒Fv$ima 3=D)tHVorwE)M 4If I$@2'~5p:vx^WpΞ.ݫ<s/ů{uUU^9'r"*ڞ-$pUo_2W7kHF{dp#`>j9hhgКKQz(y n"fC%"^we :^H Mx> stream xڵVn6}Wk#탋M -v^MZmuuD^dK&EF33s8qXL~^N.ЊPZ-8`|Yĺn^\yAGGJ٭.мeBZL5[[ #˫7=c+X!Z6B+a}qצ.wUQaKtٹ?I`I +t̟W?/e`9o$ Se9dFL`Z7H 2N0i6\+z‡Tlٔ8(tF {Sc i3L:-TˆV@$a cfj3xõ*Ǣ"./&GqhS-f>%QfR_}䅦y\fO˂/6b-!x^O,J-+*A}?i%_e|cҕ|'\Y6U[Vr^bH=pC̲RR-Y˜!lW$] !tCy=px7Bm`5*z>jq4´ wߍz`Ѓr^^Mux{UکaL$ZK:33u跹(`CorQo<2H_=GL eת)pX|@9/E!%\A7t ]ŨzG^x> stream xڭWKs6Wp|g,83%EY]<(2\,v}`9ksNn0vRF$r+{ȉ1F:yΛ`pKyV,E] y}tdi4niC{e؛-m?o3 Jz9?i4!_0ʝЛڌKfF!,GgzϬQB"9CrÙe>B&( *FZe\龐:RלQ,ЪK)-k v™E+慠eYaoN%QBٶvqhSeZڅBŪ[=cYȍݻF\|Z- Z#= I >FY L|mD-@.Z93H4 4Rw+FV-8E~-{n&}n2bllpvT `[QQ~0$+SŎePTk÷0)ŭ.fjXu+.G{UM貴kiiʬj}ϼzↈA|S7eޯY Ќ}cuCxcbЩp9@B[C238uaiZ^WkDpq|\]oru*W%f-G.$)"pu|olQcm>e[ dSDaBdemwNUE[ew.B|p P];bG`y[{xL0hS†dJ=PX0qSb̖ɆW(q` B'-&tBΙ| F>:͍OuqFm?+Ue1o^B/tsrq4quqüt擿>uo@CşAN܁%=QP/F$䀹VÜ5\:o@߇t:̨#5m\?*  /.PP> ^ ~"^F6z$D8wLdص]c?7KU*Yj*cz! zŸڀ^YSBgnE[S4)mm[5{q4vxrlL4Jr {e`t2y<3}%ӧϭg_.H* 0B-xiCO񇲖Sbu@(DC';{ H&aُau8k5V2 }X& endstream endobj 3867 0 obj << /Type /ObjStm /N 100 /First 969 /Length 1510 /Filter /FlateDecode >> stream xYn[7+l7A<6@vm,Gi:R h=C$#]dc ggWA`54@M}PKc;؂899HECΡKV 5|ՂQ2H0 ۨC,.FK3BN[(PAu *  l*e.!|B0*;dHf)nQ`M xT8rW * pwpn7h4}TtC`,Zp9a0%_s1J9!yk5{-폖ڒ#?uMew_Uw1h#\;!˴* U `V:0ۥ.JWfhpf&2̱ 34x7egTs XV]Hƨ_RC.4z,\W%_ܴ0g8%酺 BPAuAWGG@"PY8i}F?+NL5:Ϧח=M0:Fחx[>OH'W pb>;?,YxvF/'F˿>Lbhz'ŕgxGz~>Z>c8K(vXO ΖPG׋ѓdޅWGGOϨ?= Q8dT#!@qwi}7{9 p7'oqn~;e5,5T֘1\D-މF D Eρrwi@kZc @-}/cSKTnlZۻ2 X= r<-E^T,M:.Vbzv[nkG[nw{"bKa-ɕcrg9\Nj^d? q1lf;Jlyxq1|'oWVkm ๤2Eb-0hnh3-5GRhԌFAXs^KJBo1\6s߿v!+{`ίqTAƕ0hėDFwR\2Q%>U}QN-ʌM.wɟy0ӷDv ~Q@0}|,[[v V\$qısoW띁|>xrǾjSR?$%i^v)R? :ɦQ0Tko0m?5MB*4P2Ǭ h)N֡͆ahM?kĝL-L/`4ىifiAsP4i/xZJhެaД({VF|d11,b{m*A%Gŋn4Yjm?[Kwiv9sZʕXa"5klW endstream endobj 3957 0 obj << /Length 987 /Filter /FlateDecode >> stream xڝV[o6~R XRwݰu Ű%n_b%Z@I5]Dʶ\s"^t[_7ś RF^d6AdńOUa=^8W R?1VNm}G=%\/K_s_;!;Uq961715q=vo]+hMފe#ASy(SoNz QZ=F5| _KG띛vܻD ka4W5o呒9`0&Eڎ׬2*Oxv0)aQ0DRDBO*2]7߲{5&!( MI7~L\zуނx**-G5\P mQY״#P(F޾^nO@# T"r`rT%8+O=<鞡j뀺3=n-w>?gj CAJ>AqYyx~r䧉46V@}udP8m?p! HN N."F s]@BD|g?ժҗ}T s.? L mJdf,!K!]ȌA~ xS#%)~p)E| SՉVlVK|גC akB ;UlJ#S[%cHPS9oh.|YI/GUo+8PF"5ewf !4k|׌!yaI+"7Mn7s KbQc}hԽ7N]=NkrTWLI]=\lϤJ`۞7wߛk>~zh=%˽Y~U6TT,%-vQ"/yMxEݖ Ia5Ҿ&hN LSU endstream endobj 3970 0 obj << /Length 936 /Filter /FlateDecode >> stream xڵVo6_Ad/Z1$c.P:{ɂhK"JIߏI[r:GwݏD`}^0#=?!0bN#d͊ѮoW~42q,6 \Bi?B{1EO-Ѳ){S A$;aRѿAWͦL%-+1roa`cOwP1ʝurʒvBoj8lr$A;eVJ Kw5y$Sbyh*!PJ[%2:%v[&U4oM4]^_~\/XĀ#Qrq@*~0GyP-/@ eI0 0&0 | HC/C!=*0T*S~]1;fIXtZ-O2?k@ endstream endobj 3983 0 obj << /Length 1049 /Filter /FlateDecode >> stream xڵVMs6WЙR3%L|<&'9xڋM;TZ, E*X}@D$"_kx&,ČQ-RXe?.>^ݨx *TJD( YiM>k2wbyC.fL#z:1"+H`+M?Hp#[0w:@;"%#Zik}wXoڎlt4a[C4Y=q`l, ,.=_;YLl8HIV%x"q$Duy]웢*GpOH 7͡2>͙ Lt B I3vl1~OZ! ;zM,C8Pp{d =aī;2M%\O3fb ;+zzt(ƴ\d+Aþ[; Q˾5R}"D kTF?dIC؆:r{ݺ0:n[Dǭq{# "/2kkWN}o~D+o8kJz+arV&r&E|FS endstream endobj 3999 0 obj << /Length 1353 /Filter /FlateDecode >> stream xڽWKo6W9@Po)@n"nA蘭,i)9wڒ#'v[> ?Λ|8O'?Oίh$t|1r8g"ax>Jf!ǴM'9㎜,Jj$ L')S>u# z(tפ'{~@ǣ(x;XyNÒ @̖jD{f88톸"`'(31ϗAk^`eiӤ < l:keUN,UAeeV=IrjC'C\=63+>a%k͛ӕz7wHTGȰek4怉I*2GsHVJ @Cf8ɽBzZ JܓNVnBĮ-#x"y}0Jn>̏ntLnPi+rr8V,s[Ӱ N(~n1S 涢/> 1L=|)Ғn("8}aAu\B7aNtARڪx#Xɶ>=i4!%ͬicdEg`(Ne$M]9餉it@LEsLgJ=UYUW%-(CVdBKCrWCmJ΁&fxRӤV"<D M -'Er䃖k0Y^ .Ԛ]cS ?w'uwuQns&m|8%ԔgH-<ˢ@>Ċ \a 8ƤRs7FPdDtBEj:am Jh ɶxA3dU<=[>v",>m 6 BjĝNcZg*9u,tW}n"ٲ_~Teitq/.\ N%i: _u;:<v݈] lVEO֥F/ak@0% vÀ/u_hFֺhLH}@w ۡ}aRLQeKNw\#]~o%m }c-W?AFQrRB7:/hT 71qoge^=BV5;JNUmyc׻B-̋D_"-_鶀 'Wb rN[8Qھ, ={.'x" endstream endobj 4009 0 obj << /Length 2131 /Filter /FlateDecode >> stream xڝ˒6P$UEO1{rVlųd P,+__#F.4Fϋpӛ<<&EY-v Ad\ S}棭qZ)l i*NlNʮoB1uc SvrzFc&~j%Rekn-Ų?YIҿ3l6,S3-d}22X0[m/ nl +Ҵg (eG4F@\ͫa2g44ھ8sl8F0T,ʵȃ4t*p:l%_ql{=WHA~y/Swn;^7,mgUw)[OZU2D-=v:y ~4߸'o62Aͳ6j>0AZh 6"a}% aĻ:E!L|6茔wvPug姕H{RX>Y)_ZbHxl[6p:VFڎVBE IQ-#h떽9㩻ېmpǏ{2l} Yj6AOF [v0ZPg~sٷ7 L`.:"ٝ XX {J[pwwh޻OYu0QFD@2- Dniͅ2ʲjJky'*&j/^"bP@<ЗXQqn ';ïpoX Cg6 %Q-edp18b-#Uw2 UؤT C?ȮTQNh2 'GUn`i.1ÿ+K}F @ZekFc?txI>W{c0QGr^W#n/*2  8y;ߝ"-]eCeã(Zgy.wc !q"{3I·iӜ th5$Y(|>̠ ծ1sSa(Z@ZXoJЬJ^9V7Vh'ˎ&.Bs0^ȸdc vs1 9FjF_ƮTII s0DQ:: CMmgN.r1#wIEčBk;ɛȷR%Gzlew'a^rYEQVw$f3S);U{AfDk WktN< 7FA egeԔ _Fl8m=C4_s5д I 0Z)! `qw̕|cH&~KYQFќx1fjSoMW h_vLo r 4jN>g6{g"H7HXg۾ba^ g^͹uy$9@D:+ͧ 0A+jlMeL}[IQP6oeh_CEpF($$Q' 6OBe8YFڡk@$&.levEB!V`"6ta2k[AFϲc콁6+zY ( {!q^&$PIeavN7\v+g,a3c*YUm邎{Ak,98NF!ʸχ6A.nh`jYksrFts:w,_e1DRq*5nl`"~ӛKD<6&A,A\bN%?kIӑ_e'jהaSvxioԣ9?`|ٳlLᬶX{[fRC9  v۴ '8j*Dzq@={1*҈vpR>{ON)%%v~Ѩ0FQ!(\l?e;06jg#FXڣIޏJ`[ T;A`c+yz endstream endobj 4016 0 obj << /Length 1477 /Filter /FlateDecode >> stream xڭXIs6Wp mNbgi2X@$d1P='X n=꽙\LWq$O[qbٗUUӨF $1ALQY4\O/gv>dhZPa" IZHa pP5Xt˥ҝ gY18Ĝp&!qQ'0ʣࣉv>Qر5[R0b(QWi@tŸD^m8 d8)TUʎ9:,:oEiƟhLAcD ŻKdhx;U[- y#4wiiX~uOdd0VOX,ݺ*vXtN\Jٵyccc,>'kn\S#SVnM?XP6U8@>Y#YAs`tmVwޚfj`KԢrzPfRWʦaK†d$僀7e/BiIz%*iRVB3f٫OEi%]',6h6Wk ',a@Y+KsehM=Ur$L)Q̿ kBcBC`ʺΛMC̀W Բz|:)cǁܶ$`8: 5Vq3>8[ oxߺM%`FtKRt\{pseX>yF]-%XvLmg'v>,"lLXg umci+}`n>, =43NMmBfAS0dZ# ˜)8FHqvk0Kg4ZHr@hWp#X~ ?y ,eŗ*PkOPԀ>vG5;Q{㱝GlN%RSM`N< iEpSw=g/H@)@XD/7{}:$ y6> stream xWo6_@:8k}C$UlzJw +lcvp 'l8t6Nrl,E'rDaYQ$d,s'W4x)1jIW=I1?Z_M VE,A԰t+Gyְs-75:z?JlMĀzYb_kFPak{NzӭdUm=XWhklЮz}OyTŽ>侵OȘ[mVx'~: sB>4s5d[VLa+U .J|ۨa4*ٕݔ sTf#2u 5f }CɻOl{맻t?=Ÿݪ /3 ]oی^ϏbHV4C䟚cN+ z@T ĖGƪ)p7#|*R8d!Ǯ 0$ߟNL-g]:w0_3W#opęvctGd*75 ev:)+Dfo;AuˊY-@i3Q-z"DL95 }td%fk:/OaWتՖ Y j猭j+kB82Ҳ1Ad޸SrlF_P!Gp7'w\ :FE endstream endobj 4032 0 obj << /Length 749 /Filter /FlateDecode >> stream xڽKs0:L =mҙLI.iȘ xm@$NڃG}}B>>iju)WA!B.)SpoV6i^e_ϯ@ڧ )庥Gx4e<5C| Y"!qD`Y H5W4Oi L|_NxptIEQgN ϥcmFJv0'W%ϫUؚ%+ fZ~[j&]"3 wrNP38?v.=AQ.NBc9ӈ&xD3 xCt$c A GCX1r)uե+\|'_@}KnuBZ kmugmvxJ2Ğp s^4"ٶdUמ|sZ( Ƌ/"yG'Y8äI`E*)^);N}Jɹ|S9>۝ R䍽܈7:X|Ć->myTQil?sVISR*T-Vkun'uVLWB\@F^6su_5!vQȽ>0]PBC臜,J`CPoz7p~ ^itM=+)*:n|T1[7یI} endstream endobj 4045 0 obj << /Length 1262 /Filter /FlateDecode >> stream xڭWMo8W9@͐HI !&)mS[,hՇKIw(ʆ>H̛m1Vr}F`=CQyi1 {kOI=[ /50q},FqD%1qf ԇ*Emf2͜`_mV4!Gt|sф86Zu]Ugޏ雬v&OJɲ1,yD'>Hkܞog8rcVŔZF-1cۿn;w| Yhʮ"ki_1o?b~BDu6j 0F-QbZ}Ui҇k7-onBV{r@tN;w$^.(w|)4Jwvy1s>Cy.%bp4xw4Wm&d x؈Qq T<9aLGՃI<^ C'r0Me4:"ܿ@vG*T憶`}/ *.Ӫ\T^ȜeWs@[+XO pշIt LtkF;U{s'ζ xWbÑapvI840d{O}.xL,AQ@paxʰv6tJl*N$E8qs0a E#t; U:bA(qV!X J FzZ8ẘ  WO/TM˥T%$Rbk]:{I47h=#(/MB`HlS MbD)%caxX'%Br٤rndm] eMGF'^J6oFQ> stream xY]o[7}c(u  lkAoT2=CɉI*a!ш:7X%bD"KrFDb6B \$WW26d\U1 FvTvD)H^@Ɛnɨ8# r7E 9TfZQ dKQĔQ$c*ɨ8D2T6ŽikuxK]q*K̦;9ia3.d(Qj`<!IVMQu `L@3m8I.fjl18 ÉAd ƑpHj I* OK Vf[SBik%382֒KB" 2 ̣KRP\*Xͣf}jb99jrT2idS+lJK5aS(]pՔ)55Xn GKCiAV6=dj&Rdډ8D.rRGݩDuW[Q,?ݽ;g e(xOg F1!4_}r 7~.]u_3Iatu/!v4sAu ռ{AWƽ t%C%n>V~ =nU8?:Dd1*gh|㕥)#A3B}כR.QlG%L է`>Iؖ%ڜq_?\Z(guh(7O옰J¶=e>%kAAE͂ )"4ڛTWcNlp!(]oqN|"$5 >;9>nvuv>e8y)vs E[O&B}( m;р][MĀ[g/BVofa.'Z0UcJԖ w3ks^?O[gZsy!]-/$ʋWJǀ0  n^1'8i(mesf=:KC-t Nf?C#d5D s3j\ H@4ƬȫNaw'EI]q=$P-R֝ТSѱJ> n[Ar3&uM(EҰ[.i5%7M^ڍV)2ʰ#|&#] endstream endobj 4058 0 obj << /Length 972 /Filter /FlateDecode >> stream xڕV[S6~ϯp:S{(/Lh t:^ qcH6)]]b!voUB ɏ2<(7_z8 Q/z{D5neǔR@מ$z?t0FlC8"ν)ɍozLA2dxv@Яt Od0 V5(`jA|H7lK%dg6+.XYijWlu+kUBh(mߔEJb0ij,[;nDJM)Ć4{LW(ab[IN%Np*y DU LL[jAt_ψd ];io|u+ӎsXO>^ JG1ͽ6]{ L,QNN/&%{~`^.U@ endstream endobj 4075 0 obj << /Length 1232 /Filter /FlateDecode >> stream xڥW[o6~$Ë-M( dAHA$;IRꐇPyscl,~Y-.IPY1b~D%*s]ˇoAt1">Cæe]ʾ#^Ugظf=F1"Qx4Lhxۉe\z_'p@{+xAyKM@[j#$o!#]6KM_WHX۩i7(Qx[7[z4ώ#e{$@15*owkWMv"PV}I7?x׷j?bFpnA5/JDc^ڜ0vgiIHKy.΃PmE!V_G(?D u"fa>uQ#LMU.)Ms\ \@ YjInFn]5pXX+}nNV˘"Qd~aClTMO[n'V*p.~ o;|gqU8m/GQĜ6/7 ɞܔ6{flM> S1؅^z&*?`H#}Z75Բ}?$oDU|քK7E%-J0h (TQI(::2(ZP8:SI6j549;BګțǬ ̓?;z?Keݙ>~^Ig^Թ8£XD׶2qwc~k wyACM,'ū&3ޘZ CnpIFX?`'$vC%Eܹ]=lvġĞޝB2:Bb+wȷ~7uacs.GȨIY01t%ڴ3z#~u0zxaـBV(lyne\SƠQcN%^܈*dNZj2v,zh̠SMP;ն7j2 p 1Mc5Iv[Dhp5,U lu}Px١ɔYXh䯜*7HDe~z>1FAiКO "rļ#y=3c vM99Np]D~$Af )L6l@`j2Y1b- 1Nmol :L3 r'4`É9@7R= 2PR endstream endobj 4086 0 obj << /Length 1085 /Filter /FlateDecode >> stream xڭW[o6~$c1CJ%nCÊ'/]1h-Դ#EJm="|::/~.7AP@GA'D:y>tӯdIZG>% Tx$PD hDn $AȉIG^yHsKEB}/)e `䐀 DƔ33y\ME˲+_/DZ,xƍ<П./3?83OŌPNNGcY$2)+cUgQeBqyһD\UYgZ a?vZ[aG +*/ Xqs]C*IsimJ/`sִ5}8IEe-_ڤD\9~Pl9ɺKeɈ~ssw;Ic+ɋS>}|"޾{h(KewkZ֡Q?,$1ǪXoxT-udêt}13%HIz߹V^,G kPVkc((X^z 8=qd3"&s_*(vYsTFw XZMvjS"[ĥ /P#`S91{hL&J>tRNL9yNSY5_BR$+XZV<[,˖=wHejoؽwGCߑ|6 endstream endobj 4096 0 obj << /Length 829 /Filter /FlateDecode >> stream xڭVMs W09314;tiiX6)@I__cJgz il2C/9Acc0MC lD[P?F0i?nPCմFDQUT6cRAW`Fsp;VmN#0"~Ll>csDz~B9gLpq(1*B@]R,WLdxj_g 8 2AEQfb^3)Qv/,yAOUKavjO.V"& />(%Y.t0w!,1{:̨mS*c"NrPS=|D JFm-/I ;fʑj!'7wWP"%Yy]-sat! k" Ȋ5l%jpdQ~[Ȕ)!JҙNִ8&A$XX.nq{':Y~ɦFR &D VaG_pU\Uzց^U7k5(m4iZyiCyCyQ:iv }4+ajYy 7 +0ЍVƵZjejI 6VK3Z/tuߪlg㊦uL-qhyV~]. &af,Uӹ(eYpmuqs+XaԲVmb.?%KZM,eJk\ǔ)M/ %yqWh[o<7 endstream endobj 4106 0 obj << /Length 1408 /Filter /FlateDecode >> stream xXn6}Wy D}HͶEZ,@-ZnuqE*dKqOэ9s8(w볫(Yd(x.6"嫃_)Oz'w+/ZD}%uƨ2/t&݈='@@$(R76#wk9'ťzqnpZPw;LPoNYLrm!/ep]A;s8 `;^PajEN1HTM孀%SzQ ׻ʄh$2t-*ʭ<K+d'\/u_ ̱xtB ;RM1ŋw-m*"Ti tgs,I[?>Ӷ%G5~gղgA@\UfzLQjR{ w 1`gԯr>jC#[yߑi4[]¼: (ˈ(ƨ~YjRMLpJƑBU=)ɠ7,8lct@ػ>(#A%-XQ2PV2(DU8pzPkKZ8KffǸ0-ц sAܓz8_Yp1i5R ]]#P(i~ow|RF$3DŽ“8ouGpp8H.ɐ|PU=DqhPorj_pJŦ5adY:NTR NU@ĿȄ#@:w`Y'cq8HdUBGw:948悀b?GL \ZA%C ($ baH40>> stream xZKsFWp"Qm[KJ.Tj c, ίO#%hL%"m$E_(ŗ;ݔT]6-H(dl¿֞!O~S ɖmX?HȄS-_}}Zdʺ7اOpSWU֩.MSՌxO4eo|\>G?xG$]_H&*U.U I)Y=@liB( bd"X9!D@chJy ҆#0hJy08\D ayIJeèg/+]qEK (Ly{PlHd*Q`fhβ:WEl #y@gecgn7š@!1@PlZv3ŽC ݬ|ew'K_"{CzT.5ޅ Gu6 3 *Ikz)Jr6uͼF ѣGr7nn,Q`hY)ֵoY0gn Z* H{\ y 3,GNΣ0Fˤ3z?yE }CP_hY gj%--B :xg"LVmS#B`CbafLQٜWo}FIv{~@`3IaA#7 ݭxևvjᄽ̱EtOQ5Y',LqlbCf)hgR`2s@Hz`2`LSg. I^ /y|nFMT@A)w(7F2G#\o" Fmt`q pͣLt;&ۢ.`yϓFt,8$Lw XzIVMzx 6C^*OV7W|tg@8L=JEY=R*_eqcZ 3Kqj>YT,bojA&BgEi Ft:66A`hB M8 ':o8ɀ)ziO):g7`(K(ͱCbR J!nʟ`n_cot Do4q<3[_J|oW2~J}E͐+Nl<Ṛ)Emu!O56]_* endstream endobj 4052 0 obj << /Type /ObjStm /N 100 /First 979 /Length 2272 /Filter /FlateDecode >> stream xZn+xܽYEV Y >ʳݿ+8,M-8[Ū*P WD\RlXlPd8 $86f,٥Rt2GF\Xa)n Df.`Ď8؄1ՄQUĭ11&P1~FPQLJd6Dǫ@)jF䘨ٱc>'F˩#g?2J{ HYY'r5o5)H%$>sË;ָz>~¼ݥiWdx|ܞ/G=kmeȕ_awQ ]\lrTgj0>gD-'ÏW_O(ab+=A0 DB2̞e.~ Gwg?ۿv.4 >}oח~}3|b %䐹$rwq?kj>h*ū[3}yޠ;+b~ITU,DZfgZ sysD? wNHhoFvF !;%+ |$]D@ֹ6|`Ɏ|ѕ#eT*!f(Hl|b9\$AjIzx+g5֑_wYԐҿ. MHz"QJ\*vy5ܑ#sG;2wdȱ#ǎ;rȱ#ǎ;rȱ#ǎ:rȩ#:rȩ#:rґ#KGWϯH:tdґ#kG֎Y;vdڑ#kG&nzI#玜;rȹ#玜ˢ=* lA7*(NW> {<%֋ ('R}=~5.؎h^";)Sr]d ѴAL +~c}}D{G x{wHE}{X(9 L?jC{^Db/I!u b XmqԎH1Q[ZV"ad>P}F;gMGeϪ endstream endobj 4278 0 obj << /Length 1739 /Filter /FlateDecode >> stream x[Qo6~ϯc ,(8u!N tHE=RQ6H,: >]],MI8 `6B89OGd,}7Q> Rmj1/Շ\cԯ>BtKYX)xW:o/ƏD#Yz{u6z :{sP7zbKcD'ncf,M݂!NG !`1\bwDQ7vlŝD "z| fPaBݔ0"L*DI͑:Zly qƔjQ {ݐ:6u4k.+f#!Sٰx.7 e3&<&,IN A(x|³Luxm6e|Bȶ3!-%7.h  /u47ImNQ `KUZŊsS֤o\~y|7|C\4_xFm 2ӡ`2 2]uin=y_S F[ 6aʋA4)Yf%Am4OݣXmLٽ|!x@ TJJQ3a'3ׂ%򧡈 y^@LJ\Zc@ *_x+ Ip]$]Սk=6z7eLO%XI*?!F]^D8́w_*FJ^`hAa(m=-Auϋ'+p xr ,2 D^(x ܥ֢xKw%!r(~xn%=)S-|"ʼ3QKbPJI +&򆳣AT" ga,z:y:Ej;'P8~׍=v맋|sFا$2q?=c|^3f6YwW9VrJ^53Gv?e1h\]MarQŠn`#iȓ 3bp$mw$GzKF L,n1›9x?Y4h_Õy{{2\F=#.4K&Yt#_Jέ\OX,,-CS}qhqTf| ɭpu2^$_i#[i1;#-1r%EH" R xqxc"~1+QRmg^y=RCd(X˱ȴmt0X6vPb|DW(LUJ2D7s5eo;WocQ> stream xڽ[M бhDR$p[؇(?L}|l`d5ȇT9TJ">@S-Kza/I ]eD6qPN2)>wTQdj{xͧ$B J6 NJ2 Ec55CSFK͕(o`}ԏC#]J^+c/KJa^auDu (b(0Fjx0 Tʄ&XSzg9{*uSPaf -`:bd.1LgÎlq3fq|a?L fi0,%2 0?w?|M;?r wJo|LBqp\)U|Y:J?=~L}6JQF )hc%f*}^BvJ!-Ej5;-*44Ⱦ.pYz-[ C 2kxQ :XI7j9sǬJMRF!eĵ jA\81.DiPr˄H4j Xhj.Ρ &]HA7Ѫ8Ml5 쉊 ,YXm n _ e)C'!TVh!B(N2 -ܣDi"Q@3 =t{HoFޡbΉ4e0s*7B\}OV\/HYQVh;u8ʏ4%$7B)R놀jj+YBa܆74V9wAz֏qfnseӱYgf3l?lg =.ɱ! GNAQC 됉.CQWH0DWQ$VNJC;cz=AaOu9l .|E[H*3=z!m e@%M$MIT"ȸ@0¼pQ Fb*ܑCN?^ _~8o+w_<>|-u ͗^??/qz{4^>N~XGLu食º:;;~yiՈS6/}H҇_[J3 ŠŠ im%m ?-6p,ݓ1FHá,h`:47X\^;E մKViq:r\쩤7zưwGfuHB̏/5^Ia[uGI2h;x^_IN]Dq\ŋ pTlT"}uv-;UaD/H@TU\xyY'RK)d{0 /Uzo>=;90pJ,oZtx{]`wqfGjUjY^|)zEx ]#+SI !,2Z;vUٰ27c,<եog,D x={GALy P捻ΡlVWG7F {}#;=H2E9mH8 Ch^Wsp=%PoAUfhBqFfS-Rp|%NPp7#hnse` Nd)A/Tz!եm]8gZ{:)fY?~ji癦XDZ> stream x[[o6~ϯ,xe/C&mb/c1PZ$kGYnaC9w3[͜듗ğ p;[<̠Lܙ!pq0[?C__\RowG%hw.(>vT_\ϟb>ƳS_#?*R ZPTxd)w0*ꄁ:P_sH/ &P ,!6!|OuB@.jPvppSqp{j,g"d凵B%ܳ|汼A1 >erDG쪿Qh u$W˂j|y"Ԋ}M`gqc$ 1_2)o[pnC\#QIH`_yF\Ys"Pn*Z7"6\ sHQ̋ l醩hX&LY&O#Oҧ#tD)NAY gr6I|h $JLI:<lro3M ŒԻ($ҚN!{>~J>O5+uʸQv]>u$M~=|lJp^jI>:Q|ةzEy4]j3Y@иB]7OҦblk|!RϪݗ]sq.Lvp u 3Wfe#8o^G6p]# оIʆJF@Em4/sL&O #9Ȕ(\q[} rq#3y⑯-m!Dm#r@#}]W>NX1sgV.5nDlG#}e=~z ;Fb҅1٘B[ aBVУ)iri> ó:_Cnk8[Rb_|(Vf5h!i28BL$Ê`йC1А'GWkP88Ab (95l#:ɯ3{u:n0Iuӥ`Y!S딕H8(F%"PūK a`F%~HxK4ώₕ(T.N/-ԀlDHh0;@m>F77 A5oLOuJzaiv}c  =)u,Qtͥa*4s5 ^`RO9UZS۲3'vDf)N7)IYƺ.iS{x?WZ+/LԊw, FjC7{Ր c5,|)[ OIcN~vRͧ@m{V|ְ r5<(!dǑ4,!#dž7p['"\DRp+NK=rjWT#)`۬x$!O" ]vaENNiqX4-/ ;/|ىM"> stream xڽ[Ke2*I0`@=$fθ &f̌Ji;`,tݾ:ԻT*$eD’EKU=YGE% iDe~W9J95]DmKm'.eI++ٜ28ᜎ3L@pd vd$:$4&JO"o#E*H:mg'VpF$c ?JV,? Wj;JT;}Sε,k2gVL%X%?Xj@QFI6ZQ߇Wh}d›j`]m}-s_Om4j@TVI}E ^ +N+IݴJSo 2So-䫖]Pj[ Ugi뷒Ƙe,BP⊋%\ #Qn0]7 WL/𼿶0ԕc }%ry CaɏTW"UiӨvO(^74n# Xa4nVX4(QK N+`xĭ Nν8 Lv`c $U'_fCR|#incёt72lr޽xqwyߟw>]_?w/Q\|oi~|}cVfV@,C*}^HoO􇟾}>6Pӧ}QGfrɣ"PP= ,VPɓ^VHE EH-[e/TKD@zJDfuAEPahHn#5{P+`i"*9! "@7^Y)5V P (/{2,.(tQ֥RV+mLrb?+24sCH) !}7(r @WQV{#" óR YE8Y H Xd9I`)Ї#ARYvgӣ]Z, u#g`AJ4N/|ZhD ,%VΨ!32AcF/#l$)EP'2(X 9K{."'H:B*>- Th> {#3!*Jf;WXUȐ;F=c8i[jul|ZV?zC)΋v/o|^c ?|T&R/lg$GbO}^G@ _'Qsmk ՌU17ni _4‘VٻMl~D} _z;Ҋ>}%W:?o*꫗o $]Ĺ>~@΄?{Sq/~IoFpxΐ8x':=:Xp,$sŢŢbQb#G @<yq ᱠXp,$ EEE S S S S S S S S S S s s s s s s s s s s K K K K K K K K K K k k k k k k k k k k @\r 5k @llllllllllr -ìjXr =%lP%lP%lP%lP%lP%lP%lP%lP|cd35-aUC-yc6fnjyN]^WDQz0$5 .Cߡᝩ RQ&M=X6qd #>3&t7 uC2֚_|3?6w4jGwҨހMPɉ!ōt g5!C(U/mgDPKyMl,@#zQ#B#o z;;O4Pj W{DT(P ;m>܀(-4bgEN}1 (>PЈm0~zC{; R^}fjGfS^T߀ =Dy22E5^z.;aEsFٵ>@$e]YVmfI32DUZGeMٜ:w]TWԶK/NN^1nE~2ՅWxL͙|3le(D )<6"xG3MDN#]D~D `NjekvwLMt&Vžjbg/0`pTi1#ݻOvRn͒ d8. J>x Ԃ7I7X\K_}>ӠWd6,}?qzct5Tnp7 ]Q(ʊ.ǛnP3A7?RLCVA<1O]jym<\k s)FQ̵JcVFȠ> |'ŭ폧AO&bԝPNydF)*-Z=?NVKY͜Y/́jdZ3}wU=ƣf e4xJ SDl6XWOwεĨuBL8'lI޲"oii9hPSo4?IC3FA1b,N"ANsq~;jjwS8!; J{T"8UzoTb?FW]ɭVPf4[u²x3ٟվ:@M -@鑓.Uq>޼E\W-x'# kv2൑OzgB7kO Y+ endstream endobj 4538 0 obj << /Length 2266 /Filter /FlateDecode >> stream x[mo8_]ࢊ_ďizv=`c*K>Iަ돶%ǔ)J\P ipg#=#1h4( h~?s(Pze΋\,J!@:@)"gy.<ۤZ <`C]pjC/ŭYHj?c6~ux==UvP&ӛKJ e^[E@]*gYC/6EqQt.il} k؁!M#2bG<<.7i2SC;Y),M:_jU W;0m8ʁEUEWT6Ӝ)DN4t}:6M6EF뺇u!K !vǰ ;6PBȩSjbΡՄ$⧊ 5wE=Ҏv vދbVgxzۀ.ޕ&)c䓈YF~h'ۀ@zsq&W[X{J&k?y|QUZ!Wa=/M6*ԵV(XwѝDKE|f1 ` P劉Zg"I"Gy&VKH.TK]dߌZ)7㨏ܼSafx͒vӅp|{nB/@cu{4)%(1~Gww-3{u=t= ?ȩmj% oh6#s\褪@Mp`tGNBdg"36|uUb𳧃AW*OxyZt55MxNz!h =ټO?آc)'8[woth[f_PJ? 3_0޳H2yЯ<2z2h iN8TK ?$Ң{_"ެx$E/Q+pV`n&zلpFuҵa`4.ͲIj~E>s8n:> stream xڽ[M1pXŪ" l J$`DACkp}^qzCls_JM%IKT - =a.< hIf6|r]KAR@^\O["ROԸ4u $a&)5fupʄ9LV8K7<N*&J|F o'q&~Th E4GnPk>xx%kcpO~\֤E|.I@MkqnI%\ZʔE0_ApAue:9vXVk\FK]R8y@DkKzHz"CF9A0 HÍPwP$AjTקRaD o"X>*nhTϱm|,PiiX_?a+]ς/H`6)9Q/sv TkP>1W@l\ݧ1r _lٸ[l@$up_RSI0\ϙU>o # J9a5סvͿOo?~|rw?.=|ۂQ]x4?]~%LVrHUƸoӋcCL?3ߧoÿ HTΌS<ÌHfP ?Rh=7,>\&(٦+Fj)+KPɄUscZg؛Rŏ$rMm8 -ePDY62G+I41G$(Yr  \NX r7 #SHBUt,B,=#Xڲzm-W>א'Q${UfvU4E47Rf(3*4Az`ea"f/9#Z]VieB`,aZ,pyB$>=fqzKtOȌu6ckkJn3N?)-ށ:ХYoAU8#QWz´p9~c'bNWIE{\:MnzU^F@;m4\aİ׷]:e -d[l)a~rz#=l9zXN;+ ˊDsAP**eV9h3DkBIM~Y@҃~Bc~ {घf܊NGk2jSNoázC~Ѝ<1Oa'xzy~HiLN%+_~y_=|2A_a>WRCmxbۇIlO <O~Nj뗯o_һk|| 9O~KS꺥'|ϣu\Ϙx,;y BM!r 5k @\r d d d d d d d d d d d d d d d d d d d d d d d d d d d d d d -[ @nr -{ @r={ @<y#G @jlȺEPC4 CA A A A A A A A A A A A ߭1ff&CN-0CU`q|+io -^؉{ :; h;' L5y!JfxS1\|M-zy3Vot drłR$da#sC> ~_"B1Trr %1oO yYpgpf9Kz$v TuJD׳ &{A\KB2CQۻB[ء;Є= CfPmކCm4`mU߮:;2^z{瀎[S+YߨQ0f(QU*~r kj2ߢ.웥HH(@sePkhʧ ɰr[eՖ|فQ32I _*k ~:(~3"5;/eupN),e(ߺM͕9#4yZͭNlN'3M_?wVf ?MB/ΩIGq:C?C NI;Ab+OIJM~ط`BaH,m?{?\7n!8],ިKuh'~$kgybn6beQhʶ'}B#u.Tږw~.fx8frk|=6w~[]I^y7z V^čsU$}m endstream endobj 4674 0 obj << /Length 2178 /Filter /FlateDecode >> stream x\[oF~ϯc ΝSlT9nخkmKSDFID!GjEl$ҹ_sf&`>?~%}a:`xd6=d&?\V( |>tu{cKT02hړ/IAN_e ɖ- _`=N"a/`!q$r!0411m=(FF("N5pQȬdFY2C=쳈f ~?_Ob͐?&?T4)naD7]>qg&^ e̩x[пݠ#f"Yo?i~=fBiIL(*_(J@b%#X6t^UIdoϑ?9HG 5S&Tq,;:+:aq7fo&?^ZW80PYg,2Z It)^5` IɸYMɈW|~uہ*B`FЉ='Jl@9r(YoW=TDCEwϔH"Cu)/hV6P5AQ|((;9!`9y!R"]EcD]Y hNH(̜qH'9\yأv5;9;u'c}w5)L`6.j#4@v{-WV:-Q\֢d:Zѓ\%\-_#hqmAsdEk"'Fhհ iXYƉfN8rn3ërXJr6$C7o^%3>!bvxڷ9P>˃t86M GsPƘ'8ɼE:)RjxUj8һȼ>m1{ Ql5")#X5TU\XK'H7gDeH[nm.H.ŋ"Naobm7UUa8lym J29a3h,8Eޮ`RD"., MKTB9{wM[#Sb)4j@auF$Do:`j!SI&Ԯ`эq2y~UZ nu`TΥVJ88·\_xk~lK79]u)7xmŊţ [,qR[,Kq2G|=Z?:oI^2vk''l(yq)Nnڠ[u\U݅Z˷50^9si{-Wc9xUj) XwPj,\l#/+V;k|ﵜH/VbX_GM).Oݟs}@i+8Tl{">'/Rz@@p,X,(=PBkgx2M{O*shSf5u~ڽd1߿8y]| endstream endobj 4541 0 obj << /Type /ObjStm /N 100 /First 1014 /Length 2787 /Filter /FlateDecode >> stream xڽ[W\8"%@tp"ȋkVyŞց;Lw?&Uɚ֚Zk=Q|]PyI87jۥvI&o2$M[#D6*%(ʉp|d᭖u'h†>u`o {oVR-JqjKUI4.>R݇p3X[!ut:n]$͖&<]OҪ#jh8\ed1jR*<njR)>a PS11V-NVYjK&UIfUM+C%ޯ:9g)#uvB%&JWol>K-1%Ilj9ģn4HCS: sYͫvJ&wb:_ A~ܦg .6(l*L9. FLs.]5Dml7o2op淠S*7g֪ J45i~VvKol݀yަӷww778?ޜ]g(O=;nN?~|LkvV2f}^H72ono -0 ,cAmdru lLBV#3̺JP $tD]!hˠc! i` 7Q\k 2 Aa2`cZ`1@b?Er%wp3xa0kx0 6WvD+@B8{sa,Z,0epؔ#AȻڨ/$HaqHuH %=lgK#l)&,_ 2D]1= 0i|R4g{aH9,G.=&H,5 t6|$` 1Y"zHrD)zpU?m\_g "z(Z!R2x*+g_H}1^i/$Cd9E7L ۋa*B%w +0[w( BN6ae=|] pUR0kC`D-zXA@zV~ǁ*wJiZ%iߒ)`PXIXźs``ٷ\7 e q\[| hGBѐ:܅o 8g t$`ֱN*V+A-tBoB 3sqкh^an&|5S礐y*7aG3,}t~$p&0 ^rr}3ol|+eɹU~b> stream xڽ[Ɋ%Whio;hmmݻ‹]cjsM)rxS/B'p4R0T.4h\XpU.x4\-lW=sUZXUƽ=hs-AW3e$ p_ڸGm=qeт'pszǾ=$8a7ThU޷%,|^sBݠR߬@cPFGa%ti\i Bx(VqG ʸcB:衶RGNvZM9L[häJІKМZ(nwZr oW[iC$CW!nH|4 r7qXgJ9唕b)'4_&W ޜF+,8m9mz9+ nxs>L,|m_{b7np n0na v a7ud [v|Mn ÇǏSx|o??ԐϏ㏷_?k>lS "ÇCxˏ_7?O1gm7,@?cDU"<(t%1N !d 1S i,Dр"ۉB.mv}Bc_ڕ <  "JDDo *~9&P%R8bq0%pJ'>Dqf!92#F>CnEB@(d:P n 4 hRf+m"%c8,Gf- 3s#P!,I^ɚQLք #G3 < vG=frGVbb.`LdOH(,;UDz+L#El IB֛1:0Mhi[ܘ4QCp(r&(Q km>:,∥  !?Ah;! Q[' !@NG!ؓ+u&` " øL!R jQoZ, Me dEJG/@=@qC݃OΈY^ FQ/3'(TU*Pu2٘GOU(ր> Yj$%`hHP5"")hYi >% X (ZEOA>uiP$y4T!(bof e|43źg{$|İ73QvU8dqXEPVɢ UF! и*{c@7v/A-d!9J!ڮ"Y]c}@4ӺG*g'qS] bM^m7VueL%PB,bA7{<\@Vd0QN97e/E1evF29S#W(Jjp(y.3ApL+{'`+_It8ޯPYNd(!tiq\1֩Q`&1A 24JMdGO;S\(듊%wXVT<( w`+s<$4t@,+ *HŁM8DrZh<#l吲QH 3ۙZ.WlE(vJd9?b\Qd/ִ6ԑ\B +m}VF(inBm8E7Ȃ:~Jt+V <XsSXJfYC?PmU(vT^}gm9ϰs,ŖaqNڤ7dX'=úqCu8> stream x\[s۸~vF q"Ǯvlkitv;;()R%__H/ m/xs opw~zgEt0<0!X ?o>|ߧ?%00!cCW^.-.?~%޽4NŝTjX' sd绫~5b^COd /0{x{$ ` NIA4ZA睐Zc0uVLESUxj;o}'ЄA.ԭ&l X1+,K.dPFu⡺@pQr"?"H-6q6 iiL[fY3k޴}ĈBN`8O '+"!!ɝF%EӨt7(Q` T~sr0EL$/ԳL& =|.NM1ƛFԼW @Xu34PU`9w;ڥhzDĔpmbXiN`rG4h$(SDfAA»#9pCͲI'gsҀ 538W|泚g.N^:G2׮ .E$kv8'5YMƲ9Xb%@I'Rǟ뇷UԼS6֙:Yj0P$165P^ȿ~ 2eMKY[(ZzXW> >yN$ e^1>TB2UYQۦ;N*'E%H:?wssJn<||<_ 4y{[Ig0bS@1{~˨ ػs}ɑ-CkM[/~]:O-_ 1a/T# &Au297'uZ;Gx7yC 81ٛ?ȡ*~TPĆGid;lxI&zN3Gmv& ǻLJI pҐVय़l4Ëtf!Xw _ x_`ᣊtkE}GlA\Y!W=-/xU.pqQe|Ux]DE:Rt˒Ѵm]#YW%,%T@T|ժZ_VՑѡSS!j؛"ܺІ |U~C~ vQgc(u#D+MpfDMUVnADpg95LjC2q&35(M[t}x=xC> 5m~z٧>$w>&#l۹-f@-/Ɋ_5qCTJLi)?A"=e-,ݴ} ~7=mn ?*jvƿQtiɷS?g;k}`&o]qG8w\J2@kc,&NW]: endstream endobj 4677 0 obj << /Type /ObjStm /N 100 /First 1023 /Length 2790 /Filter /FlateDecode >> stream xڽ[M1pX*V J$`DCkꞚс3ns_VZTK4h"!EFC3xѰ2 ,ĭ? }o=\h4V/dB>~Zhέ(L}gyꅥY@v`56'{aoЙ9P /5P̋pi|gsƊ1%Ύ}B {EujEԋtle  edžx))0hxi{L/,2L [bbz@s7)6 J-)Gˊw,Z^\@Y|lO)Qq>i7ALڟJ?2eF;+ ̝2a9mx{L˰jdƛb`y=V3NͶQR"0SoGDfc4F Ck5910Fc /6b`hny8}oEw?r-m<}cyۧnPku6Ixmey^ӟ<7?NJecq lRs,$V6Ȥ5LH@ p_$n3pgTBX:T% uRXʍY X az/,JiB"l~G}rpa` T:1 Z9 "|ar`lCb A~Y%:t*}! *ԟIY;~^%r=`j鼯XihLgejIJ2$yJJl +aplω|*d7 I8DTIbzẮXWwGagS!{rwn-pV&WY,560c%`B^%aNx~& :-%^I|U>?ӻ %R[oz%'lcvT /Qev0N><}7/˻O+|8}%G{x񻯞~-T 8J> J;n JUgc}<8ggCٰl$'L3g"D<y&LyFRܠlp6z6$ ˆg#))))))))))9999999999{"D'rO={"DDDDDDDDDDDDDDDDDDDDD<y$H#G"D<9d&DDDDDDN ZjRԠ-5hAK ZjRԠ-5hAK ZjRԠ=5AO zjSԠ=5AO zjSԠ=5AO zj|&.QHcέC!cC\LaatdDEԣܮac(/xx7{ JH: jLDZI~4r*z8WY=Eہ8mHƽov%x qFsvƞ (ѓʰ ":7q`!i%#65Aʴ rT|} ԧCiLl+ ԏ68&Teq=$;v N?a0ߝrUvj) 1T23il5eT XE(h;V׵ ֏HH\% +*ey6A`uS>]8r\#tk<%QX\Xm endstream endobj 4908 0 obj << /Length 1554 /Filter /FlateDecode >> stream xZ[o6}ϯčx';:ti8n䘱R&X_?:\S(ɒu>~7GlrtrEHMG&l! d6|8O'Wo8Koo..YvO1GvV~Ui>[PG~dCQ,R=dc1#(YO,>}##BӆHaL<^.h6~4@0BR̡m(̂J9kl@wPVp\?C 75/Ulo3a b)uZ{KzRZyY7E͛7~ ,%O h3Pl}f d': HՓ* 5CWuaw qUvϭ.Śk_ham02`+ QS{Pѝ$kNY,  E Yu 5#^o抰UN)fFtgtiHHv k[X&֨ YYk5P:~rzl^꿋i(KcE,q줱JK6ҋ`}׆خ]*J` A gsWo Ȑq+z}]˃wăCZk RiXid UmDr?Ua hw75nptB aՕ}S3@3۩I,M̤d/ ^Ef?LݝONQ9ؒ 8|vZU rd3SS-+4T;kC ?Tzr:W eZ))ڨB~|b#4X-?ÝB i5 \ͩXE:*.7ivY g 0.[/KbvGF5\[ ^ F2u?|'*لlUndߟx s=c.8TSdOW(SsAwTg*~5IHu+LnWnpBrrR endstream endobj 4826 0 obj << /Type /ObjStm /N 100 /First 1019 /Length 2531 /Filter /FlateDecode >> stream xڽ[M7ϯqsX*~F$3X1yjyց0ǧb}lC RUe1AfB Ȅ4 -RL%]q[@R丱\o5PIlR Tok|US`6Xlʁ57Q3VcY%pKW z&S5ZC4ZY^YrIƠQU qȭ hH>Ŵ|Cq.+_ԌF7cCA3tURUנڍ3ih˽-h6n!ۜ@P2!q(z)ct\'[ TBM˷5TIH-bRU`(A0BF9O$s44TB[XQ MǸB+3Sh} 㒞`Rǜwgfu#J4Bxe\RR!b(Z@&MBxKyAh$ F\ѨQ3hs@7h Fcz _!Ѹ m F>)-MhJ W\z(J˘/No}8}|wz_{ww !?pw4ܝ~1*fCfT{0%(&xNoǷ2LwBk\LE4Ik,J jX Hz3˄  A< t*1`s 9諸OP6`bDccF9dWE\eE3PIGcFސZX+"++ZDܱ H!TP>0 H̴ IMK4+ r >_$xJUKLW cࡒA"wFDh~Q+nYfA6kfXC7ʆXQRyڢ|+kf`e+Xxiu"[n,8e8m =LD1 ,EH@_'(ja *pTШ|cݏ Ɖsp]0KU[D'v+U 6 :(FyYx"V&4?9 dF":C\YhG"ːEЄY \@BgzYHұ4a ,nW7MY؉Lu7N_ ޏ?÷bgO5¡Ox2>8*4*_ qA](.8rvő#G.\8rqő#WG\:ruՑ#WG9rs͑#7Gn9rwݑ#wG;rw"\ ؅삸 L l3kLv@e |I-Se(V,lhZ-ͽ?87;-wAulݝbzY)ŪCybk9G M_y5G;_ !EY nq`&kKh)EP+Y3Mʎ:fCiHM2jQFvDΑNu~P@jԺ73$?6ϛn-ǒw$&vH!e:ڨm^b6h}(Ome(+ٶ ƂKH\c[gU0<: XXc!ln=VQe_s3)y2W\VFAF7)qX j endstream endobj 4910 0 obj << /Type /ObjStm /N 100 /First 990 /Length 1680 /Filter /FlateDecode >> stream xKd5)M*9<`aTp!,a>[;u1zi:7I9ɵJ-Uh$Gc$+53:VRk[>g4$mVTqRs^Xreq;T}hThTcI$ZjvI"QM3V%{G98BIKxo*`Ԥq!I8I6wXRPGTi8ʏl+@q|<hՔdSb76afكW\5C8n-֛=mD ]8m|&Jjf8вXP,32fNަW&*th_A/Րs/5EZ}D2n{A$Xc0:- U`L*}ҢZ^YxOjǴndڅshAQ]Hb&vхDK9-`h1C9-}-! {f4rq>&->$r9Z7u![ngsq=Ρ(8fZ#x߯㛛ۻ?~۞׫'o~ʛ/k|&ϰ_9 I-y%CyY:Ln_ݦ|飏[gᲵgC\1=c ĚKwX#j3z&@5a+VhE3ֽBBYTd j&|6AP&lgUvu! @"z^Ɂ6gQԥY5WPrR{{@ HBP ˅TRs'J!\@U3P2J2*V؞.`#9Ah%.BЅA$ dE)dL>EKkq*2F?Ka A2*D7L!^p[ˆ!b>(=\ZEl7V0Q*US G ʄ;АSz'BqHBP͢GI+ J0K\nc8r(e-rDЉV2CO 5#Aᩗs̰O۸uK(|}>wы(F+AE [ڣˤKǀEMPw2lH<\!}לQD{H[Xh0ٷ 3MPlc$b&h$)u5ur{ca܄Š21T<*CYPvK!maRW иKvDi":c<5 S0+ _"&"Ne:ڮ,2k2'Zr*Ot$߂xebjMN{(H EQ}w(bd]rf.]]eMRǃV*' WV΁&u:|wiƧ~gBK7ۛ/noU_@+6<+ʀP|5=>:kz{kW_ë?Ҷxݛ= # endstream endobj 4913 0 obj << /Length 113 /Filter /FlateDecode >> stream x332V0PP06S02U01SH1*24 (Bes< ͸=\ %E\N \. ц \.  33qzrrJi` endstream endobj 4927 0 obj << /Length1 3040 /Length2 21659 /Length3 0 /Length 23164 /Filter /FlateDecode >> stream xڜwT]˶ep`Arpwwwww qWM!͟?B/]"?UJoT`fd ˮK;寃%cM %730<A e=@]@!W_Nv;A/`8 "'A708 A7b0HF,߈ F .o"(8A\#E7qQ@\T#E7q@\4#o3@M,-M,L\m2,w,o062vmUL̬N@;mf.;+_fk?XGu hnL }L, Mml v<(kt46Fd)d; 폴], r1]n3 jDtYAi;H_:H?:݄ Krف΋"J۹7iPN?@:^p0r2oԝhOV648nr:9/=K@@N  Hۇ7{bc&@ͻ{A5u9; 5A @I=@^92yb3 $˿@¿%=&+&da%_̉JI`ki(.Z\TY]R3WYK8S R~ ˾|~5en2`i59f./i]]7&kе [1PkSc6= 25KLDJJKIP=Rl8`*hSgiԭ'_2tߕ"ChyVC1Y=٩ >i_ie#fUJaan94&=iU1S"*~==0]f],֬D=i(a](~ "H~#x<{WmjRۤDwL䰕;s0R +6M(_f1?c3 G+Tm)F¯$@&$F3RᆴleљDƈǍy NdzLvo/{{"ٕ: 2 ƫݽƳ#:Mj<) ԋCWtMRνck:ic' pOFL9Tô+&ˢgSMWWY. -FγSIV_XNVZ{i8[Qv:" uGV@KR<~WcTz(Pv超%ygB /!rfG0$9qnŹ3IoktRzGU},{_u#;VJ 'Z/G⊭pO2Jsmފn;'2,<4\ $]̔cwbX,)Yz9'_\{3thY7#Sij24,`ӬYXT#Ig5:*j7Pa$nr?j̤/ܠzvfɊDqxTO*yc$<[ oBfGу?*+q%kz"7Ju)W1f&^\D/"(wu/靑2ɚ͂R[x "7=Gj8$i2{$t>MFm2ͩq|.܅#[ C[φ#M2qy,]E ;>4 =\yScory͆"+SXZo͡:Cs+}ZȘB=NYI4~|^ͧ)e}JndJDAwxMȜѷ/#J - =uXpBW`R7,7KS I>{,*"-pl>q=7umaYɷD+UpVv%qI.CϮ&z\6.CȑFJwis.ؤsl;dʽ\7٥܇L4,s*xV=G ^E0RXq;U]tN@9RK0F6Sξ<R@xͼl'hxgsAq["gٙk[bTyQV L}FKK!'1fЯʇVn׷gC*ڡhzh,*JL%Q4̺RLhU{˞Y EF~&F+8EPh?cRu _-t=v| ^o|+jyk[p _ypd.*VΠJԅ24a-vj#eH/=8,#nIiCl!MA:?°/G1Q,!by3Uܲf"򐭰\bK ܹǷlzc y,.jYX_zӷMo{^u1tq,ߥ{JS^,ԅ VOIGQDn6z6.FM!e%YК[x<:RpXbsN/lmB$^yZz(fϜ/mpfOXY~EeX7cL-}8~1fJ=(k/cH]"ɔ.2 C}&ci''! V7c<: II?$؛P鋛FhIXC2vB_I]>38r4Ej,*[@`v U$GTl2;=xժᄄLGm!W +?42sjfM3ͪxb[DtE5~nO  ¶RT <]P@Y 1m9bBbc;:hW&1 L7Ŭiw,%^&\;m=x4|f}_6Q/KAlz(Ǫ&~觻宆<ǔsG\u[7\J6>R=Ϳ:+4Qbnpb}`P9iCRL G$=!dV[F( ؃ Ll&%?;{MҒm3"p=d.(Ϛ5"rf7PJumd(ͦyӵ|׋]}>ʄV8*؅ԈA@P=fltálrM4+.qтڝ }r^J-Gyİ+˥{ˤicE2 sot2nu[CUQ]Tہ^_BJ22O EUinkI&3s #)? +gz\Ljciy*jmd2^ԷCk06%FՄjhO< zf&.<Ђ*tK>Pr%E:ﴏ8`"$+t2&-Zq Ċ_eYSS=zT%bOEvr1A;\8!8i'cI7_2. ϰͥJ>u6%a ~0^wȂ)=nT\hW3A.%y}_vZ3O/YT,uݸ# {]cPܸ4[aY,XֵC[-UCS J]jYZ^"x(5&D /E(w S߅hdgtԈ7OpJ5aU/ʷ34m_߸H^D՝rzu>IҐI }Hֱ&/b|,5W1 y2dj•:!Yھt\iCg\,A2 MJ1zRKPG21\@!,nvb;b=v7;rhWQ7^ĵW췝"cG[v8I6. ^=g8HrVo&%u6Cse|70W,dЫAgfI=ȥ#.1mm*"tȬ{2$q\y􉴣ERk<}mg<ݣkppi;m82Ugv &ߖOBQ Et3=~!/r[?{#<[l KC1%l 9lW7U捺=H5`2W?'t 'M'B`H`d;.F᛻Q)ө=XeVrIrHrX>NʓvvL5sݓexӸDL&4G^Yt??)Apq sm)IdK.z`]HOg _ MDAxFԐgoՍGr<4`'̳7b\n#tَme`Ȓ{p}0tKg 4o/Dvt#->nY4[I}Wgâ?xG˥{/9XrbRR 8|#Q&TBy%Uft|ډo!k+tf=I`GF)]jV[ec; `0l %ŸhYJkʹxȩͮa?v̆ }B-LHtzXw64B94R̼/ARΦl@ub#E)G]Xd-&7&΢MfƝ B-P],WvK8Ο=^>4&D_ hj~>Ѩ 5W6Fmp*쯥:ٓ_7%6익5|O*]:sI|H.1V4De- u{7 bdfs\J Jp k̔ԕBVPkKw,R aoP y^T髤>9+Z 0='.Mi PBW߳0~pU^:-Mx\G͢BĻ.*$fO|AaBK+Zd6?UeF8].qZڜGODM7ivgakF4u浃ύ 6Te2bڊ1T\tyK5p"UTT^cXM)-T{Cs@?ߛ!̌60C*jF/nKԻJs|d3&X9YGZ`Pd^CEXjK1,Qp\- ,Y95*a2bw2wcILF$۟{a+tt[Hp*ۆǐSB%HeR<Ҭ{0$qrGX+scF# ~GRz9U6ll-:B?kL:[>CLJ~F۾JOnx΀Ȋ&"qB]Z?F1W݂P0&E}Яzs 8 _閦({zgE8a>ܚf{cbU@ z\] 1'#ءf'#™: ,1sP"7Tæ[QūFu>&`;)T(ֆX t'/v=Bp}x6DgO:Qeɵɦ4>26pW*ɤACpBc}\ɺO1i[|79mXXvT-w@@q5iRc{qB!X2/l;/g4x&+XIjqw3?pȇRtp׋f{ȏ/-,X%gTnw/yjq?{/9@]b!SM+9ߋ 8f 6g(\g X.xO/dPs!4Z11WMGMS}^u#O-I`&OzGvԓS)$D&qK!JBvZՀSEmBBneO MbQeсO/$]nv򤵞2y!H{KNx:-.&k]]E_ټuu,'+D8 hFdh.6^~0#PiCU: F(,U&dVC}*=z4Y`c[%|} ͪ榿YxWaM82!Dw/ ,2s#zUzb_8&{'^ čS_!#s19Ny[7/.P>BdtCfHW,c?*i.u P!{v #tf}[ AL4rB@Xh%2R ǽJgOmWU,Og=rktˮ˅NZ]ק,Q[DM0$u`b6~櫲!Jz*;9r7U`SS>mRw%(ކw 1(1gҟj{eZMOrgf=-w}2H8!R.tsz?J-Ty⫣B1( &9eR̍#\\!vTg@KHvsDsG &iU=3>oUYdl/?S=X/]xTm9[;ETd,زa=R}Q16hᡈ2:9ԢG jps&Ⱦrg;.VOlesWE[,_% V!fWyMc=`'#JsMI]a|ĽnvSPY%蟖KuF8թ̧5:ƹ2IwEJɐ)%;@qݥi%u3<= ~>NaҤa}#K,6 {< ܵ^dokæ6w҉[N' bE܏bZk)3DJNUܾMr,,zo6=v`X{ ߁r>CL͋:))b"lv E7v!LD!@0,n aGu]F?zmr%ܹiELyRm)W&? QȼNO}MCQo|X`IgdAX{OAd*^!(z A96 >  ;(f·P5aU,Z}d=s׹ CgۏY9Zi?PDɦCE, 5@( nMɥQ0;'-,q_rbag4YSnqKꥩ~tbd #vq6gESjODh58[,\hv@|KX~ksVCܐz@ ݀" lv=}B<)]jk#uO>۶Z]r;\85l?gkIU* U'yʁ(1͍69{JrPyZU`Hp75=-BOl}~.tDa&~}pmR- !V`^Z=sƈ.(>z0J~1ה-[Q!1p.NF9dPҜ]y溱q yBc)_lYRݿܣ[ LI^ghe'V!yr>D+E(.]Cv|2Vf+9AoTDz?-??VU8GJUo6;XҢ4-[2_|.& XR awa#a,7x\3V'32oS!NqݣmD.Z;jd.6*ZZv|`۠,pҶ;_ )/gIZgjhF;C\Qj(⬾ügBa/)wk  SGX9q/ ,,rZ-0_o"*: thc8=i&) ^%XN洙(L'"vH$!_v!=J|RxVi`O(5 ɃC e"'kASF>k}ns69rM5fħ{_iBhXp7vCD+teBs*'qم*X?bݡ)M*ʹ6>01'dZwkP@Kϵ"0ljl#[[n%ŠtS%Hivci PG>dEŎa"_|-ATNc/ؾn dLHPNnvi{&qQOz͓+8Ru VfYbK=h6Vݨf~A313C.FXSG>|tV˜F">7ehrc؂7L.%\֙ԎVeBr{Ż[=yPד?k8UcLyoZܳ-@@-ݶǑz7b |mHhD?#p`/;-c,f>yLB߱>?c0ĨUCiW @ŲxwvFר.f͇pMF>/Z63Fp[Lф2d OVY_"FL|Fu)9TskReY%K[Q=s1."ёSaжW9k41nuO >C\1Q2xl.~;H,ONVÔ{-L.9JO@b'UϠ@vmqnBY4wg{m9L31o"O-;HU8drHtrTuHv$r +(C.?iǬ뾿9l3:Pfz[ 盛cЫ' e;#8h}ky<ƥ"0;16l'u';#lv0_&{Ki)^ǒ ̫& lˋ( ٚpu"9ϝ)7#|j nXT^kIN*M2u!X,/^n2l^"۴=  qJ|~Md+cSD[hs)\7 )j%Ü9'!фY+?$$B[4kԿլõet:%ɚ2]QCB+Umt2Q߰j^t]U)yƵu#00Qs>]A'<0 [ml8~xt'})nEѤ#7gi HU Ndp ňq Sq>u3,ZYs kQSY^µ@$N.R(L9g+kV`j7 (žyYGp'=%z1LD]9ɒzz w-!JF7oQlEpVaU\ sBO-%B&o2``^GL"I`/pLRL#=c*!k~O 7v*m[~D%VzEmdQx>ol21Mj:|Mҏ]ٷ^(7Vv*Wwr!7}l=~o=%"BM4$o ,m <.Kؖف$sT)#>萺g{O J޴ 3,hM9*шB#=VY Ao7!̶uw4Xw$aOLfewWuګ+6a3dE>5PaLKu|Zwojl6ɠ DtHy*9TBG(E>)Ty13E"pŁ)򋴰'aVd}5Ѷ 8T{˘.GK  u^!k Ùиn$Pv<gkk=D=w6`-`EiŪE_.8ΘQ"W u|emKA ̉o.B);Ք`yu:#4]Ԗ,4~e_Cr4eޠ]I] `6%J3*REb"V<:CK^;S(Pw K;T`ɋοOţi:+ W0ǒ#y/ڄg͛hY_| ]JT鎊ގ:6pR'ACB:gR43yvC qN#xkd1GwJٔo]PTͬ%lIo7;w h VT տpƅ}͐Av*,Pf:}u&#.uy ƌiwx-GXGb6׏s&~v"3/pP%g'őIA66WbE c(뺦?thb)xcg#ͪG=B}px8O䨂2-mw)9Wcg ?G%#O!|>8ov83?l=~?hN٫m<IWXX}}1Ɯؕ(HKCs4mE>GUzl=e;!RʰCRES4;Q)$WZa,$amM&ѩUp.),9׬&)tu4M^oIGpI""2'!4Xm t5eC?^~GϨ껣y?ђӊ*|AU]Nd6q!bQ 1%;ZTS\Ws/2K>xYO|" HT!) ]1p)\>:rDBƄ$y51|u-3ŏu FS D@g uw˒fN V"{qZdE)߷k-̭4\~eXoz)7$CT5u!&scTy·wJ0;hB%Y s{AN(țfY_ꢤF$3o9[~hl40R2*otV$#xS.(icFLc׀R$˜nM|C*'K#er+_lVG!&4TmLv1ԣh)JrRd l9Ѯ qؾ#e}8op+ljǃ_{c!mڒ^51`|TAVhWUA}J\[㉮E _1k:1rtG䷇8ݧ4#h\>1~!kKϋ6Es@o=1y֙# ec mlG(|hgkzQo;᭳q6GON"}P8lĮ;ҚDh~~D_N|[E {(L.s*Q1MaJ4YW3?_R!A \H/Vj*[rlZf?}Kvq7ou:C8}ҙ y-Kd4%F5ZX ;/4koI7\ ZwhYYD1M997 Uʽ/.ݲ;Bi\Swy%Y$l Mr(A&Cm[,g ~ATOC|CYEXNOҾ0!Tt.Y|,̵u4)Z@[燀_qh14 dӓ?n"yIGI䵭.[L53v0X8;{&+h(c[pHۂ&W?$LC5`_'Fmm?s8Ry+$33XOee%"ŊW5fY7G*|GnTgF!K[6"-%ǩwV>V)6IySH3)5$b Hw@p[GR AeeFb!VXN&dh!F~*8e|2}dt2sGۅkou3̓۬ y" 8Qqu({be ]rHo)M%`x=&3!6Y9˧d|Gj's`eh uR׆[T =2?u >Sw$]6UAc ̕MFl_:>. tag|ԼʃWi+\,2:=6 %Mַ-ZFwǒ)2k!LK9pChQY_WzDlتpRJ(86A񶰎l4 }*WښS_Ye\K4o'#tiib@=7v#VcM0'2bc:gr~*t@m72Xܶrz#qV: ̄̆*h@0S9 aȺs QEm pۚ -<~Brbtnߒ13Ug}s#= ՛2n5Y If\9Kt۳XM=# [m]rer|)oukБj8cě|~-sާ7UM5;IuQKI/L7oYQE6tA<&#|_| k1X*.5@MtmyN (4萔Xo(c2bCĈ%sCOv?o$yn<(F)X=gEK2|0K*+[>|¤~5;jafփo(cBhxOenKiUy$mA~C6aX o3-(zX5FiNց4j)I#f@~c:r~- {Eh7*>)._VHTm@w[{2.)g\Fμ*0Q x:KsĘa2hcULlpl = Kl`YewR޿&nBښ-(t'fxԴDӃ1 (YmD}%J*ɝIamcgE.@Aqxty1_}@W֯gf1Ϳ\KHK2xf}I`ӃK]n`SX\~޲"ϹCx~bmQ-cbSVkw ttKUݼآKƹeR\g,ZE)t渡z2#8N-P쌀^챕p rDBha=a~*;=GKL #}_BHJ8i2Q `t f"/@DW<9u's4}?hupVVp1O:S * c[ɃM(LCFoeE U(HCp,viۮ2;~.ªyexVn殭6 .*%iǷBUBۭS!C76Mx;3dXʾ:Q$nZ*TIt!T$VQל09KZ-EzFêM4@(+Yw,gaFT^jDBnYCw.R4eIO q@1uhn 2Q'ۢLh-\# 4cyMQ߈pmm4c`=fX,4;x*V_??#YTeXP[\w޼*Q+4/FB mj*M(P^q;&9 :2/i- <8]@ l· {i#W[aB"Zh^c]Yy^|P4А9zܦv_3q:)#k9/`NQ0oOgcp=t>OY!ozBRF@^?ӲIϲ4AUq03a-? YTS H+S82eҠٙܤ泾ߺh4v u\}t{On^/̲LO"(8䏞'0.no)0}p3S\cدC2RWn2AQrExt)0:*IHri#1Մt۲`[tڣ&&i!ʛjˬi=s%ט#E6&,.eln|2bʿazA8mfE{jm50psKkrq0kD7[Z[<LF@oAUA5o"u\x ||yq'x,Iyo˪p+?pHU-Oý/3 L?=*,dinW)fnY1yXݵ8K"}a?,p2{ݔdJ\.Rx[Sj(&,A_EE- 7p6'oےu_]cMM[D~G`n ;6x#Ob4ݑ=~_|v)RBĺ;s; r|Zr7GM;[ULUG2Ÿ63Iѷ91wVs^u1S2-`(|˃ea+tV~P'rIǠb; DN-O$.τ9a lJ5&}$/];4&cG.NPX :`_G]]p1DxYڿ.gKm <̇DT&Pj9V*=!vr)X[ilOAY:{kKߋ\b[P@(1Մ6՝Q],s;MiaÜ *Ȁjt#yT)nF 6ol=}kIh#;Krˆ-X&- ) H7+:1[^)flec G=tg}vo ncji J78on[gE6:W24;Z83 3l&r_I{wS݊>mե'!/xnA[vc't{H=˺76c_$oR'=AE7e&:(fX}E@oOxwbN5ylFSpKjƚw,A vTagG44"]W,ei| XH^Y *;.¤܅1 <;+ۅ!GQo!*`n SYbO460Q X_޵Q0*fLlI32"cAj#gzCpA։"z$ x|VBcLئ!3.Gkl3 /'%H.ttDD Kt,My{P͎e Qg A {2^4dIf9%_]RnU:ZiB_`prt~R#~!J9zdjmxeY1TvsZkW_#K6c˖!*I*@~{k hOX F:Y|eϦ݀"JΞ;3]bOA]kz@1w^/eEcp#Y>L{T#h naD"-%!>d`[8{srHN8]ĥC>q1wY g="m-Ԗ^N5KZ.@Ysƾd#w8QI{ &[}nɗpt ot?CQn<0wOI'E9^6l$qHr=r E ;<•s u30ܴ'eU3.$ӢupԆgDلFn+iu6!- 2ӎ "-v5(?X=d KT\W5S\1Ux> endstream endobj 4929 0 obj << /Length1 1386 /Length2 6039 /Length3 0 /Length 6990 /Filter /FlateDecode >> stream xڍxTSۺ5Ҥ#H7 & wAjHB$t^7J](]zQA^x}kk͹d30T`($F$[ PT!4c$f04 T,PD@4HBRQU C!ahN% III(<0 8ܰ!`0H#K {{{ B('y^7 0a^0(=hB$g8/1 C!H(0Ðul$W?ѿ#p #H/ Fa^`8n PW2 cBh8׌¿`Y UA4ɯT0v}+{GBt6Ex´T`&ۜ`PJ\\ =| ¿ ~;31`pG 0@tsEps#Ik9ƞ`0( 7 iݷ43(@PJ@ 1@?X-# W}e?#^?s顰̅xMtk;_YWwGo?_v#| `UjPs_ՅAn€jPB:a-+Vp /e77 3@0( |XA\w4]0YW AAMDL`|~ ,Da!쌁GɯؙhW98rLV{[0 B2?Ȅ8UbP欁gՈ" zX]tQeg: MqDmLПg'Dl* XG.d44Zxzl.˞#wN+-n"7Z^w D8N$Ytfom%7k2SiCu&'NwiW`O4(4zgGl)ð {x1)QMmX㸅ȣc7RՙݵwۍF=UsRպ\RfAd'dPYcBA{hۊQK,Uw ^4mu gxš? D?|p{jn+Aݥң"ę7Ej:"v"7[Q$[>S 7;<Qdnef&NJ[DVҡ5r=gUw8(BJ3{9Πsuwo!!|_mTEQkWM%i݈{1:O;̴LVAOE;747LE?!һ$}MaR4͕zWd'~ 3C?~ՖSv[&-Nn䃼@jie5{左[F׽Ts UIȧFr):]JZY4%P!M?WșhϏ$ءaSzGQ4cQ˚]WV?X[t8 4"Se =y<#0lZp\7.E{:pU"U^hzzIǶHaITX>oxYPb'yq)F~Oi7&lT?ˮge(l~90qV9]\|>\*Zdxv]W}[?+gM)e Pjo}q}G.Aj`{ƴ5=G3WC*IDzZ3+W- u˳m7fHqw0LgJ+hR7RI[<]6C3WILggdgltyͱJR%5j0[0r'm>8i(s>{meǏlp|in|;ԙvgn]I0S? !0j)n-R}E:/!#G㨛U9:o۴?5f>b?^\sNMܥb=!ڌ8wnc\6΂'2,Uϼr`}Ʀk^%]q[9NJ [x;N&"- 5z.6B<{5B޾K~'\}BЄeG4lz}]g$-!JXo*T2.?`gl`)V !d~oѣnW?wݑH ]@ O7}oz]y)1X R|[727r4UE]zaEi-U'U7yYhc-b0kx'8tx.Dѳkx%{@! f njuɁby蕋Iv|Ho J8 3$%ͽl˾&wIbpa[rfR cG(]S6!bs~P^Ξ}<ѐ&A$㰓[v²s&>'+Su oR!Oωm") gK[A!ţըC~moC| [P輱:Rǯ.n"cd67wK6Ù_'Sp|,F|a.2))9 \++ĺ| ,"bBnUhME3ƢQ/~;XT悔 MqwQ,;[П!%7QM9J0XHtvdK.8JpS\dYiہQļ J)N|[!=͚QbY%F~=Q?cґF՛^gl᦭*Ҫd_-Ei;·'Mc]L]ecgz z 6R kSHXܕj^TQ J̐e4>c V/cbje`rbqؙaΌ O`kn_EkV2BDKW i7Y͎rK%ȑ/ɷkhԵW{|Czn,)v_-vwı{ e yѼ5OR d;, ]kA\8]vn>&אY8Ca"r7q֚啢s;<5 Ll@.Or%Ռǣ==+䂓6sS/n2~ }URڈV0fo0pj22fm˨@.g^pdt,Pb쎆DY0g+*mռ?sngS~)nFXN`fLe鳨N}t2m `^uyu'cS]0 `%O)Ĕ J(RK0)a䫌  "MO-5Y@+횃-aF $O8fh1*N>niȩ.38Ep:Z=g\P_kn+:Xh߄oqʑxXv:#-"]SY 4{r#}1E(BuY0ՊcyOB4/rky8H»rCo 27n'EPf^X|;8Ԃ&Q`YKFY4@F3nfyXܤE)b /c=u1r5|!*x]m:1LJukgsC:!a\ ݅xVfO^z3z:G/NT+t kNQg7ʯ62OWNm7w|PlU((?=$F_d2R^_EU\UE"||wp_*IA؅ӊ)AĨq\ݱD?jTI?"+!r S ;/B،1ПKfv#{POlduk"'r OP5KֺAyY9XbiD*NQz)hrM3Sv{COEW=U#sSc/$.gK!Aj Cb%\cV 1B&m.T 2@"fUR_B>kqQy'E w؋,%t=/齗AA]ޣߑRFɓfab<Șp[Ci$q6qnyQ 7(%CYFXfr9bR3ȓPW@яPHVrJU͋7p,lk_*Oh}'yIk|N-LKR}şua sjR8Ė8w_noUmNf S`{*js,W|ƩI)i"flvX=5S]j}1w,oPN5b* ]*"KzKM%)։u.MCI.LDb#P3pAk˪kSE]u.z_|>M`qX>u"9=zڳaz s}%p^5`,hoN~Jxd~;B jwgTFCVclSd,iRоTsIXa-s*:EG-t>ğJX"[ss=d_SK hǧ'y~{j2K` ÍexlTI&yʞZԁ~᪸ nUmV}BWQ9MD`Ͼqn /ο`i$TעKr3ݬk-=mxA] Hb`#b\ ^y)Dgw06|bNmP`f&2E%{ E{S0d3)Fy!Pש݆mO/O&h@*-.>͍$lmKPYg5PCk-Ǧ *\Z&_&FLX?o-X=8~8 .+"=`Yδߜ7W@Ce+37q㼮Tw;?Fz0| /|;ܘ:o) Ds =K-a鴨\gWE > stream xuSyQa"AXHx\dDg"B+1+|&WY#]AĆ#t rt&TA>Z4s:¢gBvP#X4L,SB ]3i̜!>@͝[q?,fδ6Ptw'alPXp+c62@gH4Lx`Ѹp;џb B;E`B !@5|SGa5 V ku^(o>H0fn_T06x)"o1WB;Blľ  îWALd3Ep?5wO-47˝dq\xӽsiiWsYw! 10uL 2)5,fμ87 `px.1"`P @7C0sN0aB0 Q̯4xf.=eςAp+P/AIg'ϐc0nYXm,Zn+t^fD6r)m`9o9L{c" j湥i0=gCT~Ф5EkcϝWFWO;T&#񺓛Qz|%1͏(u#%[҅S.x^Ѡ[ꨂJvU}E*&6޼d(۴dzt̬]ӣ뫻5S^ّX}Dkm60dx0t~zli^Kɚv󶞆{k'֩#%ILf=?x$6wjVurhu(237k<]iu4Mтָ'" ^&?S^PZo#fn=q-ޞ'IS 6Ɖg'v5+:+E-%F#/7삯O$1w_H\W8PAݓҨ@BT9>2hZJ?U7[qf*L&\꺪#oXl-Aih\Fѹw)}ʭDءx5{b 2+: M%w:~uxe[ؤ=j*/ާ z:V]q[e"Y)sa@&YDtd[~Lwp[:eMY1uX|ƹڪ~9qluL,a$+o[{$mr>[4|x~p7>Qi\XZT< 0\8e@<2}llDUޭ\Q=D-)p#1ve9k|U\3)J)}AؾގWuЉ<گ4kli3[}!FW7=81&A[%E R9etI犓%?Hd)g֍{}:drވ>~s@ҞhReQ? {#nq69WxKKԇn7r겜p=*VmI.xu$ #c|?M>ՙe:Y`{Yt2C eͺiۍ{6i8U捞5 K֭^]%+ ڍ#VE\~E"Pk~%lLs+ęyoj UVHF`iͶ8QO 6kKZ$M sSC] ąhv~B1Ja:`:>LcKRa-4&w([nR(UK}5*a㧬'R4>o R:`4V̷(2語rnxjo \s͓T҅ اPPhy`#qRãvEjA fR[SiNuC%eNy՝թsG9޷h{cdE>!Gm,)hi|-M7Q21dՈDZêhEm 쩒\h endstream endobj 4933 0 obj << /Length1 1626 /Length2 14383 /Length3 0 /Length 15227 /Filter /FlateDecode >> stream xڭct&tl'tl۶m۶:m'g杵Ϝ/sZwWdD t&vF@1;[g:&zFn ,g%#v:[ي:@ `%{8Z;(UԩhhhK Z",l_?@k;{W@e lZX rJq9U8hh Pp10XmTS;G?9t_a@wc?&Z=`0s4u_zSw}%Ssrv2vw|UU7NgsCj;Y|v_&v./W/O-# W2{Gpq5/G5+W_} =m/ hmJU-,?"ikj`b\?;C`4es* c[oL-bm-gh1#ch 3?bhca6OHm;hagIh`ll05׿&@Gk [)?l*Vo?Q/ b2ʪ4/G%pV?Z3_?i^tL:VP_K5tvph_53gm mM6)18:~@;v}Θ'2=+ù+odJD{ |$ľIп֮/=|.y#)>LkTu U!6y'q ^Bƅz͒7-vF)E%HNGg*Bt'{D_8.f _OC#ýwG4q0d׆'Θh:x>*OB"ոFl͞7XqN7]ܯ KXTjWO-\~! Ey+'6ڜШIzms\-XۘEiWigKﲬ+)*.Yxθes ;ّ'V]<~d.hн*Wx3 4 J^,zW0zsu%<چx-LDpxb$ tTS!ߞ69 s;Mal{֗l>~j>$xgrOU17fES1اUOd>4=*m_3)o )o09ڗ= sy<(CngjJTh~k_IpWs']4FiR3evp|}Pk=QgaFx G -u3 ?eIZi0h͛~9\#:0l27^b$xNዐU!d]/;ݩ0M]P~})tW2+'[\!Q ^|WyQ3KLGyze}OT;iDrrT(J(Gi}0tNF=+'J2jĬ :㱁4ÐRs[` {-\Qt7(GumYٸN_O?|4޼>$x'[Xj|rW|q7@M<4TG*791ZXg EWO0W֍lp/WOWBĽb:>ëo0$Zy43ey*U7ɍk6 xbק8nvAI,q:6A%_g ԞM R#jVlΈSu﬈K~ Z3dL8{sW'NM~ةЧ/Iɦ-"HXh4<pnȌ,&;G;*xtin]JXq7%(7h.c 6Cܪ_OI3m}<Ro!zWud9ű܎ 7oNܑ[LJǠBf1' 0R! CI੩NȴGj [0Fl 4 u[b-RFH˽f̚;>~r6|eϭ%Fn"@Q'G݄7m\?ߤxa4K@/1t2B FCA, y7>ɞzm ,CUrSd'G'vQOH9ǘm`EJ}R=H8WǍVMDF`%Cmk4:KUY1Xu=Y>,xTX\l 3eA TJJQ,~o^Fɂ?QpmUZ&TO+ %>̎ȍ{xw-;Ӫ{I#I1t%<Mmm6by>]#Hgg0rV '܂x.l|Dr7e`2 tJ31 ~>Z7KQO5-DK-}}ݡA̶5t92ɓG-MO[vڂ -"D[*ߵȌ]rbVi bQ)%N_/".е|z=d[o4׆WK:lXWY"^6''ѳ/X%g)s[\(A\/'^k԰{!1L^q~'P&z<쵉')=gg M*44GR<>ZL칬/tbT杯y*v]ʞej f&fhNZ@qW,3h\G‡Lݎ X7}PU*abAxn΅N> 'AmXDao"%N{ iGhBo)CYߪEF`J9Qsۛ0} _hT=>ے#9|Iz0ZkܕRn R(C#E)e.iv1bH*w^f:ˢne5C$$\ߋ( `lύBh7>9$Vè ^5vqsĞX3EZd8g,U;sd!{BK}\PS!le8:n*KZvto9.N;`ܵUbi4<wUJ0J!!P;uf2{yXVYM>⍆貫@$1('4)g-kS<ydj̍%=ߘġnPWFpJ0!q0}[.zA>Zi(b. tMn` HeكPmԝ5[2RoGjz'uB!HΖ2 1$ɤq N͌wMCl>7Ix$ c;VQr?[Y&w?b'- #k7yn*~hxk!8ՊTJ8}g(&^XqmO3Teo]?FGRjaтA= ^@o) ?\`tA;Z‹W_&g4 o۲;B2C^bp#/'r~S6㑇ʦ-!Ea#FVjaN><~ gkL틫|p'37ehfgf8C@YvŘd)*+:&)O0LBT+®KJ1AQ-8S?DWCX4,2?Y Wӄ4Q@AI?Fb':LDxJj`v饆CztÒ8!,jiWٖl'֛ PFVǥOT[Us`&ޘiȖߢ=KMϞuVeЉ>n;GQpR",ةk\yjЍAt05p"{^KcY&eD}~<: 79g{^3+!#QȍwQ`wHd]`ɍFZnIa0р~8W420ZN;q 9Mq5/TmI`Cɫ̯T38S D\fJ0̊MF7<*tKiI'P?>P%\*0{;?~^r.{='n8-Y$ݨʊLNIa3 _3&BV\ae;{p~))=(_B4,T4ARuz1?w1%%xEo?N ͵`B5m6FͰIl$ﻋy Yq,7Fܐz9@m{Ivd^}ԓ`f EMR99`_9"I61|ZMenGDGI+F]:L"qDn'omݚB8\g&M?x͕78SvF5~\t]ᔡ{h(s6{RTy+I͂\*I0h #I-h髨26S0J侉4t˥ۙ_ ujf$]Pxڈ%Oh4lS1F]x=j+w&>?nWͩrJ,z?E Mp`ۃ\st ytdSJ8\' bO+[/Ӫ\!bŢB[e `v;7ﶩn8a<cY*b!2 UvYdlS7>Aʼ)Cǘf \6Ds_U.U ,zg*J7]$i n0t>f-ipXN32||{}vQn;8 f _~z rG(D3L;[̅a#bpLg":9)z"w=y5{$dPS VK{ڶ+lDݭB!?xvY L}y_(5{#:Yw?l!z&Gd %~j bG m$3Q?N}<[TWL*= OQK$o% '}KPoD|A9JȂ@q@Q=\su0aQ:.$֩!`iceYN|\`ZsS%Gf> Ow]r68!)XKO{LH)|R}{X^bP#|H \4[˓Z:sH?Ň]1' F=N?x7ԄAN\{s3#e^{ ,=RPc? *rXJ[+B Jt?S_0{'j?bP ;AqMDʠy6? :t'J(=Ҹ%K|o8J<7qߊlM/bkLd+/oq?o1wC ][t"`!g<`p̕X_!,S _ uD޸x8;P~`^Š :ǮX WIAo> B&G}F'j rdۊۓ*V8+y= d>b mi owz%oDn !J[Nܑ}uEz`-Vm:&\ʓK@~)mYq1i]*66Ռ~$B{G L2VEsck/_@i}*Ms; ) .a&U.8 >]bV#ܑzl*Hz!BnQ,EN8% $ާ60U#Et\b䉌(}OW"(AO%KЂ7ww[Jw'9T-c$/UYyi*9?kn^Nzn4" `<kaL{%计AN.c Ei9ʷJoMfVbfΠJ-ݺ3FmwHݥ8KxX'Tl:VWN^'I{w#5LآJ %28R#oF@ʅ8"bB7@pl,",]-r/k\ېh~GP41U#OЯ^MG+>A+e @ S;p70q6OkU*"o]"] L@=BV>ĸQ۴MARqc-H4QL~U\33P'Y&d}dtiKZjv7J)6w>' .2mLE#V2뮵 AA3psL\' 9m@z(ֵCÍn%ifԊKwBdppS aV%>KENr8@H;p&+w=H/4 t\әo}[70#X'+G?WWYp^㡳F0~bMUJ\01_*_@lasSs<$RDM:N_2{sPzX0֜[w krr~ rp4*XZ7E,ZzX;pHjfk&SXP7 y[E .SG@j߶>CCe(߅Mo*/'=Be9thCҌev9.)C4ԕ݋Ku-W$4Acp: |,GuNϳlb~dTJA3;KB z ~ @sR>CDۍMM5{J5/ջٴШ|a~k+U؇w?5TXS*Ck)Y(**PY=D&  vP('mp-t_"WUnKeH 4tSӐ[ve{6_I ZCt6|W㵈(Nza P6ܒ9q@ މHn[. `m_Iq#ne{8c6:qlqbɅ,]-uLCqFAl5=5IAuKW^S3@ZX GqE B{\" ~q}Pb{ l(sSU/8N4k-e5*Hprފ_9e]=@uFZN=0%Rc\&a,܍C$2.H5|a蓠 k0>Xo&Q^ -`qœgP,'!:~l:-Q>StZЏFЍ{B*Ae4*Cp2s~t!IE^D=O`FD7@k\4Ԭ3:"i,MMXM(Jf`f0;NM׎|&Hemg`QSE,]xpTܬЊuV$ ZjA\^'P 5"txpn9$GezF/Hæo6[cʣuh`1(97։֩d:rcbC,Qni+g Qtbd5rBxȝhk}l:c1-Nd`44*F^jĎCmT J1!p hG7Vd?5 / /"M؏!>] wMD]a5=65+\sژԏƍ|MJX} ];]N5GcUc%PsruaQ{%c(T6cn)Obe؄[\~ }ψsr!n1P+~!: _dTȣIxzVZFx- I+dA6?O@(|8.%IC_tRXħ!Q:%Ii͆!ulù֭HRĎQI`7\^wo#]ͿBkBr~EcXl~$E[ߞz}ȜwZ?ݸd]fifvUn .$x rY1rltMor"Gi_:PЀԢ8}Ap*%Ei-2oJ)@yS>H*$wD;$G_1M2atOK"?|wx)FU\ NuN_#>Xxjzk*IG}Lx1]f%h2F[ӥ{3}4޷3\`xUD1[یPsuU>M?xv yvaCH'QXYH6{'}N rN#ډ./XLC@'<x{ѣH]Iv v<ג^)bQR&j5'uT;ylhdyD '%شL_i9+賠Ȳ=ZSJ4q9 TJRDvKwh)\Z+%ƥ XKXZ\td(S䦠RjI =?01 Vyp M?'~ƣ1Z"ϓ ?zA..ѥ~br `#WTd"#Es&,,`/r M|c[/b;1;HMŚEs%eF9{Cӄ|a&dfâ-| H;xhv ne~DojhP)xlqXK)Ms;eK-M`|aQe0.Y4v,6%-Bް>omC+CǴUUe izdB#rcLjTe N-jZȀF/v} TZzrTw_tP8Sz gSIrfd!k~&,(*FQuޖx-\oL(/ġ^\ %ݺž롵}AE<&xb0& 6 ИIk ДAg4Y4B>*f*g7?aNb[U6 ~V=P'70]3 Hfȃ^ h Xuw)EhiyՃuj$iQ5P['f b}3W%#iCfa8 j?D}:4 ~Gd!g\-lm, _'ό%5ܬqnd?Y2׀]l>Ny2 ONH}U_j>Fc]r~^/=FFi9AwدeZ\d&>Efa{wO)@/J܃&.pdɪ(!MD ,b2YQ^#4=a^a1BQɮ/`/ЇUcEC4jɀ {1ټH"<>nZ).9c?}~mrZM]߯Nx[,bB[%Uj^;֙(Ow=UӕV5o'$-G_Ut])iM-/jo~ n0+/}6XX50Ҧ@uRF0W&'QQFwQ}6C@J^jjҖн_%&Fu :w$ +4{YɁlkR{($Zvnk  I`\@9]"D|PdF@'s$`1Z߶1,f5_æ U{̶s\6k{%efн;z!kC<Tmau iQ uvN)8LI} RTkdƋVqB3t uBW^q``t\8 dbXoq 3!!1pU\lp+ +vav:׎k%$<a B(PU.h)8Ң< 9=uqJ~ &ܵ"9UDZ8㲧IS#^̒ 2-aT%IJIJ> stream xڬeP&;wiwwwwwh[nC/sLoUϲkɵUJfv&@ ;[gzf&IF[^hgGu;lŌ<M@ h `a0sss#PD=A*uMjZZ0H'-W /u*pA@$JRA :[\LA9) H 0sX[ٚi͉/d4 1쁎6 ' [Sk 7WAv=l)99;:*INgKcr;v=L]i_0 ['3\&@o` @ cG3k_L?/[{+_^ڜoNS翹-@슴z35 vofv39ߔ;Hoz/=Wh kkc {dm @ϡ6vۀ=OQ[r`"lk&+AN w`nlwfҫۚAkzf&bS~ft~FQIyu5́EpV[F ع9,\߂Y|7)#o&u?~)q[S;VGO?fSGǿ$?=4Edgl\7<); 9b_֤V\_k/=|.yש h5Teu!Ʒ.Nڃ@F23hE-(&Ie7.VG'j2BLG{_Ӵ8nf0ӳoIOc#C?n hs)xq|SNI=L?_]9ei֢%yYr!!`θ`%cZ2u Ee)N#Q1h^>[X=oI BtwcҶ 9~}U`ÇHh +ۃV&{G&aL-Gq}bC7]=yAhhYtΣiX[\^ V!ߗ-EeѤnN7Xo S$5G$n2/Q {zA8r{wءzt1CF &Z-FoNysA Dʎ: ?*o{ޮYjzxhϕ7eدϸd[|5%LPQn!psuW('V,SFLH+BY<(zvZvtƜڽ"{/ӱPm@Ƅipk&&j+( ~INʢ#J)uԤ4]Ÿg*]PG#W S{/npmdx?g/h4Ѿd؝'~r:Itf2h!<]nޗRE, 4ӒbF_ 2H8p0KJr9LOI?T]}C!{&ZŽmv<\MiMfvulCqq$^tzj$0B:$I)%vV~ Z9xg* ;\w]QJ8qbG:Q#d3B?GTTsm'%žT7٭.!:,YdyibnNL!~fU) ݌%ڹP6f+*2K{K0NQ5ZYQ\x $!Ob%&C@8)ob@^[H盄Ld~颾];<ЗE< U =%Z)D>Q,ҚSMK?lxk swR s:uw{& >޸oU[K$rϛCIhRēNH9M9pMB X$H3R ;P5h1W-CڸS,Q_-0=H5FRrR, i vsjxȘv^]IfG~9Ԕ]k[lJr&,`u݇G8 'mHOĖO{lm~្VT0R;uh;]ŭ*7t\ضv $}o.fW^FW\KJXYG/^gK >ÁF90}@ɪ&&i(29CuGH|)QkN5Juo>CY%SgU^wj*\P;dչ)-ɷUo!j/G+$gjm-s-J"=Y[GLQ8~S N̎&H&P|~ɽ 6~S;UsSsIxxNS|+@_M8+[L+TPHOD"gplNw=3S"a ./7,bvW|V _4A۳j^)&":hir&bI@sE<52S g$^/U'3_%'HNhM_דw&(_ 7~ܣ鿈Gϗ'lݝ:E܍1/a_EJQ㴅6q#^.mgjG"[W 0h$ }V nb;8?6D炿E"gͫ|)N]>qQ3ƈ*npD%B=²"tb73{{ʐwWB[k$_\}[yJ" xX6MWQLo\GgNC)]+0DD-4Uagި)6UWr 3߾0${;a8X4ڊZ` 5k6gBu/( Ͳ{m G|J*l*}ךqaP#<†ʱmh\c]<1b&"i|N݆"+z'+])J0JI4Z9&#<; OoZ\B(MUu<3{iJ JWh:pLYiE J׿7&XIKo-&G"/1e.Z;tXZFА7ؿ gﶞj{Շ96Feg.d)#fy" 0}Lx/Nོۭ$^W"ᲲC]`ވ)Wh5NjRfAv `]/pFsgjᐊdѣHy՛vqc] K捦54L5]i.@3ģxjs+n2@{&Zo0Ti%G |Z\$),wlCX?\am'%'Ɓ y)/G/ş8n,:{["o?jX_߇ȴ&12O22#'/WcvJGjhC\GNwPMӝNcȜTINh%}$P^$$z֙1nVnsgI覘 w'^O5N򏀦TZ]=3o5{+N/wՊ= x.+.3+Tv 1yTdBRWx}lci57= =DG[ rKtXc`%OhizS3D})E/+&>o屃*EPV5YVxICJHn6F猞_\ɭDZvxDQlZ:^z)% O RϏው@A3QswQ(J3'k)]_[4&Y. i$i3{ÌwgYӈ\WabmISmjJ<=l*I~v혤YVJӓzF*`@x7 nu_v3pmwSZM{7OO{}WcO6jeDzl<&A!= 2;XV3lcѯm:YeuԪڨHˬ+ [vͼps30-`:^R3iF&.YCBeK\QF ' >KE=C_mKY'V[+̛ GپÍzx@w555c`OΈ1_5MpiSLPuNJtZ|O'tduLOߓ(_aFsC$IU q43(OANkrn N6Z:Q(Xu7Oh,O*o="F -qiv&$cpmq[7e^{ݝuu@;g74= i5ǍĭyIW6"_ Ʒ8{O,-P(½#A@uEHiwwPЌJj E$o/Kb{[^}6 Qj ap90,l4w{n z3XPcAN/($B(]T7>~Nk=yI0Ezb{Ʀ!;juB䇾+^إ:ν WƣG WC~{'r 2ǗJVN3OZVzb|a3 փ5:5\}PCAUwVj/,㫚y?tN7wdRvmZJG *YQ_İ>B29JDuz_D0ɦQH\M+r.f۱Jnƽb!'C8FSU6E2p>92[@5p]<0T}HzB7u抜 (02u>ѽ/̝M4>Ds1#2/yshH:)6WdkᯩiL&| nhdi+gT/`享J`'0ل{Oa%*p*67r~nG蘤~^rXuy E }oBN6/}TDnW iP'3l\kc8A7tF~a+r(^DA^Rq70?Qɢ'ec2} B)1.>c]h4QT\J3|gMsi+lL'} RcP:H tQyӟ_#of"oZ=Yꗱ+ubv blINuT 2MGH|9zI b; Fb0pEڱө_^hzgFǤ j)ec ȅS%kaW=DÒHva2l,j_s$X[־k22;u=Ȼad^nO]72Icsv Ւ1@&aŔߌe ^4雤yT'4*܉c73]M=8TZHxG$:wdo6K^ 񓄁˝4ˌ B7| +JrI OtTBycG'|o 0T RBլX_"kdސNu8 :Acܟ .5 j1j(ff)\H5%c!l#pT;f߿.'.^ ݫ^1eCB(,= GJs J: a&[59O$%1xi7x4A54ZP"'gr<,brp1uVc5nnk e)uò _c$/_y{zݏSr)~ KE%P[˅FG&HϭA[FIJZQ;^" f !f}r0譟9pĶ U:&'H,9s.tN uֽk-8\'|%rmTdOBM(jŸ@$ItXPDjM ޟA?lWbz]T`j"[dU)rl/j?ئ'D ҃8z"|B_wy ,a= 11":y_jx O2p$0oK[*rQ@ɟȽ|#V$V 2jNɉC8|R۳f;sxѷ!UcS%&{y ęI_57K|L`-&O("?EYDE9`F8h}bjS{Q2#nܢ*J.ゔRD3R{j:FqL>sٻLC]!δ)}98Lоy5rHOqL6u%]p Bb= PC0>òD+G S^> g[~^ouJ]|aO7.¬Do)\^CKMSx](De}S.yMemQ{sͰI1oߨE{lmj}핽A-(أS%S4x+m6h}eR kdsPPi KjYu nP%qbS p61śB07kn;!Ɠ%&q+ʊeV1u#gJMb* w6rWzu A+}whDX0:L?4'K:Ա,I/ܖ(p袖/6|fv̆oa|i"Y/l|T==j-eZ7ti<l %9elh8&S@x Wc ޿K)=ʰ& x seGrt^:+v8LޚʣL5UKC|YTy6;f NlR9ڸ@S5K\DK)bVW, 9~-~Y&yjߘ(8-K PNN'3rQ~"N]5f kNK0uϾ%ȭ/65#\gQ)½&ÂPCq(Z3qYrZJIov'{k!cwǻ[Rƻnݗ~:"S.ҁQp $OS%] $@A+{+"5Hb/yyk^z{~1_/o<;EmXmj_;6f\wA.?s5cNK;?sƘ"<`3>֪A_ŰK>؅PK<\^uA!mSiq|\g`KIJ C0Li' @1qk'5顾>VE^RcPҢ9wk;~7,ϵSS갚7T% 59ۙ>/VVt6S@Q\8YӍ4[*iN{Pjm ޑrDžM#ip3qj^yi-'̰1bqMZL A T׃Odf0H DC׎DpUD{R@-n %SZ#߷GPR^v {L%&I @Ӽ2ɩqM0I#$RѴwetMxϺRt4մ(%8wQPp럶iQT}cry+aڡT m\Ô4e)Ԯ |{ۻ8/< vHd vpDeh6((c*@gXJ\Ө<LjRYi>YT`&-6ϡ V{O-hqI |h$<;pg>ng"uXMqJ7~OGliGGϟh)DraOj2;RÔT}S#&)1c7riF ETI~֘Wy#Rr:גB"/Zw|yQNo~cd30`*@qT_\pS^:ZBZ1k/6-bU^p ၑ4ѣEcqBAPșN+O%ů9;6*Yin'=H@FQf˘VU>iiOǟ+eRuQ S+{%Uhj&#w44Ƕ h0.lIw Wǝ!r1bRq_ĜiW_4Ild _uXP3n\ӇGhzR"D7ٶ#bOʙN03n~č?%^VU2X]9E]qzJy&|NhR<BjCmrΥ>~j^L2n)5P+!Ԇ t6_S!1@G3_wE`v΀4-:zp }II*I{S΋#/8 -E6y7 }T 'U Y=Jĉg)j0}?Ž fTyi-Hhu`Tf-r6kh}Kwsr5ui&31qU:b&.HR v42ĉ@{yKI*1͟4/Bn^ ŭfͬg gωc@AI\{txсZ.[PAKﲯ?#(0eGybŶzs=_Bhwuz$^ \3ǜZ/ġ1Y}/(Պf4fG9kxxʈ'[( t}u$3*5XX01FZ"x?H]vr0򔕢=X$tGCwo[$mJtdRW67v%(G cݫoSq<b1׃0sxz k d_+R*ԮRN)C* o+qpBs*tW蔅9YF;Bh{i*D|K_uA; kv49e"B\_#AYQANkPegTPԺeU !a!N{L7`VI6>n3a`$=G)uCxx 9A$EdݶkPv p("6 @(bg܋QJ7 _ͨcS;:3:/XQ[KAD&Bu64NS0,,)jLQ|5E$ F΄ z;-~_YE 6AF##@}-Ds3ZEesdA7|z.+M!#'-} lg1ܣ ;`.l&s:} tpR19*)SyVީ0N"]z-ۆ$E%E`תt!@yDod1+t8T,&p78o͘܍~NrƧPW7MwélY*#ڵH3ivr,$F<4]ɟl>)$Ld%}7Q=;>!ciOV7P_+u}Kw+9* ŭL渤.vXXUZH`* iC-b7݅wNRXfSW}Wٵv 8. jPyQ9'56n*'#Xlá !$%oX7ǚ [)޽MP4Ya*iR:{{Q$l܋D#3L4j6۶$Yi0x7, 1>e&/9~!ELO϶Bͥ{ c0&c@Rk1(>t?*HPai3 EsObJNg4 ʮ^ȶO{5w`Vݩ۹=`)V<\#(N.#lފ'oLQNwWeXkPoFoifZXM;r:SEyՔK+-a7*gdE#AV! 1tY \St RG4 u%0/[%Nwo,-@G| HI1RET[C[Ǝ͓Ea]?^CϿK%{YXnH"5X>EW+n@@j{Y|`Z̀}]! ƍ 0o-2*瘧FD%ƙ6g(YK׹Cќx-ЍtѪ3N<2B9P%9[0ׄ |9g\~;+!ឤYw/tX-/,^-}U" &%5!| ,cyGqCL }2U+P1ٜ8<3]^I.<ƴo;I:tvzWR&P^qtFc@_'6ԝ.kern Ln?f'9|Cz㇣vo5{'K5T|xy_ξAzԆy>R+Hd).(2[7wLZI.lf[o8쑅Bog ə9w(c{E_MN2$jbTs&ŷWGV'bkS6u{Q.Zx(.%}$|i;pEuy>D0, m_.VK# xV' UciC;ݎ:br-Bɥsxk9GI#=F)F }U%EMS@r7y=? - |c]+eqe|Do+9y9,0Ǐ\dVB8 ]73S=(qM2nrLsrȉ-Iszj3 YHc_92<&MB`]U ~`j£>ny\nODY1hDVX<8O.kHь6edF-yJd>zmoO'5i.4Oj1#DI'kQ)SA/HD3ӷMVeX˱1WﱮMZ>~iLYb:^Y5Is>ڜتS aBd*~NAFPOs;>۷u,+_E>Ode%KטiHgUp? rk=R˭mQ4^CK_CĞ$.3+d-LwQXUuT/ѥ^GAG gc?teYLǩӍLP<'8,9HH,oc~6$(񖒯M\Tph/g4h! n?rX 'Ex[q] m6ziL0j/W;$48NfUy:N'"MZs+xXPx4Ҧ=nr۝vr A'H?G'la>%75@!KB{d "+dZJA_py) |G[)Wm?Jy=Pb']BrR7Cx0\Jrپ4|xق>:e͌9z' |qG))&^^\lӅ|QVQ#5n0P{6.bn弜4pu vdC$iDҶܖe_X?LVDaff m^Dk1!M F[MlP馗D㔂K# 4o"ͻT(И]pw^8Cm o Hw}몖le [/9 >!iCi)9Bj z<@ۚmj+KcmmG8Ucׄ\_LhGa#Uu@ p$j=3:"Ϯc7"6I-&"4|#"+׾<J<_:yg}\|{>CNO彶iݪYbQ%7h4+g,F|0=2!zij 9Yې:= jjMo[$ `[,WT#rU߰>͢UZAC \[bgxfő8]I w bs6"q~͵[4Snq#ڜY/XkKօ"c4pi:ީLczdIӒyCZ鐠.~ SN4o/E|ed\DFE G5GcG8zZAnߗܰ;J\(F1v0f>潻?j);c=Q\:Y]񰗭m/WUƱ20ˆGȂ>kn'kTXuon 3,0o/7*;߲wI*֐U_|@|L:_x* >C\;mr$v6yPzIb/˼3G}p1azPrV3u>_VdwV9MB \Jc7sEP=c5Ơ0ƽ-Og*eWdz.uZMj2IT>b'Rd9VbL&[͝$٠BL5ٗ{k!lx$f]Ll0̷ss!2Y'x0Y ;+M^o+KS Mi/tLizKy-"~JLrRz}i bDI ?z#1(X!O @| 4UO_ǔ<;Mzg eZl*h]E.yL֜^3KuAH^ȶxpu_^bRێv~yV^E2\@șJ~ uχs14nT'RL+7Va{aec>vX˝VI> *ԕ$gM1#?_/H,$:nS΁rc-dִl=E6/)DM KX|'g)(uxV 8LjcM(r$XZ+mTd҆<җX1@nIt-,^fU_]& 仍{(n|\lG vW.UB25?@]œノ}8m`ɪI'>BZ'7W6;|MX+ekZp@1 h#dPcdK}A3[i,elo4EŅ!ykn ڃNً1?HfE߷a<05.c@V_GsPX? ~Pt$9FJ0O$JݚuB\ 58 HMT8/G1dL4K̪TiL\DL?_ G[%FS7;'cË P`JG' 01G׍qXWkck(cu"ˌ!$~-om77籴:q^]ZݟL&9$3ڧ3?+c;$5r5S<$~Gޠ@;3V2ZF˪u6ߩtzܰ| y8 Ga&FH[FDžG+@p#c0S qI)*Ce:l!tKUD1mUQGbd#{,vJJ[[[©6ha0*pE@D3?V/_Ȯ[ww4(D2 ^q Y5$U}t3jZ]ZhfaSPd t++r~nۘc?Z햆QOޠS*IpsR06>r-A$ӧ2C Ac_al5.cVZ'Sr]3nz iaZ}K = 4Oɼ% $TDr)O EFs"{$r(>ASPcm:FhçԄ#5ҚGUoT7qQF&hG+Boձdס=-1{G䨂&Ƅg,.t>4 $ w>|ow9hZrr];^d…J{f5egL D8mfD˔ V4#[ne83rld/zM3^l᜽E7/ g2N4-Z[ܵqr2d`!#٧AvŎdͲR-Q5e猋>KਁZ) e5ۈwpݷE)AD$ޒ?2S)(0jA*x_#fAU>qZ2p\ZOhg`°- yN/]cy_mv&x_vi* ^ \D,J+, 4K\8+#e*m37 T~sbMOZ(M8=+4M+,܀D‡`DUD+*$H*"Ҁs@9f-V+iR|y/Wko1l 1BTd>̐nZe7~hn㷛: V[IB2^X;+24KX}Kj)ϯ)ФP?iY t5՘^&_O4Dr\_׃$=9,ۣ>ۧW"5ӎqxrpk4"*f~A3 ^ʚY9c[yurH#+8& ݷEM cҠBid'|PF:n> stream xڭcx]%vvl۶m۶͊mfIVvŨu?k5Ęk^7 +=#7@ENށKN mg!#q63rr5r5hDL&...2+RMY?-\{O@Q ji05((jIK(%ffFE7c[+`\迹\FG3o73O3 Z +{[7;+!Ggv7+;t4r'7 p0i`OI¾iQW#+{?V.F^߱f@ p60r65sq;Y'_9Xٚ014qmaeϬHٛ;m7usDP}'ad`o053awp SO[oM_w71|,EckYzm  r5nŷ4t\\\ĭ_9\js5ꐓ/HV pմu:8@Fk\/r̟2Wd-BX|{T>ޟbl'-2e{P^ͣZ?*d YS/>GZZhubNuwqsӲSxO׬ji6q# ( guMn,h1;x㵽MH7j͂Nr2h_w儂Zt^PVT`LR]LYwc_&ui`JҸ6O9Jfd<24}$՚#MlvHvAa.HO!hLE,B@6M$Q{ZbTh+`Se,м}%A`WOI tEڳ{Za yoXX@_*!1~- ׂɆvGW毅k/wR ,fgKrcTp92njlC/Y]oK l~4ȔO_5,>k0$R4ҍ%6*wy׬)~/@(I%GzvVmH򞡪)s`NeVc`)|nşwaAgd窽{\4ˋ Y0_3nKD=G#bzaq+K-sj6dֲ-p~(BfV}F^{HgA#'EvL !1bt:|޿,7_I_Xt3kl-ZLj:t` y(]tKad}m%&1Z8bU㣤ߊu$F3df[i^h^p:m_ >̿8ޯ}=ֹ_uhU-(6nգk@E ZĔWZӬKǔn~?}Z֞<7l~hSѸ(RYHw48Wt}rzA`ˍ]s\u=_i6ܺ\Ct*8iPR!RZm>D6pHZOԱVe:_]E^h !] (vMiAF?x9wqcښѷn3:"721 ^;%1>7У֭-:ѷq"*Da; C@KM|kťfda:B&gthBkeq|Y.nZ҈#ѭcij\|pßu=~OFc"@qQL *T6f:~+A] Q'łsŵv*"ϊiK3ݔu~@|ncqZ!T?W M룧+(;Uyt+]堜^/;; V~-Г;}n\ᦁ?ql^đ7k%U0E}46X|[Mbh,K,(Az%tSȸ0uo= MjRP1,lwpk T*֎ ܌gyl#('cN$نUvn|ѐ_"%~Fy_ C(K:]pIpQ uތ:#?9B[]S蒒ď vZFeIWvu>)cni<w $tmhT&jD]76s;H6Agb*=_VҜ}Rs]zގa\3Є$݅$Bgu 31!/2^! orFڌ{tOȼt| V9u)}/dT(wsS/?*|i?%r!|ap5,{ffh;Njo}$61ңf k!;vWGOuuujEػ5h2̂9No'4m^=Yk\O-9P5ޢ%N%ڲ=%s1b=Td@[x)C؏{=(wf4P@e<@\?\IS<3i ׺e+ 1wZܦPZKTfϲSLmOL)S14gj8zCh+}QK|f'"/pb5J]r n>gj6`PNwp> 8yz[VW#;b3O&;RQ1aAD_KKz91Ls}(ܼ9.n==AQGTe鑦LqSIQ}fܭwO="ʎΊòːŸ"e*s:?$A[TB?#H| 5H[oVވsz)pkΤCjQyg/M*+;KGX}<;rK&MF˥XmOuhL}>B{#ޚTkiHl\ /kxbwRG[cPHEUG^:a@ϪkͶӰ :mpٺQħ8i)Dt&研ǐh|gaEYn6n.Fذx 'b Xz&oUB6D>)D1HH(՜+1_7 #^ߎ(~ͬtB@2P6Q;C#Zm)̀J& ^c{xc(Zn7&x"=Cm*JWll,: i@R,?Xͣrs }Q bP%y'Ϛ Ɩq5Mی/!"^ݤЧ^Z`/led-T͟<uܫՊ)? NdWpB8PsD…fvr!CچJ jK2? t!r#XraN,nq/(oy:}!fdΡs39Fw9d\W@ #0P_ғݣ4">mHGyRMxYĴF=ޕwX. E퟈aqe QiӨ(>CWAr<;6.r(zs s6?W!?IO_)w΢44f6 I>5X6!y.*E#?~' S6y$·W ||9JMZto-FkeZUdt/B'OdajmJ83\G>3Cw;KU\*JSuD{60LP=\wC픐jskC+StAcJv֌GJ@*ƃ12_#[W8f`'TĹ\؝!8F P9y.JCYNCcWȻkaUN;_=7_yPx@|aL^XP?уbG =n&i?ft^1Jw买m6 V&e>=~89~Ea^V~ԗş5'ZiK*+Q#\C!9#?퐰 ge W*E " X- sFDؔvmOC%nllKYBIͷ/%zB즯Xd'r#Xڍ'v{c:7f% N+Y{oP$_DdW4b@$Ӵr#Ui•etk].^ǂ)/)EC wRWBq,7Fm{}G f[+:;(붋484.XO@##ܯdX#KKE|:"eZT @s@"q;%ZnAg׼/ ft֙ {%?R;KAX=SF|E0!ܧ[!OjueOrj8K4tms@29-0ѡ> 6ߒ kk=ӗ ƅ`CSՈXO$zoZjn҈e5fAb y*RI $}Nb50!5HYbãtdW=]Ng 1yc$.!wޏ3^NH|[u[r"Z=xWEd3kNJ;2,QfOJZ^6㝱pAŴQhxeMotoE'S.rEXiM1":jkGJNm;Dc`ٶl `,24qS3"k"|%q@5$;[d0js&XF|vbym\cn%'iF\6Na'&Dm[72:S76ԵUY!ņIB5)Qgpg6O>0(( ,*A4urn5{9o6A|Rk,8S(A٩PcyNygn*H.H]Gu?YLvɂxT/͍H(0g_Y_ Fu]?%8KU'/pƸ$ʗ'`e9b A\P m˴*yT< [ Md'?úsfn[<6p4nj,C:^XvW]%NZS 'mr]%~ -/:D-4VR,Su3^5 o s QA7d0~?|s E&&HehֲVGiN DlR*,X !7Mn־PrKb?'GS(HܻN`2-Z48LMV+|`Shػx`>D6*ry0 Q :P Z5_ּO6"2Yj&MlO:gqjP\**@x\bv)}Zisz|&XFX$# NM/X6mbJzz`8GU˚~=f }+m"bmXրd)*F&B>*WWYXBptƞMVKuqH\!M^ojx@Ddìt35 2 Z޸+(M 7Մ,gZaHzy< ;¦- NAN} SW=ꍘ`[gvY8 [tb2y ٛȘxwt`yp[ˡ8Պ_zT`ljcd>qAzNǧΆlrl/~B2[NcֶYQ\6g=/d)j՞R%E)oU;ڤlU|Ь`:u6auv[ '#=N1^3srpnLY DDzQЗhkY|i63j Q;\45.+Ń[2?~yo#s2誓"Qܬ܏ܾ`͟׫r/?BX]A1%Q~zO5ZrVal l+B"X h.N3gWg1@!e׀CKvȹni FWe^*ZNHoPg60 OϢmB,^'\ )%!29O/]xRJ!w&5D J7 ]SޣS|aA6/;710fVBu+U*!l 1T.Ay)3[xZSۍD xPνSnPQMM0]TmqY";=.P:Bug?lA F{,s"\}m3һjڗCW 7S;8g[2hTU|$[0 F$4\.䧇vh#s,2k~ƍ9vR '%8d`) _xݓRIq =-Y*!}fpQ uob6!GrHIWv7FGO+`-9wšjX$Nbd y;YG|p-o_M M-9f]{dfS֟N5͓M y Yھh7| CC/&\#'#GD/>>t }(?_LBJ:UQՌ~0!`(lS'E ╓=!$+ML;'jߛlS3cO6$zkLK;C!睇VCCu|zh/ǃۍR]WRȥ H](DFӅG?]&{ +nJ]sƵCs$A+;g< Dh'J>Z*F3 q?~sH KէNV˪hJ^_bjryN'MlGU7wVC<*LnK~Zm]2pѨHjP㫺vGٌ_s{X4jwK[L6vP?D)sT*\j~n[ ]-z$Y˄Tvz?A+_alO84gLu-ϟ~홻^G/6r%_6uh?Y[7px;t-$Ɂ14i2p%SS0?.2a$j SHt84F@ =-@q#_տS˜kLE غAW':|pEζSR(q7P..3 \-`{-::p=cPr3â>p.9P "@BDPCfߝW[ =YObTUIw@rqf"9TNX&W. vCg~+ϰu7ZpwpEkozm!*W3X˿o ʈR}`?` q,o> stream xڬct%l&';p;mtlVN:m۶mpy9sf?3kuU]Uu_uZHL+hdk`,fkDH36pvT唡U26utҷXa@ac}'s[}'c.@`mM͜J4i`HGsS.Vv6N)Nfs+c8B\N nlc g+sC1%`r :vƆÌ hv掎6N`nchlO&*_/oVdOnG0䯧?- Ku7q89`dhg7_2;hncMvOֽmj0wr42edonSsXEo`. foF6V#cXz9[)w* -_5{bVVr{w ecgBͭO[UO ;_hYXm6w3w36R0w24[Umm_03sCK`7dlc_{ۿ:WQ?l9+ 'w;c̤.kkP ٺBGGT~C^hy<Ȕa caO/ΩȔQ&5 SDih]#)A(l`ن$-QR=n&/8\xVUI(UJW'-W߽$yv:<&N HO9#·^rp)aުe:1աٛiV2$~fĿ7"=MĤ-.uq]ޓ7f#l{'=TX~*dRD1 dF#-n`a]*g"QB2|g&&BL>ֿ`73~_3rK ,ȉ3v[PA}?7>oYP+n.X.hAA?Ɇm}&I>TƠ\qI*D? XyU 69!t\wtL5p4}%!JgM:4P1{3U4ɰu@\q3u5TǛeEQNܠ?K<8̉J|o(R#3I~gpɁגpF!krm9PFQ{E"MzAl#t36Mg0~z:kR'2聉qյoD><c/M"Lxw? .}%>_.MsX)bAc\SJ'4{ѾWÒwc 5M[- 0$r۰7[.3F$ԁS0 )u%*d*{˵ޓtװS] FgZt)vGD+, h'y(IL7+ t^U-T&BM,%z$Ht$tu~Z~J/SE)H:frϗFH]zc\ʬL.ԂhhL>L~#nzSHi`($W\J?h]ȓAB$#H]Y(% yǛg;T 5ѦYMf F)1\N6Is'wɸΌ=1|{oJ X[?E%FjDXУޒע:.FQg=xjxk_ \P 35Zc@qwztve~cYZNӠ/C< ,M3>Ht]?Dn䁔^ @"A E\ϯatDUj4 ӪRb H~"c}kyןmD`2Y i6.?q(9Yݎ&JZ^9#(ῢ3BM4z&*wF_n-=E~!\V]|?ڻ$8s+؍m,+HduX1H"KQGu)uȱq( )=l`WysEֽmgOÍ nzJOc^Kg Z\ZUzxr Hf\ l O&&dew9*g T#|^auY"lZwR=׼^:CH.vΐ9;M@a'@>@l-?fdݕ_rr!'AF(<ʔia'tRfo7'r43|,ߡ(%rА菉A";тWMXܳTVϖ#,+᱒*_{LikߙR`e,>Kyd (JY ~Fw ~ՃhE*kCpzMm\ѫڲSdNá MbD D0Tvor@TL_ "bUsG`}Q^=">`jY-i%h>5WPQ \ :ew>`mx>!NU78nk'/n#h]ڸ|^ b:Y #[XM.=XFǖ,ʟ\ϔ ёun/f1kFව!Cy.لpI=5veۢxTmV:iؕ=8^ <-|%,|L>I# jϏta'XF /HJ'jtb$ k:\㷪uըa<6=qU?`)w/;!vXЁ[NLiRW~4iC2j9.GuTm-RwJaxOK$| p84o͖m nƒ˦"Ul/F/iJuA'1C&H<jTXxP_,uЉ4 [cIi#Z8 j tS̈`*YV"㻤}Q?\$0*@"Ja :Z@8wN%B*c:c_aWV$-`hyL3 -HK\2h54;le]?>u4/Ϣzo @uMGm$|jƐUk*))%V^ċh+"v{C:yqܙAmDg 6ׂ,-πQ ['Uw'ӶwSK3˰`Xl`cHca Yҡ[yDفޑ傕?0i  ̸F+׵њıߺVtDt_J;2>Pޱu!3QK,$XA!Q"^Cy !yc֜v́.TjJjzsS? x '>8n:& e8c w "<~D_R3`:C˥HFo/:JwceE;Dy*A*]Mq|ig4X' fO5c&CnE~"T(+܁B(6L\o?g)=PD;0};@J4VhS|_?(7 cpY80_i)\ƚ:ZsWJ8/UR>œalq?oTN< XcZ] i3RX/]Iw3AA\v GΧ@HcW?vVb'HzԹU8P(q>䝕Ja~n1"KvNC/s%ߡ iG:1< aTߴ^51_wJJ.w_\BEd nt"rv%nw'S֗ѐf`_c1( bwٯ q35A3SXͣa 5KkrŪ7t[ ;Թa9a╉"mW7 <)^FTށB,v5`ūJ+; (5EѽWM: G?W`L}]q|ȫp{_)]oZp%4&(|e6`lݒ Es♺&4 2n,+5ª`lE[J5;m,yۙzy3J3~ƭ!jf* c%f~g\~KO;8cR j"r15 DsXTe)11Ssdg1l>7CijM\P:E[n`ܶ<^U@%ӫA.׀ ̘[=EbO6o6Z*iW?ŸyN ۖ84b"3 \Iz~$ U#)C']߆O*6hأ9ec(A nCK y;aܪj-Ap_a1-ϟ-xYj/mp!eGKmU``3Eϓhjtfd nCv@gxZbw?*]^A SN-i〟IR%U!JV7o~dtg<4Gp1Bj/⩦]b,6jJ4_[^  +\H].ݳ#"#5#% qЄ'uLkln}4硽,ٙ.̛Zyvc˩ 6ȕϏB̀:3GfF. | 2Gtz~FF֠D,z]m3߬xc2{><^,`pؚV'?>b&nš]@øp=V}Avzj :g>OJW&GNՓrCcE LP7g lNTi^ݝlꌚzmiܤc]] ls+mWyoq~4XH`zt#4hqir˃YS7ft4˅ rTt>oss~89eeidnp/wIlxDV"de b_'jBp3 5)r-8Y.3F:chRx~e pJ|?;Ԟ ϡU*m[y+jwfמ4&ٞA?qdq`PIFMBDRi7"&魎7/ӰSHvhafTƓ0[A.a7U5SRUWMI^4:^QtN!"rkefTR%j u(%}Qeb i{61(uB.JQa[h5|i.~48'HQ^i>zӚՂ8G\óDʞr z>K>_<Ħr=G+p#z[@5 'ygԅb#M,nQBݤij cÇ8#@tδ<*#6ڰ[#b?"+ݺ^o4ZI&Q ^tg0^ 1:Y) XH^?jա73uLk\JL~ xfK k&eqg![siPa{E:dFL+E0j>-Pfu$qMxxqtKUg?;&$lNėA4|k%~|?-Z05W0P B t6V[Zjxx5I?ͻgТ `Ǜ^"(#_T@icX8:1M94l1Zpse`څ불:ঢ>{-ǽUzAXH^Ҏ6Y)u*0-me:^$d@3[k L€GVoTcX hڦsh&pތ)yKxv\IHT;"ghH})S QPibE:V8"7#/ 4Lvs%OznA5?NW&U; l*DAڼv=yEJ}gv*mv)?F;l'Y5@f~mټ)hmԆ績oET#|KUzvz3N[^bAk}?ZbU}9h+Z($rv:VPI.93hÊ'X|ElH[g)UwrƎT|.-1OS1Q kMJѺۊ q='KK'z>FvWԥ\k¥{ 'TMRDaڝWz*!C;9_ =.ՙ4VНӇ؇z? . OR&Her&9#JJGOm, 4@L"@~L 8 \pF&6/fuZ$l!͕Hl {yBU6'g&͏bY [ [*]~܊GOa~zDE +f5󮯀jogݷȥ?+SA/ xͦ:FӴ`bf?@r4 W4gQ֪/B;L3NL#f\X0mPVn2w9`kƶͽZn~MgMݥL0~όً7JzO= a%ffsU Lt~f@H?NYd[SJL=V^i!PNSZNCW)RݏKӾYV1Sy(+A|OXE;V"I7 r4U4kɳ-J*sQVŒyE[uYOB29i <\K ,9?}<^EN0pH|EؤQtwc]' q+p0t!)FhVNCFQ':x&P7c$V?hXtR|7>mtp<+|eդcF>S8K)MpuԴƖZ TM?Ay'.50,yHν'ڡQޢ o|>1-sR ~|CRo,T S_n(Y=/ zJc :2Ɉ$ tT;T կC|g~ f / c , Pjr_ +8g,P$ w6EVfe j|11XsIoxeݓkQn[y v<˯U2T1Qu#:&2\6|$u'-QZi_G3( @~-)ٰ&>OGX)a% +ړiJj(8I$19Z}n'#srkar OYXpy`ӳQAJVP=4m|ڎu0:dlSFşH-ڐAR|(Ij(Ӣq~%$3`QAyE Ƅym_FyN{V)8_+d$^a\X;7mDVӹ35Oů``d, BIV "KbFhiZiS:PZEyXl B!*\^RBy6gׇ<+e}_1`3t` B"re±0 iÄV%~_g@ rC 9NS;OI&La7u8һ,k-'VU5g^s焙睖pMۀ9-뭢#D\FC<Lm MUN=Zp=I5Q/F@it3fW0I y!SO!?}oNӫ[,UяBܴxyBj)WNHF_C{UZ.խ^Rw!q c:x#rHgIMs<[G Cd٤]}g,*OoT&˓79gVv'JaYH>ܬXfkT*wΠL}'/A89|=&o#8x(gېkګݝzh Rr!3Xk˙E_QP zӟ+$S]42[&Z2ޓ nBuz- M6ʄ݌9+ 췂Amvk3B/{66"_.mAEDr9j,PTS TYXH3+W^>`3> Jg#Ayp6|SrѫexD%/'@\-ςA5>;ð@Qy*C{1;1"{Imsuu1:6eϥ慫ȳ.GN4R: sҪʳ~Yg]۲Q ֵЄ"o;{NMq7|5@/vSUZ,Hj9@u{q=Mc0"N\9ˢq .I 3@(,aSglIFtrlr^%,M,VF| h -1| 7uGR=lknG haԜ 7$yok^xXa45Jө DaU$s [HuD>d2*Lߚd30B ygW7z[2$@fSri\.PjJ{#l+Ȱ8񆜡fqڋHw|i;J-7^xPS7_&yKs%!E4Y 6^M<et"(@%/y?p}3sN6+hљz#_,FG;!;%6 # G;IϟC?@Ή,> uk"629ضC$̅`HN!hQIϢ.un /[lxR_'?;}*H^gUC{ _ nCZi@ Ӌq*$ڤ,kku8oB=—ڌ0%-W.z^Y|;zR<2+aN(e n.R;+tPMdwvYyo&] ybho{>K=6޻wF@JY!ڐ2r=n,&c]Wr,Հ_9Iq0|>rUFWV@zvLLGoˇXD[N/쇂s6U6H1g]$+=&4=v)T xh;ezu֌HL燀vǯAӜIR9k1SEиk> 46^zo[^螁 rFe7ێk>Cwngf._Y hʃcYyKYhfp]مYH'ιZ7ܘ-Wm鼹822tCmYo/&>o dI( ֱ@J8:JtcͼS"Ǔ-*wT"$Mx%,\%̾Ewc1+ |V:mAHB c/cNȏf2HrkFIm^Y'F2jPqCV=.~3(:U:嫎e&>@5DMj0S3ٴ!3L^~yWGG22bd~'m!'̓wgجϽM3m:-xw>7^jMN1~Xb*h!׀tR.M:zʬ9/N0@|fwĨys U%TV(3QO9sIUx [>UK͜b? <:Fx&wAO9'mtۥFg>x8[NFzSx=n@sp(u=u eZŭ2k*ӘIFgVDHkصuP1.]tJac3'9[,snͥZxL_8L6BɺһĿr 3+,m=5پD0:f&&2ey-ȒR8s^jof!dzʖKXVe,_Lfgf nI4?sYtӳqhb5MwAVͣNy?VKE^턷SenM[/e&0CZd q 8%$WI_wW$u3Çi'ϙ" D86Deşr(L~-({![2 9!Qܯ{:cJ~Yj; endstream endobj 4941 0 obj << /Length 900 /Filter /FlateDecode >> stream xmUMo:W5?$R. d9M eCkmCp;;w~>|3E_?O]5߶w]Occ]=~?}Oyh9%?۹׬B|Ɯ>);vw%g43>\ 6 EJ78 1{~`W(-;]%=xe_,b+-O;q\L}UI--=BKE1p[! Mߊyu>.N5K)Wb٬8i[_uʕMzQ)V(Txޢjy!Z2P="Zd0\ÃGR\).2*Шa!U,H`+j.5Nα@VK-x%3%AYӀzΚ>kP#5m0Woþj.ZT$X/)n)#Wo(oRZ $Kp4Z-b\1ܰJ P"GXQi/8k^Zq:Zs9dB )sL-7xJ`aɽ)f$1 dъcCZC<73JgznHȰYɚTa,_-O87}KԴܗLloK+gJ.GZyVc48Wt]:P~`rZq.n1] S/Pu7Ue:?&?!d&1yHn5)yғBx#1ޞ]Go׏M?X endstream endobj 4942 0 obj << /Length 665 /Filter /FlateDecode >> stream xmTMk0WhFG*! miʲVZCcYy#9햅ļ{3񸟤e&Oo]&C]]Mq>zwt߉Ǯ)n.pCx?nڽVgx=itO"i [\l\WM}'ԭ̚t4pXeȉeU oq yM\-CnCW_Ey}wP dZz891euB)] W-\v\]~[S!8&+Zce"'2Ɍ5I@|"B2AQhSlLء28a}ɑFq5ҍnnbfǮCG= Wܢe$g;A,:sx l=NOTƘ$0_س/vЧQ%~Zx pX2]$^qnaK??q FqMyc0=) &l(mi,3|d &\c ]͹&ӈ9w{d-tx\ \cΜekqLJs?<@>qhx .׷8wl~1V<*m"mmDa endstream endobj 4943 0 obj << /Length 664 /Filter /FlateDecode >> stream xmTMo0WxvB+8l[jWHL7RI;onDo3ތ?n~<&Y$ŝK_IsE77E[^N\5sߖ;7|[lzmS_*7F?h3΃;mc-bB`ew\_7oK׽;(2Z.ETz}ܟ~o9V^MVK7-\f\S}[S!pcSs|TXo1/ȡ aeuC> stream xmTMo0WxvB+8l[+ML7RI;onDo3ތ?n~<&YվI|/ŋ;t硋nn\3<:Wj\=?-wn6pGۦ|Tnʽgxté7~qzxKlqrnX7UޞMjuSAxHiQ,'wͱ 1}hW7q{UEݥ-rG*F>NNL7u]tNhWS;wE )b,#TTHy=)9>*QKr7P:MȡQ^s$LD6aȑ*s.$S56`>ƄmÁ#TL 5kd}WXssc*zRh/#? bE$L|ږ8^y>eSQc̯bV̯cNa'_OAJ195kd3EH@8ܰ%~As*=F 0`{RLPh33Y$LƹǬ oqMsȼ tx\ \cΜ-eksL ?"@>qhx ׷=l~1֍>*]!MBa endstream endobj 4945 0 obj << /Length 665 /Filter /FlateDecode >> stream xmTn0C6U@"mTt@;olvR3ތm~<&YվI|+œ;t羋<]3;Wj|{}[ mmᆂMv{Kt=c_~B?zxoBS6wBJ)X7UaMuSxHiQV,4$O;nC-bD/OCnC_n^ѻs׽9X2Z.ET~{~ʶrn_~߼h!R,6ew*ؔb%k e+Kӄ$a"1x*s.$S56P>Ƅm„A Fs 5577vرϾ+uaя6R:!,əCxg+ѧy*JcL|*m:fvuiWUꧏɩ\g%<Ϛ"sÖ0_:3x0kjhyIYx0aCnOg3$cx0<<v5O#ܵu7A 6*sZ ZcΜ-ܠeYksL ?"@>qh|tngk;dGGM@c endstream endobj 4946 0 obj << /Length 665 /Filter /FlateDecode >> stream xmTn0C6U@"mTt@;olvR3ތm~<&YվI|+œ;t羋<]3;Wj|{}[ mmᆂMv{Kt=cߚ~B?zxoBS6wBJ)X7UaMuSxHiQV,4$O;nC-bD/OCnC_n^ѻs׽9X2Z.ET~{~ʶrn_~߼h!R,6ew*ؔb%k e+Kӄ$a"1x*s.$S56P>Ƅm„A Fs 5577vرϾ+uaя6R:!,əCxg+ѧy*JcL|*m:fvuiWUꧏɩ\g%<Ϛ"sÖ0_:3x0kjhyIYx0aCnOg3$cx0<<v5O#ܵu7A 6*sZ ZcΜ-ܠeYksL ?"@>qh|tngk;dGGMc endstream endobj 4947 0 obj << /Length 799 /Filter /FlateDecode >> stream xuUn@+HɁkc{!5&Q^ үル!zya/W7/~jyld}{9}N=C'u\W;oέO*k`~?''3Ɖt3\;WS]Q?SVk ]{9FSѤoG^ 32j$WC0h޼O~wC4Sy<&>U]Rn·ÛB~,{_=ڰfhm_}4zu|sH]Wb MLD!E!B FAC\dQQ(%T<#h^QqKƊL0cF F͌a._Q mPG9'+X38)+ι7\'~5:r%%Β뤧$1$܋a %aN*Atg&W̡`92/X[B|fAlI)dKdgI$[d$[H$[hv-|9~ddK%[w-t--d ~)BO)Rd dK|ɖNK)K)++Ζ]Rd]Oz͜|x8?<ᤥNO]?p@}_:h? endstream endobj 4948 0 obj << /Length 550 /Filter /FlateDecode >> stream xmSˎ0+$1$#8h@Bܰp`~ +8*=SJ]sCM&ESݮ`w z\ħmbo'ޚr028~}uHXz_z.XA_`1o"xR:bct\$7҈٘TmH@ ]W0ywznͩV+1r]oś}X 6g1ͭnm{!^ ' bނP48YhC`୤\UY=0ZĎiơ 7([4r;"A"e"qDgs"2dK$#В%#KDDNs5&]J[/G endstream endobj 4911 0 obj << /Type /ObjStm /N 100 /First 994 /Length 3437 /Filter /FlateDecode >> stream x[oFP}A<{n4fl]dɑ6_334%َdW4g{wIݘlMc=q jvΛr6=9l8J|QóQs|&@ {Fy-@=LOGZK &Z!%r@= .5nH#Ɨ]DA]HB5 Cc-uJ;[1H؎= Aә=;4b (m(aRuҏ돕s..nxv8F C}݈C#߻ٜ t˗3ոkO G-[ra/c(P%ABă6ZatAg$bh!IKTԭxq s>Y@" Mboȝ!~t3o[dۤL=Ag}5MtԖռ^aY;b+㞧b}spBfff2{YdLXlS3Aw|KC~gOXDӵ.-8v|"},xs >{NL^ɵՀչYlk,5eyF.`GobWt<\ WxH3\b3٣m}އvh4.JjGgAxܿG''g㖚-a~Ǻ?CH=VOSL}7[u;R}^CZQoGY p~2f'WH/hA XO>9R3u<R3E׍tpN^\ թ:2UEr~Ud

"Q&q@^E S.85"Gau=U\PD=&IXJD=`'o /[„0do^UҾdzK)f8$C+r{ߔ lE9tRUw]M1lɘQl.nGBc& 139E=M?my?v)[YEv^+״pMFy>hA:8Ts]hv]sζ_Vʼvxo@Xn6DaȞ+a4-^ Et{-1/ );|x6\h\#.'JeWi_lBZVS-yҗ+'G?#_P!g< >K|OjEhhJX}" 0 )&1} UI|'QN s ;sGCz :˫(_*59+)^cETC.V1aJ7}XOVժJC{~ ~>m9>z9aY^33_ʱ%S!?!)L)zztx[:Iy/z__[Sz 5Ф]R}^T 9(W^;9YoU*<$!Ep5!E)U*r De|aq31x5sjv?ڳ QJΓUu:Dm>7JiEݏn趻~zdzvCJwWźDss 13>U=\TL(st/0 j}ۃ<<@ __/1 b{_ψ rgGKP.W$9<^4yrWVP5>L 8e۩.g ٤kha7TKJ*  m4DemڮWニ :Ld a<"u˴.d6쀺@)xB`/*'Z'4\X`f͸m:Bʠ8K8GO5p4Q,&MA֚^i[]8bhaȓ )(\x_elv$K`GpEfUVٙ}_.iLP~ ?PfCr{gfdʫ qW7IU^8q?~'5[ )xn<F[>lن7bTlbJJ!SM>Ǭ qL>ȾOEY)f65n7rUYݤ4[igJ6w!C,T11zJ !!)"2Z~ڲާPv DŽ ؊,SY*g3)J6>KI}p')͸ưL#JW0qCx{}hm,c-)$$o*8  YLEv$R]Ͱ c}pF9.~p(18L 3^]ZcPVʘ~25g*V⻛ӻۅ[87+͇b8)Ltv?Y2A]n}߆A¡CJS e~v2[Gp,do襱zi˺ >G)|`}o,wF2CkWo.w_/1b9Wö?~~ GG tyRq({ %}|yU4"PZ>U#Tuu~٥iZ?gdp'Y'uHMON虞}! i.6Eh=CK]|ylirP;q[8Njo!T`܄y<_t ;UH۟ 7MEȈ4;UR"$P`31)Zf&2sRcȑEFK)2.HYJgL]g2Bu};>T񦏍1c%[}Qnv0)K@y@!k)!'1%:y 1eHgT< jWum8 k%bE͜ =Ąӡʻ1Y;8T;(>1ȧ|]J77k/A N1>1b5K*0 #xȸ,Lٛ Ǜa?UgB*nxʖR4ꡏuszav̼KB!aX."XrOa 2#ŧD-b_T| ΪaH(c='CT}hM)妶X@eXUӨ T;*TjS93pB$hQLbNI0E|g.gjYSB{ep2#(Qyd_wc^o01056q5l,vGZκ3(D hRVJao VݥG,Em'j>@)kN%w^{vTZ'ᓬy6DRJ$O,D9`K-#2rvQQ_Ka ?>ʩ#?'gGti9ק:ޗ;)h8:'$GnIrtE@k{{ǙO4>i2SEɃyv} ;I Jً 3[*%ﮁ5Z36L(>[m䒉@Ƙ]EYa ilp:JD4aO1#gô "u gt\uT4\l+r^OцQŒ?&r6,Zr$;19k KyBA|(p O$7L]Ʊ86c ] FsUv"ՆA2u,j'W͎m?R3lMWOSTTFkLFyYTJ@O)lWSڅƊxȖܔ$lR^Zjލ~@>"'(}tܮIOcc,}t J-?oh ăIR4y R*4Qcݐ2PNt*TzTzw7]+Xf?u)OFT0؇ X1nb/MYH h+VNenӢs%Ot WUa̖LH#kJlC9!J8REh 82h 5c) )T\jkFjB Ґ!w+ˇZ.*.gf7ŪP Lr3fv2.+OuI颱LT@C&w4֥k6lv3*dΒ*j"9#dP4dQcpQHZWw멮YR+cP,Y Y-ǩdև%,mc+ko~,(J0 |" qfH5\dâ>cM,П/VS M r M>,ӵ`qNl)FT*D0d2ҾUn717fkmݰwmPoi/?_6mk+6R2nb]NӴ0w5VF#D(up^7v9'8 BIDS4m&i;xn\O @7)YĖ?0u޿"$Ԟ?cbyÓOwo]}#x=l7nys/tW(<%c#<==z.'y4/v?&]^|KOl..Ͼ>WB O4ݢ^. |ӛ~uty\7n> AEs*9D#? #-"$˺MƬYs(1lLSjfԚ1!ȥQ.InOW*]7V& YURB' @d!A$4K(ʃTŽq=&S$ͪ8p@l(ƈk(A1 eaI9FDY RZQZd~k>ư>HCYV|ݧx(!Ҵ3q&*gֲʊijrw>즀by<_:&HҢR1C6,[ )%IdҘ\u/7C}W]>LHJUg[u/,ƄĖqHr*j%WuȏI A`HP>`njqZYtZG旻ICf%kmMKBʴ )x77I 4)V&x"q(5D9'i|=ϑSS`qBNy;o?cZغǛ?]-lWݰ)~k6Ew\`~ȚQ[Ov>:dN8%P>NQ8<5ꘌ=㡜=c ;߯_}sQJ rSl1ۏ!٫s8 Yk`}uM]}xw E(<ĴO7|Vg*z,cC߻H/O>;󼇶JR6r*;9JU?f |}؎bE4(TbJӡbgeg3L9"$!BY$ggkimB)2<[ H.V3S29vowd*iJtJhޠIDAT¨k3_QhG)e7~Q~t[k$SͼRU#u?^tP(ẟ|ȋڦuEj lv)hc кZn ݂}a՚>+k0D +k)lf Snbr ^| "-EU5)0#q7t*PbJH(lL!ǒSfQK,3Sdmc J ژuϖEme6۽m%!Ǧq(2y,!6dc^,VPiQVq?E%p8U5Ԕ'fC mQdfCSn͗ƃ8A P:?1&~x qfٸ"8aF\f|RΛDqQp(!B*c]!_u%˅S ^jM 3;3ݾ}~u$-*ǚ+lח7iH oCX2AՂXV34:o!e=LIX ƘrE(%|٤R3d2@PR,qrZ Lؑ[2Kr%G0u/ygڶ[T*z}u^}לiZ]vOout>~ kZ'H<+GhLQC'8~:Bx."~X2S)2ŢLۻMHX5}q?vsmE?]{{P4Vwn^<<~B"B "&!h!qz[[Yy!f_UM!Ci#7-s!ԗ]MBsJ]Oq$>>ldS_!ń2Nrp:6TǶv1Zt1T=ys4QtqNJ2vζ%@W71Tfwo Ūu ƈ2!Hcܴ׍]0;UU(d:ShIs8;0Rn:0N!Egb魯p('sG!㌖SEYgx=,# n:{Pr~_#^WcRVzwͅR|:}Qpėȗ0)('{GKgL8SDTB5c g[:w9A]JOw1:)nyOZ *F֚)V*R9g)zC^K>(S먫>Սn!(>pnji=ՅĄf PEa+)2*lPecYZB(dZ8%tV#6Q.F)܄1FbƂGlM#U`*.q*O7S]Y~1 l+E3C'˺54eݗp)StTĔ+;Y0Bhǻ]Ur1f ir]uW/68?C!(MES4 J!#aZ8\xf>ʚRieY%.3\,Zinf!)Rav*c'f3|C!}6(=NQ鋅*1Y~$%c׿.p30h]U^dQ ҰOTWv C P*mێC'S } 3Fwm&͓>=WQ 5|aOӓuE`͛S~Ri<͓K>|eq )&go̳<9Ǽivo`YwuwV?j߾E~ ~4{IVD^4x;iCO$kxQ^91h9FC3䔙)?}STa.$:#}t%wn*E2$8gdL01JGoo|k\0*M l+: Yc 1y/ EXDx7Ff1sGPrtVc %ta>hݐrHV_`LQ3M20?MCJ"~ـ4+(3+T4DRqB؏eBS;)yrN &ʁk[BQ I"kp;1y5v!63ϷUЙ)Ĝ!a%'a$.80!Rk)g9lq? i|wLx-r.REn'?eVz=53ӢQrRvK1F+X63WX26v2(pPӟ޶ I!3xqJ ca90e)栵׵HZt*Y >(1_Uf)R5SΙ}Z[T*0NZFf+ֽڮE>Q,TJi9NcR(Mݬ'B2@ʼHKU-N*$hh[+Dg,1grVk)}XR\giubpMGRAȥYPԫ7jm;{BcERHZgϘJn،cu)Z4L)iROYD$` j8xɜֈKp  1k~Kq:`؋g6Ntg/I@EG%L%RS5!( y衭pO]2 @0z|ĦV-jҨs,鬫Q/ Mݘe&7L)oCp-}n(d(I)LzP9F`E8!QPe!Z, o֣ avO^!`>ʍ}6xVq/y(I2>1z5C3²EVRiVb0-C+uH㳅SLBT$N)LgCX VeGblme$MV)c02-L1֡ q? }[ӡ ^ S)YPJ$&WWŗ.v Ɗi&T5$_|.UVth ,֨s6u|C٫`f|(.4 S,ɂo?ΛD連?~igo7~,mEǔM_eΫaBvLں- jgT^>h1Pr)l~ :l?RfKEVhfr,rzUa TE5\ŊT 85rDʙJ=$y,R@IfqLNOQ Kj0V_>j8ō{SX볳˕#R߼~o~5'LW/1 _c{id|qb‡sI%YQ|6 ?5OO JruL?C$V$>!yj| cG4C* P5C?LhŖh24֭D.*Rߎ1cVB)1-k2ػYUYƜFcO ..('jnTp+fec+W-kbT1 jDm\6~כkEra*6E<) A ,W g\>Zc)z?TҊ)r蔲?S0Q㢮HreLQ¨U(%TwTO9`(|"`]Nj{Çc[Jqm_U)e2,s_^$7QݧhH PPFd Z( 2!;X9vs=i`B3Hq(cEvf +Șϖڵ Ln?RZ/4)!k1\'tP`ϥPV2 !;C>V*D:S:mUo*h,3̜ LEMG*Mm妩 ӔjgI.Сn݃Ye_W]]J*2Vvx/%d+lxv׊!`r3":DZAS7Zm4aǼYC功f" L1(~Hp^ץ 4ߖַ΁R׭h15d̨4ɲAV@KL(e~J ^\R:Dr9sչ& ͙SzrՌ,NbB)u} F. 1vQ |LULnFB 1d](zo6 K} cѶ"S}t&u) StEv1vl?3&=,tF-4QO˶S6 [ۦnwkmfgx#umVmpa[M?}JNZYӫb S! V*$ߏ9K:E1tmۧi㘬W9JAY% ifPonRnpQ鱲{xZSG l^;^cu_b69ɑm%yD^`1O}D.N,'Ռ_xӁxǟS˿pgR\=|Ng{˿tZjp*U#S)'|3'OG1CC;rS?99lEJ J-_~\W1ve۴IWW)㡓 C0R5 F-|HȆzuF&g( zdi*T&EFVKʥR1cǩb@<0AUzaoSl& TnԬ3s.~/ iCɠ-ZX,LY*;+EdLvfhH91TF%FiC RcLGo v*BCSS|c.(frijެz_=X>4Nub xs 0)ڎ^nh4ff0KY8EeϷ}8;2ho:n 7JD4[&V qӒ3!KY+>}׸C*)8N. 5KUEJ)gJFZ p*S2V7W˫4?|Ⅹk67PpQo%q_1-낀OGi`.Ǹه2H"NڨI9EJvY NFKdkfLmUtTK#^ וR!Ile.GX8mi#m!|5OZ).I:BTLB;WP>ĒrW;cf4(%Qi%*Jtn|ZϿEAyI6>SZ]LCi?駫w"ri >ﵺe $?~ _d)~ #xZny8f`#$>DױB?n~z')-l<2O_32?8~W۫b6GӃ'O 7> }Z"'@'wn~@\Ƿq<<-l)y dE篺o7~WncCNOБwaCh+s&rflEJi qf@nv^W6}fڸ0JiM]WPJȐJ +wáK`lV:xwo갩a!yXq}t,SW50 f۩@vyXƂ4FbaAxل1CSߧ픍Fǂr^14R1 4:۲fi^h"c,*_]ũ 91Mc !۩V:#;$dcn"s9f~ gr2Bnfi rwWmsVKie$}\RS>Eeke$&4&L(1x볺E9nyR?;Ȧh|7%< U㧾5#lUHy,ⲹZ}/1ct9OAXd錳j3UY*XՠROc\&cAuaJjXd5 /v SՕ퇸| vQW%Иhte *4ԺUJ5 c"@cL3GOiۗե3V5||{磯 mûٲN%'.hP%g{Ȏj5!M1gMh8$]'wU}czyh[ʟ'hxOx# ;8DW6 >mU>\חǒQ~+%w()owjs>:u'Yq|ƪ8O4pÂ_׹>ޓcN:LrS<𗿼C[//ZWY[O|zǙ?q8{_~j{s[|sq:/drzcJ)SAІKߡ iO7Ooi3W<呫kD_Dj<.LX ygsE ݻ[;Ĥ78AmˑB˲< _k=YU$D H0 [O6ibl-jE5ʪbÙs##Fd TEd3}ַZB$m%IrV Q,3HĨlsC3;(Pi>T~LkP @ډ8Yũ2%~lƩ~V*NjiLяS0(b41_݀AS$ceLҞo:Kf35Ji霔2_4gzr;KTi*ƔY9 dT|)pShlV AbFBOU (CFb4J앁2(dl|pp;]4qUk%b´jo6kJ @ι8a$!4B(R)evJ"SBa6]dHEĈZbvJ&9 .щq;4k@ ]B@%4IJp<"%H?OK.0eMtmU%;,ǂ i1sC3Ct^Ջu!\7=^HB-V^^fmdZ=_WzS߯V?~X x0Kxde; Q|AFL8y8s:M$f/LE˰^~a7_EX[7gϖROe<\,>R~C4zdS(nb2_oۯO^^~_.N-ho|H|Ə-̬ѭ";SyNd3B HD> tN  `b)*ήV*emk;RC{1YP)Cmn3TR5[_#e&,kRrf)U=KUb)հۧkSU0R$h*"+Q.R̹Q@2 >jv F)FTg0FA8,YFRKRy>VJH hBpj#S,}|'LL?&OSL|+k3cvNvBH )/X&Lv!Yڨ@9ZG yLX_N;wC9#$TM)+[ƉaS(6.i% z޶ Eu~5šV rBEUН-ME 7q*|q%`mN)e~ay:"&ud2bLJ^uY:qht-V:\oreqqk)@#k}#g҈ۏ.JHϱU;Y|7jn*]Y5-x?faIF(*AaٸfcO3,}^)nM8dG!#Ȗ?~$$4d}nYbov>d X4xu ﾿p-h~v/ >GENj@19n;Wly0 wk^2͇(s!sZ"$$B%PmiΏѶæmmiJ S)j]R Xa3I, 4m*u 4A}h$8Rk sR(%pO4;bŒdIR#%6,s Ngςi?Y#IYTDQ6J4NBCJ5vuG^5:%0_$R kee%KNXhv" Rj,QjmSFΊ>JSaRa4Q.jJ$96em$ VBithj]^ ) Qef7O&)n5N,@En>UeJbf=t3ha|0 B)3`'e߅J/Dždk V^*觼0U?aoƦ1ZB1TW}|6NXDQrLi;o[b ى%UcKZ:ߕrP } ^ ͳٺSHS.k jA& 9 2pAhGK4!}bmZY0w]`Mg-1n~n<[UG$hk9O&@)Rʦ2"2m\]/cz!i+P"@wTJ !OD4q<ߊ 2!,-׽ٿy'J߾1zO_70}͛mU٧$Q89:c1̇ǒkx Y)w !쿿G*DIVӡ%k w[-?8yzs~˛?ӟNIŻưy(ɏBi Gxq ~0J.֍}ڜ諿~|fl}?/U.qZ5}^T~#@uژG>'wާ>'Mj5:JэCyu i~4(x^*"m- XGqzuR([dDK3cșQB 8m3\i60jv RkMAAd h 4 Z,5N* Lǫ:5&YRˉLr*kIJ"Ip$wr]!\Mil*Ȫf*Rz ++/3& $Ă7}ljbeUk3qܾ˩KI}|}%/\l8[ѯǘ\ `X6fyCz&jS/b Z^osʣ(4 %Dxh@X*0Ioœ]K,s(',ewSIV8TբV 7Tz^ojTvf7E#}Sb>`b@IUzcmT lݒY mZ2Kyy31~Q/wԴq'] $Q"}@ݟx??wQч% |tHK.ߩuΈ<]nw/\#H<_7rkE7MDOBG߷W. /ٟ9#u|áUW}Ϟ.1i߿?vۭrZ.G2ƛ+G94qi6 EH8aE wV9 Iu3Λmº.1@yD E,HT 8 y\]y{)~ 0!%R 5F1&Q~oog_׈8XIOcuXڊj!Qv[6R0 {Od!y>MV]&,Sh: >\Zh= 䅲b:!n7pB;W,$R ڗZc{{uNc*?$"S7a><+^~?}Y5-HZ?[8jwD>?BՙմX?*-r-?D{:QK'FϞ>Q]X) = >eӏ ջ?mWg|OkxG⏵"D}o_;u'?/Q!Mic|3%kcz%wv_M">&c ;x6EDxYv$J]ĻAFN1PkPDe D GLwW.T篆)_%rAFrHMPkCZQATTM%YgbERn*%KA}ݡ>FuSV@D,k}JLCӍ3爳Bá;„R(+!Q`OD9 v6v "_]3mrBk5RU>lv0cAZcB.>iF~潸PF(S7PYqIE Y8 )F9T!1ʍS"dC_m" -%"CA]B &=G̳ %EL!&&nBCIcT"J#wF-CqJZFʒT3hy닰>5EM-1B:n*ٚKCh XOVONUF1v~HS~n3OVL$}bl~mE[))CasrX701eZdtB_J[0{sJ%rIY0nn \Y\T ! >˧wL~'T^OG.}2y~wIAjsmoCztA 㲶?ݞ⧇Roxx?.R ޯ"HAxTN<cv|vt,ٔ7?۟3:W+~C~_s!ӟǧ_|,$_)jmIps3o,@V"+> Qփ ؓԷ[~p׿ٽU@)&(E,•sV\GK I C_kUY5Z-3MBIJSqa-ɍF/gv6Sj%&08!XRay` /։VBI)}~3Df"q36 p2V+ A >["*9qi*ITxL7l܌Vi<6FĄG٫3*c.3aR.}F Qf!=KFLp&ҡD|hKn8gAJkBZ 6cݞR4S~M-Pe# ŧ\;M@o◟ųEuOa?X9 B$.sSamaە R0L12qYDzLA‰uªʕwݫ.hӢ1P@!j$gZ*U1 ☝$k)r  F*%n\զ0xξgb-zRre\ZW 81 rv),UEsԇ];W]hqHg 9TڹZ 39EfYb=.Je{|l1;pFIbp5-7ÀE( EK)- ORSP()kUJ?TI]rZ?>¥1],]z< 1#R< 1#Vўb- 16z4~ҟ?{r'1y=OrMp{ tc{oίqGU/0*oV_P 0^L߄W_U=ӳJDHC ݔ$G{-\cMldyl;|\_y1ߗQ?Tao jCg΢lrU#rP8\!6u (eb qJX;ʙi4  ypXe.Q\8K]tAvN*,7RXE<ĸbze\%R+6Zayo.!e%(P9 ؍zB($@Nav.!; !A*UAkb"+GRؚCJsEgUɘSb;⫓vC jI4H"i)~+yu2C)O>V!~Zq(b(ie;NTZ#uĩ RM%+&.jR]j5̙(䬞4 ,O)& gs:/E#b$Dj*JɅ\u<1TW֚ /݉mfI[[4u~V3IRGi!8.;t[<Ƞ"?&pv§VBXx_"~Zm.^j'-X#}/rhCA)(X>(0O}Q ܯ<~QurJ!}7ݷ|# [9yʛoɫ')}w^f%?jٗg}TB%H5$>xf ;#o4w;īʏrhMࣈÏۀ)c~C<^w魊U9=UB"̃*  aRC.58}~]|餙R`I"*$E w/н8Jsi*YBΉP]HE%pJG$T`n.PB.b]+:V*xJ(gTc`< ]<[Vg yY->iRb3- iHQ:R1ecPƢ+MmO~;Bb~qM˅rdCR+}d />2%A<2>{((3=k#(Q.ͯu6 ;C^'"1חWg+:DSbG }BӞ/1|D. x읭j>ylj{q~Cljm?},j|>gOĬW7o|~/NV M۞ݺ_yfwwÿښ }~Y8JU,$d[u[?P &PE0 #,7]i?b?U[kTKHBx><~jwor󹑋90L_BT3Uaܟ_"8O- ! $IvF. 5K|݂gOrN~Q+QFs^/ @Z2Q~41źN=_PQW Yf%1HJWEߧɕJ !ijcwU|Zj`ek8PL3xhˡ_T㢲+WWBMSRqYjCoSZZJM\pYەqe"`AD\/ 㮴-ʋf âo.w)*iĭ;?:4 &.a#zhJ)9~L}b{ @+f1c\NiuZy@[F)-Z%[vu[xeP~ݥ񝮳xb࡚hw| Gy:>n{}Vv!Qz@;˳zflޏ(*2Kz0⮠q=Q c߼oOV&]q\T4̒~u}=Y}LCX/{wWW/~*:֧y_Wk2rI#H"r8(!3 ,k+Uj[~lFgҿzQCX9)+XSiyȽb|odzV^n5g:P(BD*!b dJ 3ncrXԠ%p.@E $$BB*b9p4FR <@ HT0ƂDZa%nRj7Le+3*R (/F"SiJ'Y@ iwuUXW+\J\j?ɋʉ)$,Ð5)uY*qg C<} qMuzQ QiSd:в}ueIk1;>*K%Y@K&JVfBdFL(Z3PmC 5!_bVṷڀ 8Se Iwuɗx׻iRDH`4Z+ѧuS it)SnJ rzYUT4"+bdy?=s'jCA.̽4:QN#H@SM1`>$n_˓Hś UUAbA(u֝m{$ o/s2/?qH!7?irtDwk)'DuDd4j(s?E|`)D)˽lS6.Gӎ\V%I ÛC|/N3>3?G\u"WpzRɂb]/QX# 1bg% NK!A)\(_}CzJSZv>klVfo̷3t9Ǻ^ؚCO!0ɒ tg;rEFѧ \{!drl$ZrֈXEjCe;:xXi+Mh!76J[ cb(sqV_,mn[& .+Y$S!@W g &+,Ż8K$N󅠳r71QM%i6sZNM!6FKޓBj)DZZ ccQ)$fQHP ꇨ*̦Fq(9#H9X`ecG18]6cVjDVkRPV>ͮWr!%hР,ÎVҢB]{p XJţJ1@]*\վKM8LCêX+;JSWiV^;FZq 4CY54c\T ovEQΑSSʚ]*'sf*0mǼwg k"FT !c#KA<EG?GGkl'Xtщ0hCQ0_4l?ӟ#Y`J^rw_E{+ PJA!63mb?ҜU=J2N]^v %gcW}Ϳ/F#rH{^7s\ p 8in:/i~Z=uAd\!y;\Lau+mlo x@# ݝ=R^5誸zCNE!'fF@%qYALBN,EpHI)+r&u-"a1ߦj]j!kEDo.)cf:;J1Fɶ,*R( *-!7VN5L,,5F&t~ٯjZsގʵM \m|S#+E 9R f rQX(j!X,>, Ȭ45WAjMO 1HL2:mT !϶XBM0)53UE!Uk4cU&n5$@[Sέ19\ HB!LIEK8xz\tonrYk%6X0I8J"dOL_2PIJ샧"1 .7SCW Za|>]kt8Y #5e1 ȱnNfo}+ 4m =p)ҞKRNITjiqlWJYvzN @V˾XH9 Y甐6* o嫥5D83Z0ճvinOu2 IbeB2IjfJ(EQ妯+J(ݘ Rڌ>hy33V8}IbcYӄ1lLzPr')v xGC0aãLѣ |C;x~2ǘ$yUisygU \#֋zw?'|*0uOm>AAY WN >p;^ %7⡧ۥ]߆G;{qZ5\X?~Uۼn7//wz?yj|@SFM⪪f-x_oO['2V˓EED6p((*9(4rC>8*q0Mus5m ͈ mjA,'Fȏ=Jz8qf:\ ou5c>y*D2O2T)3Y!!  =ggdL2?l-3!!"T>. 0XX)n]UI.[ J)}*An!qYHԇ  PJ.Tntd"%ef:mv!LƦ8 E3;# @smeBep 5 1&2\(a![7I9n3I%4j XH1RX,tI%ri$,%WB("AL%CLZfVwS٪iㄅzM:i|luR}5F7FkV|TWb9?jepڄAgiTh5I-3Yys.B"(.v_Fgp=Q KNHH4־ٮzbmbԧy6=wa ᬚJz(d Pצ)8, ѣpmMq]'+E[DٵJRnpםʆ$jQi!%lz.h&K(ddM ^6Ǥe\wߞt}I_F]=m6Y$rd23ɚ,9ۮO[Ǜ?O?NIW_oNBZcHv4̟FF{7V4;x<|FT߇fO@a& `wlmO\\[s<L_W"ΜSau)1m< g)wc4ٺ*XfGKD%IA18")ֵ46&Ι_T@5j<ξ2 0**)KR@B!Ԝ48deq`B͢*!wI5ApV8MFTTdRS~vo2fna\UAq@ܼ͢RNS"RF+HHj[izL.וGba!vm]!V4s;X9%UʹU܍jp ʹ kd}.BSRwj z_H,2oyJJc*"NFwٺ 9ɡFM0NkO2B\[gE[ %ծTK )lٗqN:iG`2t1QGZD^Kb>NR6ڒR5?f\59IAcEE/xի|a[. 2 i,]ެ]UB6ycbժjfSFF.`拀լi{X0p] UFk$t)!Y; I𲪸1p7HP]鏯wkcWxҭ/řw9aP8p+e8f?qe0 Hf%zOhhʜ"VFOAl.*j|H~l{/03J& O*yG A2Y@C>bOg}=ևnӑO;?Ǎgtel=?g5;K9(z.Wv>>(3AtU^8r] qʶ\L2uz;黷u=Mv<@86;2J/SK?C,%+ui}zcO\{Q~(c!N yP @?4D5ǐFq̜ Y8s\\PAȒON?=uwxw߮t  ֚j#Ӑ94 "i>NLUeiQΗ\fcXbTPwc>Uj-#aswZoqIՎ ¬ !I.@hgd" Z9.5i!X2HV0*Q:$vDCg$ZyOCl+ceA(fV{cetC mFE)X(b6Z\rt߇.ǘ% DښB..u]:Mm6QcFbiZ`1bႜlɔdaT4ybשOuEs5138`GdS)[ RLzo+f[e05!N&A_4v(DEpa?__5gO20 ?_(v?6xmBȉJi7~j[3'65bX~sDiLI~62TdU70&&k1ΐ"¨ Er2ZֺyB~~{}хx6ƼR "?S?n^TOk\\wgjSگǎݸm]FmE tw UaԞ%(5i4ygM kl͹Yɳ\߫?LyZXi_c2OPcCP#ׂ)Ƨe+OӍ>|1QLDIK< wݐ=Z1A<_~:4@8Y?҅~Xwe[oHY3g[BK1ɻ]rF!T9T`F~/24]㪮Pqb%SicJko\;AD;*@/PJ"OcQS{%)&鯴֭!CBP˖D$-_aU?*! aD\jӓF2%r2c UMJ[Ð58~,$٦,Ӑ0+7~qqwum-ƓW6E-Fip#`yow#9v" D;ov]r.x/h-Yƴs|Scql>hYrF3S.4d*" Yj74 ӑc%l)R?.blSNn= z=ZyLkЀ>DĮR(HG5@eWMSU6qR)nfgEnV#9y[ӔI˩cue)L͇źc[ֳ  0ʻ}jJ@C?כiU]˫2ޜLuUz/BB Wa_^!^펋AL^cQȏq:;kCN+(VۛzBaJOK8oˤ48fe6*evż$Ôood %"R~|8l~%E>{h꓀xZH`>fʢTO i qJZ) ?i6ϨR@wl |zOޅOv US=)y?O߿HҊ_W_]>$Ә^/?noo_6/.ۻ<\\.W5(AFSW_w7qsV}mn+iAW#Qmn?x$i8P} 1_oXsUEG-]ibޟjĸtspvo6ö%2 i]7ycSk]T~3ĉh0LHrD@YJYr9)E SJps0V+aTYN׆&if\(ƚ+`*ZUÔ}1[jd2n* PhEO{5%)#Z[r X* gg;o*Re*[7MT$( zU( Aƽ;עJ(J6OqlfSAg5BX5Prb qcnpYwi,únXU2gJ ]%\٪+Ǭ]DaTY}yHm&(enAPR1 O!ɍ0ckeeuSS4ا 444&O,ds@X&FѮg?ʇ3<쯟$<%`>üOs\U^)UUdw7wo<>G^~qxPzsu"ήΔRozƻ?o6Wgu}No?/WB槪ʔrNJyQ S1;k15^t| znow?4볺*ImC ^1 ƂH)$,ԳAx,X@N1ƠyCC."jNpcupolg$ B1ferMLѭ^4s/+0Z&S8m %P`>3B(S"SLNk  ! Huf5FiרZ$ 8_A2'x=bC+KĄYGD ES,92*Ks! q]Rebd,6Zш6%s?lF.QQN'gDek1lVºR23, drM9OkJWvIx Ih6~iIɊq솬 )b|q:tlo}Xd7wx2rs7W 4SỦo*s2gI ghk'Yu߳CơfFWvu"IrZ#v[z$ûEh0[=񓟔D>~$,^]uw۫lwCۛv*ЇSR?q}DvxrOt>pϸ3Ih1u$o>{ٙT UJk߮U^t⛫]/?Mnqh+__~׶Ư~ﯿ:zqO8vG}VFM_ր,~^qRuZ{1USൠ6R 8w3\fC*tkk}K-5z(WΨ @ )&,HJ.E?0!Tfiꥉ )%psp֭?r]+o{>x .ؚ2CeuJDN \6.<k@9hA&p3XYVXA֍YR)5@LFOXf|5ZJD1FD.JûL<褙>ʔx]RRk*I{' l:ev3N djj.vR (V1 TJ AT29 #O{"teSM~?SIh6㴮T.t9*eʬH0'%ڀ7//7k j< GBqT%sԅx áF%r3-S ꃚI%=˄lݾʍR#نh08~qK*5z;ϜhqԜyH2RaL]Fvq6njI>_~9n V[qSߟGbLwMc\*r8tkhѐNê^ CV+9+Z/_\<'*̠j>52'1*nSgAR,,V&u`8vTKea~Ur޽ۑL? %.ÿJmS#&(=ǫj`)6=h>Gys7K gRjomߍ(jeM XΝD]_xS9C3*c -܋*2E-M9CA31Chfߵ* Q*h nJ%W26\UFV0Ӱ=o6f銄 kpYԆՌ,qQ-2ͩg!yE*ff31$THReUZbjA% E=[ 1YQ&BRy]aҔf Zq!!δx(C+%ŹJL$2RuF5N"2Id~NJᾟ?e8/c,aJaEE pC )+#1D&|ҍјg-E+^K]i.23Ʊ2i;('m@G촖¦Aa䚨KvئŒ84e2mQjuj QőckRffNT2/t"WjPRVVjJ)i>sV;?3 v=WnF\_Ot7u) )7Q1ǜTmrsq:Dnsc˯},fŵ 3 T7J'0K(6bBʡV*}%;@omJDA4mVT!wSkêHUUir1OzccV8K^T9=؜>~3gt+뗚\qڜҴM/SQe_0Jg5iy8)HHUqkvF2g,syՂK7<فg=2?fg(ebރ|2 ryZ5D$,co/z#r˽^D~.{nv0KW½shz@"^<Fs}&Ҷ05Qex͋.v Y28xQ,Hl E$SѪ̏ӼBcEIwr*-%oW~<,Y?DSUe?{6j[ʃ=?^uSH},UC}9_[DxQ(hb__ooWTZ;"P_CL[gs ?~>yLNE ]R'AAx!o'*\a|]}ٮd3@S $O >isyoyyi?4L~rwXLSF7M*__Զn՛-C=պ?u }DC?!!gPu~xw24޽u[_L[YC4xyV9 OHS*c49S2KA0EviUe]i*/.%K"E{ ٧iLdyr3mEJ!uT9+)LYV7,Ume&E^ߍ_oztWBzEG1hO)鬍m>]Vdfe4LF3*E5Z : F3L\@~̌4KIc}ڿFL%~wsT+V8T^OY 춮D3z*|<~HVEosLW4Z6}j }q^Q!%Hi E Һ09K(jmfY2N3cn뇷SRefIRPWժxCq1LvV3dw&8L\S=o:g(^הTnBd}s+c~kJk%qJcT 1~yҔ#/xunf1㐖I2ur`yc0딋s$A}zXyvLu1>Li-кN}Z7U8u;vQ`2\E C/k8ȅ ͱj#(2dջUܧK)'! ٰ3DS0V~[S[cy[v:}J?vΗ1D!.aI~֕1ʫڅ)ΪizEuKϯj\U4d50/ES*tUzklE\U' .5AZi>W<m8q47h|}՗/}>լj)e6 W/y 3|rwD<'yh GF@Blzz1FIPUgM>_/RUjoۯsƯ]himܱrfտ꫿t[uƽtuqwweW&(Nu1%QWNrmfMc,Ԋ#)yM+☜gåC+X9)4 XaIx Yc&\֐$0dN]\o4Mf6EYvS5νn,J"m#F@/m ĚU3V@:/>kPDY0J4VQd8Lzېt#2n<8UdQT"mqf2̿hR$.R0vfE xHGLkU|'H L(y1f]1gf6FVRA*dU*Gԭp)2{0e֮9ĠXYQexs5SzqhPyCj+ǍkCQͺ/^oO?=ŧ''\#|>}Z7D=9?P*6&g~WۍCþֺ[:WU=%8_V/'ƫE?0ffG7U(ws?^BϾP0>dE9 xDRV-D5̤db!敷:v1م)TJe3[ ?xdΠ*U aQ_MY!3~sAyĦ"XJSᥚˠD+8 lqUۻ.0]Ь+Ku:xfeF0g 9`lb !)ʮ\ )#B+8PX59 KF)PJP< eb5s"gކ!6ã5T S uT$1)5%ZaQk;3))X[_|ϫ>ba^o!Ufi.۩UM4>~\~~7TN+(2{Uvc 9]*I6MSn~F!軟5SQ0)ŧO?ɫ^xvZO'#?g4??\ FʟןU^о lt j@@[ iU9 qErΐVgk^:+v+gi(˻P1$ue,i9<ƀR, )*t ~(?7+K 8}bLPoCwc.mwGOh-A 9r"CWuZjl-o^Y]~ 3 "jʔ WvҶ1,IàRQhJxB,<Q]Wuh-aA *c\ /1ǢֵV]_QS~y 1z: xOKb^h2FJfz/mi4aWڼB6O4DF[gȕVV!L\5ơOq,k(R{l+JrYx^YJEC4iu9 P.jisp^;ՍpN!)rH:rn?Č2OR2@M!E9PZLNY4I72U@iKBR9 /QN7i=kw\RSO)v!jJ{er jv3Q̜8BUA.cF (A p|~ pQVkcNT8uC4 koHtyb]p1Q5Vn9/cSW~umK(*c$ ޚB ij\DD:5ǪZ%֪uksUE!TX$&v\y7 իF iwCQ ,rԝf~T"y ¢B/b ;\.-Sj>I}cPgOsloI5.^<ƧY9nȏW>>+ϧ'0CM?Oy}_]qrk"Wނˉw?W߽Kn ݱ&|{8x}]u3/7ks~hB:;NgAwipbmZl4'.HVP)KdbcmR22m t~**s[WLK~Cwq fr8TFqxD0Jm쪲3\D*Je23}pֺҜh*.z%*b%o;YKZ@c4$Jh׏RZq9$r4wzJAT9BdҎ8.Cj\4Z˜y):@ET37֕&-z=8i J TBLAH )1.DrץED|m"!qHd9OHV0i`ֈZC2&04[;c^r7k.)#cpH1YLST1JUX2TҼYĔoONץh?NqU1nv땀ocɶ; ʪ67OA$#A)@@GQ Ep{ۖKw̶]]=doWf+V숵VR4byWύHX6`Y )9!q/FsVjæuO$ JAi Jt42V^n S@֪\mLgaaJ -NSXC9%%.DH+NZ+bӘv7Wd  a<(q,8Jl`d)T#MڶmW4cv uJ%r IȪDR$51ZiBjA*r;E!"\zM<\繴*}\B F*I Q_wyʳv.|s7m}u[0'UtPZy=Z;SC?gS'HUGW2qSد_h~xKo?z}@ྫྷ%~~~ë7\[ Yh50}:Vǖď*|} WQ~)h[%nh Zxbs(ƼY?k3X6/6~MJ֙ \8lWOWݷWFvj4,]ם1 Ü{'yyX[C!%VX*an gW5JT¾i,cR b**_%cbtZn~?Zbw#l}2G]ǾO-??ޝ'ÛFڶc]ٶ!EZ Z.F'%ĥջۍۘVkϒcbdV}7dYQT0[NMS+}J, 8 ,OY껣ݼf)ecX6]Ccq B,M?_]ɮ5m5j@#%[TDR3\FRWk *'O5A=ke#qad Q\Z2劂U9B;յ]Ȩdx6SnWV63>cEjJb!mX5$B)ܧȉ J++$9/sB@U*uVZBAJMSƨ9$\t"6VW(`2' BaΜjk!hXic*Q@XI9ݨ&̚t*Xy*'h< JNhW[+OJBϭ3NDQ8SQט貗'Eʝjc-RDZtZĀ]b9ϭ3aNKA8y gB=ߎvkV%sJK e«NP$VJ>6"3BaJȹi|d,S(X¦sBOΩqeg-#)PYbNX 18j_5DX w5Tr&@":)oO2RӒ:< qwC[H r4ۦtcVX+ ڨX.hwMUow]oNg YɜsʋVsU$s4]]61-tN tK|Ή!hgSIW},^(m/۵+T%/u֧X"nj85MH C2p t^ɷoF`%@=b^a2c.'*ڟ G{ɿ>T<,k=OVAf#- OX_۵;Cqα_O' ͼki@oÌǣǧhG_/EU}B ~Zǵ׏똟G_y{7_ЊVJ>iFݏMx JCw7UvUQvaZo m54ZX(iSpn9@I%IW oc2Vs߸Ui2#KtJ)!1ha˵U^?+X-Vx39+ Da`Զc,nm+жԭ4$Af@0ܒ;UQijqz`XH &)"&l,SbJ;WH1:t?,.@&aZ~Nk!@M%j`Zkʈ ĽΑrYuFn5*$LXZb@@49MUZV8K]C9yK~Q sJDc:Myx(K_Rr~Z q Y{˜h4u4>,߇(]C4*բIw{ŊYghX IԂn\o”ȓ)[-\jv?^. R4R('J  )_6.AvBurέSʪBxyiy!0T̬N~ݻRFx7ḤXgz֭Zč6/cܸrn2qj<iqskͪTDTĦN)'y.SJ5^U5cJ)oU5;$.Tvn b+c bōhbgт+\ci Z92(Fi{?qݹR s,>4T7w;*4> ̹C1N)-sɗ0Vt۹XywV &ـM i62FkA\<=%;y҇9 | n_@(#SOx%z9[ѳ3 W{XWR KϿB~1i]K:@?V~\#_c`}/ʓz"/';`Eyu??xngv¡F+mwsa~{>wN)XdDҢp"ڶM[V0eIKpEV`,JJS`%}pjĘm9X~sx}WKXWWfJ!sMc.P^_~C.)S).7 &Fd8?_b;t1V*XEͅaEno(0/鐕AgQJ̕b0I`P*Zj>W~7oWgYp(ZE!V.gRlȨ^ƔNd꜔ sYKplJ8G六VF]pLFr-nMT1B'550Nz3ń Iu~%V^5tBVRJʩ7Y-gI9^#cQi7xAd4s˦7cw~~SպF_\Sই~ۤ,TdqY ڸxuaK(n6S hNJZZgB`#YFQ8Q"V0^C[;fo53XSV4!J>ui{!Y Ȉ)u=Ǽ DC̥w9i7&| 7ZH)gtN2^5kbl[mu)2KZȷG]5g2Uc.yi\pOӔsŇ6T%iۼ;xƱ 1iV0[ T&QH(=Ѹ%1(-Nͳ5`]w9aWy{==۬拓;[*D@T PC k4V6SfdYه7Yc8[<&VI6q?4wңZ>Y{>/|l&?/eֽ+t/6 m̛ v/7]5<=x@6'asExק/ӥ||H>X~oͶ]|Oo[Uބr,0~ӛ C"7ۡkvԢ~y6jud[uQ K%$^ !i*)YE{3r9ą2ڇfC-}7}ٳo~蜹3B=NXRn"_b;Uzv['XN>y.j,y1vFB7_57F31o& p5#e<7cs^AqKJ-!530of[f}k WD02.5$)J;WP/bZUDIֲ V;*8cr@Wj14. IJ9ϥ]a]3!"iKh{tWEөq5XpFaq,3ZTgƭWDRջʥDDF :M9ka&RPNxRs-UQP,:Pi ȂiyO_nQc9{-}{{(< Vs|9'/*`zם%-i'1kDL)*>k㬨a9`(X!+'F)}2E5Bn /JZwwru'GE5sE9xݼ܎1pRkte,dQB"΢O`Y%P〡۸1D+0$+Tpql+"C cVS.NT:49yYO {Śv(PR@1uu!L$rUr!Q|݈ TS=k:{ dHc-Ғu~aRJjݭgNS`|1K0 kh3mgy}.GdS_5gOP>pOd?TF=J+^mty㡰{Kխp%?n)Ŀ7_P}>_ O,/tiKL$81nKB\\nn?ŋS~}WS:y?ei^?;Sە9ꮻUܟnߴ>5T%!\1g9PȆAbd(֭M.!/sb;+%N> DjiHP)Suͯݞ9ARl1Z)Қ+s (jAʛ]"|s,j)$*;>09^[ʮbT<1,l#Bji:aJ)r-Qc@RGCAD8X7QJOI~ujR r(D Q(re$y9F%/tpNt($%ٗU:7r7%8z~u0kE@Y yb \u2 Q!mZj#B挬Tu͇. Fe*1t= ,yM+6*Jk%?k% Fd "u֨nn`I ı)!~qA_ґS\S#˵_W wDs |fqx.GF8xĕ~Z_>Zf-/䐆uuײNj$"s+QjKiJwy wj}gvjLF _k|7PNJ%F<3*` 2J+ͥ 7n.q?|ջU뎇4uxu#RC;s*?^W&Uou[ 5poN!!$!Qxso7&.hBWsδDU/Z!J]dA ɕdD"@&V 9PhIPPֳE}hRMSgkcLTV!fKPN,z]8JSk.ulBc9RmW짴]; T ʕ t::9@\0RF,uZmgR qJ5YRxb>qu8)]vH5W[iRJz[=o;t V$FiBơPv7Z*pJ܍Ud5AvaNa͐98kB%Ă[\Rɿh|NnOèh'G塝N>&ħvUzVP=.>|zV??VQ4 _c(PEBIeԿ_]ܽÿyi}ek?Ί47ꍱ?^)S7kU9sa$ԹBU ;#A8 =Bd4Pb F . \`۾E'BtG?SBo7.9Ɋn[F~-fxN LKx:ˑ+Eww5r 1j l[PH @.\U% ϵLI4ei,U(Œ®땳x]7Qcg޳@3wgڇdeEke u"SDR sAIzw /_,4(zw䬭is]3Tz};BZ90h0m]Suf.UPJȊm3p:%+@mڔ LF6N,K C1T(PVZS윘#JY9Χ1\&|7@eu<>( IT47FiUk-y^0Fz%7+4fUBjb6@ 4Χ TՐT4dk 9XM(sC<})pۻu'Ud60\wԄ`ֶcӴFMZal[=SR>Mk"qȥ{UΩFݝںLKZRZ kSn<4qW*~*F DQ܀ N21F!4dOoiJL|u;k@ޏZȬ@C8Y#Ar=_O t} Ug < qTk$ GT5W@bIaWNnH4IS "|J+^dgahRJsJP׉}+r!6NSbvP̬J:X$Rǟdh*Лٛs=+% LhX% NR=Pb!E&Mc(+,'(JgDgL/< \ zHZϧsm-\VfJNKH~d|8% Q7Rhf_Ԫ x֍9 ZR5Dm=ki[IBԦS>srδ=\FL-l]deVN q8Xos+I֍$;SeٴŅkUjn,f1RcU?4dYpV0U.&BR0A\g'qo^lj1+Jr yBt814" ֮ttug͹ӫlT~R85BKA 9>|"VZR _| 0L9Xt>Gt>$!>jXOؾPe}0:3Ѯu큤K\nkqj޼SfL\^|BK] o.Vx c?NH17+Vݻa8:廭CS<#_pѯ5*KpH+3?RYÒ3)k`?S(EaT+XjN)i^R՜ ͲVH6 OZ7a,;P5!ႳXsвsג&YW\j8tOrc)PiZ)%jEJC jXboPcH2pU*$>s§160@cg8}} tY/|\ "B%1]C-J7J!AѤ)N/\S*u>dyN^T'bp´F]Kj!xgdda"tw:9g*`gqN吙<y5YJ/V])r)ΞKΩV^0g:ϱ! ,Wd(+䃩/; &Y>u I"A:_1 kϾriuN*Ki!k,ךq?g=ה9%`4a_0g|/vdMJTYzj.5GzSƿ^1\|6}.˵~Iۮo^ahNwPa|׷sΚG1?]o__ˋJ_ᅯ<>F+)G~?>_sH<;]C8>~onܔynS r2ϯ~ˋje ݷNQiaZg.d\b~ws2*<$`cl-UF'qkkԻ|ɎS(ym>?Jo)U̥y~Vnoz=?>};Zs./&PX CVZNj9-h M+~NZz57+Z3 >{' ܮ$*J"cL },Q3ij~4,\bȨ I~8TwC; ]Cw>͹ssmcȜyXr1$e,IcΡ fwV\ 2X+1?MmG0;!CGtH[$l)N?pxxEOEO+@ +>*䝍@cn iw}mGo6 7ەmp{rb>\4O)z'΂ c ӇOAb?8}vՊs]eyD*+xiL ͂C2hL7_xfm;i5ήRmiƒ9a4ncZN+D)bu7ׇR3Vch3b}{ kgZGOC8y:WS/Zbc_[_=KMKzi:Tz# gK%k{v> 1<)B_꛾xuVF +HĜRXjti4eDHpeOXC uYg )J@~%|A0'ZըeLUk&cEs)LEZFnLeY #ʨ Qs *B\ V)fC`@ PP0BبՅJS\S(1kqf+SHA 7f \YR.(JYى$|g<6NhRsQ}1ZQ<]mBZ-)zUh7R -8Udv%iLiy<vs\ⷅ  o⢽&i:9Q 9< V KUP%JX:Q$}#c֙9/iap/֝0l7fP`)"  2`M(v""E| N@͍2ƹ.D8Zi7k;t͂>Kjj7 C"j!M+*R qFKUȜs)*f<5Ly˼.W;[S:ZK՚K/kؾ߭9o֫d6+Zyݹø}7a#Y@04s\~O~n'u%k57F|Ӎ{mוZsI7UzEI-Xæd0 0fm")>/UV>Uuz}v^s1Ђk8\8{\r_>?j%>`C(xZqf>?3{5Ǜ7gzM۴˯^p?byTt )< VJvąY PZ)zaYq<|95bTz]̳PX%+6+yR#WpYQ[V0 ;2kd/jݍURliR1F >sDO9Iŀ1m2a3PZ@NS<,l}ȌCq XIG7l%}*FV-g1*ƠCI._ p.@d `L2xR nC#%#Ǹi]RĻ}i4CF %OoXk0s.bȹK8Zg @"\(}J YGĘF[KQIHhK)[vpRBc)*Dd[D3DSjBJ$x R t\0Dkn)OT@^cNjB~fY+"} P-ⳳ:22U,![ ' !Gz88k'g TB)y!Zw4e#wH[c$P  ~fݎ+v ŹKBw//O;h*a ΅rq^V }FaLXdqraEN\ )X0M/0Z80^)e/WR(ڈcoˀ 15Cs=oytj#˽xûG\ȫ0!w!/oQ*u&dWgfG(0ex?q;E53?;>o;"sxY}ranoo/]m0o2jrX,m-Nf7[[w6S ŋ3y%XZm44[sƨ\jEK)vX-;̔๐tT,*|Dt/.7Yc˃K\2#TR%Lȥ6 B˺[dJlgU p_^j1Kץ`fT-䁃U(%o+]ʊaRdE+js9r Ap)Xc]. u;N@!#R&2e1(W[j5g5Q@Foמ3%j~a>13rPs)=T ( 3x!"H P+]̸1;UKɉB}ȢTN˅7}Tkcs$VX83M~Zlg-reZ6[VRj*Z,FXotTW_BA45n̻D_l.@j?}8' P9%]81)()8LHRNWYDkBLF@"v.p=?&q %50QY]>;W`H۵ߏ?_ƻ@!˅ t>.O*l=shEBXzBA + JF/+- ʪMKk}{&AA?B169R>wpS"8?F\5ޡ5}>˳ݛn{@ϵ.ukon&wxwow]m?x@}>aݣ m+9YM纞\>UǼ`<*!q].&BA.*#!4[`K1+wn;oO7ic ]݅\>9ԧmspeouaޘU_4DIj,L#gCrͫ|YC]S,h%6{2*\/75YYĺXҸăL#]s+S)fL 7M316:i="L~1')L@Y2DE!\,c3O Oc'Vzoi}MS!KlRɞ&rS(t &;fd<'vQCe!FحFLi!PR WJ$$м=nHN{g<vCLKF2S3QQ˼L]EPXQ( DRK€(u5!.TdEaet&ȕUZ dׂf50, bJRXeGrR2,,c ʐj%%Rf;[Ԫa0=Ugo/ޟ/oP1T0l7/w߼?Y1zy=J{~vt63NK]1\̱#0"_oykl'&CH4oA c*k-%#08R&#Ɛzu+\9< OotQVfVg:X)BRTاpMCYc238vo#c;ݥ{~hE ?VJK;'j6+z,13z!{?#y"AK&U1 ljk߼ٽgȒBetI#$'_.X$w+q OD%'F6W''4$Ć^lr/g |~nOVfM%߾u.δXH3pN֕j^VNz3bV٧gR6׻t27'g\4/0(/]w{W6dίyVWo7߿x#xUZ-W&ebcj9rd۽[,QZV諻f_jB |˂0ѧ\g )QUd(o12Baۡ1(xB$QrF\sA&.]anvXiPJl^U]VJ)seUD)ӕB}R%xjX$kt4R)ȖOf >^4ǩjϏRoqʣʏwc9 M"rN9B{zC+MDO=OYx'zFU``~X?ar"χ??YK? ?;^6ׇܢqSݥ\֤ssn+yy7 e1SR|}}v;whc7Ȅ9|Ll;ՋY(W'BvP0 h˷7~LybL8]?~߿Ok쏗?\W|'E{ԶLzL\7O.V>y%ݸɂ}٢9;WK(׍8ݞRL>Bg.g&AU8z6w~(7wb@mXQiv5b!(Pp?PŅLΡbHC]+XFW8ӹ!Pr)eJ|2|J+VF1~pxhl!HT(+iV3G{/dfg~ A!`z y{<ѢRuA&'|]7ޥֈLluC[`mELM nnX|1a}Њ>0kxGatPьr<5"C (ApѪJn~ T?%En 4JRw~[Y c9 i^17>㩅b)C:D@g BS$j9R)؇dim6ܗHE1!Z0h=QIcRș'/L9CׯKS#5JO mТ\sYa.*SR샔~,hk*H+X6j')plqDD(e"@12o\PwZE4:n@p]JF3n!m744rC(B+68(onwP0-٫ۻeղ 9 UcI(e}Qǹ=JB%_ !‹EÈw˓yd\0RaJ.NV9X sulnZi3ubov後4;Gpy)-pz@ =@#׏*p{2_կL&cTQ?}N$쾌 5|>)Mg|c=}8`:'dwrU;C$"-q8qܧ&k7K_˲Mvn,c̃Pp!ǰlR2)eM9U?t^J2QW!!⏗tJ믖zzѢK?1;z7WE8?fOO)6 Z‹e-4KKܥ_~;Zvo1*.w .6Ϟ)4O+@Uc*!ixyt1q@"^}qn^ET]_~'sDxxIGŢoO?t9xko}!Rr{4 Co__ Zn-kȚJHt]Ūvu+O֠7p~ެU] b56jW!K^3̍z=4Ro+R,xucLw{d5|0\t?z_8ndw1vk[jFu7W gi-|l1_xyu?4/cZ#CQ琗dZiSMO!TRJRT qDIm!xƜJ/(9!\Wa`$AR0HLO8"*au? b%(&XIqY.ireҪ>}[cc")HH$-ʼ2cD K`P2W1mP\ĉ@k\DH0YaB@ ei 9F A`JΊKAx ZW׽bntO|Vk!CdCPOd,je3Hġ,RbF2Yr74J] >Y4, o\;qrδ 0q@Yk3d'sveɅ%xRb)_&h 0~׍9(VKu\0B!XiL|5xv$fR1:>RPx2=XjՑ]pcUb#۸A2cZ6QNer.=* #H rX :I)^*AFz6a/wS dцBdYtbҠ]tR3d?8JAǒs#w~ %qYe@ {SPƘ&FBԵ=}cnWF\`PųѹslDC., ΀D[]JG! QC6wQt9yx0_(e9͍Q%)UL׊6!wa}Q-)P$}2w[|ٴ5H;X crn+[iݓgvʇ~/gg|\ݬ?5~` q.?/9J1^g?KyǖvX0_}ݘRɓi[?fm5XY"-6{B.8":/NB?.߃|lն_ d)9n~M9,jٚ]I nt׻d}7F_^nFt5ӊW݋SV^f}3{lYZ3۹OOW;yDwwm+g'ϗzFT'1HonC\VwׇOۉI6"DL5f#'e` O%KQ+QiއFOgJ8n1Y(@9ɛ#|pRۑO!}kX1b(.C@2q1^!ܪJpP3믶Lrx''崇5n$KGLT0q`3M "\D 1,jSXzsU#sLFØ11Xއ)R8ⲅRa <v$P1\g*|J%VHX%+Dɠ $~GYc!FtJYJ~<3!+-Cpy֥쎒)-@ (I&_ 58(H܈2) 1&׏92MCʄJiF1G+Jvcz+Iss|iΠ4[)kQ Fn %V#-g:Rt#e[| C"S1Q=о_aPcʈͷf63V1>'5táUcC!ŋm7/uk}g|/,T_ 0xtΥtq;\=YEC7ۋ|z3ŗ'|Yi%Ζ0_.v~z B#1H]$Jȷ끧pr$gwՋyc[b/1j۽lv.e|IebQ}~_^{-zhbΫb?wmx Ϛjaub=LϾ;g Z5//' മ߿(WRFjvq @!ƐZJa,FZ!3Bx BmExƂ >ЍL*aPժS>霪yR)7+Xd, e~^s_C"%# V^2~?X+N.&^Y >q|~8_=Fǣ|5&%Ea?Q oYbVLkÝps΅+ 'gZYT3oƾχ9((Bfb}8kkr6PWZtjYa&dy~q~pᤶ?}>gv1j} Ofq~jً]\hc_n{) I/~>;n߿z3;y0c _^TRyg@$wp\H"G,8j 6UaP5`K-Ws9r:3y"sL[aLb1( <1d,2`CO$@"occai Ɂr|=@%aKZV:%4Lӱ4̗\Հ 6ʀd`2o/OOml&q?riYyJ.Ϝs yqZ+v.N[@>_R> g'/D_}|z1SOo8ϔ4B^%-olv/ϋ!j_|jktCvNfVʬ)Bҕ!2Y([΍1?[>Uv^ ˙&O)'C^2k 7W`PW!B{e/Tf(!F}sI VccTJL6|+ZF%fS~7\(.퇺KrP-ryJȅ$` B1˨|M%>.p#gj 9hϯ'\6ɥxsׇ GWkrゥeL8E9J(%frE`ci# 2yX9w.1٩ưRKNpQI#4>EbLx|>3LymlƔY1@8ülfj^wb0XJ1$ƅ. Dפ&S3?L5 XVVxe̫me yz:WYIJ(~tavlՂHU%W~K@Lc]H.}IAjMP|"a8 5#Nfk P K>&Ժb1ŵVcSc\UP} TG,I3]JQUYj% GC ,o{;3=%Rt4%g wϞI1Q5ϟbQ+fb AŲHdy> u ]su8) ( 8mE:_\կ̇6c;bUg~?ż_)Zs{)G᜝??-+=[VϿ;mBӧj"2S|և~ ?s>cwZ?>lPx . dѕ_П_Tř4"'0J\r4CD\fZi6ɞY!%' I%8Ørnre5D> *髧omO[Q_5 Il8/.?8g5_*|Wjz}q^ǽ6CFI˅y_n*!+EoBo_Os]Y-.K~nsR,MDw4B x GM_J63@~XuS ~ vo{ﯯ|V?m]h`bJ(+eBw7{khSpb9X^pV(62w,(%EI4 LI$^V*.ǐ.vH{in碡a: fHYcT-/$S"h݈Bpbn7}㊕ڠL o\L.gd yaVy)&5.8an@Yp޶AO.\Hv\Bd.hJJ&rk`OVRr>ƨ$S|fFKIAFЦ s&Z >J6OǺ9VӔr8J ȪBD!Aʐ I^GyU|=[.{?F$G9Xhg2]F J! V"M?K=_j0 2Dd.w!X(G\YO0;FeKypK+ȩ!& FcO%rRC]ޏe\trhV!#jql`60z7*>W~RyE~`\υlFSZK6"qCTȉyyH)x)'ʹ(h^J_ծ*-IuB&6feR >}?_z\?>5x\VfcPO}*>ǿ}Z~\eg=USV3e )c=ퟝ.C~,x;~cPzgN9qnnͫA1>= m(3.Bs!&H Zza`k#6w7}J-g*#mшh4l{umwwhw.1Ͽ}6]dFktaK98y~fJԧ=÷?]/_v}]8Ynz֌C:_Ts6jݯS@ڥ߽^ e ྺbȠO)$;}Hu 9z{UcX+~WH1Qe_rQD6{mQ]$ 2k,K`cHlz]ƨBCt C֏)KYH!<bbS(Bq!k`)cff#D h i|J֤* VH>gB;ď)kɒP H 8}pO096|;.&%YplNS&zp2\9Yr)K"1Eǐ|ȅF0uqft *Ksׇ3z"׷>fUY2vq=#;Xq̶Cu-{~1OgH9YXJ9FbOg90z9b23}'/Y,?~rç;#cOX };6(b grp㧊1$?g}M?_)'16=}gnpGwFM2M)ϖ gB8tb8s'!pg2pO/>f0)iInZ Ԭ1MCqU)JQ\6 4m$TVZ%Qr 42vQu]DƔ,ηZhҁwtSVFWQkCJ|Ly6᢮֭{'Ƙl*V nK"YWi%5!ZeJ˛V V\(Rעw#$,̬4ʂsccb ij?t_}ם4\1tltaLY2c>rLAu4 Rtv$hWB`>4,h4>~Ui=:}|ϛSydӱG=uaXst]{tCD䐢˩ pt̄<˒&&b q`32)EMDY$+]ܻQW<O|QJ9d!)@SdLXgF8n ).6Kb"> a@ ~\fԕ$ƓOCm##qv M~\ sru!wCw=s1k&!E ȆѲ1.Xh,?(lrMo^*1_mFŜa&=RmHuK%l[OqQ]ʔ":CQäJh"`33y;d|Uk_GXmt]ͻam`zkca9+˛mm))s&M[0qޔAɑ;4ţGKݵym&RgJSXs,uB\98uBmk}\@+NfN9I E<|zR{!9^}LUA4vϏ=?eSjɥzv~6Ot1y\.LǮ/w{uQ>Л?Z΄~{WC'gqq2\7}֓GpX_^J)LK.6)JF폕HG1W\_Offui4ZX_gE<ٶepv w./N!w'(|n6{ۏ?g9Y>lv#f?Zw㏗L}(B\UQ;|ϴDPPC7~6~y|aVo'Db 2.4oT܈_ߺsag!N7z7I8cq)c߆Jw邏}f;WjLĜbY B6Flw($A߿  )|!QcO)2!iK  dl8'QGGIWkY餘!!&y  o;[+$>s ( 8D`UCH.֪0 eqGqB]DXZ2qw, ]q@ )X/7hmt*e.X'>6F;R`)yB9c11Fl"d)ؾg-Us'֛8.!cBS6:,9JY*oJ(bm(b]0 X!Q2<NUYH;W !U HlCS)i!Ðŕqm.֎V|!.KJ>6̑4t0jdq+ I1C'sY:w,]F̠.TJ膕GoCN230ضXAfLB=vrgeQF FR \88*CL>R^B\²|CͲV5O]le &`{Ǹ,tp#p XȦu!uS`3j,rz8xurJzLe6Ay\'<#z >㻝n|}8_FXtc31_wkYRq" 1GoW'"fY =>2ܧiy2?9؇ ǻ۲)ޛ)`OߖF^ 5opkލɧHz[zUBS'g'nL!*9xg˅)UYoZXUgLmv VS՚1:JbSz_>)NR~sYg3W'b߽>Đˋ}Ow!Ζz^k[U(q*oatRra^_bBQ [Ɲ/ԮMovFY3OeJ(O°"F$ЊE$T&H D6R3b0S} Oխm*`\} cftʑOԓCML0pF#5삷vI\I3涏=b) < dUɕxX7YSQ52N|NyGkQb,C^~"uתG]b)U8E<.a^431eRrC yQ,%0ڔ2L6vvHȺֶe;D1f}Lj!j*P7S 0{efL OEa N}HQjՌ6KO!8:<>qggؐ0<1^i.0 ZpܬSuQ ~İug.7e69Ң0FȲ_*;۟48Fơ&~yYКm~mUE$;@Kh..qP)\n-Cc6J!Y+%~R}o2l-1_O W>0w׸c ̟gIb7oY}lcoUSQx\? `1{?֊Բbc/סU\4~,j5E'1!QMۃ+fJ)~wř.&% mWo3 ? 7-Dh0ufV_ddZ͐ `i~q|u];NLSjlvi"./*mdnk_ݏJII1_~yZJb͓B b xRASϮ/gs]ʔƣh'M9\vH|^޻e).ӴjٳBJ"ŽpBs:t v߹y) @!]BI\UHLvg%R Jģ`{zzs=[;h> Jb)G%-X&4LD$I08׊zO9qrL0jo-!ZZ0?zȬd*z>cο!H㭟" R٘BoS‰u q4\ґw -@B58]j,2Y@V t<10X޺` ,J$bq~AJs0H K)L"$&*"H-4ךzČF%FF@pR`',Kq\XV$pu1s~Brɢeض8dyd! CQHD92U q*Y@LLr))U%YM G3xq`}k EZ{~Y˺# " c8Хu;4zNJesmxzTB)Dmŵ~ʇ;&UoÓŲh4RR ĉOdʜˇRj!ս/DFWW};Dz}ֻ[?.Mbk-ꄟ- uoLW/N8y>'bu ks avۯV)D(SV/̫!}9?HJV y:Xɷ>D~_Oj?v՗u i,H)9e9"L21Q (%j1},I!f}4X|۾|ZiJU2kcdȑ1uA Ø3$)pY .uκr`ewZ,UTƖ |9bf#TB)L!SdoG'S 2Ϛ qLlR$,S47~J&,*>#) .p &91DYO-<(dBVL5B\ H]riDoCЏReL }j&3*erTK~4GHP0*wcN.)SƬ%ua0CBA!d<r{.ֵgGIu7jB zltRJ1QSx>~1\bQ.dr 1Zs A3NAxJo{B 3e$Mԍ0^fΙ$]u>ӄg yLQUjXnd;İs`;o`b$慪{؍-xfЃ?=|l'|h wҟZ|`H)GO2@عyy]o7T].z'E ?{5vů/N9Tc"~z19C:~> ^$Z|f'˳ǥŗ,/Οk iU_h裂sße>U{}3S)n<+{ee!"1SO$NOo exYr`^H&2n8j~z.B_O7R狲oϞ~jgssu۟/iC.M]ռ/os]iRӛݲ4OΊkgӼ{Ox}k/&,Qb;x>q=]&:9Ϟ0b:idx{wS~ZU-[w.Jdf۝-1cM=͕i(bQ]&N'fta 93\omYJF+v\zGSa9)!F}d2r 1Q) !b2!J qukŇ6`HDS\<ꡦ xޏt( xJ3,aL(@T =zg9!$jLvJr&D&`H&Ӧ}(X/7]- ~13Z;wQF|M㶋r@9WH 3aLP2Q vZRgqOhDzgnL \L^w|̙qyKä 1jY(<Oet00:{[D~(xBe)]3B3L>Sa~~lw6Ϗ<{? ޝя0?Z8@ sB.y]EIww((ӱܭMe1o׭(q~X]4L.Lbd,2&o֝`S?m( B)l!܎ƳEllwoXUd 7blԟ~-"S]cB:LNtu=[Ifۿ8/~}yROJZM`*|>_jhr.z>fc~널36S`y׹uOj`]rD+~Z)Gs"D,w_r׹8%1P䑶X7q6o۫//_dU1#`߯e-"M~SR~&! *b9A!H;"()cz7Jbvd n^)+eb$XHɄ v 002g&^ nSY_H( %Q܍S}/ۥn?\ ŷ,DMG6nO 3BF]Z>ۥ8fYFuJifXCѧH1R쬕55놄´aL¬2l9_(S>63-LHCBcFD/v.lnɳ,$O%zGb&КNIb:g(H@VRR R`-.V};Ɯ3?[U]}tUHOכȕYb}׈`I2`F#$ !!=e~QfѨUYow݄A2̚ `\wqJ69c\޺ؾ9.Ĕv&:[0u/z#c9cYI}J AfDb(g99<[έY2Y&$2KQ!4yINO|)2v[BH(kٶ,!0eYkh2@% &ZWe3:OiLPͅ!^  `1IȵbpSSi<E,.oBR@ҩd{]N9{!RBB&Brq%dI xsə)/@<Q?FNsdB l 3M!.IƚM)ڜOo]ۮAͦOpw'2)RO^7Ջʷ7u+Fo^.?~ۦs咐UY*Џ$DljCTzV2 rIRL^__}ZgQ;qb}Ќ#B-e?ġg+9hwy~zm>xy}y7*~+SN3gs!#ÌE\"x>2M9< =eZB&D(8*)J؛NOԐcC݅H-!O,>2<#n2,G>SP?16>㛭=g)1RRRDlp,*B$O轛G[%4FQ X>Q˗e傗K>Dc$g3xwT> f8*c ZҲ>ow$jԸ() MP> `ܷjcl{+-˼ޗl7tc8qx?ԂFG'Բ?X8S%1J/8[i1qQnB蘍brWJ 7/+)e,Z2xRR :* %!DۍaYU1Kg:8cԌT"~QwmԥK1e Cxw1<_J9fJ7HNql^ чT\ĘrOb(2(Mm41=+#iR!Y)v LFɘd%Ct1cuUHҦ`BN!@Lsv4WwTPpXճ۱->\"d)4 H5ssF)P!3rR__Dj2Ԛ%L3X.`@Dβ<1'sfg~Ea>0BGC4?C |,w_~wǵ( >"j^>z/:l' c&z#x}!ı}66朄Sl;/4Mcf1I&iUUGuO޺ѦUA%m32[*9#t1X{5HMC2XCg޴B X/L\Ɯc|YcINH'uqirF.Ҙ3Yz@‰_\ 4G8hR\Jq1*fgc 9R3!I E=$e 3QKƜE]8E]8R c4h !еa|m_͘Geab{Ff&ᑚ GsO(\+͛듧dP!w?@HG`j}''8~J¤PAIIL>bQ``I]a|jTšw[I1mw 6ox'h*p;t'_?ե7nYTm{9s.~|~6W!w?/1q?i1Uv|~"Ǜjvӏ#<[k*]ӭUL`Lgr;Mꋥ#̙5 vg"2K2Lܷs(g%˄ %7^s!?Y>߿>pJwm`:}V=1*a#ۮ(٪V4ͮ["4+xYKN BZ8}ܢF)1D|rw^P$@ S>!6[j6",(HvYU kU ҈K@~ykAY1B(DB>8bv,Nż?VӜ-uńíI]01/(L0MFGѦc+y&)8L3r%ce@ fObH`>5\$~pBч @wa4h6J:S^7'8>UD~ň4"*)h1`h\`4c]㳑" q#-J7 Fc$ZBZ޻u*"cZW",G5ю2',DAp;gwUŤPdb)i!-4G3aŽ0-0v)g1B3nhcWV 93n6HnRѢ>;<8dU #~, ƊUc>\;]rB2Jy$cYV7w!v/fOj>|6c" N% poƱ iK(gs 9u!8.q8Lc@s$_uQ$\F;K9Lu¿ .|B %Sf2L/JDM㖈Vc<{7O'txX|9GK3E-k~^HLQ+Ca~8 XT k7'LJOI'I?=`@{۶olP; }uIUzH"/kDڌ(~|s(vd h/}"D70psNVЍe]|q6ۙѻЌI l7:|sZ~o_P^oKAΪjD?ᶱW;@j|ԙ}tݷi\-TDii^*cîOWe3ݯ_5 f<̏t) 4K_ͅ3L{Ķr 1/ʻǷz^͍w0a̫o_>[\M&ƙ[;Lp2B {uxܙ(#"cuǞe)('INZƐ135;#0k)p*ixsSI1CwPoIfocˊq}#R&ׇS<2dL(NkSR\(qK0H\M_pH7F^ 264+&*m$`,GhGڵ~H"&z,'D1fD0nv}$H9P8 Ne l[WB$PSs.%5z#oE%8 2b1@ۙN̙hKLb&1y\LR1H$f)hYUou|bvΌcѼS1O~vػCk's?|p7ƟG_oQDU-^>_jp8Xz3z6SctQp&$E wsJOl^0#1J\lgbNRppZ})HI8Ws99"xR3$Wι\)@ޟ.ԗē!&Dc1iHH6C4nQH},LѤuIU"e`,THTmlp*oU%"KCNT  p5Ӆ`S]{܍G%"8]lք!2/kSc,9_> r4X[vIE AHtHdt1u=cH@@ID&1|wc 9`| @K1]rd"gD.IV,%ʣޔQH!ţbbZ3*d$`HѸXi!Y`ׄJլ1lzeR$s = 3I-zǨ)R 4xɦ锎IR6SuL#kvCÀ5}pd!"$"I&5.aDޕQGgHh`]wȎHPϤKnֆ0ƺޞ63tVLM CBLfL@|?"&xތ9iVhGkgc$J;osY$% 7c![41& #IZ0!)0W<`|8;o昋=%Rh!drp`Es,h[E|pLJ2x 즳'xb"wXgMS]a/˧zY!fh{axhϏx#bCFc9=3_.7>RAJ~rLk~y:_V@/?̍n cvwtUnA< _f<H ,__Q]Qt/ =0()%ǮzNn*g_焚 -ΗE]11ތ߽ލ}vބIx}3h)חͶS[?5G-]ۚ s[2aiAͰY; (Ab9yg-Rmh֚TP+u`sECFw?g:8{roJQzK9!ٍ\"`n9mś+B3)x9ǜ!PD̉쀤c@hG!ь14a`Zay^sZ)Sqr m&Ro`Nɓ>#b7;$; fB 3bD3,i)s؇R̲<℞I4=hcgH*ƪR$%u9RG-k(.дNP~Ls69c@%wmb"|>h z<-ʄI -*0JwX]UuX ઒3FN10#R1Hc&TI2igLD\K弟"r=\ǐ/$4M78޽*@+rj% t=YVƩ#9 bv}Hm?(bJZ.9RO4`3&z,3qXi9H@$JZq:;LIZQG0xքbbUf3|цUm:NTNY 2BH$dM0-+9c3$ъ't!Dqs!z^Yz9j); fB1XBJzimJE,K]>x}GTGq9ϸ'G>R|L\'vEOZ=}sҏ@~Y^ol?=]>{s,?Y8M^n덹kl6q7 t!tSsź +9-$(̙tS᝗C6'3}+m E]3]1aKfD+94@R0LEŏHDFI@JT.McvIr&a4ӋR1z9#d3IL9c,e0EG~Ȃ<"b9SIP69DՒs9،OThsMдmZCHJXDJ($n^HJ dbL eL'9-*$dR Zv*O窋>ؤJ*F N4퇡Dh<-DDrAiDEUmk9)KE10M,KN ލPƢdi"9Jb wd90 D1bty9`Xua\7ɻAʊ0 XءRy)w{; '9#,$W /f2ɂ yYRaNgu! MY0nNZ˩fk-Q!!Ia4{ۉ\J TR~LәȢuIBvM9 ᐯͶ[+t#puBf" 7{H"$moシ!c 2 6mW$ XE̽7J=o}QK`_b9ДXBw:/7틗Ba& '͸kcUU HR؋P(vth?귇~˕Rm#qToz0Rl} ˯΋NWtUCy5 XTP W)BL$'M#z{Ï;`yӅjZBws1傒狙`RL`پu~0w/5&-lT;cfUźwf)@M?_ͻv^K-LDǾmLul-̐2MwOfLj5JK".!͘0.4 4`T@¡u $4Yp3<2`@LL"HJ9cN&pd滍/$|_o\JA{IRDTsBJ"$4§iMY"8F_* 9+m/PKDg b&:%(fܦ4n6dKħH10BeZ RQ$HBXrߏbfZKJ2t<[ֻ4DJ91nH6j9Mq\ϔwaN67D)WZcX*B>6kY, Ԃ7WFK.R` $lz$2& @JAj ( .1c=t{6(}'ҞAWvbELnWh]I}ܥ@Xx;/B%Ya0)PD k!&O=2mROhʈw!!bߍp5Bzm.YU<˩ #@]pBQWևټօ!4q llb!Fp6 @֧L&e0w2F+f*Eu.Fy!n P3)9=&ޓ PoYL.y?P|j3hc/G41IOA>Ӆ>l=Z}|ꟐOc8RӭJ?!|ty?(7/8&>zhnk\AO/ $.ջm}u~ sh,ALhY VU<#:_2)zk ,=J͠ibS˗j&!ځx C 3_X%BR-];a=uI`օ]; 9pFf|w=lyjX˻C1Ul9t&n5߽`g#dU|nԌ;='4+7v]32Ь5 H?-u3$l!wKP{sbD2ȘG]Id}kK36LRR0}g8ID՘1H1SJn?&!ICYg<=rDS Ɯ,ͤ؆TrI*Ř m"F%^jfC@`TNsqY*i }?6!tQl j$HspHj6  `.h]f* ̓'ReJ3'4xX]*`t yQ霈 Et=3ƫcggkJTR.I1({7޶V͆E!pN&~?Xb(ψPo7*Xz$$S+JI%Ů3YI{lUIsȕ5!l.O) F.eWq}K1C4Yq ];XtfArE9hnn]U`w)KJTُ૒ Z*p@U8YJHq6 LrBn,S8UX!Xק˽菹,.8dr״ " #+wat1blvf\t>\b)< cL3r U%IߚvWW'rO0|z19‹0PJYK™jMv ^ 5ˣR-v>u|0_8-ހ}"\LY xY?F>5=# 0wS?>fC}=HM@P>|Li #9.H)W+A\C/L3E(zYhv!m4fR's)eEh8i^8/xN*8@nzx?vDL=tō=ᘲ4,)`0c,!j 2#M`s#"WX¥@8+6#0/}wZӰdgD[ F1|7q#fG(ބm*M"% eL3l YbɤF!mf5GF%R!&lXf8zkEA)gw}YҔIeK`ad=ђ>!`'J\MV/˂2 Z8dV)\w*g8D\2Rx;5#`@d!Llc`cNcu#_v*g%O9g )8dUB 1f 8Uڡ+ cB8?0k±''m "-KZ`3 mJ+"8bBb8X~67 X1/NYyH&/˃k{K⛱Y*z 5ێͩ\ϫM%ì}o~BEm0c)CEDM##f&Ou,c":[W2bVL$e@dR'=q/G+*xxGˀ0||o6ϗ?2,JYlӫ߿zz}<]P>$>]х8'?o{˄O2}ݿoMy~=6*k>] $ui?`rӌ'kb:trVYJm_=?]e].e}B I48=4mK Tz~w0(+P}w߿.\u)Zk-l;cc~u5O<]N*xUKe/W>]=ӯ쟬ku{](5ך9G*OgbvgsX\|5XUѫMgb ԅJ6eJ.+}tյNJGsnCcfh"'1⮳ݐ~|E'!!]Z3q^I-0G۱,8M$l}jd!˂Qn7S|ʐ0 SfHum1cph38)\J!ŅIAJ<ΧʀCoLR p,ءtF0@4H]q(tΐ|9BE}C*N[6RpȂѹzCNfzޖ O(hXxwO29"8qMQ]O炱M&D3%+{m]{d. IRtxq ]ED2+ M̓@d8ktZ@lɄ~q;4-@cDXkhc&o3KzKHEB3LdwaUD<K F35c A6L- \Wl^ o`2GnH)((_ja)}#+^@[qNŏZ-xo1ɊR}d{]|zZkBv>[> L1}.^^n77ç ΟA' eG_k.ԧxFө !1'Ģ9tvѴgn3ڐxRfBo\v?!uӛ](8f%^\n'dYbZz8˧Z >M9hzb=ݞ^.owwoGr1ޛpz&&J [8l ߽k{ t}$fmkb ъRo-gz[:xBZ}BI1LW żO< $yBb;ZC16cJݻMu_XQЙinRbM3p9 %;.1RCz0:Wi=ز݌uʏ~JIIş/̢ɧhTz)3d.8M IP~ZGdg3!XBHԈ)~L뽶$K+1;JR4rqqy#_HAԡ\^ep]f[-9=*ƴ9g<E8+Ř vv|Lo@ꑞ%)0;ЌUqfBC]RwR:fZ>NT2G? 4!%+J!ΦhY1uAH2LhC2%il^wY3$J4 .3hʲ]ߏrCGo6.8rV,c]eiӢՄAaT>Ou SWum`)!A33Rp1I(,Oj'sW(yԋ|i$߾eLf\=z/JNFq|qb/c$Uіpx%_Fc|?o<ӧ8(S_gSG䜓o7^3s@,0vwmb!_}]dn )b$Fa?QF) \Q]|hvw7asϿ97< w"«Wxޘ( H0M:{u?\EÿwW߿KN>J3"m$*˴rU -d~ ˥dB^Ұg{}k`>t!tB,/j1M=zZL7ۢT!墠<5]r݋KybƲ6}\ƋU>:Xb]Ui4gMTw\|ȱC. }&@"0XSYS .zTTь%5RECegQpjx& ZI7IN*YL D^4.q!8py̒̏?) C )9e!iDĞD$Yr CT0r3I &K8u>Y? [7e?&~?U5CB\ĈTS8/<qs*#RqTF9Ӽj)A4BJpޔ1YFERrQĢK.R$*=Hq`@@rCᨸw^+$ǒ.I&%G" +]tT4g8 ,,ćC< !l mce3F&;Y'6TQhхpVPQKj,Gīu#8u[c.ۦO|]VF*!pBVǜ/(/5bJ.U '4ơ`2fS1x <*]{ޘ#@ )z~zw7RkDb9Η@,>L^^|p%ELx.I#CXӨ;vI!?!ӓ n4Ѵ˲>:?L!^_u^&%XJ6- yus^ʺxC?h}`eI*J5Z3~9{+ޏi˿v!+{5ws6Z%$iJg Ud˚ESB 6_MfoFQj!WWX > <3r]b90V(ךd" bt$ )%~L6ArSRh>yB.jZ5cfd)2 j"!7G CACLe)ε E R@3pJ!rxZ· Yj%/rwT|?SJ*9di^a|$QeQ8a]9y:QFP5MZg,;d5 G!>Z&O\ZOVvs]r;9uɱ?Mhu߹ѻA !fBYV:LDm^e/<Ū4H!.Dx&nYN*P$v}h ]5g| 2 ki.5Rbk"§NLжB}q>JF8N)6\(C-c$f.h^- `[JI*ޜC#gs9!b&Y"tv.s+# !Q YrOqd-'yr!=;Œd.KDΔeʚ1c ,A(Df (`Ld(@UpGTriʂ>UB0On+qR`1DOh!,!!afHJtQ@(,%AcT"AmƺRɳiMRuS&-*]nBm- T7{0" jN||P $s\Scb&-K'> S |aݖ`s]d-Jq? v>%WW*T!̈́P !cŕ֫im^fY-UDr* 33-a,nnwZkQ߾\-c 竪s4)Z-WugS[\@{Q]TESDlU|68M=v7~jAc"%{ynlJ"We`d}+ f3H^^\snB-P |~VtCs1I) #24Z%>v__՜fL.<;+Η S.%4vH`*F@\?MѯVXƓ"yb}$+ U!%aS* =g<)͔4J(#4)%X9s6$dTHFInmc(+׆FJqȩ1:zzB]ڈ!y|n®@T1rh[(IK!+. J 0A#!cA)fJrH2E +ٻb *ɤ .i.W۽K^^\Q=\RJ#:C$U!"pOpvjT*CLJDDч$3a1Ť%%]0VZ2iDFSr Ԫ 4FXT19 4LPdti,A1<=# B4 $1Fy4u|b02X+YS}֍RX) ";R )%hfӉ]2؏ĆY=8Z ca<)܄KEqw랭I\Q9ܢ54bN,VvJìE7Vj@pE$WoZq?ᮕie!uej q7 \R0}w(gP |Ps}D$i?C$GVυK\5b%"ݴY1Sv%[T8ed13Uyn>ypX'Sg]"0%_=1#}*f94󳪮@ɟ_R*NOݧ+eYQvU}W7E)?Z}B }l9&1<oֿpXMn߇:x/@xuYXn ߴg;Dxv֬*â ;g85r"_ƄQ߾G1ptĢR2^mT6cE!B(.p>ޏ֦4 !5ۆkJSގxڮLV06D(.d6yIƈ.&E)9 cĔNCn30mxŃs>(($K3An?, SfF.$ .L ~3>HW3jdɗL3!en֥5'.FM)o P]uAbwcZj=N, x'Sc.q@A4r xw)|vu`BR1qA"K8R,J5MiD!HY WL~hD9!UYn6<Ύu$c0L@ rd1dˊ38LMzgM6@> wM]RFA21Z 2OH<\Ls>?R0d1(rNm٭j"n$)Z{&tDblRf'ZQ}R۝=[PRvTJܗܐL5\$GCVdyKd 4E˄!!E#m/WirREu"h#DIJalZ8$~uݒKyY.ŶjY! Ɖرާ19!4zF)1"˔df%?ZӋ*lTeYժǠQI4 UkxURNs"p +'Oc/Y/Su'rSt%ٟͣڕ—ϺFB|~>=LH /0¹NJVsLk?3 @JYH6E4;؋ge٨͸Y,+ՐEr]ҌZF)]+I&E?,R7v˶/^7EYa+R0ӏڒeYXLbPfzwmeY ]2`a]qӯ׽#o7.)7Z;K~/[^mz{rY{k4;c.f~r-N)ۛ.fBh2Z\gJ\5bY] !2Gw0u#~/cc"JS!}@ <>O$R7r{^\}nE%~4KOE!· ͅdZ}-IJG4-XN̎6+J3)H*#v?(%Z7V &k\ҙrN,gU!Ɣ1GWF 傀ٻL6' oFFH#tsBQFz$Sj2⃳ÊKٱ dV$yaQyFc&D,UO$kIPxHD!s)OkJ}LnB9fٌMmⴢf819h] 6ctq4!-6&%f O%d⫹ FAH16|i$G( q)-J%ԂB"B8ZP%%3[Py? Ә}1DJa:<*m?r.E禺T7.Ƙǔ%3sᛦUE V8rFtH8y2a4"apUA0*81ZϾNQxB$9+Q䙜_'@]u&F5.d*Qĺ1n#Ӊ,pwKU+6p;F ,C Bsͨ SHъ佛)Jå0N\0{tY!  kϋY?~H?E>I?CABI|a|٩$Ũ|V#4~ylfGggs~0 8>҇a\-0BBp D@ J9W~} Wՙ4*Km;R 1X(|b3fEzݝnoϗ g̲R۟VEi4e5 !<ɪϗ%!o }lD9uc^Z.K쪑Z}e_\3'@WWYy9E_^稵׺`G^}Zբz];׹83`<-|Jamm6rۘ$|oFS|6GYi,ɔ\LdJ.)*f) i' g3c Gm?ֺX>-b;Ăq4s6QKeyJ6n62Hl q(l vtdBUNm}&FҐp8u$SSkdH\:2BnWDP''dbiQ69 AR1/*\ їl0ec4%E,0Ե!˂qfJ`}ۘ3+r lgfij!LލN\v " (~I"JmXbLUC0Mj#Q&4g0(s0f.\jsDjg-INgOZO>S/Pqf/@ zk[sGD}~?/y,<K}QVڟ(sr]Х9U5{)^(s# 咒 d$]?o }WlK1fl[!<|P/MmZhI]3í];~ՒD;-!Y/ttr;ǯ^ZسV&O-5`E7!64Jt6)us(ޅ$ݯ~c'ۿ_~by<+% ~P]z۵Z-1n~?WE'^^E)R$ z7Wk3tuVBDLmg")a?J!źH9!L)(67H! ]U rC#bΐ柌'IB1 CO,9jsHpa"LF &y)9DIsj+..Kq?D`H`">vebj cgŕ)pB!~7VFr%P #6H8+ w!cBw֔RxГV5δHD Nk2tHsp- PLsMߓM?&OFE)Uc ;RJq@2aQ3]ss&vL cL) w29YKG2eMo=x4$kJ2Gf Qqnɓ/s×_&9 G/ Ӈ#bwb<f7EY.v˳h?n^>WM)'_0oAC>kc ٰHBJU!um ?P;H,U12r4lE})aɨ̋vVRL4gV vR9EǀQ.ZRrQW>zFsNYV<[N0μwqkg$'.R^f(u%ܐ$ɇ肝ldbY*iy;x!gd2 *0N0r"%&NS4@lE4ym H$G0q 2^(˜ӃCXb㫫R~h+@fxh;gt.jp\lD,,/1VChxQ/y"|h;>n? 磏Y$'/ xʈQ O۔!G_}!9Ɇ>}ODxL`C=5Ek`4L:/cLۗ\t*>~캽n#߿>B6kscd` fO:ЏqEKN~ %BLX*Ĕq}JI`4tYK~q*$BheZ1a[R>~w..L3&~a;FSx?d;z?nf_honGQª1+/E#.Zyr߿>L@WdVU KY)XVEd|b1?n{Rz>ySw"YZP>LlQX>쨄VG6GkJ9Ikc< c4n49e-f*5.Q`A Br!:H) vnǜ#)*IDATjwGn-A Yoc l34M x9}Ԋ*2i8e/ p߅t Qb͘Yd~RTf$ZH!)x*K0 KOfb:%c ,Zk KzsHr",8'Bs|ZI$݁XKF(0F,xO|QFFf%h4Q)H%x8bSSHH 鮳^nJqd\ 5Si*[(U?:Q01s4$\!̘KbU.KI$%@hnj+9RJ4`6)*!)uQ9;ѹ@H[T7*f(gwJ-rYCYL"!ɅOG:f!Ȱne7$"w'܇ֺTr֖UNhԶzօy[3Cں, Fo 0D)ˊkrdؙ Yٲ~hץ(`D Ȓ]ǹ+i$z7 %QA3 ڞ:șns u(N_:P#x>hz'aǮ͢<$ׅzxR xM g2bID88Qł=(cC_>O|Q0'g Bɹ)ԇu '"zNX\'%m ΛˠlNº- QZRE (#)uW!ZW5a*dn<7UaB*2 o;]Q3>3Jt$ ^' lU['K^Q( H&~QEHDPPDJEBtR|6䦐,(k7ZE9&M3le%ZtњE84'-[dɒ^kef<1?`8s5| Π<W .4 Ab@#K2k.uiGU [XP3scm 62rJBRRꎽ|S{h_WC>xr>P;k3]~۴iuLszDض}F"bfooDC,4L(=(>],Ziߏ^w{\L T@0 "J%!Dl}.ݱq}<_9%8vK'lAKJ a)m+W7Ø\8&ׇyu_j3!-bGy\pHٳ竻m_r.yd6gl&1ʵ?L*\s8S`fcD78W.L3q̵+B\,tIYwndË |&ε&h!](ʠ.H|ugGO毶͓֢TXP&4Cc&Fi(')Qxby)J Њ)Xg1bzn_. µO)! H`|BI@0>S@$T@=Bt LC9d]<%9@KE)'68R]އ~QX!U\T8E ጦ߄"]]^Yd>9).R J_zcgsrv91@6٢!۶c2x^raC`\'5ۦ]E;`F峛[kDK6NȁDGs9+B&É & r6n4mQe1'8$eR 3#+gXЕߦ8vm(oHz^_~=|ϝ)&zvm34Ѳ{yT<[88c~\еƹx۟?YUny"|ϲ͹X缏N/- ' ׃)yy .&ͦ]"7u5Ͼ5GVud 8J c+>lP, 3X.(PeП~Rpq~CN\|d0OfP}p},ۏ/)q=׽V),z^ĄmV(.zs0ͣT .m.fbJǔo?Bnl.?~yg x09Z%&iCZ 2ɐrBofK˯Mp,0$|CNd]RRŔ2#,NaRP=<'L"!"I9>qAn iQ2F~ɂ:">Y81#!@1;|{K-d,"|ӌc 9At+Q|pTcDobp龡3zA Zb(6GSHYd" bRnY)m#TDرiR(viQp8$׵ڷ^2.*))k8%R4} v&R6(QHwշ7e)1bfZ6[ՊA)!Cc.j}S`p8e|!fK7g@9Sj'j;dl R'4g,ѯ ):|Qe8%r m̈.x/(;t㫫RTźعiv{:g+y8 D71cR8b*`GʧW. v sʧuHws9 q֧.SrzZjʨrS2ӊ2BqR}?xPU38PkNLD~#O-c{{4;Z֧v6/3:fR*qx]΋gPI(@xGv9c(^|ti;o9g&< G feEWjBSem<ڌ.fp9\(Pt\"QD8˗ `|֯j HM3/`P(5'R9SdG mvu]q ]-D;z`Qjގ_RA/lԥb6ǫ_{WuGOWgkL I=\m7WMqv8gWYݴ;=b,:v >$(;8]BBG A3zԫu!qӶ>լ(є H t<ֆyu;gap!*-R q{(`4d$YSLJZԟ'oOgukN0F1EɌ#NNR1#'FI 12;L1 0KXrk}JX`PrνK R%Y(0![_|ZՀx"E$I)q;) u1)GT 8a@F&f01tb'VC* 6%C4A=u.t; &1>C 0,qž0C,JJ068P@9_jMȮ2OFdD1cG|9wf#LJB #.$N3xD JO|9!LRR'BNPD6f:-D*8Nu͏CsD=1wndomxfNj %a9$Q$) @2s,4%9`f]1ȖEu{DsLkFR;ME VKu܍3Jdv`}W5>b2MhF7+$ʹ=2$2M})E{쨇qgxF9gQ9U#1v_mڴ&>řzW>L7[gLyZ?[w)-Z|hV 7oaJ=C>$7o5P__כCH)Z3F$p;h_<6!A{|o:' esN11M߸Zϖ_=28|YB0p ⊏ɌN,gt^Ь;"³dvw{c};Ggr9!1()xa SOۉG&Bs!]kͤ$DzrQ̻ΙS'Z"2|^W>!3͓R?=f*˫ֆ! B~c1YC OOp 7L`Gٲ~L_Gc;<ێomx׎l߻h1v գn{PD (ż$\ЦubzJeNw˺h:;&E3f*-F+Bdi,e,q.%vf?<|O>mQntzPF(s8#A+b89Ǔm!98i`\ d/ϔtNL24HmL1Ss? yͩJ Q qUi‚ R3 bD$DHuޏb.y#&`ǰ(wTEED#6%G,;$ŀp0h$xZџt%’@ LpJ140lQW`|NElΟ!g,e* R1*6QKF},4pI)`Gձ3!ֽsZn~?O/ H !)6bৡLc9!q i?xL"cf8Mǁ>(K>`"zjpCv}X!쀧N'OvY5ƌH`H8 Pȡ )g:->1Rc/wwVgݱ _gm[ST0!7 Œcm0}δ"&p,9;k~ԌvG{w<aT5JsiS(0ge7('8B;?dߌSH;{Ϥ7q쉸m"y!?@7~[3ͼ|rsy;p?o>tR][˿k|= |;M )to;>/ p7bz|PB0NQ8c XHHQ*DvY7 x(Y-JLiiC{%ǂ3 1ŷWGE|ۣ)C]4'tǑL@ GYƈ=زbwO>C`Wg}gʮZk5懟gZ=+nmy&ٯo:]r\,gL4ĈYnYE).͟|vt9&wZR?ZL-jR1S <T˕X5/Wo :T m?M lULg㣳yAm<]H>&ҧOEGJyfRP&0K%b]f&HD|!V*:/H&ܧHy"dΩ޻2h0m74]FB}tS昊/n QCNlcHG3Rf4((N(FF2 2ȕcp1{ziV3jVJ$NqACrߏrUz;ro~:Y9zĘ[|G'ӥ{'{'h|=])6V=@ }ϕ-Z2~s#ݱZRB>@{H9yrίBl5d.=4ۿhl[嶠U,З_uQj|jUiD0%C8B4 yyןwJ)$]r iVoD0~5F/BcHݞ9 '5v333ظ,I{D̠ ')gEnϖ.qtUm3,*g?ؿt!'vwYժ 6tC?O>^?Yg *XGKXm;h+'}pnz͝ӔR:Do£U5-8co.j :}4[Д(dY0'>)Ė@ԡ\s>[뒑f@J"?Z ۣL,*$ɒoE|PLq99ג1pSg}@BqI\=/?bُhu~jί90l)-:ܳn7"s9w&&8Gi>9n1Zq*TRD$O,s= ߰u(,KlGOi`Ø\1!sÅ` Z1B|LJ9vRck xSaLB$CFpΜ1VӲ4Q h;&b٪"cNv3~ JPYԘI^ Px|xyVs6yqp4!֎'S*: dN4(2޶6UJIz dt 4g#"  !޸]E , ٌ1'9c8:YU3Xb)0:E`d,e)[i^vXԊԓDYBRHAJȗ7c]PM8ڼ7cI **э(I"^v4KϭiRzʅűP‚ 6e;xƙaLAp\Dy8ٺ7cQ1*$zt)yCL0aS|FLdZf`$!()땠 0~;Nao'1DI$A] ˗7n_~-Sf4_9(^T4F'!t/7Gʉ.ԩ}f|w ofrξ/U!9ݠq1&DJx^jBƇ#&})lZ8nc'-tՌ?:+ՂG1|Q3~0se^np_|}$KBoHr>մq|x64|Vz^=J;V@1 ;\&A*ʕIU"r[/9i?bMSB<} w6).k"6mqQa0P9ۏWۡoo$?lUsecq_v4>~]s}4ߜզL~,P4|w9%7J~0` HݚA w_~wrϹ0&Ƙ1pywBJIkSthnɣNgP4cКc4j^c('赦BH`S?TYPʀa)٢X=<(׿۝maz?e9~s9ti6R+Dș2ٌ%!ӮLTT)SddC4YdۂA!y&Tan{ETf׵Y*B>vQ  nvüT~لHk1fF7hHY]=8bB^6ɩu0RWrrI&\ YI,އe%gZv]b8 Hs4JйݭA B@iB)B Ա*d \@JR1f"4x9l:ĢHK#0DDU`3z0 1[#$hBpÂ)-;RP!Gi}NR؍Z ԓr`k}ceDp> q(jF(L0wipΈq8+$Y+BGFYNAs*8cs`hqb%, h7m2"d1cx |0L 1HC.hf캖朳>Z:E BpȈ<d(aQsom0D' JV%`8yNzoP1MQ^+*wGs@.Q[ĿOas86Ѫi۳wyFeU))907|NhfBf%aզ(~7^EpP>a|_ӽ\*~7P?ۚ_\ZrZ ,rĐCNTYBV5 m*ic,%oom/Fy=cSnZrsc;EUsl)ӜGݝexdͬT☬ޱC((t 3FڍK o/2=Y!1__5{_d=m]ԗ;fK:˷{pѿ6*OVlLe)_ܴd絪$G=Y<}(,͡$;ؔX~]?cD~n [ų6Ÿ\gZS)l}]Z X3 I[DiLNJ0JĔ)11v9" 1(coV`J1!|}N}Yu|v6-*. 3U5X߽-4DdhAO?  O oR3x u$Fd1NqM@۽MӸG+!f1yT291edњQR1J&6̤@8l ~̌gĦ[U)D iqb7-?uhNv1.9;oJHPp< ^0B.sLvm2!3kpiԊQİ !#8LA8@|p1g;"%x8B`#P;bddߣ}=BָL舏)F Jnm2@"wظl vq Wv>*= .ҤaE9vSCdqN0֎ Ȥ)BPbJ#ɥkM!+3E(z8b()@ʠ"r3Z=bƔND2d1e@ISB!u.L1vFPc4e(ı0]8Y؋$#:$, aӗ8Č>e_VC( M/7:)^>uǼSY{gYzvU&&407)~Xkצ~7'3(ov]:Hw\Շ\5ߋN$?o5Ǜql,Esŷ ELWfcVK-$ }$8vEWp=)vD~i+o ĉZq6+3|Ww\E=/gN"7]|Vddhʘjwf o]{lӳ0xfEOWn5M'ˋ=>+ų폞??i8ϟTRq9}Wy^ 6eR '32תwϺ k%R˻|rBe 4.X 〆$7W1NZ`DsfsqwJYٻе`x͝%YG`UxΡВp %f$@"oo{r. 8Q"3a ~LS֔5ׄ)so" = A *~!Bb O HF"tq(2TPqb(U\OocM%Da&$}7s /7BGO&)ƒ3մBB8 82|w. ?qZљd)C+bFv@V3:{DRQ5TOiIǮ*eLacɲ`jpB úgK8Ox^+?R h>g/@Iք´lJ[\$'ӬPɱahBbhL7I׆gUQ`bHJvɺ찏ԝN9EG>٬RBhY"Ӝl9(b[H9gb@hd!%vɸR6- ޏcl\<ÃӇ6/7 , P?Z =8M]o?{|\ zvq`95.R2"ڌ' .b^/朕Cqp0~#e]x~s 󃚨:9c0Y;xuiA}32> &hN8)sB=BM/b5|Nm6780zIPӿ9xrϽc&ȈBr(Bl%؇4m<^Ea05ȩLdQ.tHFHI"Ā= Ta6fbάJH'׻a]2[X-ɫ\pqD0#_ '& <<棳ffƐ01[kI->z~ %!c X]c%`-)xAY@rd@9q#vh.l.fLQ 4W8sk83%=]/tH%%C'o@\E1:9u9)J0EBaqHaYr|aQ)gTbVm)f$uε6UB"΂1xvsqΈdkwQJ|ߙbJP*~苪, Qw GJu9 &G66cb1tc”`{H,i}”'QG'&Υu- QQX4TӠ =T!Q)oN9R% GO2B-)5go`/eE(%k|0wGt2}놲Pxشftc=]FU)XWm1pxs|7VEC| 5TJe B4_5?]E!y- 0T ry=^_g mO:B-'ϕ&mc`(#`<5a|u?^\ٔpPhV4Rn G#%ť5|YVE DfO ,nFpLFrY,֘z+=v]fb]sO׿`nz<5vL/6ͫ]g7 ܍j3)q^V,ánnY?x񓇳FP`_`;Ww㮱_1/嗗cɒPF;GWDS*mՋ睳#yޙxClZskl>nܮWE;$w;eQ~`jajc"6XH  4EDgFQ#OW;㽏Z F"Ӣ )iP2(&VjF=L4 L)xf SG!!ckV4F YT$\1㈝s8#ͨ5dXwD&LjUAB 1B)NO3ܱJ0$zXSQ YjAbc#^k%9iL/gYBt]:JB2k@ lJA2⑟ 38A5ɔTiଌ)f4dV_7 0r2c %B̋*}OTt_a\!zjR?ގSJę*(0$')3泒e)Y:H)[VCI>aPR1"g,E8Cb9 KI4GW{=cY(LrkLú,&5n(bQl"2ęp޹FeA.'sƗZJ]1DLKx0bIUPn)) FLXs Q]Ȉ3,{T#Vs03^vp)&Ej I0匾*"Zd(Gz| 6)_ C>''ۀQ~_ڮ #P}3wF=n}lbgrB^<=SJ|[^iX/T*tMT2G{h\/u.w} ?H9*ޓJ)QN%/=rgM.4$}P:/$ETI.<\V Em*nvŋvi]ӌb8fxs:ÅrZEM1>9>Z* % [SJ0bMXʹ{N{Þ0 H$6̋k )=[=v_YQ]Zr׍ao26Y>;G(O1Ojח˝'szb)C)i+ ì_|ֿ;|weoCK1|4ZzEYbf\kd-pqZ(i^u[$Ie])R@GUh45l]]*U(WydUVVCUZFDG=\q[B8T!qUP*(^\mEBSXTgd]TEEWEUiӫ77`iY)ּ>[|6mb<[55xZuMOX/B LFvzwy֍"w=>l\(chh\@G͘sL~|;(ɇ!| A~nҀ1 &R)"nQ1H)0Liʾ,$ΐFg9rC9 +1+ 띔Q0Cʷoj'F,fIv ihJ!#A fЏqai bAa2OC1E[G nvYC>Np)9>iiIQ:}Z,~tEUS ;FIif]F(JLe%cʀad#Ҍ&hC c l|{Kp]1LXd_5:zTHn(n:ɪѻ,Fia$PUGlcZib%m!.H%i%P"&(b >ȧC6U!(i)o˶2,nI2@7RӔ%IRԄ˅9/71Hl&k )Ic|P"-ކ41G1)#}?}IR e3h Rtv@17,眽Rco:e,2fPU&!O:T0G;1rQB(!&uP"E[On;R3n|'{ I# %vo^ IqO:?O9#ǃH?tf"#}P}{4i q2߿0O`~ҿN˲Pl⬺?^-?Vd͵1q\ʜh{:ͭzxR7<ۘEQNǟma+{jI۝_nJTKE߽ Emj#T/ %p$elrY %0t6@Bɹ^ ANd`#1ljIW˳'տВ.7m>_To˳ppK!kn;NcY#ni.67ϗ%cRw'竢To/ʫ B 1npO/_ra˅Oh$]$^.Ěa2FX,V23HLA)J!2aJ!1q& *&/BPĐ+Xbh޿;Li6ObfĄi$0&Hq>X-dS&V]9ۘ)K6rr!f#g ,Rc8&i"23}2BIQ1KNL>b8vB*]0ZpꂤnSEE<ʓ+>䫣r:Ҩ FlJ3dJfGk#RJ4felɁ*GgH3`F#M%)eqm%병T)}"Lb,0Fۭy0A(,depӲ֥'烍5%<& #[bRsbNi滂A |po]Ɖ2GaZOٴ;ec<IJ"yTtau\hlue!!4hɾLCE!&V(4Խs :Z7^na EKS#9,uu{խq7e3TҴBXq6ADRkNei[CUmn~#O B@|h~Ӥa P&>rP?9A_219{ܺ0}rs>HXZGOrv߿x{ܿI_|xRSxss֛=PTٻًP50Xu3?yRlEmDu|׵>oojq/_oÔEN00ﶇ]?4EH*S[7Sq0\N)JydMs:N> /_ғυuh}%rƷqp \a4\e$ վ?բp}__i4T "/՗$^sH׻}\uSBY?Ӣ7bG!H l'6>E@)k]S€1ƲRdkL~o>i6`@΃lv62Bb-$\K-8;w sNQMQ1'!s?%ȨFۧ@+l+I ѱFqԥ8Q&Vrp)H`B,3uE]7>9pdX_nʘW鮏YdR(J\R@pX73hL>-[%sc4'om]f}e!H߯x =)p y MiczV&+4ْ 93B[+I}!"qSvƢG A?k-a,k*+MjSt;;vnU~,A+m(cP"af;Z4XVqJOh =m^I!'qnڗ?Sh1F.U[yD|w8Dε)@(!lsrSc@'fnXUYgpJHټE}1# bDjZg QXǨȮ>37}bLy|jgQ(dz~tu;Dɘ(~}@$>"# )?ay{}(8Zb͒;{yժ1$rdzMZJήׅԼl\B1=&†(ؿ\'ƿ|{Fh_ͳzIw׋&Q<`!]W!#>'DZPa~S!9FJHovAj#RF@۟͛7~{9[e an$7wF>aF&I\|A+#XWW[+P-\(/)4'j웛ׇ2z\fc2Dѽxo?u.B۪OⲌG) enM x}L6щ]ݹ$'=)gsL RQ,f*R>fIτǣO9̄ `{BA䓐)9HP$PJΘD֘$|a1 S 6gy 58&wS3H\1 ItUߥu|lNi%R7D" %$a7L~ěbfBvDFh^WtW{(TLYC].fb}Monk*屋ʰ#i+Q:!BJ3g<BʤH&ͪq{ FQUJekdʹ=M} >( 09*d|`R ujTFD$wy=Y3)X.4hcRPiO7U)(˂E,M= .XP,J `L(|~7_5BiME$vfJ4wnL71>O)qK$4Y 쳌E1FO|9 fR8>JP=v$7 Bc u aaLcʌCaJ.'?Q&AfoJTJ1(~SVƜR) >\zU?0>=0 _OJw'gelwR,>ep([vW?]]֫}{sdLʼy4?653<!}>|8+'h).M_nRuͦ0|bcg8eA/_wGƞջh4J 8[û7wŦ~.] R~5g˦. o4M^/Z:='mS9Q0gO/I?ՓjuђH,$`$2Șz1oFwoC8Nu|;gfg/b7t!M/lrQ]uPjij-k$*Mz;UJvs܍s4?|CW~d*o_o$)Nv 6~ҬƐo_gKyU)e"^ln9=T \׸\UWoVU7띓RfJh4P<YrI2ąE)BBpx^ 4qmэaƛ9v>!3&HsrN̜}(D>Rs8H9Ƕ2#)ZK P!EQ\(2 7 Z2*^1 %"̀sXk%eƶB)E<2, d #8q83~YTS39e*CwGgH"ΆŮ>KS5zJHItzSTj l GJ'g2٨{{k9V7E&>1K): G7K#S@MUr=CJ $gH3{NN/J23 E* Kl)YE>kKu)dﻱ0z ٲrnFATj":!t-q,YYX2vZ "(gMAz*CD=ez{32Fgc]0T˶ .Ysi̮8^M0+e'ۭJ*HɧB^uQq % C*Z7mY0a< P'NvKX+b'6s]~j J٠FatO|˥P8) TIFyyG~#|> h~i뢐);--<~!y{u쫧ׇ'|4S e8=>zH{[\=]Z,M7 eZ@]i6O?w/5|rqEv|/V_<]g !P$-v}ӖR fۍHyZHEUM[!un댚o_ȹgOU%bzn者FZ={<4IMRn7c'UBLeDL&a2m۽qrv ѷhB 8;+~^˥ZL`,.L˲\g-"l{b>]}qhdz_TR=_TG!f-4n;aUKVܽXUSsg]BnHǴZ/tCy~f t0E>ſ4zs܍rfSlNU<=đC9N%ewNt$|fz)iM .&9˾01䍖N.UoV s 1QK`?^JNa"JɅ1bZa7 8MqvDD:"Ar&s,kF?!,T%D0g1%7L(T2Wd[%BC(Zl6P2|w-u;NBbKI4B27?8SIDղ,SL˲뼍޳@aPJ6/iU16EI\/ՈuOGT>"#ԏa>?R({/*PA}d}х$ q|3M m'{<<>>!mY?=`C0c`/mt]ۛaw77붬+M"|Ro6VMO4p)ꅾڏLOqDź2LE9m^|y>'Nx5> d7+bNR8fø d'+c>?3⫻Q ڬ̾w~ J__4$>_.(O=_ 5ZjUж]J EIGϚ:(Y|$ .L VXb?9c`!ZRv7[wK.q7Wi]ȪR C6wo7[mHnnv%|֮|w~|nUٙ~wV\Omh :ۈE#aD,1)-*\ Ct!|(>5P?dپ s @RJ< 'HCƨQB HfqEI.g)t< ™184B[=XUyyYIh=.L|pZۣNCI1I*IB!:O!*Fh,.%bmhK(JbrH│. V N$} MuH #1Q)nnjeqwJaĸnOR0 s4@0HR_ڦ:4PaU̹T!ŠL#׎sBQ5^ ?*UJ*<#eЊA%c \jqNwOp VPibں ^*bcZa((=z= \a8"1$9xdRQ)TSŀ)"\]s"}>.q F2<\ Mgèoe~T/?4>L<<UqziǛ_ncU |pxr|?ݳR}~gz)4LCv#P]ۭVŋunJ`ڦ릎$5(e}{5^,__z/SIv7c[wG?s)dQݔr٪e0 HT&0 [uUljq ⋦SZDyHX؜VF ʰVYAG`Sz;ro^bJ'JyB&EU,+=Wwo>Mȹ.Urs ' A j|R|pWO_?۬$('wm7WW !œrwm)0؛;ݲRAE$d7M׻㗛EA) &;dX F⓼E>wA8I$Y-@st#,fץaŤO!.*)H4#owaQ0+BzEwa^9*Y47.-(ЩSOH],EU$D~x}xzv2;yFJ0dMQGwsBs^5~,KuWh]pөrHi y3C=Z.KCbMf7J{B5hGkc[h{x גi |D&b_RKDH)'? 礝I;8|БI·Ïs0prxLcVL/E\={q=òIxd#I_݋m>+-C}x[-zOO=Ow!uZբJFqaZ |yM/Rɿ~{MRC(t}.]/{wn}olonF"__x)[^]MuS*'?=l&QFs)Ix f/e&{9dh!:KFd1$X3nec0)hfD"UKj ;f->Ic(~!B* qYُa?uU]leuٷyOk$駦)iYޥ( KSWZm!5"7YNp|jHiӄ樭*6E!$o^n\kp؏C󍑜o1~ 9: 0 6I%S!2K 1Ӑx4H'.6YDb ¶M]y~2Z37&0`piѧ9'G3ۛn4UL) TTdњ`sX 2ʂ%U)b]I}.럶q腔>zȔ-%#\d98Y 5 pCҽŢ)3F] :,(í!23).Om8RhJ' (1tiLtq. ԥQJĄ#FYWbߧ RKb 8>hE9B&JmJSШ q}4FSvZԄWJQ:vȏIYX ;7ORj]tDaBa;"`!몹9 T!5-녎)oZ3m5DX0JurU7Swi(u)լon+#9ZfJT~ٖL&tu&Ob2=ffR.8V Eeb y28-٪Y+V 1}GE ih.U٘{:'P|3 hqD|4>?bQ ~/lk͘>}N~4<9~]RYzQK!q=GUQd<ot6?5?BhMUٔx+ KPҰRR7n~ݖZU+]ʫݍU ׿Xp0+!_ߌe*SpIJv9vtuW/:?t)ęotۣ\YrtIhc] 'Sʔ\2}x.a #)8ևfyQ >R`Jְ( QEB6iV XKnuYUwFᮯ nB)տzm.0U%84QlCxw[e`(_׫nU#V76 & й8s#$EU_btN۝[TJ$+M) MEVS)M#V54 +)^r-rB[ l C*zpc_ei~ ) QBf2%ZZ.HH8yV H!W'-Led8 Ncrn1+)0"2Hɧ5\<Ӳ1c Y,O)a䋶ꂙAIV i3$%43=ڬ 睤9PF=VT^N"Kw}? 0R,at,0Bju.RHH%x֕:f n,d'Ȧ*DcAj.%XizJ=8t^Wn0R&r)0OQ}dm]@L;D0EeYO4_Q;%>|#>{om.ǫQ~c7M.|3_O #$gd ~$=|?hu8Q{ngϛզ{[j.;ߔ7WǛRťwIy^ jź>zp|}/ϟ7@ưV]欧EQxB0¢QRUqyRTC܏,ZW ˏ{F>X$fj\kSy O) c75)3pv!VT$%*!mհbL9J|uLE`B!sDJ¢(S 9m6WFV̬r؍a46{ܜl)j/K}֨^Me)꼯lj}wSۣ__攞L=~|@}4~ B6<1OSPsE B!f:m̵ԥ 33c!` Q !g4Y+*h("!UJa݈WU)ONFi hHn)EA!JE%!d ,UvG[OvӳEcH.fySn+1QQ7sXs:՜#g-Zảp2kHj@¾KZ)MRvRQ1gT4dg'.bTXW!5%( TF0&/JK<^(dQEƦ*4)UJ^m'a@DZid2U~&Jҥxb*5EB^Au2("Ωa ˅.MRW庎3Y@r'Ipw)yIL#ʲԑ̶i4s>jb)O1˒Rڲ!Zv{[ռ=v3i!)gIġo_wZ4B2T RG7 V*NI(*}s356^|N"OTB; 6%yJЧ{9>x߳'_4b?_o#ٕʼn82f7FeK0,Eـaaːlz̛GrHV[nw;k7A+3{G §bhR^ЫOX _H?sv//9q_Usc_=}w09zÿ]~sijk:;7V|ӽ}tO懏KwMm򇏻:}pWgww;TMVx{?Mtv+w}3XjMB(AmJ(ÐrשUcɅ9oYcxui3-  SP2&i(+nJ31}@ )%rmH(%-'o;v ;ZZ-,ΩdZ;#KI'V⺭SJsJ,)|>oeg޷(@uvy\#+ t+6py=9E>9)#C9Յ R k?~OctB(rcU%9PKPraVu9/nPp?f!Oͽ2I2VGS Dc qNYF#^;S)EU]I:5w8Ep PX$x(%4+8,@W0̐/9[粋sE*cJBa}@da,׵xBysӨ!Z/@,zAK ĺ211w&',@6vOn GzӔ9ñ/R$ &:SR(e""}2 1HM`aXdUzj-sLp*%atVujRXKmї8,6 1 +) , $.;J*XQP{CLEś0zejcQORsfd-hyX,rbܙ:l0l[!!F{fu6/88"@|rJJQrh|I!FBlKeoWuӠIisiHúƹ7X-k7OASQx8i`]eq|< ' hҧ>]o-?~|M)f㴛z;qFFzJo,q? a3WtssҔx96BُȢF;YJ{VSgM,n(4wPhA9L h#RMbć}X4(P qPE\XчRx]rDSJ7n:sFʔgk32e\CAdZqќ3h~7FecPL gsDJ%盩Zܵkm <@_شJ{_F32 NN3NUJdfRS\)Fgx4ّXXgR,'ƔZmk*qdD8LOɈc,YKvc(yY,KCkzNS1{&CfyLj+J8%#8+RqJi=I* }V̷Ӵy?9˂a`[k[Gw4lv3煕Fwȩn3j7 b8~hSB0`uY;>s%ibIllcUӎ~gǘU* F6'SEwW:9@NOt[N/x2}9$񉷝Tr_R)kwn<~n?m6UX7c?,p?:]2f2n>/HKH}, _!q?꟬_i?OO4߿=XTWWen)|U;m>nB<6g_n9ZzXk2V%Qͦϛz/i򜹿Jze>0qJQf0>sBDLKn\5-0J0sI)S|jE.G6$XiMY ݦ֪>J0vj1<>@AOwfΝSR~/vJc RD5o67꛳牍.[cK.宏ZRsf 폺qs}m8X4!m"~ 1APc{ed]y6Ho6f;JժG:{ggӺ@L\*k1V/^U%J0ͱVnp?/~_KMBO(ys<`jϓ*crP0hSGqH~Qym ] PVg׻ >%o?lMSY$ _nӵ:DbsH/O}<i/0X/uWt~ٞ__ݵ-fOo7E=nƋzl? .ˍJ(I`e,ts05_<;-\e-Rl¬#a.RR CȕA.\@jك)gNEXiyI 1)FČ'1'0jVr)REI5jw?MSZ4UZR%JXHUS cS^nmJ2g8A䋍3Jg'T!F}>jQ JC+U %̐s%TY'~?K?.2y(,W[@!"ɐ })xј~9qa'@8L}ݘ4`:͡-Dp$-6->,p#T.5Y)CӘy,0"Fbjź >kWH ^x]<9 QTdc)w]e~mmH.w5Cg}]UJZO G$!^|igvxAxlϳ)~;UU 0K:v7?_OSo~|j}X>( J\}_7]5OR/?Yrؿy7 a,|WO۷o7no6Z}>Ew]c &>߿o? o*oq ...BYSg 'C_ j'Y5-aJ;MWV" ?tc5ò݅ i~kך-29ZƉJQaywT9N1BIr'ܩNK j\\6 rYC|-P'>f#I}*()}jj H%y,zMr1q#هT iYĔ8 s! EwRnLO,4aeW&mUj9R]mwc-,%6*"r;zP1RUQFs,|*ɐiSEłRYA\Pg":rՠ6RI2f#xS0\Bʕ%Ć@Jj;cuԚ늜ֻOChc[\1M7G0R|+KQj-hLly1ƺ^88fiHjlƑQ $"Wjrx? K4&ft0u"bVsLA{w?]$0C)RY;tA QW sL)5 oGDj|W #/j.|M%= }{KǽL >(S|QURRT﯇]Oՙ$ko:Hp]u}?nΩX`ι>rS g-BN4F_&ۿWC//ugWvh/xMc*klc!{_ꊚa.z{}s|Z =0Db%muղ0)1or9ˢPZJ # x#twbsNk#*)pLQb-󰼨ǀBC2҂I0 >}򗥨-<v^~t8q (sM1aN$\ 1AHsEJ $P"d m<͜Ɠ^ԇnU$d^0QcimZN@ J)1H f{ĤTӜY`\I5?[̌S~/*-tB I@. A[*9[{jib3g))23s6Jlj V/6AC Ʃbݍ1Z#Xk[bL( n<ʌcj>N#CTzupۏ)F[R T48ƩT\)'D]cEƺv9KQHD\TIQXuh.*#"P`ƗhFDһ/׍uFδbcqk)=8:V@@g5eM g)P[q]zC<~ > q=(@DMW.R^NC %>>zYl Yzsn<1ڟA|F/gޜVAѧ?Zg`OIK!;rs~\P E$^\E~&&|}f7~C8ӧ)t3 i*HnJ?˦_"r.B/6T ((\u5)&+xۍQWV[ S-rwYTQ7_c*lWu]ymrrJV/ w>ژb: 5 qg?dR=3) S+U Yc9 ^[<1jCa.2RSp/ ĸ(n Lo׫ >fqRt#z𪳅cG)[)/WZYNN*b ! C!8.hW]!R r*2RPƓsJ|KR]#LQ@&0odžoo:Po7X H0.d+MrӸ($!j?׭(X0x,g>EY[rDTҔf A A.BvL1R$R$ҋ%cAT0b)j¢fiBR{2J WR7&YY<5@?<%'qOPŔc_*p6#qVtj)Ԉ8t<˝j !bvb)0k.NhM)E?P_D xysa gk%+Bum2\ kD4>y9QTYjtre员9/6sIx8NQ IFDȇ#3=a5z|$%hojCЖPeԒOr GdxPjʅ=㦪v8DZz嶾/wҮ s茕TP'[Uc*U FidE L⩘!~ U%s1 ҖZ㠜MO{YgOC^'%Nd.<^NI 8*?QC~~"Eũ  4܍?_(!)9R^<=t"NzjY< bY,ꟿN%haowWzU3bqۛ0kr >w ~۵Lmmq Ĵ8fJ+'p1M78us;GxFqN1'g8{(D14"fPn|ͽ7M OJT4MŀM-RVطs1ddEb@BqΨU%,2% *+P1f樍PJΙ:ɣF<B mA,*3Ǩ$z@Tɕ.G% &'9YNV(uwYFZw0ժxw"tϕ"sJw2J)Rb c༸;eqޮ)պn@P!OpRJnJ0̫&#}l|׏S]#}iJ *R;E'fJ0%/=M(BO'`B>Zcvrcv:~iKLL$9ԜSȌVTQH"'Y~nW98yVYJą9t<5"JQSe |HHg<~ uXV~b#gfl,xCJ "e[\]X'*$O C>TʜZ Czx܎0$Kx Cer5g9*}?~Ĭ 4n\s2G}JXUZ6JPa,7Ѧx*N_tUQTRpgP)8mܲXJO$`B$_v=Cpx3Gc\*Xz1~Qk/g|Yb* %#3}&OöBP~wo~}YmTs"u{Ηgle!曳篿Zyݵyw?~x;_7=k4NXmWʉ)CBFӶ0˕]^t*:{rsmEJ4,BD(`H)E]_땫[ۇp 4Me3#0iPN>xq{Wq  A# A4*eB&I z[K8L^k ÜG_iB)@P(R/j9sJL$J6՟}g'Uu[-8"/_uxq^ %-HV$0Kc!TKV%E,yxǒ>O{hHD~JS9<Çcl\!̨8EӌF)4rVbU};OScq ,z>,%: SȤ4*Ś-@!U2Fِ3R)%3;`4 @ `+C 9Q^ʒ {9J䒉 QsE'/D[92Ir.0Vi%'9PR$X(8tK"!klrGRJDYJddd !LBVDcڬa, 9E}U< /%k%sc?ԕF>m/:si*o̹q b.~*R˶J.>"pøZG?eYX.~YY03Ɋ#ʐvJ(3nڊ@[YØ [k%1Άѐ(^n-SvPN)tVtIVyUi8~UWa̿yzsU%󧛸2GG% n-DRY|GTAs8`|7=\n4C y;DܠS*BRRmgX@ TdQ;cmcHV8EJTUs q7Jcj{ Z p`0TNq\I5~rJ)3yluEC\w*' Hdc#9 )i|伟}UnDAX +IX7Y"$< gH*]o"*IRʇ\E'e([Ļlp!c 鬩QVj42y)%O-Ng_Kqش)峦K=o6JK"\Z#aUNUxeJ3_m%8 Qx;׫*ftu4!K<TʐDR JᬩK86Y:_jJdALœ{͇)Eźu)=xw;c`# :;Nպ9t` "OwΑst<2GcAЎ`b޳E@䓰iU YdU7ʜWhBύw/:[*saoDd.}=к[~WF˛Oߜ9#}d z]\ӵ۫vբѨOʽL7gfmgoQb-q:,bnLE/STnV¢KjbۏV6fِqkY )\|OcbÜ$A$caBQfY<2LRj 蕐TL 0dsIn!x %D(iSA(PSH:[L"EߥN"\<Ă*VdaZ ?;WUպ@mq7+3%?߼T]dFYAӼ F!4 QĹL Y4BqY2mRP1_B\|{]#N-OPUҏqZ!Y*F-ݔcRsA(,h;΍чHūV $@2ZFAX1\(ehde PXӽM9"+ZvbiPȕ)Zu-ɺQv?xULY@sF憢TJ؊䋯lթ L%13`2aRfWWs,R0HcvF #|RY\zc}glJ1JGIژ!'LcU~O*(a*1T%*9^+Y$IewxfsDPY5א! Ă V\K.B )$H 욺**|A7GQuL sS5S{j9nWzź)ez?ޠ1 k#{C[ 3!)iNŪ~F"xOL_ys%Wo>{ZWܮNVZ'ôێGg+k6z?8"n__N^6G ~{uzqC';;/>~u2+@dGWwC_OώR||vhKgZVMmof?k]]r JWnP+a:JA.\n & ~ЮIiԐy 34"r7RԻ;?꜂>10G ci za _\4_*ƲU|}7 S>zC,8f2{9>Tǂ/6%ɦV}v}2׫əߗoF;rd(l 3qFm>T%f3մxjgB>ʢ(*iǕ^(*c2c/QY$!UޏaSYk)NYT`Xc1:CH^6SK%)1&D5i,p|?# )p6K̲i0SnX)7?褭`b( ˘<Dbd X}sIiTG2mlYh+mUU3)hl-]p)P-X*\ һ~$.CBV~|]kclh}'E ה>hm|tX]U| X0JpR,Ia7,J7|_-WbFgϙ`ų'O^~CWRrhZtƬ3A B.~"e7w wg0F)ɃbmݴLqneVVT໫nY5*9j2(MiFsaCiqKR u,RժzG'Zk̏>̩&E,jrue^ MI}~@amhh6o~!O^hWQ?T16oajO+cQ3)"m&aC/NmAc\TqxY?ɫ\2]h9R|cFqk&*0s8!hħ!\t,r8=Vb愎⅒>&+\܈0 fэcI S ZkZËVTVFeCt-V+SQcnMpRW/鿽>=p 38njb}\w;h]ĨQ)™ar8 %vds?O?j0շh֧w>e9 Ug?}L*af(d3Vu4ù, 5'pGRYrS+ذQ#,"V!99>r*$V TKU% 3E8,BRSjq\f^rǕӡa23Tw~(GlIa`ri1yJ(գ)ͺ u\ f렱!/^rʹ ʸ1ĩQ\O)N%V*q:2ENIgo|J9I]Af跛zl|cfȌ~9E1̹4C6ayXteח'G UR,'Rp@JJq;jzRxAqp>/Dd~?~w8}y޴OޜuS} =Crt}w=1]ݼn?!?|gPkyJ\"zr+j43W |~l٪^l-Sgr=-\@}oZ._jE)>:ۜmΛ]چ\b\v_.^ξ8~u5zACvN޿ h]>)DbO~zvB]]Ǘa?FSrNa6@IJc\׋MmڬWUvH1zV)揮UqֲZu"P(* B]VTHU/^.̸ bqQ"Ɇ̲~̕% b*̑n׫v25Gv;ABUÐ^_6m*G>vJ᜞Qykkg:[*?5xGt? q:&^ﯛwWRO6LGS*+7׷] Cxq\V*#Z>w>|RfϚڥ5FC)TR>F A0S))RX&؏1,FÈ{I%l _m.V\ƓE)1BQFdM%ex9r!B>Ҫ(`mmX/;o L>UVwSbmHP#Zj>Y|k![)@HH$tFsn/**t3*J>Z)"2a6PʛFUN7fTHgg-k a?Ikj>]#b(W4 Ϛj~g@C΄ Kܼˋ?jww,iTujS׷y=~=ͫZS˯ŪJҟت W67Qm׵9 >z2H04m{ߏhf&q1I֑IjbAh  c JBGVj<]Ws"RrrR͘/6Jnl*mw׾̡ }VG6;l5Ē{l ƦՕ2si(u}'uc/r@eTEͻi )3`/oú"2TNDbinwh U5%Pa ԐRr# EoSEx{@=㥶Z0mS*Es^ HZHOc̹֧4s8ey0~ÞqSa9bOʥ #T)"rSuKu|f&\o rdR'dmȪӁ}5H8A AAeu~kg2@fZ+Y|iTPr7̢Cbֻ~W7ڇ l-0RJn7f:XWT +v?{h?(RE>) gM=CH>,OQ=&OTx>09? )ֆ>'W7]^<'LFħl2?&xٺPbR @f@L6#Ae k.!U5TUCu}tvѺz!DXw~Js727aT7=Cݰc3N1(UR>9-syn$V̿,(r|N*Ċ*g!vU$TY4.wW_nIω(Q|JjX wN@ \i\hNR A˵Hy~̩ +FSaR᚜Z,]9U(),ą>UF}rshb1Fq1PB"\k~w;nIk!oAFU9Jq7櫛of;O+aw_?\mmeξ̮VDX` |BO-))ڙ]L$Sh]1owym__vg]5vȃS1σy]ZVr˘SU)4 %[èS&BAAȔ%9zf`UCR̚т^`Ux^iަsim @T3ϫᰩKhW2K,7S }@wRbNza~s=Վٚ>w].(0k4Y@mU iUeV@)&D t9ܝM8i7NLN7~RTVLQaLqRY쯾qo/1yeMǜ")Z:k  ØBCzm5(i~`. M$8mSOΪq\,Md~fGU>4h#xc>ji3ḏϛ(;_z+<:X<(?e/&A_bir2"n' -lZhr߽ PՊ o~}P)Zv] ]5[2.*(3Vaj WkmQ8Xm߾.l+~=Z)W}^Q" yi)fF<[iXuބqֆ` q7 WfX jC9=N[}y4>3ݝ*ʰۅ" >K][9@1)˗Gc_8*`Km, V2 5X[% f`64ss_ɪ(}H۝?2Nry/wɑ.ɋ, S8N o~O%qOPbS/S5^Gn{Pd{ ?7t_O%N>o7YND)f]R*c3dnز\mjMiú+zQa"5n|1<*f6VgbA8y*?@TDm?T3,2P[m2cBʟ eę,[PS.'1 8wGU}3G+WH,cb[U7N|2TzMk׭ۊ5i٠b?HI"kB,Q?9kM^+;zU)BCn9 ٍqr*ιX!OX`ZkeNXN ʱ٪eTeS)D*)C1VJMnʀm4.9sʪvJku-[irHL@GEEOf*7vÙhRQY>͜n(̒ώTd,uV)\76Hr")}JɜC1!kK~yQ~BZvMw0gt8J9'C4~W3EX\KG*hU@>W{;I^vcOQk{EDnvAsJǺGt{]9g/9p?=Z>ido7GY8qŋ4)jy,u5ҋ/o^㶻9(F@uqR-LϛYp^7D $65W0Vdc^ S! KQqntݬ^k(T'Bk4j)#,T^36E9 wcGgjF,:ޘzJڍ/;FƄ oaLGk3-*!yrQ"> i䴟k\̙ JKE1K}^U*YXœG(g!bMQB? y,*ԕ;zШ|u \\ OQ+qkM!\.Z/qZ}lc45NL=7fR&R\l{tB;h_m/Z46vXgvUʶVҺ!$S9g!x2gX_8G~>¢ <󑉕 z(O *Ϡ|7^n/'+r~1GY*B߼c&WC) >htãh\y^<|3 GMBP&kk]HHRTkbTL1#MBt[ﯠi+T(vu]BDeJC[iɻ޲7גtjAy˻82Dl7ZV8GfmxYirR~۟m w!;UYRjZH J@zSk|c`ݻ7ndN T9uק9{q`WqE$[J9&XbbR XS %aVaZ>=p(aeu#&HP̂ byS(˒~ddtљH!]7ŧ8qw׿oO_ͺK?77~ۯ_w7CNOlj]|m(qHTEzIQn4DDZvb rr̴M?9F/[dP" 9ss*ÅL/(j% $hJn&X}wdȸOq7 *fB2V^)b\be:iFqӼ+Ûctq?ͤ 5ڕXšO!0)hߧ)Tx/nq,8gGM((v?LG3mC][";92B!9Bg:j7Zd!äL RF >1ѪY}f͕t!ԕfD4R#S(0zhkS)bBZ)k0uV( *(YU4x+61 R@cɺEwL*DZQ)ZRȧ!{:^)QpNkA2jX*q/7wv{C 1 aƙեY蛝gT4%Y|nr?W'㕲ًGкhstj=l'@Wke׮֙y@ylj5&"b~QiCR)UPǐb}NT*ιDilZnk{OPr8EȘ#3^Z2k6Yòo9Q `Ƣ +B00mNfXDŚ A(`)ĢKJ$K ,d@ÔF`|jÈűoy[["nw1 {Bo_o%X'_^7Ƿrs{ 3D_M;nZSFcCJBij~s7f*v*XUfse, #KR,+œ/VV{3R9@C 9d">u}R1zFtK jׇʲs8.UشԄ7AVC[1Vk)^XThMC_T%Ȕ~J!o W6TN*7IιPZPq˵m+OO⨢Z~H@jN &arMUs)p,0SƒnmnꕲA.^v@Ld5CcCZn8ww~Ur(~Xf,SCiK2nl cjk)j}]nf2-I;u785Jx=V G Ϩ^8-S{C)~dˆ<Ǚ@E.Tu]זuNbngt'8&IoڜnSjv㳣RZLV"n$2xK)&6* rYt~Uo/w mE'ڨﭙe{{!imRXuG_9窪LI)UWs0& C GBœjg EUWrikM=Df_ޟ넉\-Oh]BZ߿e^u.e?tͯ_|xuy}꧟]mh/~׿˺f5\1fW~:z&j[kgVV$ Le4]1a,JҪ mHJ ] ӪQ>!TZ% "oM %/١3mogƪ\d׉anOYҪ4f]icp!)P qJ3e؍@iߥOO7dIbxWowSKѸ,ӣuCr˄p.Tr"A\01 9Dˁ\ƺ4?33 ohʑ$}j+l'+g4O!S,GRTq/B$!!gz=( T?xBhknUN5֙h$VVmDZf*}7_:­7Ns>jZ>t̆a&YO9EAFDV+H8H?BEY^4<~Xߣ?X|x8OS" aQt?qS4~S,">Wᣑ;ņ_P;=|D5)6meck aO_%ߎTeW[d T'M>T[`hPy4raP߽ݥBbmIkW͐J7iۛ3hR,CJV+mt%Ԇ|Jvc(m;I]uMj#e쳄dH) @#zИ J y/ 1YV )PQ0}A8L~UӦ1G&]8͒Df8Kj;EQf%ƈtgSx9nnϏ\mi-1DSi[(#k:q%FmP[./"6VV*& F#0&RJJeHMckr4C9nL)T>;;u{$h|^;* +R*7hk!/ (ȯ+Ԏiq~?äV);ʼnK:5M19H*`,ULx6qXڦֆ2o4iDijE`ͺSZ^[okKEXc{8ÝVWwuMhRM~76`h6Le[Dd7;i=02|Cվ{ȝ{H3?OxӜ&<*{sya6=CG]IQ<mow|g7kþo?p<"?NjZ))v{i 0~S *X:7~8zM$!=\*ňtVqق\mU''ő&&!" v`qztu//6POġ\>w痟:F VE5q!Z;Z%PE$JHpz8=+ڰ12EH1Ð*+s⋽?Zlňn Q=9E1μx8r,l,T+!otav4CH aE*v+!3krgr$T]4=i@ِ~ ]ЗypYi3B;+$_W>}nw'?_ >!ǿoRP75X|UmhmϿll~{|o͛m(+/Ec]Ke%q 1VBMj8D^]F.ce(/YXHxlp_*sBJ3UˢPY7*Sb>"FH+AYS1u}R)EP۩r蜔h N4ŖD1,&ƑŜKBy8:nwH"bnz{v .d*DΚDe\*iIu#D^ C EJZTHu}L^TCU.tGU}v>=gWڪQxgL}ݫ(RTTBr0OcͮC Ø`XǔDcԺg*Ӎ^Vxl=Ɣr69aS|k1Bv_l8MV<-O!݊8:߉r{S.w~ozMI (biR ԍ!gkBe-F__zⳚ8w|@O!Q M\/+D8yy䬁]R(+R)A@gr!h .ZŘviUc"VI'3EsD|??H.6tzl /_-5Rwx>o>NV8qD#nH7Rr_W j#`B\Qn=!%$0!䂵Q>8Rl Ɖb gvLQ?iAeg$k UZn._?_?/^zt^hpmmOXO^/;=yO]E! yş~_g'_/^_q<ċ462F1r|Ty'ҋ0KNXd Ou`n;~l ɗݘN_tq]IDxo*ZV|j? r1H d+1 sO"g򘲱haӁec܉a scb6r`M2D#veQ/m}&B%ntb(h cD!ĉ-Վ[nIP}YLSX–DۗU#YS̻ rJ6v YʙiFcQ0UE}] JXzL33u2VI92LRhҨa 2AL9> HB4@eWd7[N=GO3ZI !OCf,kHSb%dZGN\*kQF1%ޓUJIHSX2DS R0!Sz, B >4St+x ש%)ΝmC<|BܭH-~'ƻmO6z%yWj}soݿGYŇ9(yOzo_ԡ7e|}eդF^o~[|y_Y sήisK1V dչ]|եQJ˺Vs)g&~޴fv1_iߕ~h]U kZ1mli!:}1_v##]-Z4f#">K-K3g(8[7EZN3LK0 B4 `Ia&E|}}4ӌV\W}w^ 7_t/Ͼ|GB !/n(Cvaqv}]}v$0?|sLc]YlN:"(VOlHʉd!}QrYlxҺ~Z|Ȩ2j1zi:.㾫{ CAF_r}~1ESQm$'F8$4Vi TZC;5 j՚pw(ȅS㤘 E~ODArZM,j׻cXV1Z.솈z69%> )RHղ ^ڐAVSyjmCE)j%¯^X3[rh,j)gmRݘ3ֹngh] @]$FA2t+l'(LIqJKȩ(#}SUP8b7ʺ- C\.G[ KE;?"*ٶJiN;ͪֈe D0GuzMz%D)A*, 2T0\(y03+s XA891QkmY,Sc EsdQ}ç;z v47A 'y\-T킏=@NwԧYnAn|:;t}wU7vOzwE'{O+PO48a'6ЖveNg3 4kCsbcnH0NXX.OWaFqiF}Dc vɒAV 3jZ6aje9DlDqQ+i`iPB)jK9ʦ~{~T9) WBMt4PѲVm|{zTJG>jMyH̪T8D 8r5nIk#b^|s2C.R6w VAg ?l]R&y&VW]]\ i"x,_F&.`ˍ)O'WK^QqX>}믾c&w7_1{؄4UYdRlE !%ɪ@̔]5zc؇qYK1Ч˭Xw_WS04VZՍ0Da @F֨؊`!a٘mQ@uBL,QOrX1YIx',_wV`ap7<ϥ\KI+UI-k@2s^Y)\IT(VVR.+BynZ~CqE@y7#+D`D6A!=p˳P@F1R/Zϥrj^&; w]Z?я2O@)|wPputc^1 >ܧ&q~T]'_{66(#?‡)m½ Qb?aڕw͢SlϿǶÐt_7ŇYIw!܍ە%7 S4&p7oA޳6ctmCLOYLsY",L2 !)2KD8Y&,ꆺJ]ꇫNmɹ;ώM/%ow81i7Fa~0Beu%$Mg]O1Cbnlri]s;%c~/N' PJjjR*WXsQKg  qn#Vfn,X>PO 7/zy_Z^???ōR9(ZͪkT!>/ nss'ry'G?kQ|]m.uո;Rm?]<-  a$E11AeIZHօ2Bfw_2ݘcn|{(>& iXH[tcnZb N*!yafkths?16hX99X­nbrVh-yDbn_*։ʪ({ J灯CH-s} P/Ԫ!]mz/|q*c*i,$ZJ0fd\X9ZI)nY2*tI2Ԩ)ְk{SbpJ@(uM%ot\;l֒H(L1XcajRLn}w5pw}(NY"Qu A'al#wuDOGRxYUJc§(42n!sZIL1ӽA 7$JrcRq@ZJd(eV\;D(DKQ+}Z&[9*/\.P ~>Ň7YŃ_|?2/{aOnUӽ?/ſd7$Q?WG-͚R?[s^L4/^׵Ҋ/|hharFBըf&J7g//OWFzrX ap}QGںтDz~BhMDЮT>0 )4:}vwO>kmxs>|RGTeT^>_7ϪCJ/V@,[Y9gzl軬Hu\r8}c(*\r Kn@Y+X7{-fN)$~(sl]K`WrFV@N]zil,Sn?eeǿv;!%G%`CZ>Ey{_g?KMsvmc/ۣjanƥ7QV}O'۷ﯾ6ѹ5ο;l].?o_mldeSJP2$XEւ102*[p4ꆘR]me TC|pZ>棅S4NL`,%W!&JZReeH!$R@h&$T%P2s* /RS]269Ksi7ӥ;d5??2 9bjBZ&Evh3&=(ChGǚ%uc Js[piH2 LZV2>R8 D<"_oСr R0ħ'2&*ʱp EUFQ 9J] 9X &ũ "alEn߿:n!)= gʂMxqUv:gm6܂@K:JIԵ"!G8tߧ>&E3[ l،ӄy*+x]C[Yk^SFn(kSJ&U n\~)5=G G# 7YǶ~8[ZG;sx{;]??ں;xUT1|ȟ ?R}Hѽ/n{q{|xCY E@EKU#׶u=w-a 1G]\Jru]c:^XtLJ!ޮOrѱ{g"0zB?3ou]҇wo~|nV @gMuykgK;_X65hiTMC/< Lye媉FEȂ8~p J8Z.|v(@pFG.BOX1oMOn3pl팤Z&i[?o=+ _>l2'VhJb)v_~_v_뷯_??UM?ټ?=QT,)BS, ƱB0~LQ%B5}B*+ R , %SH5n X:K„HPRGmrF@|(j+P<|\k=M84`/!JfJhI]tR) GYd(ݘFA82%$amMs.)h+D}YԢVg(T 4֒Ra^&sA9}LJ R!i)x@ǔu+IxЂ.Ws9#Rʚ"[)1R`ы?Bb\ZgoQ!tR )j #T5Dfge w RZ^B*a+/9AHZ+/YA" Vvn@:jpΪ.xied<&~ymcN3aϽe4){L%W{VwC]SL Xx%ĝz6svݴ+m}<:nOΠQsg'Ik;6_wz ۟Htim>p]9,bI_VZ!j(48-!g#^_\=鋹5"!E߽?>Z>MF}8wb9uyziJ݇o]yBYu'+:^o?^'~#& ~|1tNqbՍPx}؇ZhIbZ%ծWߜY RNR%emљ)0y~Xl%/NWn'kُ1G(~vV0JhAJH E*eXzb7VT0|.O[8B. Mc@h oO<\*goX_,cG?Y/d*_\ * 2LDA".%C}N(j I$ZT8G QfVnm )&URSi!rOn !cb ΂tJEI"(bm.Q'~"H1rezn*\18Rdi{)DNDvdraǓee T+5z&/P̂ڑvc *9cUeN7廳ݳ,1p`U+jHqVcIFA&Ԕ3|%.a=*Y!D!*)XMا:s9I7G#-y #*cj 9gN<5XV0Èʢ:c( ЄlS gQ %znEVR7brt 7C,Jd߱2 %OeG!IMmVwC?90@"uňc@NBD,bﰭbMj6َ\nؒ솔M#*C}n߽/x ╣g|⣲C1t? $a~Ix#J'O#5J)f{Z}wǢLQCm]pӌԇC8y~ķ;!#{zOrn @IucDWD\5 9,9#(0^a?yUW '֮vM-6fQAtcBJ #DQ5gܤo!c490Z-nW}0(֭LA` IA"'.RC\:7x0%T2`FvV/OÉ)Z7aa|֥Wj˶׳pCDbEvrN%1*WPjCX.pp}*VZOtݶQZV&fUÎC>ߜgZGrBgG'/׿r>D b(VbJWr0KEEjȰ$b$iNiUҺl=ćCd6]mG(K:#[Yb4JJ+Gԯ!$H0k]RjJf 0̭]\4 /ۊSw\$X( ,M[Kɐ _Jswr(|ȣ>Z{%“ݴOqѲDz?tGϟ}6dd`w cq˗ƚYTcǼQQycy {Pcvfk#$S|],TE*Q*vī?'fo-vِȱ 1x}|bZdu*niku.>|eu]Ckzӈ\…4Qks'ϚE ³e3>nS$+3wZa @-^jt[++1|h%!d-F2'Y)MJ9|P"RcI* ZYK#<,ŐseNq_>?iWb- ~{?_wCY c$|v_ ^H!UKRЗZi4Zi9U]rr윂 {NXi)x$gO62h-^/>RbwOܳJ7=3 2_iҍRhYC Fḇ@N@\o#Nt!+R!Z DGycITq7qdQlGg%H`QGY 턱d9NcB9!$GHJy7ub`Q(qZ 5|w}b >Y2Nb(vsR 3=! I7}D Z0iZRj9u8ZTF A8v!-mDVJlȠ_uck"XKnrXigsPf\osAWm5P40U?^8fr7t\Ѻpqi 9$⨩v}Ѽu1ZHॲiy' ?t|\Y1!n9`׈..jPTbX8i$R>$h]Sl! s܎òv攈3S9EJۺ;HP?\p,/1!Bl9w `n+_'IV:s5eTw[]}n>J>Jճϟ9G >@ OWig_l.E2MxpuB@)hЭO܇u]i8,68ġl7ne7 cbM4!9XPB%/ZsoIDAT؜1M:ln?TZ˜SڊJa@zaE3c,7ߟ4Pطp"cVWgFTi xOXXr'GskCxѝ>RK>֚>de璃 Ge7^j)oj|($]E"lbH% 1ng|(>7|xxNȣ|{O3?%?dy(r|`xyc,W,g}[e9&Ҧ ڨw\puqUybҊ#@.ZAwgݺP$C"r "r, VP@u@?lC 6c,ɗozbeYZ7~u>Ѫ6' .EFBwmfR!CvP%<a3c6F9ʪ P*WJiU0Dܧw$ .k _HB|bώ̮DxS[R!}Nmn|엍 f̝Vk}(#x-D=D\16Ȼom8U#ߜɒs;$ݻê1BD(j3f!1M%`m0pZXTznY՟/J1몉ů&ʾ ]C4Z9+zGmcp},JP-).e !*'Z#&T˥㜳-A|fN1΁ƺu+㉙FM"Vy#-$Z$P8hݻm鑝gn)u[񸅅xȤv?0Rη/\\bbw)xp˜o?*vǕn-G6 ĩ|[1#AšXƴ˳JK$!gwq̒&n$\s'jٔ!6NfWuUcĨaRMB$aD I1$#{ F,1q^}TERg榒t/ezfq><iB8ľrDs0m8CuޜRVn{RS^8='V[kfr"墪sҲr(">IT13M9Y<]!2Iʁ n}=r6覓 8i,Yr#"תKO7f`Kd_!Fm}R[eemʺoUfƖw} yӐDle 9 ʐY] /:q,Q~aebohۇ>f7Aih1З`.5&/0u,%Wv( zS2zÐs^: Os=zJ 0wX(HgS\ۍ/h$KN1`/|'taWiz)uH:ik{(,pfX^F⬆a¯/ iCpE. )=E]"vGǵ!:>?#t}X6wwht}|'| 7۾v 8a#JIW 'DDV9ǕkRt5n\V39E[Y!{w 9 %c  CH4 [#-Ǧ}6ZF\ڒ=k a?nJ+)=B.<4(@<BIZ -}ѐxzBG9xThjb*s݆Mޙq)x'8U,! xIxU{Hz<—'p}߸?;c %.h*d>ѿ aaBN0'I l)z=tfY}pZ<~#+gԧ͎ A%j% $Ut=CVCm}RP<춼p IdyB ,9sL|*IM9D,Vuz@ʙpDas!iS*A"0 h!B}I?mqYE}d`rVw"I0ǩdSxk`.C8؎'f!'#t~9W2'fҼRSu-6K!f IUJYS9`J˺[j هݮmJœqfUfB`I}_+̱~l2W)%Ҡ4oMHڀh핐MޣeUr^Ϫ@]8oc.v(Y`VzmC'P8CEa-z"?e0(qb U /+ :-W<>y>=-/I?0_սdGtGKeLo??|K1( bHwmetQ㤵ƧSq'7OHeNK4`YEv}պ[:riC׆ cCۮf# yw޼2U^]\*Hv}vN9Dp>7k $f ЇC[X/";M?5)>3XRS1gZJL|׳ $:2sxHnQY.1$(`ȭV oZ"I’)%~՛Uc˙րu z/.4J\J`S{uMwb|8 7z!a*g m:&qvTBIZ~j=WAmhP@1hB! g0\U-PG,`v.\TuH5XrI: _4)GSJ T6Ǡ1o`*S}muS! 7( 7B"W ْTT.j%9KNݱHMXMcʉ&0Fx$CF1h492k@sc,y1BjnVӂQRBe^k2b3li.jR}]8t=7ь ]HSvzVYJL %m߻FXʬTYv!EJ#ʉa(RռA}܅KHYTs80MDꇸX\rɠfYl) ٧$HF]*Gh(J \HaT)"jk(me-G3'Q 4VmT qC-0%ifA fܶ-SiPR9f;0 r,1tp~|[ Idl=[ӤcU"ye~5k_M?Ȫrz>~e?\nw]ʩU/j×?ެdx skvv'`h|Q~mݞOW_G_>.Ύ؆(\,sH!t[-"%yhklw?(V޿)Z(KJmZFh7z$ 2ᬮXm;&F BssCL1qUxҏwï当.qS|ۏˇd_n͝1pV)×nT պc(PM˛b! rt,VHL\rɍ+Si{g/HXnw]Ef +A;Z^d2\fpقworQ5[Q=VA3mےP9Ӆ!I(>P|}z] |ht>E4Z*,%D;{g{JqB)M}qQ\̞9OZ4nX$PuS! gJ9\hOyZ*7 SbQx`Tͦ+%OĿNe'jIsx:e?aLu} {gbY[5w_/߯/[3R &=Z3k+ m;_N-(O:n; I/atE { '/a,fCh#!gf>jR+"CʳNɧ՛Ϸ}zmq,/BY:q1ssF B.(<{eLd%Z|P(M7mioV1%paEΗ߼mP\m.BRRfJ1w1r9^U!3}$a*aQ-ok9.$]WjR"Q^SB9?Վ @cD!qn)D 3OnBˋK|/vV|׶jh ]'WpE'3ݧq|q=\/j.+3 P=v@PB 'TE O0F]Z!KNJP&!0B BȜ%BH9rᬒbV flah[yL%PiS'Nʨ1$L=z}SIĪRNb$|jgBBafӮ ѵDM }t XIBdωv%ƸF}I!޴XeZ/\ q*T88 8S,c.z qWJ#+3 Y9:LA)I΁HH%p$KIpq K<1rwHn;>7%>ͫ0bDzhћX1eF8wTӦ e1vY\T G[4:4} F[)}<[~߭Wc;c Uc@a?et1ebY%B?l}S׬uRYTy}p/u{2w~oQ) ƄBS.^!&m"i˺RZ5.gVO $ l&Q].^8KzsQK˕U63d* uBpΙЭ 9XOmŧMwú$;]ԻcT֔إX%4r] (, uj8%$̲lRT !졮Sڇ(7 ?!Ng}HMoӧ;პ I['RV!h瘹;ֈ%j?X߽WS }2!biy*q`iMu & T|M˞18^7UZ懻}?_ԓd%H:y^_ꢚ;C*Z>=W}?ԆQc۬BmJ1@FiE`bʯ/Pp|fn>Vo/՗< /Wڥ` ! ?_^͝9X̝ӡ 3S jد?Kth|<>}]ENK%SIhE17Wr^BF.h(-֯cNdFέeS M (1X>~m4~ׇlxJ#y߰D=+~N3=h'c OOڦFsi/I,RLH|ζr0UVCF 'd%"4.dƣ%j &ضDt0K6D$ ԋ9ݷQk"V&dHEN ?X)?􍓕C qJ̝ϗ+MPr)Z$f̡Vm&.+2FJa(,XC\ct&A6 T^)4jVY"ZsU~㭫Qq9a{(_4S9ƻݐNFCX čSk !JQۻng)C͍{;D1b7rHެʻSj=Y݅pެ|~S8!ĉ|8OfټG: ʙ=@e/:/{Uۙ~IUޓ hwoco߿#A%hw(%M9v{_]/'Kٯi7OuCWG_W_%O\\.WW+m4Redx󕲇ms !ɷ0 -N.%B"?ݢ!Z '>y K~m_*l4{emja6}0&3w?kkKH)pgCB%?p3h$V*-ka> V1ekDVKP) 228!] Z `Q8<Ok8b( 4A.r Đ2*}+'cmbbQ S}SR5RQfCb; ELa=(rJ3KmO RCRR?ȹ+ݾԾup_(ONOmN]&%T mL2d. pQI3:pRݑkE$"'W!/2AZ!vGش~f00B2Jj!7.HRvo|>JE֌띳҇h H*D6Iut1L6K7^ C߭͐CI<[DCd2ԒjT|U]C*D ҧvvq Nfyhj8vGA۪0_ΔBBl㾀byt8=v DVRGG%MPo/~"\ aF>sy~$ȧ>V WW ,2g~-riCWoޯ‡RHլg5 qrǹODw]-',/(+|azx>:S|fަ /}aW"wn2&U%G2| Pf*wwQ_~je3cx\0ۂeCn+fF~C,3g#ϷU]̫KʪRr% āK+8ZWIH6pc5ӅǗeU8k rA$)PBL]ճZ $qo6q貔gQ7z67jH!ǛJ|e/@'v_$C|?,I[ۅvOo Tgr_*MNkis/-=r/Ex,yX1F|1΂h|^ntyR9aD1IlQ$b bԃXn%Py9"cb$q98n *O:,h*EF;8 R!jAz*w?nnRf2B+1O}1lQU+c_}u&dR1eD)G'TضIJfZ*mYX gBcݞXmit(ljϫbh8 aAȍET?anҲ+^ 'ӮR u1BuU ^01*95Ns.`D45^9=}露!`U#Ycl܍B D`oƘfV]o|ӫ)󙎡Ї4zV ۮC3)qPJTLm9,ra1f EgiwhsvZݴۦ.3='HЮMsySYKч0I1J؛n ֿz۽X:NɓV}UN _~py$3tE>mƏ⋱8(S7O&Bg#͉x΍< < 'SWI}u_(?_|ӗ]x8H?}t;tjo:w;?~9FYc3@vzp}A}Q[0%EuWu"ФS{D?,ԺC[j]UVuF>3 TB]^[Ή2Zr=GItl8+7.]OJ(( c!n߾"G3e K"JFK`,I,Le +UR> 3]* ^^Y@uE8@v.OJ U3vЗw!xR5̬7Kur|icȎ2Z-0ȌW$$,ds*2<:+Sڗm kuӬyզmKY~vhd; eȞP22~PʺQI R |1JeCЕb HX[}<^|P9v>6PD%K7Ow \:>#ROEiœjwī~@pZQG(YĘJ0VM(h$b.*T h T]}f&Bv@Fj]̈onKѰ^[ SbWc30\JK R&$ `fÁ7]Gg$ٗ"JC\̬X U8],tnh Rox V$[BGmy3R bJܷ*9.ڑCL0TR؇!sLzy8zuclhR9%ڄoΩ'>L++ ,8{O|nb?Yr"bCIES=v.VZ'SL?}mwE#L !~"\3td]vs}-pk ^g<_~;X<&?};=TXos龿b%ʑ˰pFPTA/nvۜCH.tSwCK~8 ͪ6Jt~F9$$œn?Tvl,-F5͵BYJQ }\֪ٵF,Ѕ!"@YPK J LC>6qȕŪC늖7nPRs- IC$$,BJp{Hi4޽oŇo~?_PϨۧh KI GEǗNdf቟O܉<2?:89W`>t ZݾHFb7F ,Hk֨ Fݮ,AR2firf!ymژGW)̸;EE!U5c bh b8}IՆPǐۃR> ׺uR EÐ|+3ӱdusY"tUVC3u-6VaB/rtAP8%Uc>h-L7 M+Cc;kq߯39HYUT H5:Iav!yj#gK nu%./]ׁC*m H bdR.JgdcTwV@|\¶/'lCZlۭ QH&YGN /=ʈЪx3_۔z$}D"zobbsG8P5D!X%sehNNUjT'C_̓Vӳz;=7/q r #KEl~ΪO#0^@(bIjքP6wӒH?p{ش_~n<'Q<@/Ua{W|^<_f鼼~BN,'67w8eۻu~zV-UͿNaI_4kۨ(W#X,/#ڎYVZOq.7^>* ~_/&*UMqL$V(D}A] %- .Ա4s]w IlqTT Mȕu% cyf\V`BJ0t<Β"bR 1EI՜G=ZS!n@) :@JHNa$QS`ĒK1 v[>IA+)Q.@8mS>x胳)Mo)VR )H(rR[ m²aVB]5U5KaJ8VF]I%KWqbɥRZJJ̡ }J|̃j"z7iл1{h5Ce:.2:bYu=JI9 SmjXK,$`IbȬ6VbiUlj) dAfVK&S\J6y?t pxd !5UĆL9+| J1sLBFs ̊Ӕ)x'fo*trԸU?'0S )N6H)5rz$\tQ^\.\e>^[?O~y1JJr8mE=HOWo/b+>-Dp_uT>qzT37_pI+p q}gCp9i#d-}ֳwokRƲ}ownc^6n@1w ,dŔ8>Cyv55]ژY5Y(UE8_\6r""B0dЧXR,omYύR̄$Z4;c$=nC NZ*5z9T$$ M˅"!6|nMηVjySS!"(FK9UgMX9ff5#zj r S!ѩ(xp}J3t23qlM YI_R~KICgѪ 1'q()a-BDMKL2X~EK`DS=Y)Smq6_G,⢶Jѽ2Ij**-+mwPIccHN!}!!>Z\(͜ ˌk4kvii@CW4eQ%f~ Qc!zi$Fh)iX7 E%&ZX|{L$X 1I2VrAF;c8)4JlaZU4Yxևv0R@?SPcʅ}SAp Xl7W$v86蜐hͥCrDgќUA*X[}jUhgph)Ux㻲 uFO;KL}^^vы<=p8U=!& ga)g^h㪩'@.]<(tHȅcnu>_ou֪wճ|1sb^@Y dcJHbjPwf7|h|I`AJ@j#@ P ~E6 iƇ.Nx ۭAHeBF9×/wH+yZUC]d_iLg/%%Qr6)DlQEA\ZHO|&ϔ ;)c|H Tqp#m'1قs~YZTjsp(LR̳y*G-Qԯ~9a1ЬcR/Smp]. Ƀs^E9{@CJ낖%!HE!]ANBy]@\5)y* .G(J"䪢xw\-+ȺTBw嫅ޘqH;#c͊{+mY=a箲8c͓9KpQ@%CHM޿U Cfqh(|z|ΙMSVz_k4l|:0;_|qgiGOEY?+:5"|N /"J>|VvY*!\vw5 ]fd AUV_]ڴ)͛ϯ~?A6f@%PA!]B HĘHJՒU*9['u  #guY)~vfz6ȴi=dRH IRF0=8Λ6lrI采*NɡmVBai ?D}6 m uh^ȗVe̩xaʴ?){rF)IPvܸFy Щwe? a?Nfuss4F-=>pT"'e.)MMR$ p*1蔔N# @p_)1e$&14Y]AA+_TV9@a`,LE]1n ~TR.Oȣ f-`^4N6hv-)0E9#2ê.IAdޥ^םO w 1~ƖR z6} )RȫeY(?7+kwiؚ4Z@Ώsb2OϮc*YkBaeBB.(Ezhs6V,o T(,aWKYH걤 :zQ81O\#ձ1ir4n8gÓ&zw  vU"J6`i2D=Jxhc2B ځ #3H2m#q 7ƣ Ƹ,)0D,Jp,(% )i5pّ|9-2y&7UiSJc!BN;01vf_rl]{nوy;U[ 1' ?RNSZw4rceHHSNg$IG>*M빹 ԹdPFrT0Bh,rLZ.xc$42 ]"Dzv. ) }썰R>rU`6dHE9҇']*E\8mܨ+τz9s"8?m[upl$S=3ݺ?ܻy5K*7J<f[Jmlf1F+Ƕi[AąwG }6B:%$JF:V|wIZ:G!`/܈* ])Y<Ż;?M 'J*_yJȻ >?{&K>f_\.o__}2lyf4r«ۥ1Z_.,&>g#YLNJe:))3K3(>+P Ppz$:Mp4znU ,P\̿׿g_n~|+~ώmwa1MȌn3d҈97_(Ҩ)7 v B-]]a*h (ArۇҪRR5} ZZMRz0߯ WTD2Arݦ++I*?F.DS*YJ})7ũYtʙsϳKyY߹ﱐ]S:9yt~1k?_LxQ@74EϮ|{e(Hyme@1L9:lը$*6}HH `' g$ɼݦѻf3'FFчlt2Ũxi##$w4vX#Rqg+sseG 7R(u0%r#cn1&fЋJx"-rq1x_FոnR 1][>[-p 9p3+9囹 RFYh\; p4z3|h`A"8*cQX\z^J-9x*$ 4FuVF l~_$RFe 0 Pxu[] H2#rG!PH#3K;bCrTQQ =EytAdGN uc4<=#'sRypD+ w_ͧjML2k}DAF&1l+|UȢCYJd@VO2@mԻH!G($iT*@涍SqVE>47J}MJ؋b0jkK".1dA2tW\S E !.+r)m0TVj(RQ,  ?6҈a]6=Dc`4R6Č<|q]}BH]J)@ ((z/sJTR]j]bȋ&!p4^4tyQ^J`mm=S(Ab6W3?lcuOn[Pv3 PՂ=V>׋S#sP 1Juo`dd !D%J&j $3<.nVK)4ܴ_̶[75RL9[#7(_e0Qye>?6UGeqڜg^cG@is A>kS6O?<㩧K|*?'ϾbLy|iSO" >BLZsJMsy韟&JNoUm>,h)Ȍ ,SSIc kSFw{XL ػ7J ]"Bi\ת."d~xHX # %E0Daˆ]̙O${TZ*3f-*63%Hm}_ݚrUdtpN Oބ*̠n٩_~giў?9yXtY q9Ťju#7Dv],*ΒХd{iaTC2V@BjȢTa g).q=3knfUD^, OrU UjI `עgtew dFI HYPA"UZճ2D!͐)9(DbP:!d@IXϑ%JaJ mv\];I]`hpbH!܆m% =Bռ$ʘF 6uBm/__ǘ.rV"'`J(R;]BdOða;]GT;7ha6`kh/hT e)!̻^b*MD*㬴*߭KkU=w.q%0KkS]3BSaBs1z4#!b)KCЇafށFbpnȪU~n݇|Xw}8TЯAv_,4ߊcO:@VnqGaܡxb`3'٦˝yʑxf֝Nb]tܒbL~l7)?^I?¥W ^"5!`Ǐ@srJ9gن9#a~ӏUޠT,e_,p9i?*W%>n+ 4g2u}XP]sh(}ʀEVDY !eȜE#DYIr"r`ڕL!γFPBn{}UШUHF %jRH+*̀Zׯ y0f,mC- *FM11I"lT(Q2ښ!!'A{Oyl; cFW%(N2f 18b\VGTQL1,8ƐWtԕ%DitC~~rn?:ktL >lZku{7̌$iHG!!cl96]W"#(jYEҷylFZ2:E@ HPir1 )eQZ|gk)J(RH" _73k 0˶)IZSФLCLziEUĘtk_H_̪7^BU3FjQٺ?kjc(5+isNYWXn|M9k\^,$6ִX̔'?"~zYo{3_^ݮżDPXfAV|YkHv]\ͧOQn(>rׂu< oRފσU{No&O3'GL/01g<+p9Ex("~cQEv`nwr]2m_\moӟ_zQO߾y zC{mj|O ^+bu귋?ehbǟ(R ޑt3P)I \(E4I2#m&bIi3 SҶCg h9\!04.)`QʢMKS9;}lMźde ;u2(ykS=r?')}0:g kfډy ?g SoqDۃ3݋Oi's>E/3,X穆:҆f>Ds׿^[+=z] RZf B*K.mJ-JKBCT35-.pT۶clbZBdz+~Y-KVo~zBյz|4jXbH&}neu,Վ=9xU蛘JmzѱP(U[dSʻpZ'DTDm 譑w~0)T[ 9zD([`L R7_wymgV"~y3kWVR K2ۡ}qmwe$~.+jK8:Ґ9:4>y9W#;fDT6}G /nnݠoo*0<[[W&昲/F4Za{U/9B4 LI*Z1 +΁̅B\9$Rݐ A*̘VPX=ʓsWe@> T䨉Njv3C/qHN:9[ҕ')SgFFm]-vͱElTx*-1eޭ?-oZq򉆦]\O<x)ғz䘟z<>YG8!sS/238lmMJmYNkېPj34(\Nf*^we-vٴ\|0?YVVx%\(#:,n%_SZ67l^0 Ybb<9ca@Ȝ`>QYD8șuA$?=v[Zxo~ERXU2 2E\#S|o7mSww_UTfR_VG8%ֹ0;ق-Et({4ic̉yj{J1cW Sie@:2LQjN{Ҏ6Y':ڸjkwDyGCb\w]Ld4ZLTO"#<ZȑffDIAHF`pCo:" 1.ֻaY%t> &jfzmARB+.hż *QÙY8&)Ji+W;U,8bH͔"s; ΍KR%W)I)9Ǯ VռsT1LI8A+keVmHUENIy 0Zfr{T rU)dA |J [WZJ"9F Rb zcCh$\i=x 4&TW2|xVE]R"p] aU(8!j1Ozq13w%[ ]7 D!GRfN2Lw 7׻)wVC[_a=/N8Q=&e)};f>f?jU!:dJyjWuY']BPf^+/gQ)d"p.׳~ @?qsu5?M/|9ݿq-!_ճ@zo>[?_OR|J>lֿ#k"ij5$PR(}irBG -wD9/_-+=P7Z$!<@;Yx  QУ䐼 a&'?IquU `4ږI*n7K5tݤ1ns#k.w_7~xUW:L%UڪzHܩ5ĉ2z"׺sneEzڨVԯCG; n`BOO\C*x>Zث,o 4pVI$B$l\P$2>ddJ-G[1Ś*^h}BW+[TιcJbf8ԥy\Q(*40,k%"&GJ 0*c5$-Exe 09\2G;)kU=m<*I3օR|l;mY)A#T)`PZ:(@CEgV=xYHm8 }JKahQc) C_>צ 9 }%6*p`e9z%i}i4OD[T?t|&7D]; 89_.Z ˪nYdF|tIqMMdaU7C^߮vn"sxh֘G׍1?H,RzVjBZ+XȪaYB w)hi4Hx0~ w6͛滿|l˙2R1OQZbȄ1vx/R{B0̏JlLN=7nKK/o|߃V|DH8'?πs#i rMQN$=7M!T)cNt ],+HW_X(Dw_&3c1}QZUh,?cRJ8a):4tޥ7O|w!Ez,0pp~Ȧ޸.bc uHq. Υ}pJx63Z`3) 9h 6nAqUȟ-+ak33 teRҶ VV2a4| Q#5ɔ8Q&I.DEFFBebea!!|biͶy>ۺ.y߾kVwz&o]1wYvof_"yj;7_^ߴCo ]{O]XTfna |^oߦP_ݾ77R^u׿X5n^c7! -h4{vQɭJc mU]m61U riFDοP_7Dx18F&'ǜUO>eCt'N.u~Z^/q_;e\~J4ϛuI% 2|!'Bayu|@ $Nњg:y.X>xkyYR}!((Ӊ8*-E(dIs<4 ,3n}ђAȌ3]j<& K#>nT*wOԘs=syHBgHk݇݃Ѩ)YtJ2/#lX]37\|~#.q2+鯓"pr|?y_{}HBD$'*g2DIdzW(*{<=?|߇3׿ku}XYbHB3,)/~WLjL1$׻1'ta3 ..*/Q5}lΉuafEc2S.)L%utB놹?_TkӪ.5Ue]^,jvs > J臧N Q`Rc  9 )PljnݰkQhI$nvl7x1Y6RWsBi,fv}!2,DղN-%3L!{N!K9(4ӽ1:V5K[S3TF`5tٶnYMz8s{dfgY|M3 ~fУri\G=h |FKRsK=I!ۜ]WG1r+(8V)!{?a4gOo/ݳNG~L\D D XN;Rv$AOG(CǑDDRJ>X]׵/7_u5ʾvaDji2)~UDC`v1*R_UŠ|9Vx9!}ώkBvw&ME4;D ʾ 9]kLq4wT`pt'Փ'24ッ{M*1'XHtiT߄Bw+Fa=̬$(v)}x+fzaZV"}L6ӟZϕyܺRm.ESzW/KY$>$mqPI juZqkD;"Ǚ1K٠\-t°N1CfH)Wˌs+%Z C*z6Vm ZP!q4%….fRׂv=GRiDLQH6=e=4C#@n)m~_U<!!$9^c^^l0XZjmpU*%E3m i$-MkRl(ʙpKWk߄q"--`6TE;!d^w.!4ROTU%t~0][ȸmRxCJk)7oWs]eeϳ_TJɎ3q9m qy)I lֻ 8y_~B䇜N-N*xݝy*Ov{ Wۻj4>9N"k?'{.s;}Lqw@kZms>r.$nIӐDb\SPN${J fH BB!)OSHzBȡz9,nfTªZ++$'D]OGgdg#rDiMɽOS- گW>ES>@r-L:C%>s >M-4v#j=vAK(lm~7j D>j;J᧍+J# f]ȀTcK"/k#MN@bQu|YT ]U&8nB/JDX̊L8/RKRLVQ M-'ZآֈE%$A4GO]t%}D@iR8(Uƹ$"j sS^^$Ѵ8o7ij kSoB-ga-j0/UyMMnS;X-cTv9'Nf1"qX dm PZe iӦ>;JB3U]v*sKs@d,jWržm/*լГR"Rزduԣ6.1–j+ P&ÞV[_ga ^T4J pXjQfB_~U E-K8@ 6:ȗ\K/%:jx̩$<&}ڎOJi -8z{H B(aG9r%Y%| pY'O#${1߿8׋(?n{S˅ygve?]S8fzaX6gҾd)Bӥ8u+H!}Q~ 8fޱxĩ>&e)yNġ YGW'3|F`#q\ɈRtV&?90_OG< _W>@Q#K Baʼktݐ"`uCw8lC5hpY\3]b}b!^^U>R69j1aW/=gUJϡ4Sƅe iZHu eaJ79HZhOEURPЦ nRħ'.H쯯te|p(Q<.wkg"=|]mTj~}9* 9 56&u̔*: -YTmV6 +)AJ S+r?"CUvpVhբguAC&9[azy-.81&HR6}/rSeLILJj";J!YYr3 ZJHڏ`=B_Dce4! )j2'm/w?7L ?ɩ6|u:jR|u$j&N<D:8߽o+]9SG}yҼ>OfȴOz` 8 .E,._"itPX_]߼vzǏ_}u7?w?=/e =OȄiD @#bv[|]"*qppr>];x\Ԡx~ rN906̅Ǎw"/wOkDtNڧm\Na 9})xJ>]`NRլ|ݤVȢ;֚~j=#We 8:>>%]k#7^lTO4<7z? }2 X+ipPmc'b;#%Z.TYhPLjȌUVrJRbVP(1Pv1?OY%cM ^+TC jfH/E$ G{7Y$a#%zzf1IO z*e(=j<??wW/[]U |<]5!ϢquRz%e 8k6ǰ6\١, *OČ8oѶj #Z YA9ۺ_`c,n>EqiwBMxM.#ӓۀ, e.uW63B§},*Z=@`E ~Y"ryrJʎ$7žެqY{Zb(BL}X(w-rȳm dQl\MJYKÌeh# ޮ}{dcnM$R$-KXm"ʌTAs|ivo[[ ȤBPz{ԬQ:9dMG\zG sG ln0M5Xŀz[MV\7v9sY#&|ZޭBpWr%":l̫>tlW~%fhyͧNW~KVSDֽtr۵&ӧC>s}^ݯI*ƻa^FufW\QG) &p`IE!;v|01^Xd`޹&ɣO$t6ԣJILΫɥ ?'|qƼ,dx:&rb0Vu\fJFRV7k?>>5oVTF%G"!ɥbu1#9.eB Mo-#d =o9ZqoU:)PI_vKCm(4A!iy"p.}sj8/#mkDJmh)皣PÙ9ZGv.OrAI…u ^R1 $0q׭9N96"Akq=o([蘤1k5Lօ}c߮skŧ́i?%{炋Y&>f`3}1ТŦBmwz>:ž,2NnByq^wMQZtkbmKEu`4X5TYŪum[ݖRx@dyR,rfb"F kkΧT &}Y:x`Ń(2!~_$_?62}gby/<k5Tb0g^%Ld) @8WGeSwf3E| O?C% >>lLa(7HfR AL/jv񣻀 P_IP{f3-^<ڜ.K:3yD!IE.i:&aR nx 80 |Eͺ*FuCHXjUɜjeRJshSN۾;4x=J&p3ŸWp`1{#xGe/mm1RH_}*ȱk#݇}&a@lf{WCo*DJVF?t1*tXODۥn NZZ20Bbh&CDB\sp[ʣ|4oVæ7L'bҹ2&,K#eεBr4gmTb`TE읻1~L0:5%_7ow.[/{[q."̼y/m*3nO/BWpB' sbl}SUeݿ +zHFw,F`y۫1"lWc4_Yr63ĤojrPPNIDATr/`O펋9wj:LjQxs9~qq3r0Y^ 3af4x'V1Ƣ SN t>&˪^PxtR )y)5aG59^ $ r@Kې~{.C.0B$U#hۈ=`$0a3K:J7ε~۵Ծ) +g{éJd\Z1 ̺ ( 4 (tF%H;y4W-L57oڮC gZ0F %}Uwf r#`](Պz Ɓ\ $ S%&4<IB%V{Rf\}Fx#]"3VrsFꁒo4đ\  &הxF5%i޶Mm_޾YmG3(|+֊Q3ڦ]G߂,vZBg\m;\f EjUp$ec %an֮wrs?筽)\۟ekz؊ fӖ#': 19l>=+5M\3j$U(o+;ux'/3 *JB|12 Oj?~p#$/WP(gOz%\SR,nŢ? t,C10EAqݿ-}͖sɮmNaJ8>bƑ`*`0=zCqcɆ1,7쥤hY J8t/()qfTI-gR^ W{ I´-0ч&r۔>aF2nM<;gdT;IiTHtt 12">ؖJQ ueueH`Ս#K!($vO0`K8dC_d1mYO|X䆒1l0,`G4c PmiSV|@IC=U^[뀂P`k9zo2\>*¸@[5=ec":=ӂjbϨ3ĕFEu 8p쭭wuQJL(!3c 0D%1x$w=><~R:P%4(x%ȞXƉfZ1&=IFY"7P -V䅒Yρ.tUg^rs輕s73G9Zg4F`m>D2T`OU^*`6SLS˗oᩩadZBAT]qARAnHZ'&L-&\ H!it.~Ԡ5 /N6̄,pgT_`r_WCv\\y/g„ \?W 'JI%q^^fIL郋W ( )7rU&ktWಏF9pml$"s-3`neAC61$L?N%Dɘ[R3l*2 :, ΃ f٣ mSzI%;Ëà骘NM p"WQg3ddaSom$r}"8 @B|]}t"D'|SߙMfnt`xzb 3*yA.4{e݅5Yrfw7Ka:X8ԞAE$e!8TԵ_(Qq6h8"ӚqAR]4E8Vh.N {|D7e`sZC6[qhV*&8PeC4 >X(oIqvJ>ʍZ~DmOԧn\IBp/{;Ԋ*lMX)}ӹ 2eP5瑑oou_6UƳ>H{1R*&JETC 7+Z*.|ܴ]L)ö{K>erI+$B)<]7K3F0sA)` \CNd : <҃#kaVj)x*HH\0D:(8h[,Ug}&)Ͳ\1i'6|RwPmZ@\DB]*eq`J3i6v9 բZAt-EڌA$H8 w' M, f!6xKu#eASTM#Di$0<7h:ɨ}܀Tiq$+}|l諝-JB<3iΌJp;\dsM J蔀Y \Mu}Ͽ{$pƑqPieTW2:^(N9I Qzj=G{GwYѢR2BXvWy:2v[d@Xuȷn[3QRrYqSgɿ5Z\8L(H^*,獞&`A$1:zD )tC6|d77P0唦byb]e!eL^We M KTL#rq{$Q>?0qDGFO?wMܼYTڍ0G &'sZEOҬ2J4ԦG4J.5 x4E9F66){+341ɍ]x>?\Hz%Ye\ss <$7@/:-M'\G=]'A+Lx(#J1y?qO+R,4p {BF.`@MWSӖD5OuUN(=ZڇPypm$mDf}8wLi5?ېE1Qa~S;zH&PFw7 "ӂ U. LA JCu dP<= lC lewBGIYN)4w-L. ))yZ:m`Q.m(.%_ڡ[N1)S+cC6p댕i ,XvZa_յm>j u|mL [,ܔ Zoi\dÓ69/MGJNE"uK. NMW3e:^j5}od&8-Y77ѐq⦉˻rΝ4qIљJcҌbRzgy_HQp}Oz#M$觟>L)`&&ϻPKM:}e^Y v .t?mR.PȠlM4k+|7ԛŹ8C܈q{B+Q۶]X:Ђ-hoyգg BDb_k>D: :uo\qbTx.w]anբd ?o}̴h]|ka )KI/RK]ȡg Bjx7ÿ"'77847Ha$=.D>w7٢>4q:mknyݍ~|fbPh}CVxۋɟ+yK3 @&3 aGS[3_qA26s|ZKZq)ۻnұ_KMwh2 GEEܝ% DOF P0F>y8'Î82Kah—ؐc|l&S~ Mb|8~fofliMH ׉{L90""׀ގq8;>1~= 眦1)CMYߚ_S5M*F22>Z&tev`ǾwQB`z ֛wj-鉦mQخ-2"6ceד4={m`B.x#9麷KE^"ܺ9wg[bgC)4 CR}6\VvеFWX 3jCbM5_/,#b ?8e~ R@5iD˔nh$pAy(>p[z|:.`.Bi"iNeHHcHS"3,JNZ}a t_mLyd˅,3ʼnd1I~  5F_+FAEhpѵv对C8Ū8\y;x5udVJ܃ az4I^r@b b:/L*]ra0'ET;z=dLț=  eb{&*).TZ`2>_<H*:uv14$Z*Zy.ӡN4P[c|O߯KץpCcMuV4FOͽʚ2=٭*4'OR:tĶTzmoZT7&[9癳ja@AyqɘҢDxJ%*g1C[4"=y^(u *?pYzR@J :%<2h[ln!>nƭYq38-Q`3pYH1 Hv*WRwEB+Bs)R[%8iھ"=Y-1g2q[żΤz{< bHZMGZ3mU)B"hzbS\1F+$a e.EAȳ/Mz پnwִ@RV* x<4"m#>=WDBFAri炂xA=udst-R㏟ޅo8H[vڄTZko™fB=W'Tw}d3fM6^7!yr\ȯ<:O[1?_T英~tO=ppKI֡Ūxe#>3$]Uja,{ڝÈ-Xoṋ!kݦ.OZ\A'Ȕb7e9FJ.wW0.b{,M7uTFfv&۵AZ#Ҫ wDRm=Y.Tzœ{[D\x$t)miń) H hMKzvQKN/8.c9𥿻1 ,j2+oa!wkm-xB`qԁfZSRQw}W:#!(NZJ$&J%>rپo*)H|U2.ƣPOzt0 CL?M@YTH(aR(^ӗ-ȼZ.mC2" mpHxZ"]6?tNgP}eje6U+$8SVԍ'@35;S.&PΤl:2G'9@ #- ȆÒ8'](@k^G$$LsS᡿5K]dv'SH SChr}i;{zk〧& clJOR?ݗΜ C NG/,;~a 5I \\8Y$:Ov,~qL skrIS8o/Vш#Y^H8.C972NK˶ r؊dL~tiҜiLE-Pщ:FAFRQD " `FiiC0E!x]{.š<*m(F;;k w/euR\ gUtdxTZ]%1B{s[+8ziiZrhDJo$hcap1vmӳ@\riMvTSŖ\JzcBLjL^rm3 鸉ą9$l>oz˛R).8l*z^.V1wdR)9N|{yr͵7c qN< t /Ǜ5Xӊ$}& %Ǝx]h&l$L]mhD`_?<}s2/<'I{dfy]8,4YQ,;$  1f@aO2v6; H_t~@(Cm Øɚ^hBb)>]2q2We>Vァ֜qTsɸNB7mȴp(bb]w 2Zd{N,SqlO90[l$rZus2@z[ݼP;rAB2Sуg ;W1Ƹ*zK\n6J-[d$+[ɅQ]G#V2!sB3C& &E713Ua, mPȽT^j+f2[sj[X*?>7V<*Sk&SZrXp<|gE`4.,#A4=`JQi`5 I{ ԅG; _=6 Ba:F}n#77/b|vm7OܯJ)H!#qKURO? &/sr9&=rI9F F&p"E$2 LlIdFkEI ,HYZ,EU~`Cpӗ1l{=elLB,:&) N0u q RjˠiEǓ95fTJH^͖fy k(lt( 0Bqhadpy\Ze@\ܷU?C[f̶ 䊟kOm⦊ e73B4\-Wq|ƫ$1W|,>W@ģj4 $ς*'oR41X8h-m,27U{Їm9c̵KY:.DHu r β Z Ut0UZg3K#m)x0E17:pDɄd|( JUٸ\g[*D9$Uw!řf\㎜zgmeY} Ham\qt/uSCɔ`޶^ |%)y.|ѓoHs50Ӝ {N.PvQJU-\"5uNz_Z0;~4N,nf%cw,y^f٫˷o!qjں}z?( ^h@wJ-s$"y~,_K{xEjqc8ĩx#,VwWoB#quzݵ8Kij}oM}?21`K}MПL $AL}?۵{`GO1=?:Nq%C=r:l0 S#"T<tI*ߐ4lS{rMX`L0u~G;%Vfy8B(5?,M߼8zn 06TRh-u a[vd`6w/ 瞵!zL"3\6nٶ;l+8.H G) no^`uf;]0$hbÙD4̑z@}$MJ`T"z=B\}{O2xr,MSv96_V٢滯1Pc"wv$3Yp$ a,*0#BxLwr麼R:%oV7oVy!-]/ =b%H2&^LD/xݙl4xUx1#4cw)'Zt-jK3'vפ&ؽX4ӷiQ [֞gfD-\P \=9SZQ37,t?G KiNȡ4+sz\ғ(_їms"S %N4s!(8J]( \- NX Ɔ(IeZu23- ɘ"8!vu]m$o'?|t4ώ WB<5crap1 hڕ~67^mm6u@! 6n|vbÕ(qe R8pH karyluy_Ђ.saJ @rf3D] rDu>F^G%pfA`b[y- H,!gJ(880M0*945HFgJ%k:r)8.^qkE9rݶ?Qg a=®YDE6+a^f}wW\2ܷNٸrn&.R.ps8`T  Z[w\yƀ{#d,P5͍if&f:loхMWʍT7c3EuU#,1M~v[jUTCgsCST.e xn$  iB]%u?0p8ƅɏ&nI^K 7˛M?؁_=r޴s[didȸ/R7<S>'!* ~zDY4hh~+pa'e!r.!G` qO{*f}b8&c0NwBZZ&ACYRd&A0=+89NƳ$#^ٙIL>7Oe+$$ϳ?%0c.aB'nJ3fzD1` <],ýs\ +b(zRZ/eNq9v.Čf rf@0"L3޺XUY!Y*-fK%f!2E)pN˅l;g:׻K8G`,j7u\ΥJ@X$xBP׀Pex^j4!Y4pB6V9A3Yԋr樕R3쾓*cQn6O҆32DS?ͷoou=6zD-dH?Wi-͌.8G@Ґ}Er*ʬk2#Hf^2Ɵ'xZgVc'h|xpyڪL^ Okß>=~y1 ={n;"m05ʧHef֮n9L_:7BLҙ4+ZN?_D%^\hZS=_[&W&%_JӵKZx՘^JJ-BR cPR_|ņ]bfZI^p1b8MH 0M^D`3oB*OA+81ģC1 c>6yW/y*Lh)˅زWdP8.|<@1Y =iJ P02XFmEvd)C,:Jota_漗Jb%B\ kj;tZHrDlRSheB{bDZRH"XԺr;O3foS慌6L}gWW&A}usA8]:*4lKhkQ7R+iUמsh,m>G&ǧԹǎB~>Oלw<7mHlgv`!.֑:ȫoCDHj&[x䝶hmS:H3}r헤W%" Bz)}ZAM;L?9T±dP(N:cЧZ/3EI~9p[TQ0#5VdqLji)6϶ˆp*ʙW*]SJkR۰ Yh|DF_. ˒g _ 8Mb&0F-DBy 0B<(:2#n#a1/ι}F dB k"@yHpqYe*B6R |hRQ OĩzB ÷e\w!wy)cmyFo˵F 23HkAhش"cQ7_6x~Ed8Y'"1Ux[2FIdzT( 赒]jk3cdGn۠PD°\dz XJ,4w^y\-q%E> K=zr.x)j>a/,*>?ܼ| TjyR }(vi||׭cl .~]aF4!/ĝ},.%J\2 g1]gR}Ev}W¨1C4$Eǟ|FJ>e_>>dI^*$RWѐp~TF9>;1R*t}1Ow\ 8UZ9 |U iRӖqM@Spo7tNb! qX,5lznc)K FH]:D : :.c;olJ&ߩ[7AC7'nIQcnX_H(O\^$%܃UWڼ/!d+';vGFqi5s&֓䬟G`0F 4>]p?k3s=SϪY:^]$&q3-F2T 1(΂1&5׹4K%!ZzDFZ UkX~W?s/ Vo35ZD r=m>3`rmYs˹4̡;[uDWW (T*9A` ȩN.+dፑkf:n3۟o~5LA;};#JZiyCݕ[ӢqdF4je .ÐhhOR2N( G J#,;"hij>pʢMKjOhK01;xbO<92-81R'{S SX9 MbH7<نϳ RD'X0L^Zf6=4|H 6 Ruw`X2'؅/mt"2OqoWWjc s uۮi٬d0+خ.(R!R>Oko?Uj!d$ [o|H ܕ`jt|+ɑwJm"`fc+YG@;3JayOH wdw,64_. ?~M(d(GBekj4[-cSh:rO|ۺ)Ԫi\k Fn[$DaY&WtW v[$:V[[pCtPHRYs_\!u2~sOpn'߰zȄe԰z!lM},R䌝%gE2&3ĂOOlu\_;w1[:Aa Jx^{M ~aWHxyK6z&~./o:2h73p!qtd\ZSY^qN 3_$bJ_#K^J @J}SA8^(٤WX=u;n 8($]a:ELNݕ"%2$d<էO9lc:s_ CN蜞P->zÞCN j% st]F!E.݁T7Tf3!QmAe:オs%Dn);q]G?8@CnZ*KnF+T]e?~ޮ8E6EGC%q@uǀ)u1ࣄu% -s\7,nQwTn (138R<4ڈ6}q53G}*;Rl.3 , #̳@(O>B)dz8)%8faԫE 2 *.}|Y&Qs8 es(%?p[f!хkL/Cň1~>=7PQJ߳L3öYUE^4u(LSPV m6Q!! Ca8,=n3^DUH*%.IjO^Î/TyJos?}xЅr}JLc5#c .Ôj^HAH^3aztiȚTcVK( }_{p\ݾ|ry7͢79Q0L'i('#%8DB51;d\K;0"zhOE՛DZB igUX_.u&׻,UFk[r%`"&q~\6J"rRK%È'[S:%)/!dz4s9Շ;k1v.*1K)n6q?FF;M Ed 9SLs%XO3HXvv (BXr2T>cS9MkHls f^cp`B4,rx=;V3. . B 4:L^2. τ Ʉr%<";Ad a%65(AӾnig@wh|u; K{%YY0^rU",fiv[{?Weٶ+^I ym{OБ(e]LdDͶx.YQӹ8w9b#pS*&\ePtprFAWӏxT~iyMZ\'cx>f?~>mWT0CH/Z9 9淿IkT]cB_qYy ?ßA_..b>6K[7zoRe2U2~t/Dq%R{Ia8.{ⳡn*mtܘxiU ㅯxR5ʕia;a6A[]-L«OgpTҙҝ^Ɖ6&\P%0b{ 35آu= &:xqsd*JgF^Eۺjz Q i_~)OMEɸg9b#r>\q0i>zTsQH㱢ɦsrX|W7_}n`?p~_??WW_]G<221Gi6ǚp׻7OwO t^cwbNDǾß?87ﮊepŒ a]؄wisD<!Ѐz[dӇz.9/De D@-7P)G^bZLJm ތG oa4` 1[K2& oA֣1B '?Nk_$r8,; <,nx| "] ӻJþ9>|,8[?W?RoM]۹/y*΅ 6Lrsf!\EVB:B[` W3Z$ΚW$T]mRG@M[nIWxFCr"]WeӹbR$b&DH( 0$v;ӏǨ۰}<{y05ZP;3m:Ӛ8 *sw-2"0;zZ(G?|ٷ.ԭ͒`dָ]kL-2Q`m]b9:F yN0ZFuI6tuNt]jȋ}eC7+Y3@xkV*ʿA.X:ŧ`8JUHΗzL:,*ʆ ak!ޕs72eM)-wQY3lk9o~:"ApNYi2~؜s1LK )9^~- UvD qƃ<߯RR tnmOcg߽璇ULJrQh#isf5y`Q@O>鿂]T4hJrHjIHNͦ8.5LbNuJLd^l!"|r4s\ OfFLԖ{ jJrr :l1b@6PO H>!G~G% DiZ0΢EY"nSnxޖW寋J_wJ7-VfQ ص]cir6RB0D[wg8K9o"*5B*iɆ!vq9 6{2 E($B<7qRMQ%j:qP L B jlwa6V.ةQq0̨]0́osY X"GULR#Ph=+HU0ds9ZIշ<2׿8z>ПƝJv}wsL?&m5lj3pD 2^2/XY|aANK?Ą{t?&iKL0~'ۅ/￾^\-`<8RdM Nw g/Pe|43 J-ϟ,SE 1/>I֋;.өryz {΋'gW|qCܤdy1W?⋉'c0yN-{U"bȳQt9; g' ']| F !Ɛzm˽{g/bR&pS "h(#\Uq>8Y&v%"(z\`,'TJ63\GnK-PQruT)*kb(2K\ \11Qs gLJy}L`"3B--fȠ0:p7u,f!Vs|⁑a<`IbUL/, _>D6Nicf}IH^L*i(YQ4걋agU\(sؐ8@Ʌ|U=t痋4M" .(4=3m/dsߥG)B_#꓉'Dnu)VwCu1}kYP\4.[NkJִ:XlA٦K[אؖgeB˫~8KOpL bK!n\}oFvܦ .w\dpB*dw12 B|$!8THX0{N C.ʒ)<&F)JR9g~yeWN>6i]tIմT$i!JMS$cSYHd6E.3$[:.kkyI%HaZ}(~0.bpat.i;1OLRîFme FYnuUKZHjj_' Dt4+e}u=]*}7u*j˩|6/`Y%X RIMns1ˋ"|_uk06i"tպD-fIZdLv7Teݥ4=Y̋4DnC9y: v3n1τc!+0: a#kGSh$/rRJ?N?sף 8q)xpF!uVl+ N/rSzB g<-}\|vod5"!a^cQF֝aE9\`aa*|Aqd4mb39T$r8&I! { K` ^NYb+P^v]xG5! *!3ojds_Y Ϗ` t+EzDítQY2Ui^|Q~^U)Q8Kvo(h%NȔljT&i(QRi:QBbj;Z0$4@(RV.MoW'Vr ERʮ4  ѦTVzؘwO|"EDfV*;tfAu!t%kn[A=u߮w2 (pae.&W4?f6}iY&YRվɪy"7TCfI5YV 0Rm4hUڊ<-N"#Ҕg\֝Adn}ꪛ|~ZNR$T$Zh3L]4‪LX$q"zjjEd)}Ԕ֛v9P ,j)ߵϖonwZd?V IUٟOlA F`Q.FѴ,1180"۷w~7OʥD؛7O‡zxw_^g0y%muU ȝ~xeGo I<0gOZ0Cjng//_\4}g,egPn6:l_wW$?jlALH pz0޹r,;zs3oRƳMC OO8{2xK7*؍Au/qVpL#g)+ǜXugܿklמYS#ػ \T_ؐf(I$A;ZRR X0O pdYh9D/ &#?ѠTHҰz/^ ;6Ɛ Hvf$~~}Տ..PoW;[h[ݳ#Ԧ{zQ>>Vn,f0m4@g"N$?md\M1oZ*lԝ\5Y<JŶ6U#4Ժ%Nb+"ۮikiwSS?y69Z뮽*TnIDATHcNYWj wS[H&xQlv|t} !]UYwJȌя۾׋b:2 hO6c`*.OX7A! ;w[$z0TF U׏fL{-䨋`^fXO~2Q!OG3)&a0RгDUig1,17>b>ZIUgD?d(*9M}cMƘư{uLa+7t\ӷNŘ!]oC T3AWF'7kBr f*(R\G m*lNґ ;/DDFnE  9߂mn ~x`+` pj8/p"0p bOl[d0ӈyGp͆gw?)]U 2#ıPSQ 4gݣ֯k!Mz./TKKJƤY*BGŰE4{-l#4(" )Xt?]~__{!4g]zs1kд3)'VIZUk%KR-ZBYR~}!N34O44Iek2AWURVN^. 6u+jmgOJOt}4JY^NׇCrTKpߵkna944!,SCSz>kbY$)t@jzs%& 'q]?~ݶJ33OQDi ׸}38 >.c$,;e176Y jz/@k3HB:6%0 ~,VI0X/oݿ;ᜣZY׿}mHvy|xb:#}T]P!\ B >+g)k_%z-r1wg+ }!i?M3Cl a[c@:xx]%"'s@K`$EuFA(fCڗ`뀱o8 ƾIRεYQC֧`c`MLD N#&~eMvk"Kzݞ)Mx?>FV&nh$#ewa0,F`u_CmͅxW,0=?rw;).o.wi篮/F`dj//?9(.v{],o^><3<3NT!%?b#Bw`xƶe;ZRF{|SpF˚F׹BCd[)jtJ>}f'ӂcE%xf[,zNtþ:p# nׁp6<o``BZoŒqDa p׍m;Q7HNr4*FH.)Z[+g Bۆ 4fzGu{} ba֘S)!U3cҐycHQu#YFvpQj_c\^D._\!P5h\*:5aҢw4V1`cwj \x9Fpxač~ nE[8W`,B0*?N !..0;cƘP'I2|KoI\>md rȅ6a[6>-ύW Fa.++]i4'vޖ`Q*K"T6J:b50 6R_aF#=o?v-{S6{HGν<6|T^duiһF8vʼn.a1C,Hd!1.Xi}<y621z|Qۛ $ )D\D6͚h/STGsreGӬ?p߽}I^G33ݵjDHޗ^6bWU "=5y6ZVU,6۶\ehP5)'7O<5  A ʹLX)Ѧ2d*t)U}#CJEL&EGpFGdI봐Ozt0b"ˌ7I$18>Ę<xf}8 Y.~:&&o<cN3w?>|*XD'W7y 710gO*i$kݾِed@lоk(QI*C(UmKQqCϧ@BMgLâ'}@q:3vGAf=OzncӋXp".[pہ pBQ&=rz(A"82/ I̳$Z޷_wjJf뽆m+>nlw|BmpavU"D$m= 3zl/"˫}=IҢHC9T.vH@,D`eP2yO$~nUw9ILF($(jQcRLZj$uI|d8&|$^ɑ2=:֐<Ġa`{rY@㚧k@hodVpm}~a5OBOQL)T`uH8 w/uj}y< xd/#o~sHFş2c٧7IU/Nn{,H}RrcU98|)Ω[.I:o?UQw+,WFs'E#CDS>?>&VccBAkqyG| Íc~\0,#a$Sn/ )4hrK }4Dg1s4s 71AY"^sp|xD Q废H." A#{tiRc5: SM@ sőGUDǺBڿ﷎|<ټx,|R3IxIoͻv/ϮïvyRKyu8T(pE4ny,ZPw| Ȧ$Mr2JfS-'[)M6$^fEde9oÜҞէls(֩LՖtR Ѱ|x\dZ湑`}e!":A~޹c~QW@? XB!oweVm|)׿&_//m~O %CՊD5_[oda+y~pS^dߙQF̋Szn7zCz.DDrZzcZEFF'Renး~| z,khj (aKm>OҞ7tG")%ky Ve{wnX\H]|2}; Pà*8Q:7KO|"HQQ5 &UɄ:Gp"C8"脁:~$(;*ud<K,V2,}7r*y :yk>5@ Y-w\( N`AC7cxmKJ9_lZ*ezÝj 7r&mfm`1R@Ub](Gٛxj7I5FOLHd:Kʤ-iIb{hԦ&MU^$uH+b>HI%%)0{>I>Iܫq#yDXZ;I]>Vrv4u94t[mvdV<ݻO>}ƟH,#vENdpTtyr,C']J*GAзמ2\f;pU/~_?I ,Ϡ}zڶNK)sXj 1c/"PSσIw$M]>dVdi4]L^{{Jg;퓌1ژAoR™ e80p:Fdau-0͐G|p]m%/~)qv|qqF?\ʐ=pkݽ/<d@#e#RkAxu& BR+w56W8lM`4Iy\Ͱ& "L͵߰d_h䎌 #{o2%DH3 %7wqrS]8 Q ܆Im`]>/7Ym.nO7 J &;dyH ԇ6"91x(X6/,z͹>n@CmͶF"tt0Wi"h^e/q4]|>(!e6I:|W TBA@d &"l6EQW?(Aد\Iہ!ҥ(Xng6}T:4H+5FC@$Xk9 ݉ayga0%xl镢, W/l+v4-n1A], JQ <Y~8ZGsm-n6v TM| 'UKqA,KEKbC$u94Q'ۦNt~ *99AN\y}`ƉRa_!gіxnRV b r@mŰ$˶Aع )\}Ҽ@XMWŎw:6'uicY9tQޕ20U]gYNÜ1 =Lt;F@@ޑãiY"v'HicJcfeHqaGڗ?Vܫ_7h ~erxx>)T 5YJ$V L`y>M2P٣? Bݶ-9?MХ0\LL >P:>ȟIA2PHQlw q gVD\8툂" 8'L62PW&?{r\Ʈ2&Ljd]]KHcU)Da?ZEvbht|bwzJ)wEYguϐC Ha&@\G2ZY~tV.?đc<|W1kdH׆P&Z{vc>M7g*rYTbyf.bsA7ҽm4`BfĪePr6x\p44Q& [5$1>TX^-ܫ"Q @G☒4w/vyXxv~ BQN^3 mBG7u=ucQu|{[˓r} Hu<nD?w ̎ nA=V>u;p{4_H4& l b鬈ܧPlb,X!7 1 pm`z~C*Cn\6v0DK+\;JBaudȘ,IE'q9#!_~!"M\E%px`b6P{9.n˒~B#s@ ީ@i蕀G[(}o!@,g?{wOOliu^"t(J ѷ]X$lsFoϯrMu-?,͋4UI_3Dx[PٲOCSɊC,]`::FRh@d tx^G~rE.ƐL$ۨM)W#-޽{|G/YU*MAm)Pķj n"$M$i ֢8 rDPNi2]ɴ5XAQr4s]DwuzgF-ހ1 V:E$=*t922Z!AN:<$i7n!9=+cc#1~qHEt"j8QX?Ad+;"?܉V?6^=.eZ 鎺M3^&'9ell9fe`OUꓺ}6N]B@JwIl{t1{tJ$ x}Iֻ~Ol{ǡz},(gkx!rea~fmII~rΫy#FDxWi7`{0:5H !i##B{ / f=zL3nf2ׄ[`(hb{A$g!6VIHY L`My.9޻3[ěqNIz5>x\,;%P3%Ъܪ"t`R'WPF^бKt'S3$h c ȌA\# G2G@\F gJL) dxRNĝ'Q,KǠ/.gz6 )CS7C#W_I!H[ ̮=AQ܄`4Jb;Tێ8>|)'0a &h& mKZ ~E0zqO~zxQv{21mtFީu{F1ꄧx(wħjO uR&^=(saZSbNX"OL a5ym3Ċ">TIr  0WE%.^(mxAF1?9d]hy|*#BA S\N:hsaD}/seWt:y&k4< Аq >!+ WK!%($%[ @ ÿQ7^Eu,d8mt Goxƫ~pu/paWq,uxYmT"R#3t NM Xtj@Ya|A}d[h q|bƐ:NXW?"%xo ipQofQ`y ѝockGA55t=Dyۊ$INb&у q<'w9yd8 md@QxD"8 B WN~mB R9ϊrc AtoUt)km1⮧G{Β%gh_ӥg`.uC=z]o"X}jWG %-r6#1gKB78 #!Sjk1L $bƴ8L ؕy#C*2[Mր!>dX1R}G'ڋFt:ķܸ\p1c,őB4:x*#f20[6F~?N+@{޶NFG dh9^$|2: 5DbDWv_'̞l%skQb(Q CbBѣ/ă,a6dX[<[璮(F('Zht2KC?:(= nW#Zt'ph@-0)9˜/dZW| iHB˜9MYc|РDz!c$j"t]䌪bΈ!ѡN/{`P&ܙ 'FJׇjM@ǃ"_]WePjG@gY^qo GF}CE3.Aa5.jOxH,EI8]4`angJNUF 9r6X+8Y̮H{gK wіK>63?=˿NaEߤQ00ho[󞅤0lG'iqd/ ~7ސ?/]a4vVlt 3DzDw F&NVڃbG0T:3uЪdusklYq *5x7Ef5ptA^{t'/ fuxp1 uނh9g\mx霁:[ _if]y Ns+u:;,4}皺>M[!*ٿ'ډ?lkJ8<ՃK=]%AdSL 2-R2EOZ˂,^М!dCD7]ә1hVۙ-f1߇ƪ9lv@<q buBPr`lȡƿ0%x`Lhg~ =p϶K<1<xQCC:ɘeDWplaLarEF9&A"ħfuC2*It2kD ,=v!KPUcƨbM@g%1 * w{R+|YoeZ#z =F¼Ms';˻-2:v[e<`1=YhkXe980:/FA #c3`whHd!4-bZ7(^P-D%|0C SZCn (,R .pGp`j\HA b} u^I B?wBf!}5pFX#@>F=h)00X?f"> cz[ {ĘՅ/XO|2*4;T,@8J=ڽ;%1~v4! dzQ l-ywFωRU37Q$`^ȸLr"}1L una_`~|U-im`=1Zpu&)^EOp炣@]fht{ށ g-!cy:ӄ0\7:j}>"#C+Ӌey[aTG qW Xq&/=㯝>QV-e84 Q [ux_G FEb)OY|@&W}\x.6`eBjN8L^B8G'1c,ޙ $&҉1}=.sPź9_Ǒd/m%`[!}!ㅼ5M!( 3H5r9:Q=o"D FHa ~`^]bf|*|#INd?XFsɋn? XG.If6M}Z,ʼV <2" ` )hC'&p14݇1Q9"cVcha}gBgqdQ D}fuG ;s;r yxdK Ih Q'w""v茼Wq Si"~o 'VCx{o!mie&[IhOybr a/:],rMp2"%#̷oK!:7Sfa sbb tʏ1d "%|lz~gypK#Lww;!N̎} H=j8>Y2JBijG5֤ $2mw4 &BCH8 jE۠yTC l9tJ V\u(|hM?ANdIйa1M^~jye 2Y"ms *dVRNa`X+95D[dra9!ghxDvCc]I%@H8bxCgTm$9Fyʔ1*~˦EF- w/'MM +pW2h|[,*|͚a鎧}WDa쿻%;AK2k>Z69-S-{@20I _TC@Eᦉ '(IDpo6ep$ q[| V3'+jV#ڶ]:@#>-7O [ڜ/LlL{L6n&&qJS%ok@ԻUcMqw A^b,@,n8޲v0q.VfpLњ,1Ddd` 2FyL)=T9vn#?bL[\uL}d]09p;7(KlTӥRwHmqS}rՍ+`ko1%6FG雓-@hqS(UEQͥi2D~Gi]8[ϹZ1/=\ (xDAZ` TQ~KScLŴj1 ۖ?+/wA޻()B5QATqv8$I8 ȱeS(m"3eQ<\f.aCU]\0˝+Z7l^ݶ}G&6`OLlEmHj+7&waubk, eVfIӈkO7LMܠR1%o\ŷv$53E%tlP#ᓯՂ *ֽm7fTU4uqf`0X "] LbM׋8PAg$sy!عIkˮtOt"@sƕ  e $TVH[g虍4hBiT@9~C(^ou}* 3X_fpkLKZ4=ykUh컾aLS;OnYEi&SY$䈪Bt̲L IbdVMp? ;8Au먭Zô,"mH+}Z&6\s\Hk}3>q$afxthՒ X[n]?lP<1IAԤS_gEax5 ϋP;fUʍ%^$>(jց-tdLN0dLv)DH=k)sb`͢akrGCndѮ!`ݡrpWr/ILV-sj:b{k hߊ#SIJ%d|/K%JLb`L( ,BdҲƴrdvHW Jthk%qQstZKʈi[p3ᄊ3@H3 lIkoAvuHi~fQ4t Jcȕ#!nRV&-QSS?}< )ٖh8ڀNd݇E,p @?e-n'v#20m/Q:{8&HK+MT;ϙ/Nq܃;M8t=sS5:]ջ \|FdE*luPmڷ=znoz٠Nn'+:VU(>+d$I8'I2,D0IR۲0y:@8< p|z& FPhd˓>$>(I b8rRϟӗ}8?|G8f"8C^0PG sMI.d`w}SI[|wq^>+/\9CaAQ`wyUPwS̘FQcao`*[kLTW_V4Ngە:ƥ| ˒$y^ ۋlczC_gQk|TdK(S?iP|@ߞ0@Io^8]NQ<+ -B'$3lcyvtѧ4I ʘj'q;[~: ژNʁX!pNO9q穚:!X%L>|x܈a -ѨVLM=?>ڦp,Nw#J"цIrF|'eUZsJ j5Bc}8aU7<Q$>CPg3ԟ| ]/>?1?u t18]߼p%T+$S\NWߜ}ՙ#;vF_L0zz\_eYqqdD.'S+&+ʳ8BRn5o{莇!˪ {ؙF_'͹ {M9%q袯v$^V ^ǿ FOU mD$T!sknz鲨*rh=?;]-I?"!prfx>te1tX$\̬Yp8}A'ct$I9^nvQ0пej,*ڑCXVb _7Kswt)`/?a0UH ~x|-kkoGa+RϷ~>lW6̩ٵq]%Xӕ\֟~lVQt {ǧbM1j9FN-y:D)3(} ?nY 4I$s'B%Ek{9$iyv9Un&hjd tXKxc|wYS "/^U#CPӺ;Њ/0oq_wRwmohR&v3-pW 2SIP˶;=q1k'ɧlY/wVӉ//SFR#]( JHdڠ4V/ }Sk4d^9^3?@4k aB$ϞIEzPh!ĤI@)>2/oMܘ_0vuK'h8F"]s$,5{.6_ f$͞f,9Ą{OkHvscn7PU]oO,_aґbWnyԬX՛!bB-F}-$Gar`-M/M  1*A\tX7Ct,B\α#,b$H|Dk) "y {+*2ݐ (ϋx-Zб,D 4$se2Hi8- :㐪@;8_Qs,#H\D$q`$ijuƒ(r gR  ( ̲"N;@x0sZحM7@p>h](r)ac.I$A,d%aV^QbVY9, ߌyU}4l㛳6Jv ~18{n}hPN0\_M,vf˯ zbx1zyZn@?Ntq3x 'hWNAhm x6 4IAqxh\|!MR?Ipa'i(a8@3FQm9.Sb0ȴM& ưKNGv^qA,%sLI{( 4]m; 8^gD< Ar,]AƙH$I}7W|,'p4e9X& < A\Yq@V Bij]Y@a%ɞP "qDQ/+B'Qcz~d3 ılpUŌT35qk͞f i^@t`9&q|nf)]9Jv HB&YmE'hm?Ͱ9IS9Ƈ)i*/dFxߏҔ8t߀ybG2~%'V qa|MWXNb$qvϞyQ}vZi7j򸈂x؞@e w~an(?!)uebhn0o_tUyXEf~}hr屦SIw?fXza[gszYTMXvdQ9+`x?'` 20E+-rz8Jkspfלٯló{oK"o5$IBXCbP%r{j ,f9ve `~D!(,1)Ir~=j LfCrP\()HbA \O yXEҤb[pWy~zðl#*M3=Tm5֟*kGU 5c:wl>ǷU`2WV@ȘȍS" ـgj6t{Q"*2*8.VAjqv(*8-c LQ$vw5)0g}V͸4H|?-w*=HtHk t?lC%qY (6M1Qa6@\Q.'h;MS`wI0% F $ί!bկ"ef|yZQx[&)/w8#`ZybK&I>>[;iGM >r义W76[Ξ5#Pq ?{^83~}W@u5̞z_0tܟ?x϶ 8~+#ӋcnSYWgߞH·  #Q+{M!iO/ʼOb"8tr]9c:=@ v+e@MW `rAnO vU?bqdڄP;>4I<Ϸ,;O׆<ˊ,Ts< Y^§\BAf2ñ1<4$I?}^g+Hug]b]_p aWmFw=~.ɞYlgOsEOoEC?|{PśW5F(-Rc-RTyjoR CܩA(IREAsiPhU=4x n<# Q95kYB )`GTSOмM)&^1o~{eT" @e)'٧s"39&`h4]a:fb 23WA yF$$k2]_f &lcIe4A |9Kn(1n<ەݰn* 7=ج(%'ީ dkKL9-BseUPIJ&|9Y?$-X0B5K,?Ecp㑭7_ÝRk4pۺ<,_14(^I$8y4QHPڶ T[n%jrڴ8#@F9ee_'{~@띿G0njAx#h9؀pj bc\mp@į&n(z4Pt-A68(Rv<DUr[)sV.e4A61nл1zUšvBVbN]"JӒ4H M;-_j!McI}FR0kySa Rqx5d4Վܳ ȬhI3 h͟.*C}L嘋A<@Wv㗗0dEFQriA$GaqqDAHsA_}:g\6thx]i~g]/^]T'!IEa糀B,8ɱ!6P}Qp{*IRI7A i8{9#cr #'@&eokbA*J+CA-PwB1~HPnGQXi!cX '=9) 1E, 0L2랍@R08ydqQWjD^y#Q^ 㻏qF`ܸ! o{vU e+ VNdx8NN|7/$OV &Pbmf8XVR;Ueت5C+`p>?{V ~>&T$^@>>~̫6BA˾k 5:="d yPJ4C]H'.H"xy_Rd-+w9]r/__FS3H]/>.F$}ۿd$M(/gTԉv_n9^ ]aYQj9LY!TE qful HA :F_i?ޭ7QqzDh!Ѹ韏zg]Q&q7O. H烝 {{xXjAtڸ7 ;{Y/EI5,|>8X{Kw&Ei C-?Or, 88Mq~W:VK9y<.SE^ ־X4_o] ǽ Kr & ¼Kj0LlM&n*~=ɷϿ>: N>_;\ʒT]>_t+Y_/~nyd6=Ƿ4Ӵa(˿U2nyNwP8T}˟>^ 9~IU2,eZ6W' bۭan7jɎӧ 0$„aQ|;[aOV84l 2|3*n#>շK>rxQ5 a:4x!a1YM,W C8G]ee/jfhv44|5U\2f-nuch,-g9Z FRM=wJk \A2*O²@}KA%4֖:yf;8>aU EXQdQo3P_53ϋq B/[C9dpyn@9):]U0j3J\>qBY93v=-z0:"-*19ăAK̑TZ/ ,1,a%}3Uזao ,B6ٺ9S`l-jfmwQ%G(m#OɻYD iP՗/`|}Ɗ_hW`k;~NҴpp6+s h݊c³.(~|8" wqRqA%1 Mk4z ꪤJGը@h]+)m 6`[e]DYQ!DQ$>>ƍv!a6PVe^>.wrS)Nª琀aLZ14MWqQuܾcH'D72:8bq{')r~hWC"*FC$77RM*.@$N/vKqsf\  cxx%aA pa k6uznj$)qX+ؒ _Cc8*7`QK_$ITT |uL6QU0!)6T B-wPc  4%evNM9@.$;_4 #Nkڤ.& vp3MXcR9շ~Py*WN̟W ^-wir>}$ak274Ce_ òaW#^xQ=@Nlv~i4 IIEnAdHyAXqI>s_Al6oZXU%Sum)Lr;< 4KkN^,;q"cm5r1{a5(8/Z/M \w^D ta^-?|yJ0r|LOsKS^*A0qm<> ^V\WgRD<BVq"Ʋot5Xˤ91, \/#bn;U!&M{Dm,C^ )a@UaK0 Zlny㵪ɍy\<.$7.ZWyB}Uw  Dlݏ7F$E0 l@E'pyXBsg핢Ȣ "TRlP*F;iz ~hͷ?\hV k( FlGrEQ kZUqI@?EY\>-DQbwWUWڼ[ir:bXL7gijϣI&@vwQ+s,!<`,~r,=8}DI*d JS2 `')8l-IWJ`ʠ^G !{ȿm'ri}LܠZjEدeջ\ TMf"W]LU0|2]jD#Awë|Z __rU>Q|&ipSsЁL:-]]s"oo tRim:.d$ɫv67~߿C(dQj]/`m`0 ,Tmkx4+fH}~)y{{" zQ~a %%)8s14x-*IZ]nL5 qm^G[Fru3" !tlnGѫ4$[1SR(S8>;"> 02 N@Tվ_?>nc_ȁ`p}bыZ `xї$vZ,?r\veqw] SQw˗cď_FQ!q+_,dXRV9H;]P/]Nf Z:L W+dM deP5oGaUgs4]_?xѷDt-jN\^:XWPt@o> A'iA,$1yARmNr}C".M]󍫢RCCpQ:*ðԋG~KE sb0L;:c "8#A*`\$!'I˲AϏvʢ rTep8 ˆudYI fͽYqrmQXeka[dçiR&h$[@HnSW]Ap 9xK='mPzulB=Gi:Ԗ ub4;0aL6NmfץTท^a`ab7~T`>AennjP1YXŮi܉!`*EHki*,FPWp#я=Py0' +9y5gq\",}7~S+$= (LR,Ȣ J랐-7VqK%룗kD[$>r w:پE[t*1-]:E4NGWT55~$ܠ"Q4,ac9H<^fP&E{PEehIa" Ð(y+ 8Ĕ#EeDRၪ-$et*PBp¶u~Q㶂:̵[!d?f*_Bv}UźS PA'X@_*sA%@S nģ"; A"qmiO͂lXOv0avK{~W7PO./mJHC:PMP.747- nPbn`=lCkhQRZ0bʽH8}N^M-QWq| 8XrÓ"-5C%oSů#,-ƅm #4ihc \BJh,9-,ID)^c3BK <@D Vm%mV2J͔2z36qC0+ i4gHi m4eHV(MPJij@nj%8`'w )f*ѐAܡnn#F5+nRȚάvW-Y([%Ogo`=JTi cb4V"_. p,{Hq:>ӌh4>mjBh8K?v 1 :9v//QE}+Y[yn1ƚ.5lLtĽno-Q>>a,Jh"IRo[S-螿+O/Wۗ|vuO[V{G>쉪oUUP5ˏU~߿Hݞu'a5 b>=U褐{0BR agpնF_^v[_gsEx??%:8/gW#:_YtN;ݛ݁A/wÞ(˛@QE?}nge*-OwrOuر'ӧ=(ʰ %ddzOO&NW;biZ_ ,/;`?/UtT0iq̞W.sdPpEsⰷ%Ej'B8{^N6"G1|}SQ]g0t; _LXOP9uJ[0_v]A+MyU:/UC ;] h=]g;4dMLw=PiKNOgya0 9:*mWP,-:31 a.J(uAr8>ݤg8[-ۃcݾ^{E6לaE3hm/3A0D/w/ݡrlaL?XPT0H/aA,e1-)ZGS~w`0Z~ +s/0Fj1ŧۗҶBH9 䏢|ޭ~1#sp>qmDED"rIDAT,eza4 r򰄬`rwkK, -\s_Y:緳 !E8a}WI +@zRUs{'lĐm o S(HX}47 jWM!nmr?_]hjAHɑ˺tdFw,Vʋ"jraޘD,6˝7$EN3Ԃar I⍾Yl:.#r."U .b6}%_[94CVNXOWZp;v()J:(SÜ03]u~w=4M7sw wֳ-7 [;wp9ꏌxp,&kc:+ZW_LqZ!ۃU ^c칾sn3<[>$}˒y0ӗ*["j>/G r-loࢯJwˋчף y*ûͥ1>||Newlwn.'vR/P3Y/q`sG,_VjQ:v/yg;zcFӿMu7a[n|x_@e4~Z h+zbV՗$yLa%eeXpqQ N:G@-$qlt?n*ߞ2 Pq jh)a0O@MhL(o^8MC@2Ap 3] wQ#e:$qq4#'@5u1,Rȏͯ4NA_wӥi2U!^oZBSD2$B(!Y._rȳ:`YaWyI:jãCkuzIMee_ײaȊ JfELz"(t9 M`uY%d8kwt|-j "}1ٖӇew< c(7}XPo/HRGl0wcrLf,XOSTSF`&﷑1NZ;)PfDIU 0  -c@y9.۩S/y@e%4- i 兂\`,EIP ]Tf4d8J@ Dǰ X!8 \lOykRKuLa /d 2 ~@4 ϰ1ıq"&i1QS$|&4B7$u%Uvmb]8.b9A$xr =ۭDXI-$qp@ȇ^9%bW~*sfrLʑǬzjVn cz0ͫN+a&ϭBM$ZYU Pc$H,݃Qdjq2z8(I[:Mm@fQ/K 5麡}w`82 [K \]wlX4qwz"{>6y|vAd|*`x1د6AE~؞H| gW:{Gʪ(Y0=?h}zu4a{n`qچ zz{aeQed~czn:~>089Xz~x1DrGEwk*w -ŲpDZl6rh4 +kbu\1*1wCrՅ 2Ҽ];z^hv-wGsv`Gn3k5eګr\ $ʱ>輷[g,k;Qfvֲs>@:1{F"!^R R{uqK1+3^T 0?ty>MhWp*J88'v߰<ۺ_ӵu֓7t+Y,l&Ki]GI4U d&ؒ2*|^myT10"Ƕ$ pUdF+Fv(2wyB jyߌ[кLbkKFΨpa##ΗB8!aeftL[׿{<ak8zk)89I7ӵ? ~v6}uvj8Ew)^g07fT: Q0"jS)' z.N{t,A>7y0~xMyCu%Q""z$E?LSf|9P<Ϟnzu|rz}b|W4 0mڪ} RԋܹEQ_ίbi-@wv~(V˃ {ŁW QS*ðe$9 * 5LQmMrՙ53=R E.Wҧ])pmopO˵}8[Ow: <'j*DApyb_/.y!mtV^~ŗ5$?t~q}::=A>{G_><=]l/"o7;w9;<:;񗗻ϥރӃO?_q}7g_={飐M";/N/.ONb<}o^>?:i2ztiTФ i}?8OƂ?DX.Yo Kr͗ҵ#7EQթ`:YfUh7KmNw#&A\/Τ#t cS PB_@k[܌uLRQ +P;9ݲZ&c͒#w1C̾jӄU&a3;fFa9ݗW O"F0 23lJSd+wx@4IZr.,?JU~Ϫ9,~,BrОlo%lu%$%Ś~a[l1.a,4I.K> )?hWy /q"RKB*LF XIOVz0)iDC&9_v g/xJ/RGjFdzi\UǓG8‚*Am #]Re>Bg &Ta'TF-%(<N&x&c3!0T;>::p,NJ$4ӎNaQeWD%+Θ:C˦xY$^0n:e\Hcv2cchBNf<^e^5Z0 bed0PumLUlnSVv س1U!s c(Ub[H@`H̪08@9eZ`Ԅ}몇Ӝ7"5y`]үNZ0Abk4I4L`J 5rmڞw|?ݳV3tt7+d4/j ElǀSD 'B+΢7N4ළRQ-flna0PgIJxxdDS5wxZ&a`tƲr%ĢIh"SX݄Qtp՘Uq e[̱FG1X׸,ĤcPҠ[B5\خE9킂,%ZNWbItED9QK)n>Y*yŽWQ(H@Vg%Jl,8;aU ŒCۄO|GGxqtR&X:dF+#HC;E8[߉ pSZBr޺cJ"0"u7 \32|J]!R$9utӶ6=Lois,MOoif|1:2MLTB<EТoωCu[dݡVsw ( %\#nK3mF>؈k 1gj~ʒ^a hc@ Ň)Md >>)[3?H1KXg?uG8 @rT2N5Z '$^ RLe{:Jr^տL&ܼ~gW!RȐ%}Bk,IqP3&o'd~DM:_76 W u{Q͖]nhr},GfMaxsY*>yv<0N}DRX !`: fbEa(<[;2UFE+EB5bTg< ~2x9x6kJ1hC?'h L]d4)Z*lbjx +sa^*8UqA wʞhU>Uo !yo%L run]t# c*b9  N7:^ rw{+ zNZWTUmyHvu,OOT’ t(qy!v 4,@{Jz OLX@_G-oؓp9":20-yg! POHJkcAD QF>xZ&wBͣ "e!j-jtuGjܸN5iVV1^F53?D7^8P3Ioif'+n"+'6fZNLϒ#Ӕٯ.%moV()?`3?BL\~]LKKBsKKE'ձ?H1:ԽzvYLᬆ  w JHSt6ϩCMw3.]Uߔ29&ګ̤4΅+Ex\g(Xl6Ϛ  |ڝWƞT.υ D;FԿi&KM 6iuASlV_G;Ү 0R8G!$ 'icfQE9mP ,&.5!LEP?A>Ylo7we%Ԛ,2ӝ_g<%ڬ8rv)W~5..)3Pì`b,4d>U@%`C F| RzdPΑ17Vg̽ͪ32b~M/J\e.d8h̳3 AHY\wHgTA0yNq"˩{T\JAyg@T**Ngq&)vYEj(o9o0&xgZteéϚ h@ 6"T1v,:"JXBǓ$y̫zwZ#,fFFY#W l4K#SEUي,Jf@`ݚoB&mpĚ~1C}SM"oXS5&IkH5v5XDR?iiܝIGD"FpΥ}ԭa`Lqm>7w~pbj$Bg$'DLҒхyV |8FS zo!oHL6G>;*P_bZΊԮ-z% M_855Qe*06IENDB`Seurat/tests/testdata/visium/spatial/tissue_positions_list.csv0000644000176200001440000055173214525500037024652 0ustar liggesusersACGCCTGACACGCGCT-1,0,0,0,1487,1480 TACCGATCCAACACTT-1,0,1,1,1607,1549 ATTAAAGCGGACGAGC-1,0,0,2,1487,1617 GATAAGGGACGATTAG-1,0,1,3,1607,1686 GTGCAAATCACCAATA-1,0,0,4,1487,1755 TGTTGGCTGGCGGAAG-1,0,1,5,1607,1824 GCATCCTCTCCTATTA-1,0,0,6,1487,1893 GCGAGGGACTGCTAGA-1,0,1,7,1607,1962 TGGTACCGGCACAGCC-1,0,0,8,1487,2030 GCGCGTTTAAATCGTA-1,0,1,9,1607,2099 TGCCTTGCCCTTACGG-1,0,0,10,1487,2168 GACGACTTTCCAAGAA-1,0,1,11,1607,2237 CCAGTGAGCTCCTTGT-1,0,0,12,1487,2306 ATACCCTGGCTCAAAT-1,0,1,13,1607,2375 GGGTTTCCGGCTTCCA-1,0,0,14,1487,2443 TAACCGTCCAGTTCAT-1,0,1,15,1607,2512 AAACAACGAATAGTTC-1,0,0,16,1487,2581 CAAGGGAGTGTATTTG-1,0,1,17,1607,2650 CCAAGCTTGATCTCCT-1,0,0,18,1487,2719 TTATTTCATCCCAAAC-1,0,1,19,1607,2788 GAGCGCTATGTCAGGC-1,0,0,20,1487,2856 TATGGCAGACTTTCGA-1,0,1,21,1607,2925 CTTCGTGCCCGCATCG-1,0,0,22,1487,2994 AAACGGGTTGGTATCC-1,0,1,23,1607,3063 TGCAAACCCACATCAA-1,0,0,24,1487,3132 GACGGGATGTCTTATG-1,0,1,25,1607,3200 GGCGAGCATCGAGGAC-1,0,0,26,1487,3269 CGCGTGCTATCAACGA-1,0,1,27,1607,3338 TGAAACCTCAACTCAC-1,0,0,28,1487,3407 CACATAAGGCGACCGT-1,0,1,29,1607,3476 TGACCCAACTCACATT-1,0,0,30,1487,3545 ATACGCCGATCTACCG-1,0,1,31,1607,3613 ACTTATCTGATCTATA-1,0,0,32,1487,3682 GTGTGAGCCGAGGTGC-1,0,1,33,1607,3751 GATGATTTGAAACTGG-1,0,0,34,1487,3820 GGGAACCACCTGTTTC-1,0,1,35,1607,3889 GTTCGTTGCGGACCAG-1,0,0,36,1487,3958 TGAGGTTGATCCCAAG-1,0,1,37,1607,4026 GATGCCACACTACAGC-1,0,0,38,1487,4095 AGGCAAAGAGGAATCA-1,0,1,39,1607,4164 AAGTAAGCTTCCAAAC-1,0,0,40,1487,4233 AACGTAGTCTACCCAT-1,0,1,41,1607,4302 GTTTGAGCGGTTATGT-1,0,0,42,1487,4371 GAAGCAAGGCAATGTT-1,0,1,43,1607,4439 TCACTCAGCGCATTAG-1,0,0,44,1487,4508 TACAATGAAACCAGCA-1,0,1,45,1607,4577 GTGCGCTTACAAATGA-1,0,0,46,1487,4646 GCACTCCCACAGTCCC-1,0,1,47,1607,4715 CGAAGACTGCCCGGGA-1,0,0,48,1487,4784 CAGGATCCGCCCGACC-1,0,1,49,1607,4852 CACGATTGGTCGTTAA-1,0,0,50,1487,4921 GGTTGTATCGTGAAAT-1,0,1,51,1607,4990 TCTTATGGGTAGTACC-1,0,0,52,1487,5059 TACAAGCTGTTCACTG-1,0,1,53,1607,5128 GTATCTTGTTGCTCAC-1,0,0,54,1487,5197 ATACCAGGTGAGCGAT-1,0,1,55,1607,5265 CCTAAACAGGGTCCGT-1,0,0,56,1487,5334 ATGGTGCTCAAAGCCA-1,0,1,57,1607,5403 CAAATGCGGAGTGTTC-1,0,0,58,1487,5472 CGTGCCCGACATTTGT-1,0,1,59,1607,5541 GTATCTCCCTAACTGT-1,0,0,60,1487,5610 ATTTGCCTAGTTACGA-1,0,1,61,1607,5678 ACGTCCTAAACGAGAT-1,0,0,62,1487,5747 CTGGGATCGCCCAGAT-1,0,1,63,1607,5816 CTGCAAATGGGCTCCA-1,0,0,64,1487,5885 CATTATAACAGGGTCC-1,0,1,65,1607,5954 ACCTTTCCTTTAGAAG-1,0,0,66,1487,6022 ATAGATTTGCAGTCGG-1,0,1,67,1607,6091 CTCGGGCATCGTCGGG-1,0,0,68,1487,6160 GTGGCGGGCCGTAGCT-1,0,1,69,1607,6229 CAACAGTGCCAAACGG-1,0,0,70,1487,6298 TGCGGGTATTGGGATC-1,0,1,71,1607,6367 GTCTCGCCAACACGCC-1,0,0,72,1487,6435 CTGGGCGGCCAAATGT-1,0,1,73,1607,6504 TAAAGGAGAAACTAGT-1,0,0,74,1487,6573 TCCCACGGAGGGAGCT-1,0,1,75,1607,6642 AGCTTCAATACTTTGA-1,0,0,76,1487,6711 TTCCACATTTCTCGTC-1,0,1,77,1607,6780 ACAAACCGACAAGGCG-1,0,0,78,1487,6848 AGACGGGATTGGTATA-1,0,1,79,1607,6917 AACCTAAAGCCGTCCG-1,0,0,80,1487,6986 TACAAATTGCGGAGGT-1,0,1,81,1607,7055 CCCGCTAGAGGGTTAA-1,0,0,82,1487,7124 CATTGCAAAGCATAAT-1,0,1,83,1607,7193 TGTACGCTATCAGCTT-1,0,0,84,1487,7261 TTCTTCGCAATAGAGC-1,0,1,85,1607,7330 TGTGATTCCAGCGCTT-1,0,0,86,1487,7399 ATTCAGGATCGCCTCT-1,0,1,87,1607,7468 GCCCATGGGTGCAATG-1,0,0,88,1487,7537 TTCCCGACGCTTCACT-1,0,1,89,1607,7606 AGCGGTTGAGATGTAC-1,0,0,90,1487,7674 GCTGTCTGTGATCGAC-1,0,1,91,1607,7743 AAAGACATGAAGTTTA-1,0,0,92,1487,7812 CAACAGAATAACGCTA-1,0,1,93,1607,7881 TGCGGTCTACGAGTAA-1,0,0,94,1487,7950 AAGACTCACGCCCACT-1,0,1,95,1607,8019 CTTTGAAACATATTCC-1,0,0,96,1487,8087 CTGGGCACTAGTCGGA-1,0,1,97,1607,8156 CGCCCTTACATCCACC-1,0,0,98,1487,8225 CACGACCACAGACTTT-1,0,1,99,1607,8294 CAATCCATTATCCGTT-1,0,0,100,1487,8363 GTGGCGTGCACCAGAG-1,0,1,101,1607,8432 CGGAGTCCTAACCTGG-1,0,0,102,1487,8500 GGTCCCATAACATAGA-1,0,1,103,1607,8569 ATCTCATAAACCTACC-1,0,0,104,1487,8638 TGCATGGCAGTCTTGC-1,0,1,105,1607,8707 TTGCAGGTCATGAAGT-1,0,0,106,1487,8776 AGCTGCATTTGAGGTG-1,0,1,107,1607,8844 TAATCAGGAATGCTGC-1,0,0,108,1487,8913 CCATCATAAGAACAGG-1,0,1,109,1607,8982 TCGTATCACCAAGCTA-1,0,0,110,1487,9051 ATTCAGATGAATCCCT-1,0,1,111,1607,9120 AAAGGTCAACGACATG-1,0,0,112,1487,9189 AGCTGCTGTGCCGAAT-1,0,1,113,1607,9257 CTAGCGCCAATCCTAC-1,0,0,114,1487,9326 GCTCGACCGAACTGAA-1,0,1,115,1607,9395 ACAGTGCAGCGCATTT-1,0,0,116,1487,9464 CGGCTGAAGGTTACGC-1,0,1,117,1607,9533 CACCTCTACGAGTGTG-1,0,0,118,1487,9602 ATACGACAGATGGGTA-1,0,1,119,1607,9670 ACTTCCTGTCGTGCGA-1,0,0,120,1487,9739 CGTAACGGAACGATCA-1,0,1,121,1607,9808 AAATCACTCCTAAACG-1,0,0,122,1487,9877 CTCCGAGTAAATCCGC-1,0,1,123,1607,9946 ACGCTAGTATCAGTGC-1,0,0,124,1487,10015 AGAGTGAACAGACACC-1,0,1,125,1607,10083 ACACCCGTAAATCTGT-1,0,0,126,1487,10152 GCTTTGCTGCCGGGTA-1,0,1,127,1607,10221 ACAGGAGGCGCAGCCG-1,0,2,0,1727,1480 AGGCAATACGGAGGAC-1,0,3,1,1847,1549 TGGTGTGACAGACGAT-1,0,2,2,1727,1617 ATCTATCGATGATCAA-1,0,3,3,1847,1686 CGGTAACAAGATACAT-1,0,2,4,1727,1755 TCGCCGGAGAGTCTTA-1,0,3,5,1847,1824 GGAGGAGTGTGTTTAT-1,0,2,6,1727,1893 TTAGGTGTGACTGGTC-1,0,3,7,1847,1962 CAGGGCTAACGAAACC-1,0,2,8,1727,2030 CCCGTGGGTTAATTGA-1,0,3,9,1847,2099 GACCGACCGCTAATAT-1,0,2,10,1727,2168 GGTATCAAGCATAGAA-1,0,3,11,1847,2237 TGCATGAGTAGATTCG-1,0,2,12,1727,2306 AATTCCAACTTGGTGA-1,0,3,13,1847,2375 TGCCGATGTCATCAAT-1,0,2,14,1727,2443 GCTGGGTCCGCTGTTA-1,0,3,15,1847,2512 TGAACACCCGAAGCAG-1,0,2,16,1727,2581 AACATTGGTCAGCCGT-1,0,3,17,1847,2650 GTGGGTCTTCTTTGCG-1,0,2,18,1727,2719 CATCGAATGGATCTCT-1,0,3,19,1847,2788 GCTACACTGTCCGAAC-1,0,2,20,1727,2856 CGGGTTGTAGCTTTGG-1,0,3,21,1847,2925 CCTAAGTGTCTAACCG-1,0,2,22,1727,2994 TCTGTGACTGACCGTT-1,0,3,23,1847,3063 TTATCATACTCGCAAA-1,0,2,24,1727,3132 AGCGTAGCGCTAGACC-1,0,3,25,1847,3201 TCCCTCCGAAATCGTT-1,0,2,26,1727,3269 AGGTCGCCACTTCGGT-1,0,3,27,1847,3338 CTAGCAACTAATTTAC-1,0,2,28,1727,3407 TTGCTAGCTACCAATC-1,0,3,29,1847,3476 GCCGGTTTGGGCGGAT-1,0,2,30,1727,3545 TGTAACTTGTCAACCT-1,0,3,31,1847,3613 CGAGATGTTGCCTATA-1,0,2,32,1727,3682 GTTACGAAATCCACGC-1,0,3,33,1847,3751 CTTGTCGTACGTGTCA-1,0,2,34,1727,3820 GCGTCCAGCTCGTGGC-1,0,3,35,1847,3889 CCCTTCTCGTACGCGA-1,0,2,36,1727,3958 CCAAAGTCCCGCTAAC-1,0,3,37,1847,4026 CCGCTTCGCGGTTAAC-1,0,2,38,1727,4095 GTTACGGCCCGACTGC-1,0,3,39,1847,4164 CCCGCTTGCCCTCGTC-1,0,2,40,1727,4233 TAGTGAGAAGTGGTTG-1,0,3,41,1847,4302 CGCTACCGCCCTATGA-1,0,2,42,1727,4371 AAACAATCTACTAGCA-1,0,3,43,1847,4439 GCGCGATGGGTCAAGT-1,0,2,44,1727,4508 ATAAACCATTGGACGG-1,0,3,45,1847,4577 TCGGGCACTTCTGGAT-1,0,2,46,1727,4646 TCTGTGGCTACATTTC-1,0,3,47,1847,4715 CTCTGTGCCTGCTATG-1,0,2,48,1727,4784 CACGACTAAAGTTCTG-1,0,3,49,1847,4852 GAGGAGTAATTCCTAC-1,0,2,50,1727,4921 AGAGGTATCTCGGTCC-1,0,3,51,1847,4990 GGCGTACCCTATATAA-1,0,2,52,1727,5059 GCCGGAAACACATCTT-1,0,3,53,1847,5128 AAATGTGGGTGCTCCT-1,0,2,54,1727,5197 ACCAGGAGTGTGATCT-1,0,3,55,1847,5265 TGTGGAGGAAGCTTAA-1,0,2,56,1727,5334 AAGGAGAACTTATAAG-1,0,3,57,1847,5403 CCCTCGGGAGCCTTGT-1,0,2,58,1727,5472 ACTGTTTAGTGTAGGC-1,0,3,59,1847,5541 CGTCAGTTTATCGTCT-1,0,2,60,1727,5610 GCGTGTATGTCGTATT-1,0,3,61,1847,5678 ACAATCGATCTTTATA-1,0,2,62,1727,5747 CAGCCCTCACAGGCAG-1,0,3,63,1847,5816 CGCGTCATATTAAACC-1,0,2,64,1727,5885 GAAGACTTCAATGCCG-1,0,3,65,1847,5954 TTGCGGCGACTCATGC-1,0,2,66,1727,6022 ACCAAACTAGAAATCC-1,0,3,67,1847,6091 TTACTGTTTCTCTACG-1,0,2,68,1727,6160 GACCAGGTCATTCATA-1,0,3,69,1847,6229 TTCTTCCCTTTGATAT-1,0,2,70,1727,6298 ACGCCCAGCTGTCGAT-1,0,3,71,1847,6367 AGTAGCGTGAACGAAC-1,0,2,72,1727,6435 CCTCGACCCACTGCCT-1,0,3,73,1847,6504 AGTTATTGAAAGGTAA-1,0,2,74,1727,6573 TCAGTTACGGAATGAT-1,0,3,75,1847,6642 GAATCTATACTCGGAC-1,0,2,76,1727,6711 TCGGCTAACTTCCCTT-1,0,3,77,1847,6780 ACGTGGTCGAATGTGC-1,0,2,78,1727,6848 ATATCGTGCCAGACCC-1,0,3,79,1847,6917 GTAGCTAGTAAGCGCG-1,0,2,80,1727,6986 ACGCTTAGTGTCTCTC-1,0,3,81,1847,7055 TCCGGCCTGCATCGAT-1,0,2,82,1727,7124 TAGTGGAACTCATACA-1,0,3,83,1847,7193 ATCATCTGCCCAGTGT-1,0,2,84,1727,7261 GTTATTAAATACGACC-1,0,3,85,1847,7330 GCGCTAAGTATGCATG-1,0,2,86,1727,7399 CCTGACGCAACCTTTA-1,0,3,87,1846,7468 CCCAAGAATGCACGGT-1,0,2,88,1727,7537 AACTGGGTTCGAGCCG-1,0,3,89,1846,7606 GGTTCCACCCGCTTCT-1,0,2,90,1727,7674 CATGCACGTGTTACTG-1,0,3,91,1846,7743 AGCGTTCCGATTTAAA-1,0,2,92,1727,7812 CCTACGCGACCTTACA-1,0,3,93,1846,7881 CGAATTACATGGTGTT-1,0,2,94,1727,7950 GAGGTCTTAGTGGGTC-1,0,3,95,1846,8019 GCCGCTAGATACGCAG-1,0,2,96,1727,8087 GTCACCTGTCTATGTC-1,0,3,97,1846,8156 CCGATTGGTCAATGAA-1,0,2,98,1727,8225 CCTGTGCGGATTGTAA-1,0,3,99,1846,8294 TTACGTAGCGCGTGCT-1,0,2,100,1727,8363 GGAGGCGAAGAACCGC-1,0,3,101,1846,8432 GGGTCACGTGCTTATG-1,0,2,102,1727,8500 GCTCCGGACGTTGATA-1,0,3,103,1846,8569 ATGTTTGTAAGATCAT-1,0,2,104,1727,8638 TGACCCAGCATTCCCG-1,0,3,105,1846,8707 TGGTCGTTTGATAGAT-1,0,2,106,1727,8776 TGTAATGCCTTCGGAC-1,0,3,107,1846,8844 TGCTCACACAACAACC-1,0,2,108,1727,8913 TACGATCCAAGCCACT-1,0,3,109,1846,8982 TTGTAACTTCATAGCG-1,0,2,110,1727,9051 AGATTCAAGCGGGTCG-1,0,3,111,1846,9120 CTCAGCAGACTGCCGA-1,0,2,112,1727,9189 GTAACATCAGCTCATC-1,0,3,113,1846,9257 ATGGAACAGAATAAAC-1,0,2,114,1727,9326 GGGCCTATACAACCGG-1,0,3,115,1846,9395 TCAAACAATTAGGACA-1,0,2,116,1727,9464 AAACCACTACACAGAT-1,0,3,117,1846,9533 AAACGACAGTCTTGCC-1,0,2,118,1727,9602 TTGAGGGTCGAACGCG-1,0,3,119,1846,9670 TGTTGATCACTGTTTA-1,0,2,120,1727,9739 AGGGTGTGCTACACGC-1,0,3,121,1846,9808 GTAGTTAGACAATATA-1,0,2,122,1727,9877 AATGGCCGCCAATGCG-1,0,3,123,1846,9946 TCGGCGGTATTAGATT-1,0,2,124,1727,10015 GGGTCACTGAGTAGTG-1,0,3,125,1846,10083 GAATTATGCAACCTAC-1,0,2,126,1727,10152 GATCTTAGTGAACGTG-1,0,3,127,1846,10221 CTAATGCGCCCAACAA-1,0,4,0,1966,1480 GCCACCCATTCCACTT-1,0,5,1,2086,1549 TACTCACAACGTAGTA-1,0,4,2,1966,1617 GTTCGGTGTGGATTTA-1,0,5,3,2086,1686 TCTTTCGGCGGGACAC-1,0,4,4,1966,1755 GGAGACATTCACGGGC-1,0,5,5,2086,1824 GGGATTATCTCACAAC-1,0,4,6,1966,1893 TAGAACGCCAGTAACG-1,0,5,7,2086,1962 ACGAGTCGCCGGCGTT-1,0,4,8,1966,2030 TGATGGGACTAAGTCA-1,0,5,9,2086,2099 TGCGAGAAACGTTACG-1,0,4,10,1966,2168 TCGCCTCGACCTGTTG-1,0,5,11,2086,2237 AACTCGATAAACACGT-1,0,4,12,1966,2306 AGGAAAGCCTCTGATG-1,0,5,13,2086,2375 GAAGGACTAAATTGAA-1,0,4,14,1966,2443 GTATCGGGACGAGCTG-1,0,5,15,2086,2512 CCTGTGCATAGGAGAC-1,0,4,16,1966,2581 CATACGGGTGCATGAT-1,0,5,17,2086,2650 CCACTAAACTGAATCG-1,0,4,18,1966,2719 AAATTGCGGCGGTTCT-1,0,5,19,2086,2788 AGTCCAGCGGGTACGT-1,0,4,20,1966,2856 CATTCAGGTCAGTGCG-1,0,5,21,2086,2925 CTAAAGTCCGAAGCTA-1,0,4,22,1966,2994 AATCAGACTGCAGGAC-1,0,5,23,2086,3063 AGTATCCATAATAACG-1,0,4,24,1966,3132 CTGGCTGCTAACGTAA-1,0,5,25,2086,3201 GTTCCAAGACAGCGAC-1,0,4,26,1966,3269 AAGACTAACCCGTTGT-1,0,5,27,2086,3338 GATTAATCCTGGCTCA-1,0,4,28,1966,3407 CGCGCAAGGAACTACA-1,0,5,29,2086,3476 CAGTAGCGAGGTAGTA-1,0,4,30,1966,3545 ACGGCGGGTTGCCCTG-1,0,5,31,2086,3613 CTAGGCGGCAGAGAAT-1,0,4,32,1966,3682 GTGCGCAGCTTGCTCC-1,0,5,33,2086,3751 TCACTATCGTGCAATC-1,0,4,34,1966,3820 TATGATTCTGCTTGGT-1,0,5,35,2086,3889 TAAGATTTAGCGGGAG-1,0,4,36,1966,3958 TTACGGTGTCACCGAG-1,0,5,37,2086,4026 CTACACTAGCTTGTTC-1,0,4,38,1966,4095 TGAGCAGTCGTGAAGT-1,0,5,39,2086,4164 CGCTGAGGACGTCCAA-1,0,4,40,1966,4233 GTGTATGACTTTAAAG-1,0,5,41,2086,4302 CTAAACGGGTGTAATC-1,0,4,42,1966,4371 TGTACTGTGCCAAAGT-1,0,5,43,2086,4439 GGCCACAAGCGATGGC-1,0,4,44,1966,4508 GTCAATTGTACTGAAG-1,0,5,45,2086,4577 AGGGACAGCACGGCGG-1,0,4,46,1966,4646 AGCTTATAGAGACCTG-1,0,5,47,2086,4715 AACTAGCGTATCGCAC-1,0,4,48,1966,4784 AACTTTAGCTGCTGAG-1,0,5,49,2086,4852 CCCAAGACAGAGTATG-1,0,4,50,1966,4921 GGCATCAACGAGCACG-1,0,5,51,2086,4990 ATGCATTCCGTGATGG-1,0,4,52,1966,5059 TTATAGATGCACATTA-1,0,5,53,2086,5128 GAACCATCTGGGAGAC-1,0,4,54,1966,5197 TGCTATACAAACGGAC-1,0,5,55,2086,5265 ACTTGCCATATTGTAC-1,0,4,56,1966,5334 TATTCCGGCAGTCCTA-1,0,5,57,2086,5403 GACGGACCGCGTTCCT-1,0,4,58,1966,5472 ATGTGTAGTTTAGTCA-1,0,5,59,2086,5541 ATACCAGCAAATTGCT-1,0,4,60,1966,5610 AAGTTTACTAATGGCA-1,0,5,61,2086,5678 CTCTCGATGTGCGCCT-1,0,4,62,1966,5747 GATTGACACTCTGCTC-1,0,5,63,2086,5816 TATCACAGCACGGGCA-1,0,4,64,1966,5885 ACCGTTCCCGCTCTGA-1,0,5,65,2086,5954 CCGCCACCACAATCCA-1,0,4,66,1966,6023 CATTCACTGACAGCTA-1,0,5,67,2086,6091 CGGCTGCAAGATTAAG-1,0,4,68,1966,6160 CATGAACCTCTTATCA-1,0,5,69,2086,6229 TTAATGCGAGGTAACT-1,0,4,70,1966,6298 AATAAGTCCTCGAGAC-1,0,5,71,2086,6367 ACCAGCCCGGTCTTTG-1,0,4,72,1966,6435 CTACGAACTAGGTCGA-1,0,5,73,2086,6504 ACATCTCAACGCGTAA-1,0,4,74,1966,6573 CACTACTCAGTTCTGT-1,0,5,75,2086,6642 CCGACTCGCATAGTCT-1,0,4,76,1966,6711 CATTTATCGTTCAAGA-1,0,5,77,2086,6780 CAAACGTGGTCTTGCG-1,0,4,78,1966,6848 TAGAAACCACTAAGTA-1,0,5,79,2086,6917 ACTGATTTAGTGATTC-1,0,4,80,1966,6986 TCGTATTTCGTCCGGA-1,0,5,81,2086,7055 CGGAAATTTCACATCC-1,0,4,82,1966,7124 ATCCACGCTAAATGTT-1,0,5,83,2086,7193 GTTCAATCTATGTCAA-1,0,4,84,1966,7261 ATAAAGGTCAAGTACG-1,0,5,85,2086,7330 CAACTCCAACGTTTAG-1,0,4,86,1966,7399 TAGGAACAGCCTCCAG-1,0,5,87,2086,7468 ATGGGAACGGAAGCGG-1,0,4,88,1966,7537 CACACGTTTCAATGGG-1,0,5,89,2086,7606 GGTGTTCTGTTTCTAC-1,0,4,90,1966,7674 AGTAACGTTCATCCTG-1,0,5,91,2086,7743 GTATAGTGGCCCATGT-1,0,4,92,1966,7812 TCTACACGTTCATGCA-1,0,5,93,2086,7881 AATCTGGGTAGACCCT-1,0,4,94,1966,7950 TCGGTTAGCCATGTAG-1,0,5,95,2086,8019 TGCCATGGCTTATAAG-1,0,4,96,1966,8087 TAAGTAAATGTGCCGC-1,0,5,97,2086,8156 GTGTCCGATAAGGCAT-1,0,4,98,1966,8225 TGGCACGAGCTCGAGT-1,0,5,99,2086,8294 ACCGGTCTGAGTACGG-1,0,4,100,1966,8363 GAACTTAGCGCCCGGT-1,0,5,101,2086,8432 AGTAGCTAGACGCCGA-1,0,4,102,1966,8500 ATAGGAATCTAAGCTT-1,0,5,103,2086,8569 CTTCCTGCATATTTAC-1,0,4,104,1966,8638 CAATATGTAGATTTAC-1,0,5,105,2086,8707 ACAAGGCCTACCAGCC-1,0,4,106,1966,8776 TTATAGTCCAAGGTGC-1,0,5,107,2086,8845 AAACGCCCGAGATCGG-1,0,4,108,1966,8913 CCTCGTTACGCCTGTT-1,0,5,109,2086,8982 GAACGGTGTAAAGCAG-1,0,4,110,1966,9051 ACGCATAAATGACATG-1,0,5,111,2086,9120 GGTTCGATGCTGAGTT-1,0,4,112,1966,9189 CTTTGGCAGACAGAGT-1,0,5,113,2086,9257 TTCGTGGGCTGGAAGC-1,0,4,114,1966,9326 CAAAGGTTAAATTCAG-1,0,5,115,2086,9395 GTTTGGCGTCAGGCAC-1,0,4,116,1966,9464 GCTTTCTATCTCAACT-1,0,5,117,2086,9533 TGCATCTCCGGATCTT-1,0,4,118,1966,9602 CTGAAACGGCCCTCAG-1,0,5,119,2086,9670 TAGCAGTAAATACGCG-1,0,4,120,1966,9739 CGGGCTACTTAAATTG-1,0,5,121,2086,9808 ATTATGCTCAGTATTG-1,0,4,122,1966,9877 TGATGCTCACGTAGTC-1,0,5,123,2086,9946 GTCTAAGATGCCCAGC-1,0,4,124,1966,10015 AACCCGATAGGGCTTC-1,0,5,125,2086,10083 CGCTATCGTGGCTTTA-1,0,4,126,1966,10152 CGTCTCTCGCCGAGGC-1,0,5,127,2086,10221 AGTGGGAGTATACACG-1,0,6,0,2206,1480 GGTCTTGGTGTTAACT-1,0,7,1,2326,1549 GGCTGGCAGCTTTATG-1,0,6,2,2206,1617 CGCCAATTATTGCGTT-1,0,7,3,2326,1686 GGTAACCGGCAAAGGT-1,0,6,4,2206,1755 TGGGACCATTGGGAGT-1,0,7,5,2326,1824 CTGCAGGTGCTCGGCC-1,0,6,6,2206,1893 CCGGTGCGAGTGATAG-1,0,7,7,2326,1962 GGGTACACTCTGGAGG-1,0,6,8,2206,2030 TAGCCAGAGGGTCCGG-1,0,7,9,2326,2099 CTTGTGAGGACAGCGG-1,0,6,10,2206,2168 GAAGGGCATAACCATG-1,0,7,11,2326,2237 CAACATGGCCTGATAA-1,0,6,12,2206,2306 CAATTTGACCGGGAAG-1,0,7,13,2326,2375 TCTGACTGTAATGGTT-1,0,6,14,2206,2443 TTCATAGCCTTGTAAC-1,0,7,15,2326,2512 TGGAAACGGAGTGAAC-1,0,6,16,2206,2581 ATCGCACGATTGTTCA-1,0,7,17,2326,2650 CGCCACCCGCATTAAC-1,0,6,18,2206,2719 TGGACCACGGCGTTGA-1,0,7,19,2326,2788 GTATATGTTACGGCGG-1,0,6,20,2206,2856 GTATTCTTACCGTGCT-1,0,7,21,2326,2925 TTCAGAGTAACCTGAC-1,0,6,22,2206,2994 GCGGTAACCCAAATGA-1,0,7,23,2326,3063 CTACGTGTTGCCACCA-1,0,6,24,2206,3132 CTAGATAAACTCCTCG-1,0,7,25,2326,3201 TCCATTAGTTGGATAG-1,0,6,26,2206,3269 CTGGCTCCTGCGGGAT-1,0,7,27,2326,3338 CAGTCTCTCGGCTAAT-1,0,6,28,2206,3407 GTATGACGTGGGAAAC-1,0,7,29,2326,3476 AGTCACTCCGCCTCAT-1,0,6,30,2206,3545 GCAGCGGTGGGCATTA-1,0,7,31,2326,3614 TATGGAGTTTCTCGTT-1,0,6,32,2206,3682 ACTCAACGAATGTATT-1,0,7,33,2326,3751 AACACGCGGCCGCGAA-1,0,6,34,2206,3820 CGATATTAGCCGCAGG-1,0,7,35,2326,3889 AGCGTCTGAACCCGCA-1,0,6,36,2206,3958 GATGTCCGGATCACAT-1,0,7,37,2326,4026 GGTCACGTTAGATTCA-1,0,6,38,2206,4095 TTAAGGATACGGAGGT-1,0,7,39,2326,4164 GTGCGGGACCATCGGC-1,0,6,40,2206,4233 CCATCTTGTTCACAAT-1,0,7,41,2326,4302 TCCGAGAAGGCTAAGC-1,0,6,42,2206,4371 TGGCGGTGTGCGATTG-1,0,7,43,2326,4439 ATCCTGCTGCAGATAG-1,0,6,44,2206,4508 TTATGCGTCCCGGTCC-1,0,7,45,2326,4577 CATAATGAGCGGGCGA-1,0,6,46,2206,4646 AGACATAGATCCTTCC-1,0,7,47,2326,4715 GGTGAAACCGGGAATG-1,0,6,48,2206,4784 AACTGGTGTGGGCCTT-1,0,7,49,2326,4852 GTAGCGCTGTTGTAGT-1,0,6,50,2206,4921 TTGTTTGTGTAAATTC-1,0,7,51,2326,4990 GGATCAAAGGACGAGG-1,0,6,52,2206,5059 CGTAGCGCCGACGTTG-1,0,7,53,2326,5128 CAAGTGAACTTTGGTT-1,0,6,54,2206,5197 GTAGACAACCGATGAA-1,0,7,55,2326,5265 CAATGGTCGGCCTGGG-1,0,6,56,2206,5334 ACAGATTAGGTTAGTG-1,0,7,57,2326,5403 GTTATCACCTTCTGAA-1,0,6,58,2206,5472 TGGTATCGGTCTGTAT-1,0,7,59,2326,5541 GGAATAACCTCAAGAA-1,0,6,60,2206,5610 ATTATCTCGACAGATC-1,0,7,61,2326,5678 CCGAGGGATGTTAGGC-1,0,6,62,2206,5747 TGAGATCAAATACTCA-1,0,7,63,2326,5816 AAACGAAGAACATACC-1,0,6,64,2206,5885 CTGGTCCTAACTTGGC-1,0,7,65,2326,5954 TGCACGAGTCGGCAGC-1,0,6,66,2206,6023 ATAGTCTTTGACGTGC-1,0,7,67,2326,6091 TGGAGCTAAAGTTCCC-1,0,6,68,2206,6160 GGGTGGTCCAGCCTGT-1,0,7,69,2326,6229 CATGCATGGAGACCCT-1,0,6,70,2206,6298 ACACGGCACTATGCAT-1,0,7,71,2326,6367 CCCTGGTATGGGCGGC-1,0,6,72,2206,6435 GGAGGATTGAAAGGAG-1,0,7,73,2326,6504 CCGCTGGTGCCATTCA-1,0,6,74,2206,6573 GTTAGAGTGTGCCGCT-1,0,7,75,2326,6642 TCGGAATGACCATCAA-1,0,6,76,2206,6711 TTCAATTAGCCATAAT-1,0,7,77,2326,6780 GATGTGTTGTCACAAG-1,0,6,78,2206,6848 TCTTTCTCTTAAGGAG-1,0,7,79,2326,6917 ACCCTTTAGTTCTCCA-1,0,6,80,2206,6986 ACCACAACTCAGAACA-1,0,7,81,2326,7055 TATGATAAATCTAACG-1,0,6,82,2206,7124 GATCCTCTTGCGCTTA-1,0,7,83,2326,7193 TTCTACCTTTATGTTG-1,0,6,84,2206,7261 GAAATACCTGCTGGCT-1,0,7,85,2326,7330 ATTCTGAGTATGAACT-1,0,6,86,2206,7399 GGATTAAGCTAAGGTC-1,0,7,87,2326,7468 AGTACGTGGCCTGTCT-1,0,6,88,2206,7537 TCAGGGTGCACGAAAC-1,0,7,89,2326,7606 AAATTTACCGAAATCC-1,0,6,90,2206,7674 TTGAGGCATTTAACTC-1,0,7,91,2326,7743 AACCAGTATCACTCTT-1,0,6,92,2206,7812 CACCGGAGATATCTCC-1,0,7,93,2326,7881 GACTGGGCGCCGCAAC-1,0,6,94,2206,7950 CACGTCTATGATGTGG-1,0,7,95,2326,8019 TTAAGACGAACGAACC-1,0,6,96,2206,8087 TGACCAGCTTCAAAGT-1,0,7,97,2326,8156 AGAGTTAGAGACCGAT-1,0,6,98,2206,8225 TTCGGACTGATGCCTT-1,0,7,99,2326,8294 CTCGAATGGAACGTAT-1,0,6,100,2206,8363 GGACGGCTTGCGCAAC-1,0,7,101,2326,8432 CTAAGTACAGGGCTAC-1,0,6,102,2206,8500 ACAAATTCAGATCTGA-1,0,7,103,2326,8569 CATGGAAATGGGACCA-1,0,6,104,2206,8638 GGTGGACCACGTGTTA-1,0,7,105,2326,8707 CACGACGTAATAGTAA-1,0,6,106,2206,8776 CGGGTTCGGCACGTAT-1,0,7,107,2325,8845 CTGGGCTATCCTTTGG-1,0,6,108,2206,8913 GTATTAGGGTTCGCGT-1,0,7,109,2325,8982 TCATTCGTATAATTTG-1,0,6,110,2206,9051 AATAGCAAGCCTCCTG-1,0,7,111,2325,9120 CATCTACCCGAGAACG-1,0,6,112,2206,9189 GCTTCAGTGGGATTAC-1,0,7,113,2325,9257 TCTGTGATGGAGGTTG-1,0,6,114,2206,9326 ATCCACTTTCAGACTA-1,0,7,115,2325,9395 ATGGTTACGAAACATG-1,0,6,116,2206,9464 GGCCCAATCTAGAGGG-1,0,7,117,2325,9533 GATGGTGAAATAACCC-1,0,6,118,2206,9602 AGAGGGACAATTGTCC-1,0,7,119,2325,9670 CGCGTACATTCTGGAA-1,0,6,120,2206,9739 CAAGAAACCCTAAACT-1,0,7,121,2325,9808 TTGGTGCGGTGTTGAA-1,0,6,122,2206,9877 GGTTCCCTAGTGTCTC-1,0,7,123,2325,9946 CGATAACCAATTTGAG-1,0,6,124,2206,10015 GCCCACTGGTCCACAA-1,0,7,125,2325,10083 GAGGGCCGGCAGAGTC-1,0,6,126,2206,10152 CGACACGGATGCCCAC-1,0,7,127,2325,10221 CTGTCTGTGGCTGGCT-1,0,8,0,2446,1480 ATATTATCCCGTATTT-1,0,9,1,2565,1549 GCGCTGGCGGAAAGTC-1,0,8,2,2446,1617 ATCTAACGTCCCTATG-1,0,9,3,2565,1686 GTCAGACAGCGTTGGA-1,0,8,4,2446,1755 GCCAGGCTTAGTGGTA-1,0,9,5,2565,1824 ATTCAAAGTACCTGTT-1,0,8,6,2446,1893 TGGACGTAGGCGAATC-1,0,9,7,2565,1962 ACACATTGACGCAACA-1,0,8,8,2446,2030 GATATCAGTATGTATC-1,0,9,9,2565,2099 TGGGCCTTGCCTGCAT-1,0,8,10,2446,2168 CAAAGTCAGGTTAGCT-1,0,9,11,2565,2237 GGATCCCTACCAGCTA-1,0,8,12,2446,2306 ATCGTCCAATCGAGTC-1,0,9,13,2565,2375 ACATGGCTCAATTTAG-1,0,8,14,2445,2443 AGGCCCAGTGACTGGT-1,0,9,15,2565,2512 GCTTCCAGCTTAGATT-1,0,8,16,2445,2581 TGCTTGAAACCATGCA-1,0,9,17,2565,2650 CAATATTGGACTAGTG-1,0,8,18,2445,2719 CGTGCTGGCCTAGTCG-1,0,9,19,2565,2788 CCTGCGATAGAACTGT-1,0,8,20,2445,2856 GGGTAATGCTGTGTTT-1,0,9,21,2565,2925 AACGCGAACGGCAACA-1,0,8,22,2445,2994 TGTCGGCATGGTGGAA-1,0,9,23,2565,3063 AGCGTACGAGAGCTAG-1,0,8,24,2445,3132 ATACTCTCGCCACTCT-1,0,9,25,2565,3201 AATCCATGCAAGGGTG-1,0,8,26,2445,3269 TTAAACAGAGTCCCGC-1,0,9,27,2565,3338 CCACAGCTGAAATCAT-1,0,8,28,2445,3407 CGGTTCCGGCTTCTTG-1,0,9,29,2565,3476 GACGTGAGACTCCATG-1,0,8,30,2445,3545 TCGTTGGCTCGTCAAT-1,0,9,31,2565,3614 GGTGAACGGGCTAGCC-1,0,8,32,2445,3682 GCACTGTGCAAATGTA-1,0,9,33,2565,3751 ACGAGAACCCATCACG-1,0,8,34,2445,3820 CCAGCTACGCCTCATA-1,0,9,35,2565,3889 TCCCGGTCAGGAATTT-1,0,8,36,2445,3958 TCGCATTCAATGACTT-1,0,9,37,2565,4026 CTGGTTCAACGCATCA-1,0,8,38,2445,4095 GGTGATTTCATCTTGT-1,0,9,39,2565,4164 CACCCTTTCCTCGCTC-1,0,8,40,2445,4233 CAACTTGTAGTGGGCA-1,0,9,41,2565,4302 AATATCAAGGTCGGAT-1,0,8,42,2445,4371 ACTCAGACCTGCTTCT-1,0,9,43,2565,4439 TTGGAGTCTCCCTTCT-1,0,8,44,2445,4508 GGATACTCATGAATTG-1,0,9,45,2565,4577 TGGGCACAAACAGAAC-1,0,8,46,2445,4646 GAGCCACGGTAGTAGG-1,0,9,47,2565,4715 TCGATAGGCTAGTCGC-1,0,8,48,2445,4784 TAACCGCCCGCAGTGC-1,0,9,49,2565,4852 GCCTATTTGCTACACA-1,0,8,50,2445,4921 TTGACGATTCAGCACG-1,0,9,51,2565,4990 TTAAACCGGTAGCGAC-1,0,8,52,2445,5059 ACCGAAAGGGCCCTGC-1,0,9,53,2565,5128 ACGTTCCGCGCTCCGT-1,0,8,54,2445,5197 ATACCAGGCTAATAGA-1,0,9,55,2565,5265 CGGCTTTGTATGATAA-1,0,8,56,2445,5334 CTTGACCCGAAAGATA-1,0,9,57,2565,5403 CGCAGAAACATTTGCG-1,0,8,58,2445,5472 GACCCGTCGCCGGCTA-1,0,9,59,2565,5541 AATCGGGACACTACGA-1,0,8,60,2445,5610 GTCACAAAGTTTCCAA-1,0,9,61,2565,5678 TATATTCGCGTCGATA-1,0,8,62,2445,5747 CCTCCCGACAATCCCT-1,0,9,63,2565,5816 CGACATGCGATCTTCT-1,0,8,64,2445,5885 AACACGACTGTACTGA-1,0,9,65,2565,5954 CCCAACCACACTAACA-1,0,8,66,2445,6023 CACCGCCGACCAGCGA-1,0,9,67,2565,6091 TGGTATCGCATCCCAA-1,0,8,68,2445,6160 CAGAGTGATTTAACGT-1,0,9,69,2565,6229 AACCCTGGTGGAACCA-1,0,8,70,2445,6298 GTCAGTTGTGCTCGTT-1,0,9,71,2565,6367 ATTGACGTAACTCGGT-1,0,8,72,2445,6435 GATGTCGGTCAACTGC-1,0,9,73,2565,6504 AGGGCAGCGGCGTGGT-1,0,8,74,2445,6573 ACATCGTTAACCTAGT-1,0,9,75,2565,6642 TCCATTGTGACCTCGT-1,0,8,76,2445,6711 TGTTTAATACTTCATC-1,0,9,77,2565,6780 TTGCTGGCCGGGCTTC-1,0,8,78,2445,6848 CATATTATTTGCCCTA-1,0,9,79,2565,6917 CTGCCTAGCCACCAAG-1,0,8,80,2445,6986 ACGAGATATTTGCTTA-1,0,9,81,2565,7055 GACTACAATTGCTCGT-1,0,8,82,2445,7124 AACGTGATGAAGGACA-1,0,9,83,2565,7193 ACTCTCTTATACACGA-1,0,8,84,2445,7261 CGCATCATGGCTTCAG-1,0,9,85,2565,7330 CGGCTCTTCGTCGAAC-1,0,8,86,2445,7399 ATTCTTCGTACTTATG-1,0,9,87,2565,7468 AGTGAGGGTTTCTGAC-1,0,8,88,2445,7537 GCCAGGCGTTCGCATG-1,0,9,89,2565,7606 GACTAACACAGCACCT-1,0,8,90,2445,7674 CAATGGAATCTACATA-1,0,9,91,2565,7743 GTGGTCAGCGAAGTAT-1,0,8,92,2445,7812 ATGGCTGGAAATGGCC-1,0,9,93,2565,7881 ATCAGGTCGCCATTGC-1,0,8,94,2445,7950 TATCACCATGTAAAGT-1,0,9,95,2565,8019 AGCGCTTATGGGCAAG-1,0,8,96,2445,8087 AAGCGGCGTCATGGGT-1,0,9,97,2565,8156 ACTAATACGTCAGGCG-1,0,8,98,2445,8225 GGCTGAGCATCGTAAG-1,0,9,99,2565,8294 CGGTTGGGTTCAAGTT-1,0,8,100,2445,8363 GACTGATTGGTCACAA-1,0,9,101,2565,8432 AGACGGGCCGATTTAA-1,0,8,102,2445,8500 ACCAGTGCCCGGTCAA-1,0,9,103,2565,8569 GTCCTTTAATGACTTC-1,0,8,104,2445,8638 CCTACAAGTCCGGAAT-1,0,9,105,2565,8707 GCCTGCTACACTGAGA-1,0,8,106,2445,8776 GACTCGGTCGGCGGAT-1,0,9,107,2565,8845 CTAGACATATATGTAG-1,0,8,108,2445,8913 TCGCCCAACTGACTCC-1,0,9,109,2565,8982 AAACTAACGTGGCGAC-1,0,8,110,2445,9051 AACTGAGGTCAGCGTC-1,0,9,111,2565,9120 ACAATGATTCTTCTAC-1,0,8,112,2445,9189 ATAAGTACCCGATTGT-1,0,9,113,2565,9257 ATTGGGAGTTCTGTAA-1,0,8,114,2445,9326 CGAACATAGTCAGAAA-1,0,9,115,2565,9395 TAGCTCAGATCCTAGT-1,0,8,116,2445,9464 GTGTCGTATTCACCTT-1,0,9,117,2565,9533 CTCACCGATCCAAACT-1,0,8,118,2445,9602 ATATGTGCACAAACCA-1,0,9,119,2565,9670 CAGTCCAACGCCTTCT-1,0,8,120,2445,9739 TCGTCCGGGTACACTC-1,0,9,121,2565,9808 GCAGAAACGTAATCCA-1,0,8,122,2445,9877 TTCGAGCCGGCGCTAC-1,0,9,123,2565,9946 GGAAGATAAGACTGTA-1,0,8,124,2445,10015 ATAAGCAAACACCGAG-1,0,9,125,2565,10083 GCATAAATTGAACGCC-1,0,8,126,2445,10152 CGCCGGTGTCGCAGTA-1,0,9,127,2565,10221 GACCTGGTCTGGGCGT-1,0,10,0,2685,1480 AGCCGCTTGATTAGCG-1,0,11,1,2805,1549 CCCGGCTAGGTGAGAA-1,0,10,2,2685,1617 CGAGCCGAGCACTCGA-1,0,11,3,2805,1686 TAGTGCTTGAATCCTT-1,0,10,4,2685,1755 CAACCGCACCTAGACA-1,0,11,5,2805,1824 ACCACTGTTCAAGAAG-1,0,10,6,2685,1893 AGATGCTATAACGAGC-1,0,11,7,2805,1962 AATTACTCGTACGCTC-1,0,10,8,2685,2030 CGTCAATCTTTAACAT-1,0,11,9,2805,2099 CCAAAGCAGTTGGTTG-1,0,10,10,2685,2168 CCATATTGGATCATGA-1,0,11,11,2805,2237 CGTACCGAAAGTCTAG-1,0,10,12,2685,2306 CTCGAGATCCAAAGCA-1,0,11,13,2805,2375 TGGATAGAGTAACAGA-1,0,10,14,2685,2443 TCACAGATCCTCAAAC-1,0,11,15,2805,2512 AGAGCTACGAAAGCAT-1,0,10,16,2685,2581 TGCGTGATTGGGTGTC-1,0,11,17,2805,2650 CACATGTTTGGACATG-1,0,10,18,2685,2719 TTCGCATCCGGAAGCA-1,0,11,19,2805,2788 CCCTAGTGTCAGGTGT-1,0,10,20,2685,2856 TTACCGCCTTAGGGAA-1,0,11,21,2805,2925 CCAGTCCATTATTCGA-1,0,10,22,2685,2994 CGTAAACGCTTGAGTG-1,0,11,23,2805,3063 ATTCCTTCCAGGCGGT-1,0,10,24,2685,3132 TTCCTTTCTGTGTTGC-1,0,11,25,2805,3201 AGTTGACATCGGCTGG-1,0,10,26,2685,3269 AACTCGATGGCGCAGT-1,0,11,27,2805,3338 GATAAGGCAGATGCAA-1,0,10,28,2685,3407 GGCTGGCTAGCTTAAA-1,0,11,29,2805,3476 CCTCATGCAGCTACGA-1,0,10,30,2685,3545 GACGCCTGTTGCAGGG-1,0,11,31,2805,3614 TAATTAGATGGATATG-1,0,10,32,2685,3682 GAGGGCATCGCGTATC-1,0,11,33,2805,3751 CTTGTGAGTCTTTGAC-1,0,10,34,2685,3820 TCAACACATTGGGTAA-1,0,11,35,2805,3889 ACTGTATACGCGAGCA-1,0,10,36,2685,3958 GTGAAACGTGCTCCAC-1,0,11,37,2805,4026 CGAGTGCTATAGTTCG-1,0,10,38,2685,4095 GTACTGCATGAAGCGT-1,0,11,39,2805,4164 GTAACTTGCGGCAGTC-1,0,10,40,2685,4233 GAATCGCCGGACACGG-1,0,11,41,2805,4302 GGGAGTAATGGCTGGC-1,0,10,42,2685,4371 CATGAACCGACATTTG-1,0,11,43,2805,4439 TCTGTCATACAAGAGC-1,0,10,44,2685,4508 GTCGTCAATTATAAGG-1,0,11,45,2805,4577 TAAAGAGCCCGAAACC-1,0,10,46,2685,4646 GTACTGAGGTCGTAAC-1,0,11,47,2805,4715 AAAGACCCAAGTCGCG-1,0,10,48,2685,4784 CGTCAGTGCGCACAAG-1,0,11,49,2805,4852 TGTATCCTTATTCCAT-1,0,10,50,2685,4921 ATTCTCGTCTCTTTAG-1,0,11,51,2805,4990 AAAGTCACTGATGTAA-1,0,10,52,2685,5059 TGTCTACAGTTTCTGT-1,0,11,53,2805,5128 TTAACGTCGCAAGACC-1,0,10,54,2685,5197 CTATGTCTATTGAAAC-1,0,11,55,2805,5265 TCGGGTGAAACTGCTA-1,0,10,56,2685,5334 TGTCCCGACATAGCAC-1,0,11,57,2805,5403 ACAGCATAGAGCCAGT-1,0,10,58,2685,5472 ATATTCCCACAGGTCA-1,0,11,59,2805,5541 TTGGATCGACTTCTGG-1,0,10,60,2685,5610 CACCATCGGAGGAGAC-1,0,11,61,2805,5678 TCGTTCGTTATTATGT-1,0,10,62,2685,5747 CTTAACTTCGAAGTAC-1,0,11,63,2805,5816 GCACAAGTGTCGGAAG-1,0,10,64,2685,5885 TACCAGCTAGGTTTAA-1,0,11,65,2805,5954 ACGTACAGATTTCTCT-1,0,10,66,2685,6023 AATTTGGTTCCAAAGA-1,0,11,67,2805,6091 GTAAGGATTTGTCGGA-1,0,10,68,2685,6160 CATCATCTACCCGGAC-1,0,11,69,2805,6229 ACGATGGATCCGATGC-1,0,10,70,2685,6298 CACTCAGCTCTTGAGG-1,0,11,71,2805,6367 TAGATCCGAAGTCGCA-1,0,10,72,2685,6436 TGAAACTTATGCAAGC-1,0,11,73,2805,6504 GCGATTCTGGAAGCAG-1,0,10,74,2685,6573 CAAACTATTGAGCTTC-1,0,11,75,2805,6642 TAGAATTAAGGGCAAC-1,0,10,76,2685,6711 CGAAACATAGATGGCA-1,0,11,77,2805,6780 GATGGTGCCCTAGGCA-1,0,10,78,2685,6848 CCCGCAGGGCCCAAAG-1,0,11,79,2805,6917 ACAGCGCACCCGCAGC-1,0,10,80,2685,6986 GGTAAATGTGCGTTAC-1,0,11,81,2805,7055 GTCCTTCTAGTGGGTT-1,0,10,82,2685,7124 GGAAGCTCGCTTACAG-1,0,11,83,2805,7193 CACCGATACACCGAGC-1,0,10,84,2685,7261 CAGCCGGGCCCTCTAT-1,0,11,85,2805,7330 CGGAGCTTATAACACC-1,0,10,86,2685,7399 ATTACAACTACCGGCC-1,0,11,87,2805,7468 TCCTCTGGCCCATTAG-1,0,10,88,2685,7537 CGGCACCGTTAGCGCC-1,0,11,89,2805,7606 TCGGTCCCTGACTCCA-1,0,10,90,2685,7674 TGGTTGGAGGATCCTG-1,0,11,91,2805,7743 CTGCGGTAGTCACGTG-1,0,10,92,2685,7812 GTGCCTCAGTGTACGG-1,0,11,93,2805,7881 ATCGTTCACTTTCGCC-1,0,10,94,2685,7950 ACTTACCGGGCGCGCA-1,0,11,95,2805,8019 CTAGACTGCATTTCGT-1,0,10,96,2685,8087 TTGCCGGTGATCCCTC-1,0,11,97,2805,8156 CTGTCACGCCAGGCGC-1,0,10,98,2685,8225 CGGTATAGGTATTAGC-1,1,11,99,2805,8294 CCAACGCTTGCCAGGG-1,0,10,100,2685,8363 CGTTGAGTAATTGCGT-1,1,11,101,2805,8432 TGTATTTACCTAATGC-1,0,10,102,2685,8500 TAAATGCCGTCTCATG-1,1,11,103,2805,8569 CGGTTTATGAAGGAAC-1,0,10,104,2685,8638 GCAAGATGTGTTCGCG-1,1,11,105,2805,8707 AAAGGTAAGCTGTACC-1,0,10,106,2685,8776 GTACGTCACGTATTAA-1,1,11,107,2805,8845 AGTACCTTCGAGTGCT-1,0,10,108,2685,8913 ATTGTGACTTCGCTGC-1,1,11,109,2805,8982 TGTATCAGACTGAAGC-1,1,10,110,2685,9051 GAGACCCTGCAACGCC-1,1,11,111,2805,9120 TGGGTGGGATGTCATT-1,1,10,112,2685,9189 GGCTAATGATTGAAAT-1,1,11,113,2805,9258 ATAACGTTACCTCCAC-1,1,10,114,2685,9326 TGCGAGATGGCGGCCA-1,1,11,115,2805,9395 CACACTTGTATTGCGA-1,0,10,116,2685,9464 GCTGGTGACTCGTAGT-1,1,11,117,2805,9533 CGACACCGCTTAAGGA-1,0,10,118,2685,9602 GTAACAACTGACCTTG-1,0,11,119,2805,9670 CAACTGAGGGTATGAC-1,0,10,120,2685,9739 CTAATTATGAAGCGTA-1,0,11,121,2805,9808 CCGATCTTAAGAGGCT-1,0,10,122,2685,9877 CGACTCGGTACACGGT-1,0,11,123,2805,9946 TGCTGCGTCAGAGTTA-1,0,10,124,2685,10015 AGAGTTGCAGGCCTCC-1,0,11,125,2805,10083 ACTGGCGAACCTGCGT-1,0,10,126,2685,10152 ACTAAGGACGCACACC-1,0,11,127,2804,10221 CGTCCAGATGGCTCCA-1,0,12,0,2925,1480 ACTATCGCCGGCTAAA-1,0,13,1,3044,1549 GATAGCGTACCACGCG-1,0,12,2,2925,1617 AGGACTTATAGGAGAA-1,0,13,3,3044,1686 TAGTCGGGATTCTTCG-1,0,12,4,2925,1755 ACCATTAAGGGTGTCA-1,0,13,5,3044,1824 TTAATGTGTTTGCAGG-1,0,12,6,2925,1893 TCCGCAGCCACCTAGC-1,0,13,7,3044,1962 GAGGAGATCCTCATGC-1,0,12,8,2925,2030 GGTCCTTCATACGACT-1,0,13,9,3044,2099 CCCTGTTGGCAAAGAC-1,0,12,10,2925,2168 GTGCCTAGCTATGCTT-1,0,13,11,3044,2237 GTCATCTCCTACAGCT-1,0,12,12,2925,2306 GCGAGAAACGGGAGTT-1,0,13,13,3044,2375 CGTCGCGGCGGGATTT-1,0,12,14,2925,2443 CATTGTGTGCTAGATC-1,0,13,15,3044,2512 CGCGGCAGTATTACGG-1,0,12,16,2925,2581 GAAATGGGATGTAAAC-1,0,13,17,3044,2650 CATTCCCTAAGTACAA-1,0,12,18,2925,2719 AGTTCTGCGTTGTATC-1,0,13,19,3044,2788 ACCCTATAGGACTGAG-1,0,12,20,2925,2856 TTAGATAGGTCGATAC-1,0,13,21,3044,2925 ATTGATAGCAACGAGA-1,0,12,22,2925,2994 TCTCGGCTCCAGGACT-1,0,13,23,3044,3063 CTTGAGGTTATCCCGA-1,0,12,24,2925,3132 AAGAAGGATCAGTTAG-1,0,13,25,3044,3201 GTACGACGGCGCTGCG-1,0,12,26,2925,3269 CTTATGTTGACTACCA-1,0,13,27,3044,3338 CGGCACTCAAGAAAGT-1,0,12,28,2925,3407 GTCAAAGTTTACATAG-1,0,13,29,3044,3476 CTCCTAAGTTATGTCT-1,0,12,30,2925,3545 ACTGTGCTAGTAGATC-1,0,13,31,3044,3614 GTTTGGCCGCTCAGCG-1,0,12,32,2925,3682 TTATCCAATCGAACTC-1,0,13,33,3044,3751 CCGTACCCAAGCGCCA-1,0,12,34,2924,3820 CATACAAAGCCGAACC-1,0,13,35,3044,3889 GGTCGGTAATTAGACA-1,0,12,36,2924,3958 AGCGGGAAGGGTCCAT-1,0,13,37,3044,4027 GCCCACCAAGGCTGTC-1,0,12,38,2924,4095 GTAGACGTCGTTACAT-1,0,13,39,3044,4164 GAGCATCATCCCTGGG-1,0,12,40,2924,4233 AGGTAACCTCCTATTC-1,0,13,41,3044,4302 GGTTTGAGTGCTGGAA-1,0,12,42,2924,4371 GCACTAGTCGCGCTAT-1,0,13,43,3044,4439 GGTAACTATGTATCTG-1,0,12,44,2924,4508 GCGGTCCCTAGACGCA-1,0,13,45,3044,4577 CGAGCGTTGATCAGCC-1,0,12,46,2924,4646 AATCCAAGGGCCTGAG-1,0,13,47,3044,4715 CCGTGCCCATGACGGC-1,0,12,48,2924,4784 GAAATTCACATCGCTG-1,0,13,49,3044,4852 CTCTGCGAAGCAAGCA-1,0,12,50,2924,4921 AGTAGGTAACATACAT-1,0,13,51,3044,4990 ATTGGGAATATCTTGG-1,0,12,52,2924,5059 TAGAGCTACGAAGAAC-1,0,13,53,3044,5128 TGCGGCATAGTTCAAC-1,0,12,54,2924,5197 CCGCCGGTCAACACAC-1,0,13,55,3044,5265 TCGTATAGTGCAATTA-1,0,12,56,2924,5334 TAGTTTATTCTTGCTT-1,0,13,57,3044,5403 GATATCTCATGCAATA-1,0,12,58,2924,5472 CGTTTAAGCGGAGCAC-1,0,13,59,3044,5541 CATGCTGGCTCCAATT-1,0,12,60,2924,5610 GAAACAGCCATGCAGT-1,0,13,61,3044,5678 AGTTTCGCAGGTCGGA-1,0,12,62,2924,5747 CTCATGGCTCACAATC-1,0,13,63,3044,5816 AACCGTTGTGTTTGCT-1,0,12,64,2924,5885 ACCCTTCATCTGCGAA-1,0,13,65,3044,5954 TGCGGTGAAATTTCAT-1,0,12,66,2924,6023 CAAATTGTCAGCAAGC-1,0,13,67,3044,6091 GAGGTACATCCATCTT-1,0,12,68,2924,6160 AAATGGCATGTCTTGT-1,0,13,69,3044,6229 TCATCCCAGAGGGTGG-1,0,12,70,2924,6298 CGTAGCGAATTGTCAG-1,0,13,71,3044,6367 CCTAGTTAGTCGCATG-1,0,12,72,2924,6436 GAAACTCTAATGAAGG-1,0,13,73,3044,6504 TTGTATCACACAGAAT-1,0,12,74,2924,6573 TTCAAGCCGAGCTGAG-1,0,13,75,3044,6642 AGGTACGATATTGCCA-1,0,12,76,2924,6711 TTAAGCCGACAACTTC-1,0,13,77,3044,6780 GTCTTAGTACAGCCGG-1,0,12,78,2924,6848 TGGGTAAGGTTCCCGC-1,0,13,79,3044,6917 CTACGCCATTTCCGAT-1,0,12,80,2924,6986 GACCGTCAGGTCGTGA-1,0,13,81,3044,7055 TAGTTAAGATAGGATA-1,0,12,82,2924,7124 GAATATTCGGAGTCCC-1,0,13,83,3044,7193 CAAACTACGATAGAGA-1,0,12,84,2924,7261 CAGGAAGACTTTATAT-1,0,13,85,3044,7330 TTCGTAATCCCAGCGG-1,0,12,86,2924,7399 GTGAGGAGCGGTTGAG-1,1,13,87,3044,7468 CAATCCCTATACCAGC-1,1,12,88,2924,7537 AGGGAAACGAGGTACT-1,1,13,89,3044,7606 TCCTGCCAACTGGAGA-1,1,12,90,2924,7674 AATTTGGGACATAGTA-1,1,13,91,3044,7743 AACTCCTAATCCCATG-1,1,12,92,2924,7812 GCTCATTACTGCATGT-1,1,13,93,3044,7881 TCTCGAACGAGGTCAC-1,1,12,94,2924,7950 TCACTACGACCAATGC-1,1,13,95,3044,8019 GTGATGCACAACATCT-1,1,12,96,2924,8087 CCGACGTAAACACAAC-1,1,13,97,3044,8156 TGGGATGCACTCATTC-1,1,12,98,2924,8225 TTCCATCATGCGGTGA-1,1,13,99,3044,8294 TGCACAGTGAAGTTAT-1,1,12,100,2924,8363 CTATTGTGTTTGGTCA-1,1,13,101,3044,8432 TGCGAGCCCTTCCGCG-1,1,12,102,2924,8500 TTGAAAGGTGTAAAGG-1,1,13,103,3044,8569 GGTGCAGAGCCTATCG-1,1,12,104,2924,8638 ACTATATGCTGTGTTC-1,1,13,105,3044,8707 TCCTGGCGCTGCCTGG-1,1,12,106,2924,8776 GAGCCGAGCGTTTATT-1,1,13,107,3044,8845 AGAATGCGGGTTCGGA-1,1,12,108,2924,8913 ATGCGACAATTGGTCC-1,1,13,109,3044,8982 TTCCGGCTCGACTTCT-1,1,12,110,2924,9051 TGATTATGGCACGCAG-1,1,13,111,3044,9120 GGTTTGACAAGAAGCT-1,1,12,112,2924,9189 GCAGCTATGGACAGGT-1,1,13,113,3044,9258 CACCATGATCGCAAAG-1,1,12,114,2924,9326 GTCGGAAGGATACCAG-1,1,13,115,3044,9395 GGCCCAGTTATCAGCA-1,0,12,116,2924,9464 GGGCCTATTTAAGTAT-1,0,13,117,3044,9533 GTTGTTACATTGCGCT-1,0,12,118,2924,9602 CAACTCCGTAACTTGC-1,0,13,119,3044,9670 GATCTTTGCTCAAAGA-1,0,12,120,2924,9739 TCGCTTAATTACGAAG-1,0,13,121,3044,9808 CGATCATTAGAGGCAC-1,0,12,122,2924,9877 TGTTCTCTACTCCCTA-1,0,13,123,3044,9946 GCTTAGGGAAGCGGTA-1,0,12,124,2924,10015 CAGGTTTAGTACTACA-1,0,13,125,3044,10083 AAGCGGAGTGCGCGCA-1,0,12,126,2924,10152 TCAGATGGAGACGTAG-1,0,13,127,3044,10221 TCGCACTAACGTTTGT-1,0,14,0,3164,1480 CACGTCGGGTTCTAGA-1,0,15,1,3284,1549 GGAGTACACATGAGCT-1,0,14,2,3164,1617 GTGTGTCGACGTCGCT-1,0,15,3,3284,1686 GAAGCTCTTTGCTTAG-1,0,14,4,3164,1755 ACACCGAGCGCTCTTT-1,0,15,5,3284,1824 CGTAATAATTACGAGT-1,0,14,6,3164,1893 CATCAACACCTACTAA-1,0,15,7,3284,1962 CCAAGTTTCTACAGAT-1,0,14,8,3164,2030 ACGGGTCATGTGACTT-1,0,15,9,3284,2099 AGTGTGCTAAGATCGC-1,0,14,10,3164,2168 GGCGGTTTGCCGGTGC-1,0,15,11,3284,2237 GTATAATCTCCCGGAT-1,0,14,12,3164,2306 TAGTCCGTATGCATAA-1,0,15,13,3284,2375 CACTTCGTCTTATCTC-1,0,14,14,3164,2443 CATCCGCAGGCCCGAA-1,0,15,15,3284,2512 CCCTGATGTAACTCGT-1,0,14,16,3164,2581 CCATAGTCAGTAACCC-1,0,15,17,3284,2650 CGGGCCATAGCCGCAC-1,0,14,18,3164,2719 CTCCGGCTTGTAGACA-1,0,15,19,3284,2788 AACTTGCGTTCTCGCG-1,0,14,20,3164,2856 AATGAGTTCGCATATG-1,0,15,21,3284,2925 CGAGGCCAGGCATTGG-1,0,14,22,3164,2994 TCTGCGTCCGGTTTCT-1,0,15,23,3284,3063 CAATCCTGCCGTGGAG-1,0,14,24,3164,3132 CTGAGCAAGTAACAAG-1,0,15,25,3284,3201 GGGTACCCACGGTCCT-1,0,14,26,3164,3269 ACGGAATTTAGCAAAT-1,0,15,27,3284,3338 GGGCGGTCCTATTGTC-1,0,14,28,3164,3407 ATGTTACGAGCAATAC-1,0,15,29,3284,3476 AACCATGGGATCGCTA-1,0,14,30,3164,3545 TCGCATCCCTAAGTGT-1,0,15,31,3284,3614 ACTTAGTACGACAAGA-1,0,14,32,3164,3682 GAGCTCTCGGACCTAA-1,0,15,33,3284,3751 TCTATTACGCTGGCGA-1,0,14,34,3164,3820 AGATACGACTTCATAT-1,0,15,35,3284,3889 CGCTATACCGCCCACT-1,0,14,36,3164,3958 CAGTGTCCGCAGAATG-1,0,15,37,3284,4027 CCATCCATACCAAGTC-1,0,14,38,3164,4095 AACCCAGAGACGGAGA-1,0,15,39,3284,4164 GAAGAACGGTGCAGGT-1,0,14,40,3164,4233 GATAAATCGGTGGATG-1,0,15,41,3284,4302 CAGCTCGTGCTTGTGT-1,0,14,42,3164,4371 GAGTACGGGTATACAA-1,0,15,43,3284,4439 CATCGCCCGCGGCCAA-1,0,14,44,3164,4508 TCTTACAGAGGTACCG-1,0,15,45,3284,4577 TGGAAGACGAACACCA-1,0,14,46,3164,4646 GTTGTCGTGTTAGTTG-1,0,15,47,3284,4715 CCAAGGAACAGAGAGG-1,0,14,48,3164,4784 CTGCACCTGGAACCGC-1,0,15,49,3284,4852 CGCTTTCATACCGGTG-1,0,14,50,3164,4921 GTTCTTCCCTCGATGT-1,0,15,51,3284,4990 ATTTAACTCGTATTAC-1,0,14,52,3164,5059 AACGATAGAAGGGCCG-1,0,15,53,3284,5128 TATCCTGCATGGGAAT-1,0,14,54,3164,5197 AGGCCCATTGTACAGG-1,0,15,55,3284,5265 CCGGCGCATATTGGAT-1,0,14,56,3164,5334 ATCTGTAATTGTACCC-1,0,15,57,3284,5403 GAGCGAGGGAGTACCG-1,0,14,58,3164,5472 TTATTAGGGAAGCATC-1,0,15,59,3284,5541 CTTCTTACGTCGTATA-1,0,14,60,3164,5610 GAAGTGCTGGATAGCT-1,0,15,61,3284,5678 GTGCAACAAATGTGGC-1,0,14,62,3164,5747 CATGCGTTAGACAGAA-1,0,15,63,3284,5816 ACACACTTTCTACACG-1,0,14,64,3164,5885 AGCCCTAAGCGAAGTT-1,0,15,65,3284,5954 ATTAATTCGGTCACTC-1,0,14,66,3164,6023 AACAGGAAATCGAATA-1,0,15,67,3284,6091 ACGTTTAGTTGTGATC-1,0,14,68,3164,6160 TCCTTCAGTGGTCGAA-1,0,15,69,3284,6229 CGAACGCCCAGTGCCG-1,0,14,70,3164,6298 CCTCGAAGTGGACGGG-1,0,15,71,3284,6367 CTCTGTTTGAGGATTC-1,0,14,72,3164,6436 TGGGCACGTTCTATGG-1,0,15,73,3284,6504 ACTATTCGTCCGTGGT-1,0,14,74,3164,6573 CCTCTGGCCTAGACGG-1,1,15,75,3284,6642 CCATAAACAACCCGAC-1,0,14,76,3164,6711 CATAGTACATTGAGAG-1,1,15,77,3284,6780 ATTTCATTATTTCGCG-1,0,14,78,3164,6848 CAACTATATCGAATGC-1,1,15,79,3284,6917 CTAGTATTCGGAATTA-1,0,14,80,3164,6986 GTGGAACCTACATGCG-1,1,15,81,3284,7055 CCTAAAGGCTGACGCT-1,1,14,82,3164,7124 CGTGACATTGGGTCGT-1,1,15,83,3284,7193 CCAATCGGTAGATCGA-1,1,14,84,3164,7261 ATTGTCGCAATACCTT-1,1,15,85,3284,7330 AAATTACACGACTCTG-1,1,14,86,3164,7399 CACTCCTCTCGGTCGG-1,1,15,87,3284,7468 AAATAACCATACGGGA-1,1,14,88,3164,7537 AGTTACTCTATCGTGG-1,1,15,89,3284,7606 CGTTAGCTCACAACTG-1,1,14,90,3164,7674 GAATGTATGGCAGGTC-1,1,15,91,3284,7743 GCAACCACCAGACCGG-1,1,14,92,3164,7812 TCACTCGTGCAACGGC-1,1,15,93,3284,7881 AAACAGAGCGACTCCT-1,1,14,94,3164,7950 CAGCCTCTCCTCAAGA-1,1,15,95,3284,8019 TTGCGTGAACGCTTAG-1,1,14,96,3164,8087 CCGCCTGCGAATTGGT-1,1,15,97,3284,8156 AGATGAGGGTTGCGAT-1,1,14,98,3164,8225 CGGTGGGCTCCAGCCT-1,1,15,99,3284,8294 GGCAGCGGTAATCCTA-1,1,14,100,3164,8363 GCTAGCAGGGAGTGGG-1,1,15,101,3284,8432 CTCAAGACATTAGCGC-1,1,14,102,3164,8500 CACGGCGCGCCAAAGG-1,1,15,103,3284,8569 TGCAATTTGGGCACGG-1,1,14,104,3164,8638 ATGCCAATCGCTCTGC-1,1,15,105,3284,8707 GCTGGACCCAAAGTGG-1,1,14,106,3164,8776 ATTCCTAAGACGTGGA-1,1,15,107,3284,8845 TCCGGAGGAAGGGCTG-1,1,14,108,3164,8913 TCGGTGACCGCTCCGG-1,1,15,109,3284,8982 TCCGAAGTAGTCACCA-1,1,14,110,3164,9051 CATGTAGGAGCGCCAA-1,1,15,111,3284,9120 CACAAGAAAGATATTA-1,1,14,112,3164,9189 AGGGTCAGTAACCCTA-1,1,15,113,3284,9258 TAAGCCCTTACGACCA-1,1,14,114,3164,9326 ATACCGTCATCCATAA-1,1,15,115,3284,9395 GGACGTCCATAGTTGG-1,1,14,116,3164,9464 CATCAAACTGGCGCCC-1,0,15,117,3284,9533 AAACGTGTTCGCCCTA-1,0,14,118,3164,9602 AAATTGGTGAGAAGCA-1,0,15,119,3284,9670 GGTCATTGTAGTCATA-1,0,14,120,3164,9739 TGCAGTGAGGCTCGGG-1,0,15,121,3284,9808 GAACATTAGTATGTTA-1,0,14,122,3164,9877 GGTTTGCGAACACGTA-1,0,15,123,3284,9946 ACACAAATATTCCTAG-1,0,14,124,3164,10015 TTGGGTTTATTCAGCG-1,0,15,125,3284,10083 ATTCGCAGAGGACACT-1,0,14,126,3164,10152 GATTTAGTGCGTACTG-1,0,15,127,3284,10221 TAGAAACACAATAGTG-1,0,16,0,3404,1480 CAGTAGATGATGTCCG-1,0,17,1,3523,1549 TCTTAACTCGGATGTA-1,0,16,2,3404,1617 TACATCTTGTTTCTTG-1,0,17,3,3523,1686 TTCATAGGGTGTCCAT-1,0,16,4,3404,1755 TGAAGTAGCTTACGGA-1,0,17,5,3523,1824 GCACAAGTGGATCATA-1,0,16,6,3404,1893 GGGCGAATTTCTCCAC-1,0,17,7,3523,1962 ATGTTCCTGCCCACCT-1,0,16,8,3404,2030 GCTCAACCTCTTAGAG-1,0,17,9,3523,2099 ATAGCTGCTCTTGTTA-1,0,16,10,3404,2168 CGTCAGCTATTTACTC-1,0,17,11,3523,2237 ATCTGATAGTGTCTTA-1,0,16,12,3404,2306 TGCACTATGTGAGTGC-1,0,17,13,3523,2375 CCGACAAACACATGAG-1,0,16,14,3404,2443 GCCTTGTATATGCAGT-1,0,17,15,3523,2512 ATAATACCGTTAGCCG-1,0,16,16,3404,2581 ACACTCCAATGTCACT-1,0,17,17,3523,2650 AGTTGCTGACTGATAT-1,0,16,18,3404,2719 GGCGCTCCTCATCAAT-1,0,17,19,3523,2788 TGCCTGACATCGGTCA-1,0,16,20,3404,2856 TTGGCCATCTTGCGCT-1,0,17,21,3523,2925 CAGTGGTTGCACATGA-1,0,16,22,3404,2994 AGAATTGTTTGACATA-1,0,17,23,3523,3063 AAATGCTCGTTACGTT-1,0,16,24,3404,3132 CACCTAATAGAGTCGT-1,0,17,25,3523,3201 CATTTCTAGCAGACTA-1,0,16,26,3404,3269 CCGAAAGTGGTGAGCA-1,0,17,27,3523,3338 AGTCAGCCACCGCCTG-1,0,16,28,3404,3407 TCATCACTCGAGCTCG-1,0,17,29,3523,3476 CTCAGGACTCACCTGT-1,0,16,30,3404,3545 CGGTGTACTTGATCCC-1,0,17,31,3523,3614 CCTACGGCTCAGTCGA-1,0,16,32,3404,3682 GTACTTGGGCACTTCT-1,0,17,33,3523,3751 TGATTTCCTCCTGACG-1,0,16,34,3404,3820 CCTCACCAATCTTGAC-1,0,17,35,3523,3889 GGTGAGATGCAGATAA-1,0,16,36,3404,3958 GCTAGTTTCATTGAGG-1,0,17,37,3523,4027 AGGACATCGCACGTCG-1,0,16,38,3404,4095 GTGGACGTGCTGAGAC-1,0,17,39,3523,4164 TAAGGAACTTGTGGGA-1,0,16,40,3404,4233 TCGCTGTGCGTAAATC-1,0,17,41,3523,4302 GCATCCCTAACTTTGA-1,0,16,42,3404,4371 CACCCACACGTCACCC-1,0,17,43,3523,4439 CCCTCATTCTGGAATT-1,0,16,44,3404,4508 AGGGCGTGATCGGCTA-1,0,17,45,3523,4577 GGTGCGGATAAGTGGC-1,0,16,46,3404,4646 TAATATTGAAATTCGC-1,0,17,47,3523,4715 CTTACACTGGGAAATA-1,0,16,48,3404,4784 ACCAAGAACGCGTGTC-1,0,17,49,3523,4852 GCCTTCAGCCCTACCG-1,0,16,50,3404,4921 GATGCTACAAGCGCCT-1,0,17,51,3523,4990 CCGGGACCCGCAGAGA-1,0,16,52,3404,5059 GTTCCAGTCTGACCAT-1,0,17,53,3523,5128 ATGATCGGGAATAGAC-1,0,16,54,3403,5197 TTGGATTGGGTACCAC-1,0,17,55,3523,5265 TACCTCACGCTTGTAC-1,0,16,56,3403,5334 CATGGCAGGAAGATCG-1,0,17,57,3523,5403 ATGACGCCGGCTCTAA-1,0,16,58,3403,5472 AGCGACATCCCATTCA-1,0,17,59,3523,5541 AGTAATGTCTTGCCGC-1,0,16,60,3403,5610 TTCTTAGTGGCTCAGA-1,0,17,61,3523,5678 CGTCTGGAAGGGCCCG-1,0,16,62,3403,5747 ACGTGCGCCTCGTGCA-1,1,17,63,3523,5816 AGAGCGGGCTAATCAT-1,0,16,64,3403,5885 GCGTCGAAATGTCGGT-1,1,17,65,3523,5954 AACTGATATTAGGCCT-1,0,16,66,3403,6023 CGAGCTGGGCTTTAGG-1,1,17,67,3523,6091 GGGTGTTTCAGCTATG-1,0,16,68,3403,6160 TTAATTTCAGACGCGG-1,1,17,69,3523,6229 ACTGCCGTCGTAACTC-1,1,16,70,3403,6298 GTGCACGAAAGTGACT-1,1,17,71,3523,6367 ATCTCCCTGCAATCTA-1,1,16,72,3403,6436 ACGCCAGATGATTTCT-1,1,17,73,3523,6504 AGCTATTTAATCCAAC-1,1,16,74,3403,6573 CCACGAGAAGAGAATC-1,1,17,75,3523,6642 GATTCCGCGTTTCCGT-1,1,16,76,3403,6711 GTCGGATGTAGCGCGC-1,1,17,77,3523,6780 TATTTATACCGAGTAG-1,1,16,78,3403,6849 GTAGGTGATCCGTGTA-1,1,17,79,3523,6917 AGTTAAGCGGTCCCGG-1,1,16,80,3403,6986 CTGGCGACATAAGTCC-1,1,17,81,3523,7055 TTGGCCTAGAATTTCG-1,1,16,82,3403,7124 GGCATATCGGTTCTGC-1,1,17,83,3523,7193 GGGCGTCCACTGGCTC-1,1,16,84,3403,7261 TTACCCATTGCCGGGT-1,1,17,85,3523,7330 TTAGACACGATCGTTG-1,1,16,86,3403,7399 GCGCTGATCCAGACTC-1,1,17,87,3523,7468 TTCGGCAACCCGCTGA-1,1,16,88,3403,7537 GATATTTCCTACATGG-1,1,17,89,3523,7606 CTGCGTTACGATATAA-1,1,16,90,3403,7674 TAATAAACAAGGAGAT-1,1,17,91,3523,7743 AACCTTTACGACGTCT-1,1,16,92,3403,7812 AGTCCCGCCTTTAATT-1,1,17,93,3523,7881 TGAGATTAGGCCCTAA-1,1,16,94,3403,7950 AGTGTATTGCGCATTG-1,1,17,95,3523,8019 GTTGGATTCAGTGGCT-1,1,16,96,3403,8087 TAAAGCTGCAATAGGG-1,1,17,97,3523,8156 AGTAGGAAGGAAGTTG-1,1,16,98,3403,8225 TATCACTTCGAGTAAC-1,1,17,99,3523,8294 TGATCTACGCTGATCT-1,1,16,100,3403,8363 GGATCATCCCGTACGC-1,1,17,101,3523,8432 TGACACTTCTCTTTGC-1,1,16,102,3403,8500 AGCCCTTCTAATCCGA-1,1,17,103,3523,8569 CACCGCGTCCACTCTA-1,1,16,104,3403,8638 TAATTGGAATCGGGAA-1,1,17,105,3523,8707 TCGTAAGCTCCGAGGA-1,1,16,106,3403,8776 TATATTACAAATGTCG-1,1,17,107,3523,8845 CGCGAGAGGGACTTGT-1,1,16,108,3403,8913 GGACCTACGGTAACGT-1,1,17,109,3523,8982 GAAATATGCTTGAATG-1,1,16,110,3403,9051 CCGTATTAGCGCAGTT-1,1,17,111,3523,9120 AGGCGTCTATGGACGG-1,1,16,112,3403,9189 AACATCGATACGTCTA-1,1,17,113,3523,9258 TGAATATGCTATAAAC-1,1,16,114,3403,9326 ACCAAACACCCAGCGA-1,1,17,115,3523,9395 TGGCTTGTACAAGCTT-1,1,16,116,3403,9464 GAATGAAGGTCTTCAG-1,0,17,117,3523,9533 AGATACCAGCACTTCA-1,0,16,118,3403,9602 GCGGTCCCGGTGAAGG-1,0,17,119,3523,9671 GAGGCATTTGCAGCAG-1,0,16,120,3403,9739 GGCAAGCCAGGGATAG-1,0,17,121,3523,9808 TCTACGGGCTCAGTTG-1,0,16,122,3403,9877 TCTGCGAATCGTTCGC-1,0,17,123,3523,9946 AGCTCGTTGATGGAAA-1,0,16,124,3403,10015 TGAATGAGATACAGCA-1,0,17,125,3523,10083 ACCCTTGCCTGGGTCG-1,0,16,126,3403,10152 GGCGAACCGTTCTGAT-1,0,17,127,3523,10221 GCGATGTCTGTGCTTG-1,0,18,0,3643,1480 ATTAACACCTGAGATA-1,0,19,1,3763,1549 GAAATCTGACCAAGTT-1,0,18,2,3643,1618 CCTGACAAACTCGCGC-1,0,19,3,3763,1686 ATGTCATTTCCCATTG-1,0,18,4,3643,1755 GCGGTGCGGAGCATCG-1,0,19,5,3763,1824 CGGAAAGCAAATGTGC-1,0,18,6,3643,1893 GCTGAGCAACGGTTCT-1,0,19,7,3763,1962 TCACTCTTCGTCTGTC-1,0,18,8,3643,2030 GATCTTCATTGTCCTC-1,0,19,9,3763,2099 ACTCGATGTATTTCAT-1,0,18,10,3643,2168 TGAAGAGCGGTCCTAG-1,0,19,11,3763,2237 TAAACGTCGTCAATGA-1,0,18,12,3643,2306 ACGTTATTGGTCACTC-1,0,19,13,3763,2375 ATAGCCTCAGTACCCA-1,0,18,14,3643,2443 CGGGAGTATACCGCCG-1,0,19,15,3763,2512 GTCCTACAGGCGGCTC-1,0,18,16,3643,2581 CGGATAAGCGGACATG-1,0,19,17,3763,2650 AACTTCTGCGTCTATC-1,0,18,18,3643,2719 GGGTTCAGACGAACAA-1,0,19,19,3763,2788 AGTCTGGACATCCTTG-1,0,18,20,3643,2856 TTGAACGAATCCTTTG-1,0,19,21,3763,2925 GAAATACTAAACGTTT-1,0,18,22,3643,2994 CCCGCGCAATGCACCC-1,0,19,23,3763,3063 TTCGGCTAGAGATGGT-1,0,18,24,3643,3132 GACACGAGTTAGAGGA-1,0,19,25,3763,3201 GAGGTCCCAAAGATCT-1,0,18,26,3643,3269 TAACTCCATGGAGGCT-1,0,19,27,3763,3338 CTTGTTTATGTAGCCA-1,0,18,28,3643,3407 GATGGCGCACACATTA-1,0,19,29,3763,3476 ATAATAGTGTAGGGAC-1,0,18,30,3643,3545 CGCTATTCAATGTATG-1,0,19,31,3763,3614 ATATTGCTGTCAAAGT-1,0,18,32,3643,3682 GGATTCAGTACGGTGG-1,0,19,33,3763,3751 TTCTTAGTGAACGGTG-1,0,18,34,3643,3820 AATGGTTCTCACAAGC-1,0,19,35,3763,3889 TATACACGCAAAGTAT-1,0,18,36,3643,3958 CTTCATAGCTCAAGAA-1,0,19,37,3763,4027 CAACGGTTCTTGATAC-1,0,18,38,3643,4095 ACACCCGAGAAATCCG-1,0,19,39,3763,4164 TCTATCATGCAGTTAC-1,0,18,40,3643,4233 CCCGCCATGCTCCCGT-1,0,19,41,3763,4302 CGCTTCCACTGAAATC-1,0,18,42,3643,4371 CACTGTCCAAGTGAGA-1,0,19,43,3763,4440 ATTACTAGCCTCTTGC-1,0,18,44,3643,4508 CATAGTAGCATAGTAG-1,0,19,45,3763,4577 CAACTCCTTGATCCCG-1,0,18,46,3643,4646 AAGTAGAAGACCGGGT-1,0,19,47,3763,4715 GCGGGAACCAGGCCCT-1,0,18,48,3643,4784 ATTAGATTGATAGCGG-1,0,19,49,3763,4852 CTCGGTCCGTAGCCTG-1,0,18,50,3643,4921 TGGCTTTGGGTAGACA-1,0,19,51,3763,4990 TATCCATATCATGCGA-1,0,18,52,3643,5059 GGAGTGCCGCCCTGGA-1,0,19,53,3763,5128 TGAGAATGCTTTACCG-1,0,18,54,3643,5197 TTAACCAACCCTCCCT-1,1,19,55,3763,5265 TGTTTCGGTACTTCTC-1,0,18,56,3643,5334 TTGCTGAAGGAACCAC-1,1,19,57,3763,5403 TATTTAGTCTAGATCG-1,1,18,58,3643,5472 CTCCGGCCTAATATGC-1,1,19,59,3763,5541 TTGTGGCCCTGACAGT-1,1,18,60,3643,5610 TCGCCGGTCGATCCGT-1,1,19,61,3763,5678 CCATAGGTTGGCGTGG-1,1,18,62,3643,5747 GAACGACCGAATGATA-1,1,19,63,3763,5816 TCCGATAATTGCCATA-1,1,18,64,3643,5885 CATTACGTCGGCCCGT-1,1,19,65,3763,5954 CAAGCACCAAATGCCT-1,1,18,66,3643,6023 TGCATGGATCGGATCT-1,1,19,67,3763,6091 GAAATCGCGCGCAACT-1,1,18,68,3643,6160 CTGAAAGAGATCCGAC-1,1,19,69,3763,6229 CACCTCGATGGTGGAC-1,1,18,70,3643,6298 ATTTGTTCCAGGGCTC-1,1,19,71,3763,6367 TGGGCCACAAGAGCGC-1,1,18,72,3643,6436 CCTTCTTGATCCAGTG-1,1,19,73,3763,6504 CCTCGCCAGCAAATTA-1,1,18,74,3643,6573 TTCATGGCGCAACAGG-1,1,19,75,3763,6642 TTAATCAGTACGTCAG-1,1,18,76,3643,6711 CCTATCTATATCGGAA-1,1,19,77,3763,6780 ATTATACTTTGCTCGT-1,1,18,78,3643,6849 ATGGATCCGGCGTCCG-1,1,19,79,3763,6917 CGCCCGCTTCCGTACA-1,1,18,80,3643,6986 GGATTCCGCTATACCC-1,1,19,81,3763,7055 CGGTCTATCAACCCGT-1,1,18,82,3643,7124 ATGCCGGTTGATGGGA-1,1,19,83,3763,7193 TCATGCAGGTTCTCAT-1,1,18,84,3643,7261 TGAGCTTTAATGACGC-1,1,19,85,3763,7330 TCCCTTAGATTACTCG-1,1,18,86,3643,7399 ATATCTCCCTCGTTAA-1,1,19,87,3763,7468 AGCTCTTCCCAGTGCA-1,1,18,88,3643,7537 TCGCTAAACCGCTATC-1,1,19,89,3763,7606 CACATTCTTTCGATGG-1,1,18,90,3643,7674 GATATGCGGTAGCCAA-1,1,19,91,3763,7743 CGTTTCACTTCGGGCG-1,1,18,92,3643,7812 CCAATTACGGGTCGAG-1,1,19,93,3763,7881 GCAGGTAGAGTATGGT-1,1,18,94,3643,7950 GTCGTATTGGCGTACA-1,1,19,95,3763,8019 GAAATTAGCACGGATA-1,1,18,96,3643,8087 AATGCACCAAGCAATG-1,1,19,97,3763,8156 AGGACGCTCGATGTTG-1,1,18,98,3643,8225 GGCTAAAGGGCGGGTC-1,1,19,99,3763,8294 CATCTATCCCGTGTCT-1,1,18,100,3643,8363 CAGTAACTATTTATTG-1,1,19,101,3763,8432 CATATACTACTGATAA-1,1,18,102,3643,8500 GCGTTCGGAGACCGGG-1,1,19,103,3763,8569 AAGTTCAGTCTGCGTA-1,1,18,104,3643,8638 CGAAGCTATAAATTCA-1,1,19,105,3763,8707 CGCGGTCACAAACCAA-1,1,18,106,3643,8776 GGGAATGAGCCCTCAC-1,1,19,107,3763,8845 ACGGAGCGCAAATTAC-1,1,18,108,3643,8913 CGTTCTTCGCACACCT-1,1,19,109,3763,8982 GAATAGCCCTGCGGTC-1,1,18,110,3643,9051 AATAGCTACCGCGTGC-1,1,19,111,3763,9120 CCGAGCTGTGCTTGTC-1,1,18,112,3643,9189 GATGACGATGATCGCG-1,1,19,113,3763,9258 GCCTATGCTGGGCCTT-1,0,18,114,3643,9326 TTACTGTCTAGAGCTC-1,1,19,115,3763,9395 AGCGGTTGCCGCTCTG-1,0,18,116,3643,9464 GCTTGCAGCACAATTG-1,0,19,117,3763,9533 CCGGAGGTCTTATGGT-1,0,18,118,3643,9602 ACAGTATACCGTGGGA-1,0,19,119,3763,9671 GGGATCCCAATACAAA-1,0,18,120,3643,9739 ATTACGACTCCACAGT-1,0,19,121,3763,9808 CTCACACGCAAGCCTA-1,0,18,122,3643,9877 CCAGATGTAAATGGGT-1,0,19,123,3763,9946 GAACTTGTGCACGGGA-1,0,18,124,3643,10015 AAGCCGCTTTACCTTG-1,0,19,125,3763,10083 TCCATTCCCACTAGAG-1,0,18,126,3643,10152 AGAGCGCTTGTAACGG-1,0,19,127,3763,10221 TGGGTTCCCGGACGGA-1,0,20,0,3883,1480 GCTGCGCCTCCCACGT-1,0,21,1,4002,1549 CTGTTGGCTCTTCTGA-1,0,20,2,3883,1618 TTGTTCTAGATACGCT-1,0,21,3,4002,1686 CCCTCAAGCTCTTAGT-1,0,20,4,3883,1755 TGGTCTAGCTTACATG-1,0,21,5,4002,1824 ATGCACCTTCCTTAAT-1,0,20,6,3883,1893 GGGATACGGTAATAAT-1,0,21,7,4002,1962 AGTTCACCGGTTGGAC-1,0,20,8,3883,2030 GACATACTGTCGCAGA-1,0,21,9,4002,2099 TGGACACCGTTGCTTG-1,0,20,10,3883,2168 TGCGATGCTAATGGCT-1,0,21,11,4002,2237 TTCTGTTTCCTGTCGC-1,0,20,12,3883,2306 CGTTGTAAACGTCAGG-1,0,21,13,4002,2375 GATCGGCGATAAGTCG-1,0,20,14,3883,2443 AGCCTTAAAGCGGAAG-1,0,21,15,4002,2512 TCCGTAACCACAATCC-1,0,20,16,3883,2581 GAATGCCGAAATGACC-1,0,21,17,4002,2650 TATACTCATGCGGCAA-1,0,20,18,3883,2719 TAGTGTCAGAAACGGC-1,0,21,19,4002,2788 CGTCATACCATATCCA-1,0,20,20,3883,2856 TAGTACCTTAGTGGTC-1,0,21,21,4002,2925 CTGGCGGGAATAAGTA-1,0,20,22,3883,2994 AGTGTGGTCTATTGTG-1,0,21,23,4002,3063 GCTATCGCGGCGCAAC-1,0,20,24,3883,3132 CAGTAATCCCTCCCAG-1,0,21,25,4002,3201 GTATTAAGGCGTCTAA-1,0,20,26,3883,3269 CTAATTTCAACAACAC-1,0,21,27,4002,3338 TTAGCAACATGGATGT-1,0,20,28,3883,3407 ATGCTCAGTGTTGCAT-1,0,21,29,4002,3476 GATGTTTGTGCGAGAT-1,0,20,30,3883,3545 CACTTCGCCACAGGCT-1,0,21,31,4002,3614 CTGTATGGTGTAGAAA-1,0,20,32,3883,3682 TACGTGCACTATGCTG-1,0,21,33,4002,3751 GTATCAGCTTGGGTTC-1,0,20,34,3883,3820 TAACAAAGGGAGAAGC-1,0,21,35,4002,3889 TTACATCGTGGCCTGG-1,0,20,36,3883,3958 TCTGAACCGGTCGGCT-1,0,21,37,4002,4027 GGCTCGTGCCACCAGC-1,0,20,38,3883,4095 TATAAGTGAGGATAGC-1,0,21,39,4002,4164 GCCCGCGCGTTTGACA-1,0,20,40,3883,4233 TGCTGGTTGGACAATT-1,0,21,41,4002,4302 GCCTAGCGATCTGACC-1,0,20,42,3883,4371 CCATGCTCTGCAGGAA-1,0,21,43,4002,4440 TATAAATCCACAAGCT-1,0,20,44,3883,4508 CGCTCGACATAATGAT-1,0,21,45,4002,4577 CCAATTGAATGTTAAT-1,0,20,46,3883,4646 GCTAATACCGAATGCC-1,1,21,47,4002,4715 TTCAACGACCCGACCG-1,0,20,48,3883,4784 TTCCTCGAGGGTGTCT-1,1,21,49,4002,4852 CAACGACCCGTTTACA-1,1,20,50,3883,4921 CTTGTACTTGTTGACT-1,1,21,51,4002,4990 TCTCTAATAGCTGGTA-1,1,20,52,3883,5059 ATTATGCCATAGGGAG-1,1,21,53,4002,5128 GACAACGCAGCTTACG-1,1,20,54,3883,5197 AGATGACTCGCCCACG-1,1,21,55,4002,5265 GTGCGGGTCTCCAAAT-1,1,20,56,3883,5334 GTACGAGATTGCGACA-1,1,21,57,4002,5403 GTATAGGACTCAGTAG-1,1,20,58,3883,5472 TTGCACGGAGCAGCAC-1,1,21,59,4002,5541 CACAGCTAGGGAGTGA-1,1,20,60,3883,5610 ATACTAGCATGACCCT-1,1,21,61,4002,5678 CCAAGACTTCTGCGAA-1,1,20,62,3883,5747 ACATAATAAGGCGGTG-1,1,21,63,4002,5816 TAATACACAGTAGTAT-1,1,20,64,3883,5885 TCTTGGTAACACCAAA-1,1,21,65,4002,5954 AACTGGGTCCCGACGT-1,1,20,66,3883,6023 ATCACTTCATCCTCGC-1,1,21,67,4002,6091 TGGAAGGATAAAGATG-1,1,20,68,3883,6160 CATGATGCACAATTCT-1,1,21,69,4002,6229 TGCCTGATCAAACGAT-1,1,20,70,3883,6298 ATAGGGATATCCTTGA-1,1,21,71,4002,6367 CACCTAATCAGTTTAC-1,1,20,72,3883,6436 TGTGACTACGCCAGTC-1,1,21,73,4002,6504 CCCGACCATAGTCCGC-1,1,20,74,3882,6573 CGCGCCCGACTTAATA-1,1,21,75,4002,6642 TGCCACCTGGCGAAAC-1,1,20,76,3882,6711 CTGCCAAGGTTGGAAG-1,1,21,77,4002,6780 TCTCCAACGTAGGTTA-1,1,20,78,3882,6849 TTCTTGGAGTAATGAG-1,1,21,79,4002,6917 GTCTCGATCTGCTTTC-1,1,20,80,3882,6986 TACTCTCCGAACAAAT-1,1,21,81,4002,7055 ATCACATTAGAATATC-1,1,20,82,3882,7124 TACGGGATGCTAGCAG-1,1,21,83,4002,7193 AGCTGAAGTAAACCAA-1,1,20,84,3882,7261 CATGGGTCGGGTGTGG-1,1,21,85,4002,7330 CACCCACGAGGCAATT-1,1,20,86,3882,7399 TGCCATTACTAAAGAA-1,1,21,87,4002,7468 CCGTTACGTTAGAACA-1,1,20,88,3882,7537 GCCAGGAGTAACCGAT-1,1,21,89,4002,7606 GGAAAGTGCCCATGCC-1,1,20,90,3882,7674 TCTTACCGGAACTCGT-1,1,21,91,4002,7743 TATGTCAAGACCGACT-1,1,20,92,3882,7812 CCTAACTAAGGCTCTA-1,1,21,93,4002,7881 GCGGTGAACTGCGCTC-1,1,20,94,3882,7950 CCTCGCGCGATATAGG-1,1,21,95,4002,8019 ATCGGCAAGCAGTCCA-1,1,20,96,3882,8087 AGATCGTGCATAAGAT-1,1,21,97,4002,8156 ATTCAGGACCTATTTC-1,1,20,98,3882,8225 AGCTAGAAGCAGAAGT-1,1,21,99,4002,8294 TTCGCTATCTGACGTG-1,1,20,100,3882,8363 TTCCGCAGAGAAATAT-1,1,21,101,4002,8432 CAGTACATTCTCTAAA-1,1,20,102,3882,8500 GTGAAGATTTCAAGTG-1,1,21,103,4002,8569 AACGCATGATCTGGGT-1,1,20,104,3882,8638 CCCAGAGGAGGGCGTA-1,1,21,105,4002,8707 GGAAACCTTGTTGAAT-1,1,20,106,3882,8776 GTGAAGCCGTATAGTC-1,1,21,107,4002,8845 GAGCTGTCGTCTCGGA-1,1,20,108,3882,8913 TGTCATTTGTTGGGAA-1,1,21,109,4002,8982 ACCAACACCACACACT-1,1,20,110,3882,9051 AAATGATTCGATCAGC-1,1,21,111,4002,9120 CTGCTTTATGTCCGCG-1,1,20,112,3882,9189 GCGAGAGGCCATGTAA-1,1,21,113,4002,9258 ATTGACCGGCGATGAC-1,1,20,114,3882,9326 ACCCTGGTAACGCCCT-1,1,21,115,4002,9395 GTTGGACCGCATCAGG-1,1,20,116,3882,9464 CGTTTACAAGGCAGCT-1,0,21,117,4002,9533 CGGTCCATGAGACTCC-1,0,20,118,3882,9602 GTTCGTCTGGGTCCCT-1,0,21,119,4002,9671 TGTACTACTCTCACGG-1,0,20,120,3882,9739 AGTGAAGATGGTGTCC-1,0,21,121,4002,9808 GAGGGCCATAATATTA-1,0,20,122,3882,9877 ACTCCCGTAGACTAGG-1,0,21,123,4002,9946 CGTCAAATGGTCGCAG-1,0,20,124,3882,10015 AAGTCTAGTAGCTGCC-1,0,21,125,4002,10083 AAGAAATCACCAGATT-1,0,20,126,3882,10152 GAGTGCACGGACAACA-1,0,21,127,4002,10221 GTTCAAATCAGATGTC-1,0,22,0,4122,1480 CATGGCTCCCTATGTC-1,0,23,1,4242,1549 GGCCACACGAAAGCCT-1,0,22,2,4122,1618 GAAGCCGGGTAAGCTC-1,0,23,3,4242,1686 TTGCGCTCTCTCGCTT-1,0,22,4,4122,1755 TACATGACCTTATCCG-1,0,23,5,4242,1824 GTCCTTCTACAACCCA-1,0,22,6,4122,1893 TATATGCTGGGTTGCC-1,0,23,7,4242,1962 GTTTATGGGATTTAGA-1,0,22,8,4122,2030 GGAACCCGAACAAGAA-1,0,23,9,4242,2099 AACGTTATCAGCACCT-1,0,22,10,4122,2168 CATCGTCCGGTTACTA-1,0,23,11,4242,2237 AGACAGCTCAGAATCC-1,0,22,12,4122,2306 GCAGATCCTCGCAAAT-1,0,23,13,4242,2375 GGGTCATGCGTACCAT-1,0,22,14,4122,2443 CTGGTCATTCCAATCC-1,0,23,15,4242,2512 TCAGGGCGCAAACTCG-1,0,22,16,4122,2581 GATGCCAGCAGAAGGC-1,0,23,17,4242,2650 GTTATTAACGTGGGAG-1,0,22,18,4122,2719 AATACAATGTTTCAGG-1,0,23,19,4242,2788 TTGCTGCACCTATCCA-1,0,22,20,4122,2856 CCAGAGACAAAGCCGG-1,0,23,21,4242,2925 CCGAAGTATATTGTTC-1,0,22,22,4122,2994 GCTAAACCTGAGGTGA-1,0,23,23,4242,3063 TCATACTTACAGATCC-1,0,22,24,4122,3132 CGAGCACTTCAAGTTT-1,0,23,25,4242,3201 TAGCAACCTGTCACAA-1,0,22,26,4122,3269 TGGGAAATGCCTTTCC-1,0,23,27,4242,3338 AGACCATGGGATACAA-1,0,22,28,4122,3407 TAAATGAATCCGTTTC-1,0,23,29,4242,3476 ACAACGGTCCCTGCGA-1,0,22,30,4122,3545 GTCACTCTCCAAATCT-1,0,23,31,4242,3614 TTCTACTTGCGAGGGC-1,0,22,32,4122,3682 CGCAATTACTTTCGGT-1,0,23,33,4242,3751 CTGTTCATCTCACGGG-1,0,22,34,4122,3820 TTCTTGTAACCTAATG-1,0,23,35,4242,3889 GCTTGATGATAATCAG-1,0,22,36,4122,3958 TTGGCTCGCATGAGAC-1,0,23,37,4242,4027 GCCCAGTTGGTATGCC-1,0,22,38,4122,4095 ATTCCTCCGCCAGTGC-1,0,23,39,4242,4164 TCGTCCGCTGGCGTCT-1,0,22,40,4122,4233 GGAGAAGTCATTGGCA-1,1,23,41,4242,4302 TTGTTAGCAAATTCGA-1,0,22,42,4122,4371 TCTAGCATCTTCGATG-1,1,23,43,4242,4440 TTCTAGGCCAATTGTG-1,1,22,44,4122,4508 TCACGGTCATCGCACA-1,1,23,45,4242,4577 ATGAAGCCAAGGAGCC-1,1,22,46,4122,4646 AATGACTGTCAGCCGG-1,1,23,47,4242,4715 CCAAACAGAACCCTCG-1,1,22,48,4122,4784 TATCGATGATTAAACG-1,1,23,49,4242,4852 GAACACACATCAACCA-1,1,22,50,4122,4921 CCCGTCAGCGTCTGAC-1,1,23,51,4242,4990 AGCATCGTCGATAATT-1,1,22,52,4122,5059 GACTAAGATCATGCAC-1,1,23,53,4242,5128 TAGGGTGTTTCAAGAG-1,1,22,54,4122,5197 TGGTTCGTAGCAAAGG-1,1,23,55,4242,5265 CTGTTCACTGCCTGTG-1,1,22,56,4122,5334 ATGTGCATCCGACGCA-1,1,23,57,4242,5403 TTGTCGTTCAGTTACC-1,1,22,58,4122,5472 CGGGATCAATGTAAGA-1,1,23,59,4242,5541 TTATCTGTATCATAAC-1,1,22,60,4122,5610 ATCGACTCTTTCCGTT-1,1,23,61,4242,5678 CTCATTTGATGGGCGG-1,1,22,62,4122,5747 GTAAGCGGGCAGTCAG-1,1,23,63,4242,5816 TCTATCGGTCGCAACA-1,1,22,64,4122,5885 AACGCGGTCTCCAGCC-1,1,23,65,4242,5954 ATTAATACTACGCGGG-1,1,22,66,4122,6023 CTTTAACTTTCAAAGG-1,1,23,67,4242,6091 CGTACCTGATAGGCCT-1,1,22,68,4122,6160 GAATGTTGGGTAATCT-1,1,23,69,4242,6229 TGCGGAGTAAAGGTGC-1,1,22,70,4122,6298 CCTGAATATTTACATA-1,1,23,71,4242,6367 TTGCTCCCATACCGGA-1,1,22,72,4122,6436 CCTCTAATCTGCCAAG-1,1,23,73,4242,6504 AGGTTGAGGCACGCTT-1,1,22,74,4122,6573 TCCCGTCAGTCCCGCA-1,1,23,75,4242,6642 TCCGATGACTGAGCTC-1,1,22,76,4122,6711 CAGCCTCCTGCAGAGG-1,1,23,77,4242,6780 CTTAGCCTTCCACATG-1,1,22,78,4122,6849 ATTAATGAACCAGTCG-1,1,23,79,4242,6917 ACGATACATAGAACTA-1,1,22,80,4122,6986 AGCCACTCCCGTGCTT-1,1,23,81,4242,7055 ATACGGGTTTCGATTG-1,1,22,82,4122,7124 CTGTCAAATGGCTCGG-1,1,23,83,4242,7193 GCTCGGAATTTAAAGC-1,1,22,84,4122,7262 TAGGCATGTTACGCCA-1,1,23,85,4242,7330 TGGCAACTCGCGCGCC-1,1,22,86,4122,7399 ATCAGTAGGCAGGGAT-1,1,23,87,4242,7468 TATCGATCTATGCATA-1,1,22,88,4122,7537 CGACTCAGGATGTTAT-1,1,23,89,4242,7606 GCCATATTGCACACAG-1,1,22,90,4122,7674 AATTCATAAGGGATCT-1,1,23,91,4242,7743 CGGTAGAGGTGCAGGT-1,1,22,92,4122,7812 AATGATGATACGCTAT-1,1,23,93,4242,7881 CTTGTGCTCACCGATT-1,1,22,94,4122,7950 TTCCAATCAGAGCTAG-1,1,23,95,4242,8019 CGATGGACCCTACGCC-1,1,22,96,4122,8087 GGTCGGATAAACGGCG-1,1,23,97,4242,8156 TTAGCTAATACGATCT-1,1,22,98,4122,8225 CTCGATATTTGCGAGC-1,1,23,99,4242,8294 ATTACTTACTGGGCAT-1,1,22,100,4122,8363 CTAGCCGATGTTATGA-1,1,23,101,4242,8432 TACTGCAATCAATTAC-1,1,22,102,4122,8500 TAGTCTGTGACGTTGC-1,1,23,103,4242,8569 CTCGTTTCTAATGTTT-1,1,22,104,4122,8638 TTCGTTCAACGAAGTT-1,1,23,105,4242,8707 CTGAATTTATTGCCAG-1,1,22,106,4122,8776 TGGAATATCCTTGACC-1,1,23,107,4242,8845 CAGATCATTTAAAGTC-1,1,22,108,4122,8913 CTCCTTTACGCAAGTC-1,1,23,109,4242,8982 TCCCAAACAGACAACG-1,1,22,110,4122,9051 ATCGCTGCGTGCAGCA-1,1,23,111,4242,9120 TTAGTTCAAGTGTTCG-1,1,22,112,4122,9189 AAACTCGTGATATAAG-1,1,23,113,4242,9258 TTAACGAACAAGCAGT-1,1,22,114,4122,9326 GTTATATCAGGAGCCA-1,1,23,115,4242,9395 CAAATTGGATTATGCC-1,0,22,116,4122,9464 CGAGGAGCTTCCATAT-1,0,23,117,4242,9533 GGAGACCAATGTGCTT-1,0,22,118,4122,9602 CATTGATGAACACGCC-1,0,23,119,4242,9671 GTCAATGCTATAATTT-1,0,22,120,4122,9739 ACCACCCTCTCTTCTA-1,0,23,121,4242,9808 TGGAGGGAAACACCTC-1,0,22,122,4122,9877 CACGGACGTGGATGGC-1,0,23,123,4242,9946 AACTTTCTCGATCATG-1,0,22,124,4122,10015 CGTATTGTTTCCTAAT-1,0,23,125,4242,10084 CCTACTGCGGCGGCCA-1,0,22,126,4122,10152 CTTAGGTCCCAATCGT-1,0,23,127,4242,10221 CGCAATCGATCATTAG-1,0,24,0,4362,1480 TGGTTATGCTTGCGGT-1,0,25,1,4481,1549 GGCTTGGCTCTCACCT-1,0,24,2,4362,1618 ATTGGTAGGATCCGCT-1,0,25,3,4481,1686 TCAGGGCGACTTCCTT-1,0,24,4,4362,1755 TCTGCAGATTCGAGTC-1,0,25,5,4481,1824 CTCTCGCTGTACTATG-1,0,24,6,4362,1893 AATAGTCGCGAGTCGG-1,0,25,7,4481,1962 AGTTACCCTTAAGACT-1,0,24,8,4362,2031 CTTAAATAAGACCCAT-1,0,25,9,4481,2099 GGTTGTGCTCTTGTCC-1,0,24,10,4362,2168 GTGAGTCTAAGACGGA-1,0,25,11,4481,2237 CGCGACACTGCGCAGC-1,0,24,12,4362,2306 GCTCGCGGTTCCGCTC-1,0,25,13,4481,2375 TTAACTCACGCGTGGA-1,0,24,14,4362,2443 GGAACGGCCTGCAGCC-1,0,25,15,4481,2512 GTAGAAACGGGTGGAG-1,0,24,16,4362,2581 TAATGAAAGACCCTTG-1,0,25,17,4481,2650 AGGCTTGCTAGACACC-1,0,24,18,4362,2719 TTGCGTAGTTTGAGGA-1,0,25,19,4481,2788 CGCCCTTGAAGGCTGA-1,0,24,20,4362,2856 CCCGGTGGAAGAACCT-1,0,25,21,4481,2925 TTAACACCTCGAACAT-1,0,24,22,4362,2994 GATTCCTATACGGCGC-1,0,25,23,4481,3063 TTACCCTAACAGTCCT-1,0,24,24,4362,3132 ACCCACCTACATGCTC-1,0,25,25,4481,3201 AAAGGGCAGCTTGAAT-1,0,24,26,4362,3269 CACACAGGGATAGATT-1,0,25,27,4481,3338 AGAGCGTACAAGCTCG-1,0,24,28,4362,3407 TCTTACGGCATCCGAC-1,0,25,29,4481,3476 GCCTATTCCGATATAG-1,0,24,30,4362,3545 GAAAGTGACTAACTGC-1,0,25,31,4481,3614 CCGGAATGGTTTCAGT-1,0,24,32,4362,3682 AGTATAATACTAGGCA-1,0,25,33,4481,3751 TAACTATCGAAGGTCC-1,0,24,34,4362,3820 ATGAGGAGTGTTAATC-1,1,25,35,4481,3889 TGTGTCGCGAGTTGCA-1,0,24,36,4362,3958 ATCCAACGCAGTCATA-1,1,25,37,4481,4027 AAGGCGCGTAAAGCTT-1,1,24,38,4362,4095 AGTCGGCCCAAACGAC-1,1,25,39,4481,4164 AACGTCAGACTAGTGG-1,1,24,40,4362,4233 ACTACCAGCTCTCTGG-1,1,25,41,4481,4302 GCAAGTGCACAGAGAA-1,1,24,42,4362,4371 ACACCTTAAGTAGGGC-1,1,25,43,4481,4440 TTCGACGGGAAGGGCG-1,1,24,44,4362,4508 TTCGCACTCGCGTGCT-1,1,25,45,4481,4577 TATTTGTTACCCTTTA-1,1,24,46,4362,4646 CGCTGTGACGCCGCAC-1,1,25,47,4481,4715 GTTGCACGGAGTTTCG-1,1,24,48,4362,4784 GTTTCCTGGAGGGTGA-1,1,25,49,4481,4853 ACACCCAGCATGCAGC-1,1,24,50,4362,4921 TCAACCATGTTCGGGC-1,1,25,51,4481,4990 TTACAACTACGCATCC-1,1,24,52,4362,5059 TCCGATGGTGCGACAT-1,1,25,53,4481,5128 GGGCGTACATTTATAT-1,1,24,54,4362,5197 AGCGACCAACGATATT-1,1,25,55,4481,5265 ACACAAAGACGGGTGG-1,1,24,56,4362,5334 ATCGCACGCCGGGAGA-1,1,25,57,4481,5403 GCTCTAAACCCTGACG-1,1,24,58,4362,5472 AATGCAACCGGGTACC-1,1,25,59,4481,5541 TCAAACAACCGCGTCG-1,1,24,60,4362,5610 TATGCTCCCTACTTAC-1,1,25,61,4481,5678 AAAGGGATGTAGCAAG-1,1,24,62,4362,5747 ACGATCATACATAGAG-1,1,25,63,4481,5816 TTGTTCAGTGTGCTAC-1,1,24,64,4362,5885 ATGCATGATCCAGGAT-1,1,25,65,4481,5954 AGTCTTCTCCTCAAAT-1,1,24,66,4362,6023 GATTCCCTTGTCGCAG-1,1,25,67,4481,6091 CTCGCACCTATATAGT-1,1,24,68,4362,6160 ACTCAATAAAGGCACG-1,1,25,69,4481,6229 AACCGAGCTTGGTCAT-1,1,24,70,4362,6298 TAAGGCAACATAAGAT-1,1,25,71,4481,6367 CACGCACAGCGCAGCT-1,1,24,72,4362,6436 GGTTTACAATCTCAAT-1,1,25,73,4481,6504 TGCAGGATCGGCAAAG-1,1,24,74,4362,6573 ATAACGGAGTCCAACG-1,1,25,75,4481,6642 AACGATATGTCAACTG-1,1,24,76,4362,6711 GACAACGACCATTGAA-1,1,25,77,4481,6780 TTGACCATGTTCTCCG-1,1,24,78,4362,6849 AGTACGGGCACCTGGC-1,1,25,79,4481,6917 CGCCATCCGATTATGA-1,1,24,80,4362,6986 AAGGTATCCTAATATA-1,1,25,81,4481,7055 TGTTGTCAAGAAGTCT-1,1,24,82,4362,7124 CAGTGAATAAATGACT-1,1,25,83,4481,7193 CACCTTGCGAAACTCG-1,1,24,84,4362,7262 CATTTAGCGGACCATG-1,1,25,85,4481,7330 CCAGTCTAGACGGCGC-1,1,24,86,4362,7399 TCGCTTTAAACGTTTG-1,1,25,87,4481,7468 GTGAAACGGCGCCACC-1,1,24,88,4362,7537 GGGCTCATCGAACCCA-1,1,25,89,4481,7606 TTGATGTGTAGTCCCG-1,1,24,90,4362,7674 CAGTAGCCCACGCGGT-1,1,25,91,4481,7743 AGCGCGGGTGCCAATG-1,1,24,92,4362,7812 TAATCGATCCGTACGT-1,1,25,93,4481,7881 AGTGGCGGCAATTTGA-1,1,24,94,4361,7950 CCTTTCAATGAAGAAA-1,1,25,95,4481,8019 CTCAGTCACGACAAAT-1,1,24,96,4361,8087 ATAGGCTAGCTTCGCA-1,1,25,97,4481,8156 CGGTTCAAGTAGGTGT-1,1,24,98,4361,8225 CAGTCGAGGATGCAAT-1,1,25,99,4481,8294 TATCACCCAACCGACC-1,1,24,100,4361,8363 AATGATGCGACTCCTG-1,1,25,101,4481,8432 TGGAACCACTGACACA-1,1,24,102,4361,8500 GCCAATAGGGCATCTC-1,1,25,103,4481,8569 TTCTTTGGTCGCGACG-1,1,24,104,4361,8638 ATTAGATTCCTCAGCA-1,1,25,105,4481,8707 CCGTGGAACGATCCAA-1,1,24,106,4361,8776 GGGTCGTGGCAAGTGT-1,1,25,107,4481,8845 TCGCTCGGCACCAGCG-1,1,24,108,4361,8913 ACGCAATCACTACAGC-1,1,25,109,4481,8982 CTCTAATGCATTGATC-1,1,24,110,4361,9051 GTCTCGACTAAGTTTG-1,1,25,111,4481,9120 TGGTTTAAACGTGGGT-1,1,24,112,4361,9189 CGCAGATCTTCACCCG-1,1,25,113,4481,9258 TCCAGATGTACGCCAA-1,1,24,114,4361,9326 CATTGCGGGTCAATTC-1,1,25,115,4481,9395 GACGTTCGTAAATACA-1,0,24,116,4361,9464 TACACCGTCGTTAGTC-1,0,25,117,4481,9533 ACGGGCGTATGCGACA-1,0,24,118,4361,9602 GAAGGCTACCATTGTT-1,0,25,119,4481,9671 TAAATCTTTACACCTC-1,0,24,120,4361,9739 AGTTTATGTAAAGACA-1,0,25,121,4481,9808 AGGAGACATCCACAGT-1,0,24,122,4361,9877 CAACCTGAACCTGCCA-1,0,25,123,4481,9946 AGTCCCTCGCAGAAAG-1,0,24,124,4361,10015 TGTATACGGATGATGA-1,0,25,125,4481,10084 TTGTGGTATAGGTATG-1,0,24,126,4361,10152 TCTGCACCATTAGTAA-1,0,25,127,4481,10221 AAATGTATCTTATCCC-1,0,26,0,4601,1480 ACTCTAAACCTGGGAT-1,0,27,1,4721,1549 GCTGGCAGGTGCCGTG-1,0,26,2,4601,1618 CTCATTCGTGAACATC-1,0,27,3,4721,1686 TCGCCGGATGGGCAAG-1,0,26,4,4601,1755 GGACTAAGTCAGGAGT-1,0,27,5,4721,1824 TATCAAAGGTCTGTAA-1,0,26,6,4601,1893 TTCAGTTTGTGGCAGC-1,0,27,7,4721,1962 TGTTCATAAATGTGCT-1,0,26,8,4601,2031 CTTAGCCCGGATAGTG-1,0,27,9,4721,2099 GATGCGAATGGTATTA-1,0,26,10,4601,2168 TCTAACTGTATGTAAA-1,0,27,11,4721,2237 TTAAACCTGGTTCCTT-1,0,26,12,4601,2306 GCTAAGTAAAGGCGAT-1,0,27,13,4721,2375 AGTTACCGCACATGGT-1,0,26,14,4601,2443 GACTGCGGCACGTGTA-1,0,27,15,4721,2512 TGGTGATCGTATTTGT-1,0,26,16,4601,2581 TTATCGCCTGCGAAGC-1,0,27,17,4721,2650 TGGAATTAGACGCTTT-1,0,26,18,4601,2719 TCGTCACACTGTTAGC-1,0,27,19,4721,2788 TTATGTTTGCGATAGA-1,0,26,20,4601,2856 GGTGCTGATCACAAAG-1,0,27,21,4721,2925 CATAGCCGCCCGGGAT-1,0,26,22,4601,2994 GGTTAGTTACGGCGCC-1,0,27,23,4721,3063 ATTCCCACATAAACAA-1,0,26,24,4601,3132 ATTCAGTAGCAGGGTC-1,0,27,25,4721,3201 CAGTTCCGCGGGTCGA-1,0,26,26,4601,3269 AAGAGATGAATCGGTA-1,0,27,27,4721,3338 CGCAATTCTACAATAA-1,0,26,28,4601,3407 TAACGCTTTGAGAGCG-1,0,27,29,4721,3476 AGGCTATGGTTAGCTT-1,0,26,30,4601,3545 GAGGAATGGAGAGGTT-1,1,27,31,4721,3614 TCCTCTACGAGATGGC-1,1,26,32,4601,3682 TTGATTATGCAGATGA-1,1,27,33,4721,3751 TCAGTACTGACCCGCG-1,1,26,34,4601,3820 TTATGACAAACTGGAT-1,1,27,35,4721,3889 GTAAGTAGGGTATACC-1,1,26,36,4601,3958 CGCAAACACGAGTTAC-1,1,27,37,4721,4027 TGGCCGTATATTGACC-1,1,26,38,4601,4095 ACTGTAGCACTTTGGA-1,1,27,39,4721,4164 GCTCTATGTTACGTGC-1,1,26,40,4601,4233 TGCGCGATTAACGGAG-1,1,27,41,4721,4302 GAATCGACATGGTCAC-1,1,26,42,4601,4371 GACTAAGTAGGCTCAC-1,1,27,43,4721,4440 ATCTTGACCTGCAACG-1,1,26,44,4601,4508 ATGCACTACCGCATTG-1,1,27,45,4721,4577 CAGATACTAACATAGT-1,1,26,46,4601,4646 GATCGACACTATCTGA-1,1,27,47,4721,4715 ATAGAGTACTGGGACA-1,1,26,48,4601,4784 CCTACTGCTTACACTT-1,1,27,49,4721,4853 CCTGCTATTTGAGAAG-1,1,26,50,4601,4921 CGCGTTCATGAAATAC-1,1,27,51,4721,4990 CATTATGCTTGTTGTG-1,1,26,52,4601,5059 CCAGGGACGTGGCCTC-1,1,27,53,4721,5128 TATGGATGTGCTACGC-1,1,26,54,4601,5197 GTACTAAGATTTGGAG-1,1,27,55,4721,5265 AGACCCGCCCTCCTCG-1,1,26,56,4601,5334 CGCATTAGCTAATAGG-1,1,27,57,4721,5403 GCTCTCGGGTACCGAA-1,1,26,58,4601,5472 CACCGCCAGAAGGTTT-1,1,27,59,4721,5541 TCCCAAAGACGAAGGA-1,1,26,60,4601,5610 ATGGATTGACCAAACG-1,1,27,61,4721,5678 GTCATGGACATGACTA-1,1,26,62,4601,5747 CTACTGCCACCTGACC-1,1,27,63,4721,5816 TTATATTTGGCAATCC-1,1,26,64,4601,5885 AGCACCAGTACTCACG-1,1,27,65,4721,5954 CATGGTCTAGATACCG-1,1,26,66,4601,6023 TCTACCGTCCACAAGC-1,1,27,67,4721,6091 CTAGTTGGGCCCGGTA-1,1,26,68,4601,6160 TCCCGCGTACTCCTGG-1,1,27,69,4721,6229 CAGAGCATGAGCTTGC-1,1,26,70,4601,6298 ACACGGGAACTTAGGG-1,1,27,71,4721,6367 GGCTCTGCTCCAACGC-1,1,26,72,4601,6436 AGAACGTGGTACATTC-1,1,27,73,4721,6504 CAATAAACCTTGGCCC-1,1,26,74,4601,6573 ACTTCGCCATACGCAC-1,1,27,75,4721,6642 ATCTGGTTAAGACTGT-1,1,26,76,4601,6711 TCGTAAGACGACATTG-1,1,27,77,4721,6780 GTGTACCTTGGCTACG-1,1,26,78,4601,6849 GCCCGTAATACCTTCT-1,1,27,79,4721,6917 CGGTCAAGTGGGAACC-1,1,26,80,4601,6986 TTGTAAGGCCAGTTGG-1,1,27,81,4721,7055 GGAGCACCAAGAACTA-1,1,26,82,4601,7124 TAATAGTGACGACCAG-1,1,27,83,4721,7193 CTAAATCCTATTCCGG-1,1,26,84,4601,7262 CGAGTTCTGTCCCACC-1,1,27,85,4721,7330 AGGCAGATGCGTAAAC-1,1,26,86,4601,7399 AAGGATGAGGGACCTC-1,1,27,87,4721,7468 AGAGAACCGTCTAGGA-1,1,26,88,4601,7537 GAGGGCGCAGCTCTGC-1,1,27,89,4721,7606 AAGATTGGCGGAACGT-1,1,26,90,4601,7674 CCAGTAGTCTGATCCA-1,1,27,91,4721,7743 AAGGGACAGATTCTGT-1,1,26,92,4601,7812 ATAGAGTTATCAACTT-1,1,27,93,4721,7881 AAATTACCTATCGATG-1,1,26,94,4601,7950 GATCCTAAATCGGGAC-1,1,27,95,4721,8019 TTACAGACCTAAATGA-1,1,26,96,4601,8087 CCTCACCTTAGCATCG-1,1,27,97,4721,8156 CATGCGACCAGTTTAA-1,1,26,98,4601,8225 AACATATCAACTGGTG-1,1,27,99,4721,8294 CTATAAGAGCCAATCG-1,1,26,100,4601,8363 AATATCGAGGGTTCTC-1,1,27,101,4721,8432 GTACTCCTGGGTATGC-1,1,26,102,4601,8500 ATAAGTAGGATTCAGA-1,1,27,103,4721,8569 AGGTCGCGGAGTTACT-1,1,26,104,4601,8638 CTAATTCTCAGATATT-1,1,27,105,4721,8707 GCCAACCATTTCCGGA-1,1,26,106,4601,8776 TGATCCCAGCATTAGT-1,1,27,107,4721,8845 CGTTGTAAGATTGATT-1,1,26,108,4601,8913 GAAACCATGGTGCGCT-1,1,27,109,4721,8982 AATCTATGCCGGAGCC-1,1,26,110,4601,9051 GACTCCCAGAATAAGG-1,1,27,111,4721,9120 TATGATCCGGCACGCC-1,1,26,112,4601,9189 CCGCTTGCTGACATGG-1,1,27,113,4721,9258 TGGTTAAGGGCGCTGG-1,1,26,114,4601,9326 TTGATAGTCAATACAT-1,1,27,115,4721,9395 GGTTTAATTGAGCAGG-1,1,26,116,4601,9464 CATTACATAGATTGTG-1,0,27,117,4721,9533 GGTACACCAGATTTAT-1,0,26,118,4601,9602 GGCCCGTATACCATGC-1,0,27,119,4721,9671 ATCTTTCGTATAACCA-1,0,26,120,4601,9739 GAGATGACAATCCTTA-1,0,27,121,4721,9808 AAAGCTTGCCTACATA-1,0,26,122,4601,9877 GAACGATAAGTTAAAG-1,0,27,123,4721,9946 TAATAGCTAAATGATG-1,0,26,124,4601,10015 TATGGCTAGGCTAATT-1,0,27,125,4721,10084 AGGAGAGTCTGGCTAC-1,0,26,126,4601,10152 TGCTCTGCCGGTTCAC-1,0,27,127,4721,10221 CCAATAGATTTCATCT-1,0,28,0,4841,1480 GGGCACGAATTGGCCG-1,0,29,1,4961,1549 TCGTTGACAGGGTCCC-1,0,28,2,4841,1618 ATCGTATTCCGAGAAC-1,0,29,3,4961,1686 GGGAATTCTGTCCAGT-1,0,28,4,4841,1755 ACGCGTTTCTTAAGAG-1,0,29,5,4961,1824 GAGAGCGCAGTCCCTG-1,0,28,6,4841,1893 GTCCTATTGTTGTGGT-1,0,29,7,4961,1962 CATCTGCAGGATCATT-1,0,28,8,4841,2031 GAGTCGACAGACCCTC-1,0,29,9,4961,2099 AAGTGCAAAGGTAGAC-1,0,28,10,4841,2168 AGGGTGGATAGTGCAT-1,0,29,11,4961,2237 TGATAGCGGGATTCTA-1,0,28,12,4841,2306 GTCAGTTTGGTAGTCG-1,0,29,13,4961,2375 GCATTCGAAATGAACA-1,0,28,14,4841,2443 AAAGACTGGGCGCTTT-1,0,29,15,4961,2512 TAACAATATTTGTTGC-1,0,28,16,4841,2581 CCAGCTTCCGCCCGCA-1,0,29,17,4961,2650 GATATGGATTACGCGG-1,0,28,18,4841,2719 AGAGCAGTTATGAGAC-1,0,29,19,4960,2788 TCACATCTTATCTGAT-1,0,28,20,4841,2856 TATGAAGACAGGTGCG-1,0,29,21,4960,2925 TACCTGCTGCACTGTG-1,0,28,22,4841,2994 TAGGTCCAAGTAAGGA-1,0,29,23,4960,3063 GAAACTCGTGCGATGC-1,0,28,24,4841,3132 AACAATTACTCTACGC-1,0,29,25,4960,3201 CCGCACGTGACCTCGG-1,0,28,26,4841,3269 AACTTGCCCGTATGCA-1,1,29,27,4960,3338 GGGTATGTATGCACTT-1,1,28,28,4841,3407 TTCGTACTCCAGAACG-1,1,29,29,4960,3476 GAATTTCTCGCTGCAG-1,1,28,30,4841,3545 AACAGGATGGGCCGCG-1,1,29,31,4960,3614 GACGTGTAGGGATTAT-1,1,28,32,4841,3682 TAGGTGAGCCCTACTC-1,1,29,33,4960,3751 CTAATTCGCACGCGCT-1,1,28,34,4841,3820 GAAGCTTGCTGACCGC-1,1,29,35,4960,3889 GGTTAGGCTTGGAGAA-1,1,28,36,4841,3958 ACAAGGACAAGAGGTT-1,1,29,37,4960,4027 AGGCCACCCGTTATGA-1,1,28,38,4841,4095 GTGGGCTTAGACACAC-1,1,29,39,4960,4164 CGTGTCCCATTCGCGA-1,1,28,40,4841,4233 TGGAGTGATGCGATGA-1,1,29,41,4960,4302 AACAACTGGTAGTTGC-1,1,28,42,4841,4371 CCTGGCTAGACCCGCC-1,1,29,43,4960,4440 CGCAATTAGGGTAATA-1,1,28,44,4841,4508 TCGAAATTTAGGACCA-1,1,29,45,4960,4577 AGACTAGCCTTCCAGA-1,1,28,46,4841,4646 TTGATCTAACTTTGTC-1,1,29,47,4960,4715 AAGGAGCGGTTGGTGC-1,1,28,48,4841,4784 ACTTGGGACCCGGTGG-1,1,29,49,4960,4853 TGATCTCCGGCGCCAG-1,1,28,50,4841,4921 CAGTTCAAATTGACAC-1,1,29,51,4960,4990 GTCCGGCTGAATTGCG-1,1,28,52,4841,5059 CTGGAAATGGATGCTT-1,1,29,53,4960,5128 TGATCGGTTTGACCCT-1,1,28,54,4841,5197 TAGAGTCTAAGCGAAC-1,1,29,55,4960,5265 GAGACTGATGGGTAGA-1,1,28,56,4841,5334 TAGCTAAGTCCGGGAG-1,1,29,57,4960,5403 GGGCGATATGTGTGAA-1,1,28,58,4841,5472 CTCGAGGTCGAACAGT-1,1,29,59,4960,5541 GATCCCTTTATACTGC-1,1,28,60,4841,5610 GTCATGCACCTCCGTT-1,1,29,61,4960,5678 ACTTTCCTATAGCTTC-1,1,28,62,4841,5747 TCGCTCGATATATTCC-1,1,29,63,4960,5816 ATAGGTTGGGCAGATG-1,1,28,64,4841,5885 CAATTAAGGGTGATGA-1,1,29,65,4960,5954 ACCGACTGAGTCCCAC-1,1,28,66,4841,6023 CCTGTCACCCGGGCTC-1,1,29,67,4960,6091 GATCGGTGGCCATAAC-1,1,28,68,4841,6160 CCTATGGGTTACCGTC-1,1,29,69,4960,6229 TTGGGACACTGCCCGC-1,1,28,70,4841,6298 CGAGGCTAAATATGGC-1,1,29,71,4960,6367 TCAGGGTGTAACGTAA-1,1,28,72,4841,6436 CGAGAGATGTGAACCT-1,1,29,73,4960,6504 TCGCTGGGCGGATTGT-1,1,28,74,4841,6573 AGATCTCAGGTGTGAT-1,1,29,75,4960,6642 TGGCCAAACTGAAGTA-1,1,28,76,4841,6711 GCTTCCGTCCCTAGAC-1,1,29,77,4960,6780 CAGCAGCCCGTTCCTT-1,1,28,78,4841,6849 TGTATAACAGATCCTG-1,1,29,79,4960,6917 CGCGGGAATTAGGCAG-1,1,28,80,4841,6986 TGCATGTGGTAATCTA-1,1,29,81,4960,7055 ACAATTTGAGCAGTGG-1,1,28,82,4841,7124 GAGCTAAGGGCATATC-1,1,29,83,4960,7193 CCAGATAGTTGAGTGA-1,1,28,84,4841,7262 CCACAATGTACGTCTT-1,1,29,85,4960,7330 CAATGGATCTCTACCA-1,1,28,86,4841,7399 TGTGGCAAAGCGTATG-1,1,29,87,4960,7468 TAAAGCGTTAGGAGAA-1,1,28,88,4841,7537 TCCGTTTAGCCTTGAA-1,1,29,89,4960,7606 CAGCTCGACAAGTTAA-1,1,28,90,4841,7675 GCCTATAGTGTCAGGG-1,1,29,91,4960,7743 ATAGACAACGGGACCT-1,1,28,92,4841,7812 CTACTATCTTTCAGAG-1,1,29,93,4960,7881 GCGCTGCTTTGCATTT-1,1,28,94,4841,7950 GCGCATCCAGTCAGCA-1,1,29,95,4960,8019 GACTCGCGGGAATGAC-1,1,28,96,4841,8087 CTGGTAACGAGCTCTT-1,1,29,97,4960,8156 TCCGGCCTAGCGTACA-1,1,28,98,4841,8225 TCTAGGTGGCGACGCT-1,1,29,99,4960,8294 ACGCTAGTGATACACT-1,1,28,100,4841,8363 ATCTGCACCTCTGCGA-1,1,29,101,4960,8432 CCTCACCTGAGGGAGC-1,1,28,102,4841,8500 AGTGAGCCTCGCCGCC-1,1,29,103,4960,8569 ACGAGTACGGATGCCC-1,1,28,104,4841,8638 GGTACCATTAAGACGG-1,1,29,105,4960,8707 TTCTGCTAGACTCCAA-1,1,28,106,4841,8776 TAACTATTACGCCAAA-1,1,29,107,4960,8845 GCATTCAAGGCAACGC-1,1,28,108,4841,8913 AGTACATCATTTATCA-1,1,29,109,4960,8982 GTCGTGTCTGGTCATC-1,1,28,110,4841,9051 AGTCTAAAGTATACTC-1,1,29,111,4960,9120 CGGCCCAACCTGTAGT-1,1,28,112,4841,9189 AGGGAGACATACTTCG-1,1,29,113,4960,9258 TCCCTAGATCAATAGG-1,1,28,114,4840,9326 TCCCGTCGCGTCATAG-1,1,29,115,4960,9395 CGCATCCATCAGCCAG-1,0,28,116,4840,9464 CTGCACCTAGTCCACA-1,0,29,117,4960,9533 CGAGGATCGGGAACGA-1,0,28,118,4840,9602 CAATGAGGTTCGACTA-1,0,29,119,4960,9671 TCTGACGGGCTAACCC-1,0,28,120,4840,9739 TTCTATGCCTTTCGCA-1,0,29,121,4960,9808 AGAGTATAGTGTTACG-1,0,28,122,4840,9877 CCATTGTTTCCTCCAT-1,0,29,123,4960,9946 CTCATCACTTAGTGAT-1,0,28,124,4840,10015 CCGAAGGGCGTACCGC-1,0,29,125,4960,10084 TCAAGCTGCCTTGAAA-1,0,28,126,4840,10152 CTCATTAACGTTGCCC-1,0,29,127,4960,10221 GTCTTCCTCACCTAAG-1,0,30,0,5080,1480 GGTGATGAAGGAAGTG-1,0,31,1,5200,1549 TCAATACAATTGCTGC-1,0,30,2,5080,1618 GCAACCCAAGTTGTTT-1,0,31,3,5200,1686 ATGAAGTGGACCCAGC-1,0,30,4,5080,1755 GAGAATCTCACGATCA-1,0,31,5,5200,1824 TATATCATTGATCAGT-1,0,30,6,5080,1893 AACTTTACGGGAGCTT-1,0,31,7,5200,1962 TTCTTGTGTCCATCAG-1,0,30,8,5080,2031 ACAATTTAGGAGGCTC-1,0,31,9,5200,2099 ATACTTGTTCTCGAGC-1,0,30,10,5080,2168 CACGGGATTGAGGGTT-1,0,31,11,5200,2237 GTTAATGTCTATCTTA-1,0,30,12,5080,2306 GCGTTATATTTGGAAC-1,0,31,13,5200,2375 CGTCAAGGCTATAAAT-1,0,30,14,5080,2444 TTAGCTCTGTAATCCG-1,0,31,15,5200,2512 AATGGTCCACCGTTCA-1,0,30,16,5080,2581 GTCATTGCATTGACCC-1,0,31,17,5200,2650 TGTCCGTGGCGCCTTT-1,0,30,18,5080,2719 TCAACTAACGTATAAC-1,0,31,19,5200,2788 TCCTCTCCAGTTGTCC-1,0,30,20,5080,2856 TGTGTTCGTATCCAAG-1,0,31,21,5200,2925 CCGCGTAGGTAAGGGC-1,0,30,22,5080,2994 CTGCGGGTGAAATGTT-1,0,31,23,5200,3063 TATCTACAGAGGTAAT-1,0,30,24,5080,3132 CTACTCTAGGCCCGGC-1,1,31,25,5200,3201 ACAAGCAGTGCCTAGC-1,1,30,26,5080,3269 TACAAGTCTCGTGCAT-1,1,31,27,5200,3338 TCGGAATGCGCTCTGA-1,1,30,28,5080,3407 TCGCGTCCAGAAGGTC-1,1,31,29,5200,3476 TATGGCCCGGCCTCGC-1,1,30,30,5080,3545 GCTGGCATATTCACCT-1,1,31,31,5200,3614 GTCAGAATAGTCTATG-1,1,30,32,5080,3682 GGCGTCCTATCCGCTG-1,1,31,33,5200,3751 CGGAGTTTGAGAGACA-1,1,30,34,5080,3820 AGCACTTAAGGACGCC-1,1,31,35,5200,3889 TCCACAATGGTTTACG-1,1,30,36,5080,3958 CCAACGATGCACTGAT-1,1,31,37,5200,4027 ATTTACAGTTTACTGG-1,1,30,38,5080,4095 CCCTGAAATGAGTTGA-1,1,31,39,5200,4164 CAAACGGTCGCACTTT-1,1,30,40,5080,4233 TGATTCGTCTATCACT-1,1,31,41,5200,4302 TCAGGTTCTTTGAGAA-1,1,30,42,5080,4371 CACGCAGCGAGGCTTT-1,1,31,43,5200,4440 TTAAGCGCCTGACCCA-1,1,30,44,5080,4508 CTTACACGGTATTCCA-1,1,31,45,5200,4577 AAGGCTGTGCTCATCG-1,1,30,46,5080,4646 GACCAGAGCCCTGTAG-1,1,31,47,5200,4715 TCCCAGGCTTAGCTAA-1,1,30,48,5080,4784 ATTGAAGATCTTAGTG-1,1,31,49,5200,4853 AGTTCCTACAGAATTA-1,1,30,50,5080,4921 GGGCTGGTTAGTCGCG-1,1,31,51,5200,4990 GAAATGGCGGTGTTAG-1,1,30,52,5080,5059 TACGAACACGACTTCA-1,1,31,53,5200,5128 ACCACAAGTTTCTATC-1,1,30,54,5080,5197 ATATTTAACCCTCAAG-1,1,31,55,5200,5266 GATCATTCCAAACATT-1,1,30,56,5080,5334 TCCAGGCGAGTACGGT-1,1,31,57,5200,5403 GTTTGACCAAATCCTA-1,1,30,58,5080,5472 CACAGCACCCACGGCA-1,1,31,59,5200,5541 TGCAAGAATGACGTAA-1,1,30,60,5080,5610 GCGAAGCCATACCCGT-1,1,31,61,5200,5678 TCCTTTCTTACGCTTA-1,1,30,62,5080,5747 GCTGCTCTCCGGACAC-1,1,31,63,5200,5816 ACTGTCTTCTTTAGAA-1,1,30,64,5080,5885 TCAAACTTAGATTGTT-1,1,31,65,5200,5954 CTATGTCACTAGCCCA-1,1,30,66,5080,6023 TGCGCAAAGCATTTGG-1,1,31,67,5200,6091 TTAATGTAGACCAGGT-1,1,30,68,5080,6160 GGCGGTAGGATCATTG-1,1,31,69,5200,6229 GGCAATAGTCAATGAG-1,1,30,70,5080,6298 ACACGAGACTCCTTCT-1,1,31,71,5200,6367 GACACAAGGGAAGAAA-1,1,30,72,5080,6436 TCAGCAAATGCATCTC-1,1,31,73,5200,6504 GAGATCTGTCACTCCG-1,1,30,74,5080,6573 ATGCCGGTCTTGCATA-1,1,31,75,5200,6642 TTGGGCGGCGGTTGCC-1,1,30,76,5080,6711 TTGTTGTGTGTCAAGA-1,1,31,77,5200,6780 ACTGTACGATACACAT-1,1,30,78,5080,6849 TCCACTTTATCTAGGT-1,1,31,79,5200,6917 GGTCTGAGAATCTGGA-1,1,30,80,5080,6986 TAGAAAGGTGGCGCTA-1,1,31,81,5200,7055 TATGTCTCATTGTGCC-1,1,30,82,5080,7124 GGATTTCACTTCTATA-1,1,31,83,5200,7193 TGAGTGGTCCGTGACG-1,1,30,84,5080,7262 CGCTTTCTTGCATTCG-1,1,31,85,5200,7330 ACCCAACGCCCGTGGC-1,1,30,86,5080,7399 GAACGTCTCATGGTCG-1,1,31,87,5200,7468 AGGGTTCCCTTTGGTT-1,1,30,88,5080,7537 GTAGCTTCCTCTTGTT-1,1,31,89,5200,7606 GCATGAGGGACGCGGC-1,1,30,90,5080,7675 CTACCCTAAGGTCATA-1,1,31,91,5200,7743 TCACCGCTCGGCACTC-1,1,30,92,5080,7812 GGCTCGCGTTGAGGTA-1,1,31,93,5200,7881 CTAACGAAACTTGCTG-1,1,30,94,5080,7950 TTAAACTCGAATTCAT-1,1,31,95,5200,8019 TACTTTACTGAGCCGG-1,1,30,96,5080,8087 GCTTGGATCGATTAGG-1,1,31,97,5200,8156 CGGTTATCCAACAGTG-1,1,30,98,5080,8225 CAGACCTGTAAGTGTT-1,1,31,99,5200,8294 GACGGTCAATAGAAGC-1,1,30,100,5080,8363 CTGACTGCGCAGCTCG-1,1,31,101,5200,8432 CCATACCTTTACTTGT-1,1,30,102,5080,8500 GTAATAAAGGGCTCCC-1,1,31,103,5200,8569 GTGAACTCCCATTCGA-1,1,30,104,5080,8638 GTGGTTACTTCTTTCG-1,1,31,105,5200,8707 TCAGAACCTCCACAGG-1,1,30,106,5080,8776 TCCCACTCTCTTCCGG-1,1,31,107,5200,8845 ATCTTGACTTGTCCAA-1,1,30,108,5080,8913 TCGGGAACGTGCCTAG-1,1,31,109,5200,8982 GTTAGCCGTAAATCAA-1,1,30,110,5080,9051 ATTTACTAAGTCCATT-1,1,31,111,5200,9120 GGGTGCATATGAAAGC-1,1,30,112,5080,9189 TCCGAATGGTCCTGAG-1,1,31,113,5200,9258 TGATGGCTGTTTCTGA-1,1,30,114,5080,9326 AAATAAGGTAGTGCCC-1,1,31,115,5200,9395 CCACTATCCGGGTCAC-1,0,30,116,5080,9464 ACACCACATAATTAGC-1,0,31,117,5200,9533 CGCGGTAAGTCTAGCT-1,0,30,118,5080,9602 GCGGGCATTACGATGC-1,0,31,119,5200,9671 AGGATTGCTTACGACA-1,0,30,120,5080,9739 CTCGGGATAACACCTA-1,0,31,121,5200,9808 GTCGTCTGGTTGGCTA-1,0,30,122,5080,9877 GCAATTAGTCGCACCG-1,0,31,123,5200,9946 GTGACCTAAAGAATAA-1,0,30,124,5080,10015 CTGAGCGAGACTTATT-1,0,31,125,5200,10084 CAAGACTCAGAAGCGC-1,0,30,126,5080,10152 ACTTCGCTAGCGAGTG-1,0,31,127,5200,10221 CCATACTCGCCTCTCC-1,0,32,0,5320,1480 ACGATACCTATCCTGA-1,0,33,1,5440,1549 CTCACCAGTACAAGTG-1,0,32,2,5320,1618 CGAAGACGGTGAGTGC-1,0,33,3,5440,1686 AAATTAATAAGCGCGA-1,0,32,4,5320,1755 GGGCCCTTATCTATAC-1,0,33,5,5440,1824 CTGCCCACGAAGCGTT-1,0,32,6,5320,1893 GGACAAGTTGCAGTGA-1,0,33,7,5440,1962 GTCCGAGAGCAATCAT-1,0,32,8,5320,2031 ATGGCAGCCGAGAAAC-1,0,33,9,5440,2099 CCTCGGATGCTACCTG-1,0,32,10,5320,2168 ATACGGTGAAGATGCA-1,0,33,11,5440,2237 ACATCAGCTGGGACGC-1,0,32,12,5320,2306 GGTTGTGTAGCCTGGC-1,0,33,13,5440,2375 CCTGCGTTCTACGCTT-1,0,32,14,5320,2444 GAACGTTAGGAAGACG-1,0,33,15,5440,2512 CTGATAGTGTATCTCA-1,0,32,16,5320,2581 TTCTGCGAGCGCCCTT-1,0,33,17,5440,2650 CTTTGGCGCTTTATAC-1,0,32,18,5320,2719 TCCGAACTTGGCTTAC-1,0,33,19,5440,2788 TAGATTCCTGGTTATT-1,0,32,20,5320,2856 CCGACAAAGGGAGTGC-1,0,33,21,5440,2925 CCATGGCCCTTGTACC-1,0,32,22,5320,2994 GAAATATCACCATCAG-1,1,33,23,5440,3063 ACGAAATGGGCGGCAC-1,1,32,24,5320,3132 GTGAGCGTGCTGCACT-1,1,33,25,5440,3201 CCGCGGGTACGAAGAA-1,1,32,26,5320,3269 TCCCTGGCTCGCTGGA-1,1,33,27,5440,3338 CAGCTTAGTAGGTAGC-1,1,32,28,5320,3407 CACGAAAGTTAGTCCC-1,1,33,29,5440,3476 ACCTAATCGACTTCCT-1,1,32,30,5320,3545 AAGTAGTGACGCGAGG-1,1,33,31,5440,3614 TCCGATTACATTGCCG-1,1,32,32,5320,3682 CCTCCGACAATTCAAG-1,1,33,33,5440,3751 GTTCACAGGAGTCTAG-1,1,32,34,5320,3820 CGAAGTTGCTCTGTGT-1,1,33,35,5440,3889 GTCGGATATCTCAGAC-1,1,32,36,5320,3958 CGCTCTCCGTAGATTA-1,1,33,37,5440,4027 CAAGCAACGTCGGAGT-1,1,32,38,5320,4095 CCATTCCCTGCCCACA-1,1,33,39,5439,4164 CTTTGGCTTTAGTAAA-1,1,32,40,5320,4233 GGCTATTAAGTTGTAT-1,1,33,41,5439,4302 CCATTAGCGATAATCC-1,1,32,42,5320,4371 TGTTCTTCCATTGACT-1,1,33,43,5439,4440 AGATAACTTCAGGGCC-1,1,32,44,5320,4508 ATAGACGAAGAGAAAG-1,1,33,45,5439,4577 GGCGGAGTAATATTAG-1,1,32,46,5320,4646 TGACCCACGTTAGACA-1,1,33,47,5439,4715 TCACAGGTTATTGGGC-1,1,32,48,5320,4784 TCACGCATTGTAGATC-1,1,33,49,5439,4853 TTGAAGAATTCCCAGG-1,1,32,50,5320,4921 AAATGGTCAATGTGCC-1,1,33,51,5439,4990 TAGTGCCCTCCAGAGT-1,1,32,52,5320,5059 GGTATTGCCGAGTTTA-1,1,33,53,5439,5128 CGTATTAAGAGATCTA-1,1,32,54,5320,5197 ACTGTCCAGGATTATA-1,1,33,55,5439,5266 CGGGCAGCTAAACCGC-1,1,32,56,5320,5334 TTGCTGATCATGTTCG-1,1,33,57,5439,5403 TATGGGTACGTATCGT-1,1,32,58,5320,5472 CAGCTCACTGAGACAT-1,1,33,59,5439,5541 GGGACTGCATAGATAG-1,1,32,60,5320,5610 ACGCATTCGTGAGTAC-1,1,33,61,5439,5678 CTCTGGACGCCTGGTG-1,1,32,62,5320,5747 AGGGTTTAGTTCGGGA-1,1,33,63,5439,5816 GGGAGAACTCACAGTA-1,1,32,64,5320,5885 ATCAATCTGGGCTGCA-1,1,33,65,5439,5954 TCTTCGATACCAATAA-1,1,32,66,5320,6023 ACGTAGATTGCTGATG-1,1,33,67,5439,6091 TCTTGATGCGTAGCGA-1,1,32,68,5320,6160 GGGCTGCCTAGGGCGA-1,1,33,69,5439,6229 CTCTCACAATCGATGA-1,1,32,70,5320,6298 CCAAGCGTAACTCGTA-1,1,33,71,5439,6367 ACAACAGCATGAGCTA-1,1,32,72,5320,6436 GTCCCAACGTAAAGTA-1,1,33,73,5439,6504 TCGGAGTACATGAGTA-1,1,32,74,5320,6573 GGGAGTTAATGAGGCG-1,1,33,75,5439,6642 CCGGGCGGTCTCGTCA-1,1,32,76,5320,6711 CCGTAAGTTGGTCCCA-1,1,33,77,5439,6780 GGAGGGCTTGGTTGGC-1,1,32,78,5320,6849 TCGGACGCCCAGCCCA-1,1,33,79,5439,6917 TCTGTGCCATCATAGT-1,1,32,80,5320,6986 GTACTGGAGTTAGACC-1,1,33,81,5439,7055 GGAATGCGCTAGCGTG-1,1,32,82,5320,7124 GTGTGAATAACTTAGG-1,1,33,83,5439,7193 GGTCGGCCAGGAGCTT-1,1,32,84,5320,7262 TAGCCGGCGGTCAGCG-1,1,33,85,5439,7330 CGGGTGTACCCATTTA-1,1,32,86,5320,7399 AGTGATTCAAGCAGGA-1,1,33,87,5439,7468 GTTGGATTGAGAACAC-1,1,32,88,5320,7537 CACACGCGCTGTCTTA-1,1,33,89,5439,7606 TAGACGCCCGTACCGG-1,1,32,90,5320,7675 GGTTTCAATCGGTCAG-1,1,33,91,5439,7743 AATCTGCGTTGGGACG-1,1,32,92,5320,7812 TTACGGATGGTTCGAG-1,1,33,93,5439,7881 CGGCAGGGTCGGGTTG-1,1,32,94,5320,7950 GCTTTCAGAGGAGGTG-1,1,33,95,5439,8019 TCTTCCCATGGGCACA-1,1,32,96,5320,8088 TACCGCGGACTTGCAG-1,1,33,97,5439,8156 AGAATTATGGATTCGA-1,1,32,98,5320,8225 ATTGATGAGTCCTAAC-1,1,33,99,5439,8294 TAGGTCGCCGGAACTG-1,1,32,100,5320,8363 TAACCTACCGTCCGAG-1,1,33,101,5439,8432 CTTAGTAGGCCTACAG-1,1,32,102,5320,8500 CTAGATGTGAGTGTAA-1,1,33,103,5439,8569 ACTCCCGAATTCGTTT-1,1,32,104,5320,8638 GTTCATCGTTTGGCTG-1,1,33,105,5439,8707 ACTTTACCCTCATGAA-1,1,32,106,5320,8776 GCGAGAGTTGCGTCCA-1,1,33,107,5439,8845 GTTCGGGCGTACCATT-1,1,32,108,5320,8913 CGACTTTGTATAGCCT-1,1,33,109,5439,8982 GCCATCGATGCTGCAT-1,1,32,110,5320,9051 GCATTTCCAAGGCTCC-1,1,33,111,5439,9120 ATGTAAGGCTGCTCTT-1,1,32,112,5320,9189 ACGTTCGCAATCAATT-1,1,33,113,5439,9258 GTGACGAGGGTGACCC-1,1,32,114,5320,9326 ATTATAGCTACTTTAC-1,1,33,115,5439,9395 CGTGTGTTAAACCCTG-1,0,32,116,5320,9464 TTGGTATGGCTTGTGT-1,0,33,117,5439,9533 CATTCCCATTCCGTCG-1,0,32,118,5320,9602 TGCCGAAAGCGTATTC-1,0,33,119,5439,9671 CAACACATCTCCTGCC-1,0,32,120,5320,9739 CTGCCTCATATGCAAC-1,0,33,121,5439,9808 TCCCGCCTATGTGCGT-1,0,32,122,5320,9877 GGTTACCCGACACTTT-1,0,33,123,5439,9946 CCAGCGGGATCACCAG-1,0,32,124,5320,10015 ATGTTTCGGCCCGGAG-1,0,33,125,5439,10084 GCGTCTAACCTCCTAA-1,0,32,126,5320,10152 ATCAGGTAGCTGACAG-1,0,33,127,5439,10221 GGTATGAAAGAACTGA-1,0,34,0,5559,1480 GTGGCCTAATATCATT-1,0,35,1,5679,1549 CCTGTGAAACCGTAAC-1,0,34,2,5559,1618 GGCAGAGAGATCGGGA-1,0,35,3,5679,1686 TAGCGTCGAATATTGA-1,0,34,4,5559,1755 CGCCGACTATTCGCTA-1,0,35,5,5679,1824 TCTGGCGCAAGCCGGG-1,0,34,6,5559,1893 AGTGGTTGCGTATAGG-1,0,35,7,5679,1962 ATCGGTTACCTAGTAA-1,0,34,8,5559,2031 CCTGCCCGTTGTCTAG-1,0,35,9,5679,2099 GCACACGCCCATGGTC-1,0,34,10,5559,2168 AGTACGGCCCGTATCG-1,0,35,11,5679,2237 TATCTAGCCTAAAGGA-1,0,34,12,5559,2306 CACTCGGTTAGGAGGA-1,0,35,13,5679,2375 ATGTTCGTCGACCCAC-1,0,34,14,5559,2444 TTCCTCTGCCCGAATA-1,0,35,15,5679,2512 TTACTATCGGCTTCTC-1,0,34,16,5559,2581 GCCGCATTAGTCCGGC-1,0,35,17,5679,2650 TAAGGGCTGGGAGAGG-1,0,34,18,5559,2719 TAAGCAGGCGACACGC-1,0,35,19,5679,2788 AGCACTACCGGCCTGT-1,0,34,20,5559,2856 GAAAGCCCTTTGGACC-1,1,35,21,5679,2925 GACCGACTGAAGCGTC-1,1,34,22,5559,2994 CGGTGAAGACTAAAGT-1,1,35,23,5679,3063 CCCTGCGCTACGCATA-1,1,34,24,5559,3132 TACTGGACAGCTCGGC-1,1,35,25,5679,3201 TTAGTAGGGCGGCGGG-1,1,34,26,5559,3269 GAGGCTATCAAAGTCG-1,1,35,27,5679,3338 TTACCATTGATTACCC-1,1,34,28,5559,3407 ATACCACGGGCAACTT-1,1,35,29,5679,3476 TGTCCTAAGTCACCGC-1,1,34,30,5559,3545 AGGTAGGTACAAAGCT-1,1,35,31,5679,3614 GGCATACAGGTAGCGG-1,1,34,32,5559,3682 TGTAGTGATCTATAAT-1,1,35,33,5679,3751 TCCCGGGTGTGCTGCT-1,1,34,34,5559,3820 TACGATGTTGATCATC-1,1,35,35,5679,3889 CCTCTCTCCCATCTAG-1,1,34,36,5559,3958 GCAGGACTATAGAATA-1,1,35,37,5679,4027 CTAGTGAAGGACAGGA-1,1,34,38,5559,4095 TACGAGAACTTCACGT-1,1,35,39,5679,4164 CGTTGTTTCAATTCCC-1,1,34,40,5559,4233 GCAAATATTACGCTTT-1,1,35,41,5679,4302 CCAATAGTGCCGTCGA-1,1,34,42,5559,4371 ATTGCTGCTCCTCCAT-1,1,35,43,5679,4440 GAGATCTGCTTGGCAT-1,1,34,44,5559,4508 GCCGAAATTCCTACGT-1,1,35,45,5679,4577 GGCACTCCACTGGGCA-1,1,34,46,5559,4646 GGGTCACCGTGACGGT-1,1,35,47,5679,4715 CACTTAATCAGACGGA-1,1,34,48,5559,4784 CGTTTCGCTCATTACA-1,1,35,49,5679,4853 ATAAAGGCTCGGTCGT-1,1,34,50,5559,4921 CACTAAAGTTGCCTAT-1,1,35,51,5679,4990 GTGCTCAAGTACTGTC-1,1,34,52,5559,5059 CCATGCCTGTTTAGTA-1,1,35,53,5679,5128 TCTAGTTATCAGAAGA-1,1,34,54,5559,5197 TTGTAATCCGTACTCG-1,1,35,55,5679,5266 TCCCAGCTTTAGTCTG-1,1,34,56,5559,5334 CTACGCACGGAGTACC-1,1,35,57,5679,5403 AAATTAACGGGTAGCT-1,1,34,58,5559,5472 CGGCCACGCACAAAGT-1,1,35,59,5679,5541 GAAGCGTGAGGAATTT-1,1,34,60,5559,5610 ATATCTTAGGGCCTTC-1,1,35,61,5679,5678 ACGCGGGCCAAGGACA-1,1,34,62,5559,5747 GCGAGTTCTGCAAAGA-1,1,35,63,5679,5816 TATTCGTGCCAGAATA-1,1,34,64,5559,5885 AGGGCTGCAGTTACAG-1,1,35,65,5679,5954 CTAGCATAGTATAATG-1,1,34,66,5559,6023 TAGGTTCGAGTTCGTC-1,1,35,67,5679,6091 GAATTATAGTGAAAGG-1,1,34,68,5559,6160 CTATCGGGTCTCAACA-1,1,35,69,5679,6229 GCGCTAATTGAATAGA-1,1,34,70,5559,6298 ATGCGACAGTCCCATT-1,1,35,71,5679,6367 GGTAGTGCTCGCACCA-1,1,34,72,5559,6436 AAGCTCGTGCCAAGTC-1,1,35,73,5679,6504 TATTCAATTCTAATCC-1,1,34,74,5559,6573 TTCAAAGTCTCTAGCC-1,1,35,75,5679,6642 TTGAATATGGACTTTC-1,1,34,76,5559,6711 AAGAGCTCTTTATCGG-1,1,35,77,5679,6780 TTACTCCGGCCGGGAA-1,1,34,78,5559,6849 AAACGAGACGGTTGAT-1,1,35,79,5679,6917 GCTAAGTAGTTTCTCT-1,1,34,80,5559,6986 ATAACGCCGGAGGGTC-1,1,35,81,5679,7055 GGATCCGGAATATACT-1,1,34,82,5559,7124 TGAAAGGACCTGACTC-1,1,35,83,5679,7193 TCCGCGGCAGCATCTG-1,1,34,84,5559,7262 TGCATATGTCTGTCAC-1,1,35,85,5679,7330 TGTAGGAGAAATTTCC-1,1,34,86,5559,7399 AGTGAGACTTCCAGTA-1,1,35,87,5679,7468 CCCAAACATGCTGCTC-1,1,34,88,5559,7537 GCTTATGAAGCAGGAA-1,1,35,89,5679,7606 TTCTAACCGAAGCTTA-1,1,34,90,5559,7675 GGATGTCCTTACCGCA-1,1,35,91,5679,7743 AGGGTGCTCTCGAGGG-1,1,34,92,5559,7812 AACTCTCAATAGAGCG-1,1,35,93,5679,7881 TCTGAATTCCGTACAA-1,1,34,94,5559,7950 GCGTGGTACTGGGTTA-1,1,35,95,5679,8019 CGTCGGATAGTGTTGA-1,1,34,96,5559,8088 ATATGTCTCCCTAGCC-1,1,35,97,5679,8156 TCTTTAAGACTATGAA-1,1,34,98,5559,8225 TCATTTAAGTCTCCGA-1,1,35,99,5679,8294 GATATTGAGATTGGCG-1,1,34,100,5559,8363 TGACATCGAGCGGACC-1,1,35,101,5679,8432 GCGTAAATGGCCATAA-1,1,34,102,5559,8500 ATTGTACAACTCGGCT-1,1,35,103,5679,8569 TACGCTATAGAAACCT-1,1,34,104,5559,8638 CACCCAAATCTTATGT-1,1,35,105,5679,8707 AGATGATGGAGTCTGG-1,1,34,106,5559,8776 CCACGGTGCCCGGTAG-1,1,35,107,5679,8845 TCAAGAAATACTAGCT-1,1,34,108,5559,8913 AGGTATAATTGATAGT-1,1,35,109,5679,8982 CAAGGTCCTATAGGCT-1,1,34,110,5559,9051 CCGGCACGACCGTTTC-1,1,35,111,5679,9120 ACCTCCGTTATTCACC-1,1,34,112,5559,9189 GCAGCCTATATCACAT-1,1,35,113,5679,9258 GGTATAGTGACACATA-1,1,34,114,5559,9326 AAATTCCAGGTCCAAA-1,1,35,115,5679,9395 TCTTTAGCAGGCGAAC-1,0,34,116,5559,9464 TATTGACATTTCTGCC-1,0,35,117,5679,9533 TCTGATCGGGTGCTAG-1,0,34,118,5559,9602 GGCCCGGAGCATGTCT-1,0,35,119,5679,9671 GGGCGCAGCGTTACTC-1,0,34,120,5559,9739 TTGGCGATCCGAATAT-1,0,35,121,5679,9808 CCACGTAAATTAGACT-1,0,34,122,5559,9877 TCTGATTGGAAATGGA-1,0,35,123,5679,9946 ATGGCGGAATAGTCGC-1,0,34,124,5559,10015 ATCGCTTTACGTCTCA-1,0,35,125,5679,10084 TACGTGCAAGGTTCCT-1,0,34,126,5559,10152 CAGGACAGCTGCCCTT-1,0,35,127,5679,10221 CAAACCAGGTCTGCAT-1,0,36,0,5799,1480 ACAAGCTATATGGAAG-1,0,37,1,5919,1549 TCGCCCACTGCGAGAG-1,0,36,2,5799,1618 AGCCGCAAATTCAAAT-1,0,37,3,5919,1686 TTAACGTTAAAGCCTG-1,0,36,4,5799,1755 CAGCGCCAACACGATA-1,0,37,5,5919,1824 ATCCAATGGTACCGAA-1,0,36,6,5799,1893 GTGCTGCAGATAAGGA-1,0,37,7,5919,1962 GGCCTTTGCAACTGGC-1,0,36,8,5799,2031 GTCGTACCTACGATTG-1,1,37,9,5919,2099 TAGAAATTCACGTATA-1,0,36,10,5799,2168 AGAATAAATCTTCAGG-1,0,37,11,5919,2237 CATTGCGAAATGGGCG-1,0,36,12,5799,2306 GTCTACTCAATTACAA-1,0,37,13,5919,2375 TGTAATGACCACAATA-1,0,36,14,5799,2444 AAAGTCGACCCTCAGT-1,0,37,15,5919,2512 TACTCGGCACGCCGGG-1,0,36,16,5799,2581 AGGTGTATCGCCATGA-1,0,37,17,5919,2650 TGTGCTTTACGTAAGA-1,0,36,18,5799,2719 AAACCTCATGAAGTTG-1,0,37,19,5919,2788 TATAGGGTACTCATGA-1,0,36,20,5799,2857 CCAGCTGATGGTACTT-1,1,37,21,5919,2925 AATATTGGAGTATTGA-1,1,36,22,5799,2994 GGCCCTCACCCACTTA-1,1,37,23,5919,3063 AACCAAGACTTCTCTG-1,1,36,24,5799,3132 TCGTATTACCCATTGC-1,1,37,25,5919,3201 ATTCGACGCCGGGCCT-1,1,36,26,5799,3269 GGCGCAGGACATCTTC-1,1,37,27,5919,3338 GTACTCCCTTATCGCT-1,1,36,28,5799,3407 TGGTCTGTTGGGCGTA-1,1,37,29,5919,3476 AATAACAACGCTCGGC-1,1,36,30,5799,3545 CATACCCGTACCCAGT-1,1,37,31,5919,3614 ACAATCCATTTAAACC-1,1,36,32,5799,3682 GTTACAATTGGTGACG-1,1,37,33,5919,3751 TTGCCCTGATCACGGG-1,1,36,34,5799,3820 CTAACCGCGCGCCCGT-1,1,37,35,5919,3889 CTAAAGAATGCCTACT-1,1,36,36,5799,3958 ACCCATCTTGAGGGTA-1,1,37,37,5919,4027 GATCTTTGCAGGGTAT-1,1,36,38,5799,4095 GGGTACTTCATGAACT-1,1,37,39,5919,4164 GCCGCTTGTGAGAAAC-1,1,36,40,5799,4233 CCTGTAAGACATGATA-1,1,37,41,5919,4302 CGACAGTTCGCGTTAT-1,1,36,42,5799,4371 ACGATGCATATGTTAT-1,1,37,43,5919,4440 TGTTCCGCTTCCATGA-1,1,36,44,5799,4508 GGATGACGCGAGTTTA-1,1,37,45,5919,4577 GAAGTTTCCACTCAAT-1,1,36,46,5799,4646 GCGAGGCCCGAGCAGA-1,1,37,47,5919,4715 CATACTATGTAATTGT-1,1,36,48,5799,4784 CCAATGTCACAGCAAG-1,1,37,49,5919,4853 GTTGGATTTGCGTTGG-1,1,36,50,5799,4921 GGGAGGATGCCCGAAA-1,1,37,51,5919,4990 GATCGCGGGCTCTCCA-1,1,36,52,5799,5059 GTTCGCCATAAGTGCC-1,1,37,53,5919,5128 AGATTATAGGACGTTT-1,1,36,54,5799,5197 TCGAGACCAACACCGT-1,1,37,55,5919,5266 TATGGGACCGAGCAGG-1,1,36,56,5799,5334 GATGCGTCCTGCATTC-1,1,37,57,5919,5403 TATGGTCTGAGTAACA-1,1,36,58,5799,5472 GCATAGAGCACTCAGG-1,1,37,59,5918,5541 CTTCATTGTCAGTGGA-1,1,36,60,5799,5610 GCAGATTAGGGATATC-1,1,37,61,5918,5679 CCTGTCGCCCGTAAAT-1,1,36,62,5799,5747 CAATTTCGTATAAGGG-1,1,37,63,5918,5816 GTACACTTACCTGAAG-1,1,36,64,5799,5885 CCAGCCTGGACCAATA-1,1,37,65,5918,5954 ATGGAGCAGGCCGTGA-1,1,36,66,5799,6023 GTCATTAGAGCGAACG-1,1,37,67,5918,6091 AAGACTGCAAGCTACT-1,1,36,68,5799,6160 CTAGTCACGTCTTAAG-1,1,37,69,5918,6229 ACTCTTGTATAGTAAC-1,1,36,70,5799,6298 ATTAGGCGATGCTTTC-1,1,37,71,5918,6367 TTCGGGACTAATCGCG-1,1,36,72,5799,6436 TGGACTGTTCGCTCAA-1,1,37,73,5918,6504 AACGTGCGAAAGTCTC-1,1,36,74,5799,6573 CCACCCAAGGAAAGTG-1,1,37,75,5918,6642 CCGCACAAAGACCAAC-1,1,36,76,5799,6711 GCGATTGTTAACGTTA-1,1,37,77,5918,6780 ACTCGTCAGTAATCCC-1,1,36,78,5799,6849 GGTGATAAGGAGCAGT-1,1,37,79,5918,6917 AAGAGGCATGGATCGC-1,1,36,80,5799,6986 CACGTTCGTGCTCTAG-1,1,37,81,5918,7055 CTATTTGGTTACGGAT-1,1,36,82,5799,7124 GTACAGAGGCAAGGGT-1,1,37,83,5918,7193 GGGCCGGCCGAAGTAC-1,1,36,84,5799,7262 CCTGAACGATATATTC-1,1,37,85,5918,7330 CCGGGCTGCTCCATAC-1,1,36,86,5799,7399 TACTTGTTAGTAGTCC-1,1,37,87,5918,7468 CCTAGGCGTAGCGATC-1,1,36,88,5799,7537 CTGGCGCACAGGTCTG-1,1,37,89,5918,7606 ACTTATACTTACCCGG-1,1,36,90,5799,7675 GAAGTCTCCCTAGCGA-1,1,37,91,5918,7743 ACCGATGGTAGCATCG-1,1,36,92,5799,7812 CGAGTTTATCGGACTG-1,1,37,93,5918,7881 CATAACGGACAGTCGT-1,1,36,94,5799,7950 TGACGATGCACTAGAA-1,1,37,95,5918,8019 TAGGGAGCTTGGGATG-1,1,36,96,5799,8088 AGGGTCGATGCGAACT-1,1,37,97,5918,8156 TATATCCCTGGGAGGA-1,1,36,98,5799,8225 CATCTTACACCACCTC-1,1,37,99,5918,8294 GTGCGACAGGGAGTGT-1,1,36,100,5799,8363 CCGATCTCAACCTTAT-1,1,37,101,5918,8432 ACGATCATCTTGTAAA-1,1,36,102,5799,8500 GAAAGAACAGCGTTAT-1,1,37,103,5918,8569 CTAGGTCTGAAGGAAT-1,1,36,104,5799,8638 ATATCAACCTACAGAG-1,1,37,105,5918,8707 AAATAGCTTAGACTTT-1,1,36,106,5799,8776 GCGACATGTAAACATC-1,1,37,107,5918,8845 ATAAGTAGGGCGACTC-1,1,36,108,5799,8913 GCGAGCGCATGCTCCC-1,1,37,109,5918,8982 AGGGACCGGCTGCGTT-1,1,36,110,5799,9051 CCTATGAAGTGGTGCC-1,1,37,111,5918,9120 GCTTACGTAGTTAGTA-1,1,36,112,5799,9189 CATACTTAGGCAATAC-1,1,37,113,5918,9258 CCTGTCCCTCACGTTA-1,1,36,114,5799,9326 CAATGTGCCAACCCTT-1,1,37,115,5918,9395 GTTAAGTTAGAGTGGG-1,0,36,116,5799,9464 CTGGGATACGCTACCC-1,0,37,117,5918,9533 AACCTGTCACGGAATT-1,0,36,118,5799,9602 ACTGCGGACACACCGT-1,0,37,119,5918,9671 CCGTGAGGCATTCATG-1,0,36,120,5799,9739 GCCCAGATGCTGGAGA-1,0,37,121,5918,9808 TCTGGCCGTTCAAGTT-1,0,36,122,5799,9877 ATACGAAGGCTTTCCA-1,0,37,123,5918,9946 GATCCGAATATAAGTG-1,0,36,124,5799,10015 GTGGAGCATGTCGGCC-1,0,37,125,5918,10084 ACTCTTCAGCTCCCGC-1,0,36,126,5799,10152 CCCGATAGCCTCGCCT-1,0,37,127,5918,10221 ACAGGTGTGTTGTTGC-1,0,38,0,6038,1480 TGAACTGCTATGACTT-1,1,39,1,6158,1549 TGACATATATGACGAT-1,1,38,2,6038,1618 AAGTCTTCTGTGGCCT-1,1,39,3,6158,1686 ACCAGACCATAACAAC-1,1,38,4,6038,1755 TGAGACGTACCTCTCA-1,1,39,5,6158,1824 GACCGTTACATGCGAC-1,1,38,6,6038,1893 GGTTCGGATTATACTA-1,1,39,7,6158,1962 CCTCCTGAGCCCACAT-1,1,38,8,6038,2031 CCGCGATTTGGTAGGT-1,1,39,9,6158,2099 AATCTCTACTGTGGTT-1,1,38,10,6038,2168 ACTTTGACTGCATCCT-1,1,39,11,6158,2237 CCCTGACTAACAAATT-1,0,38,12,6038,2306 ACGCTGTGAGGCGTAG-1,1,39,13,6158,2375 GCATACGAGGTCTTTA-1,0,38,14,6038,2444 TCGGGATTCAAACATA-1,0,39,15,6158,2512 GGGCCCTACGAAAGGG-1,0,38,16,6038,2581 CGCCAAGAAGCCGAGT-1,0,39,17,6158,2650 GGTACATCTGGGACGA-1,0,38,18,6038,2719 GGATGCTGGCGTTCCT-1,0,39,19,6158,2788 CCGATTCGAGGGACCC-1,1,38,20,6038,2857 CCACTGGTGGCTGGTT-1,1,39,21,6158,2925 GACAGGCACACACTAT-1,1,38,22,6038,2994 TCAACGCGACCGGCAG-1,1,39,23,6158,3063 CTACGACTAGCTATAA-1,1,38,24,6038,3132 CGGTTGACCTGGCATA-1,1,39,25,6158,3201 ATCCTGAATCGCTGCG-1,1,38,26,6038,3269 GTTTCATATCGTCGCT-1,1,39,27,6158,3338 ATAAATATTAGCAGCT-1,1,38,28,6038,3407 AAGAGGATGTACGCGA-1,1,39,29,6158,3476 TCCTGCGTTGATACTC-1,1,38,30,6038,3545 CGTGCATTGTCGACGC-1,1,39,31,6158,3614 CCGGGTTCGAGGTTAC-1,1,38,32,6038,3682 CCCAATTTCACAACTT-1,1,39,33,6158,3751 TGATTTATTAGCTGTG-1,1,38,34,6038,3820 TGGAAGAAGGGAACGT-1,1,39,35,6158,3889 GACGCTTGCTTCTAAA-1,1,38,36,6038,3958 GGGAACGGGAGGTTAG-1,1,39,37,6158,4027 GCGGCTCTGACGTACC-1,1,38,38,6038,4095 ACGTTAGATTTGCCCG-1,1,39,39,6158,4164 GAGAGGGCGCGAGGTT-1,1,38,40,6038,4233 GCGTCTCTGCATTGGG-1,1,39,41,6158,4302 GCAGCACACAGCCCAG-1,1,38,42,6038,4371 CAGGCCGTTTGGGTGT-1,1,39,43,6158,4440 AACTCAAGTTAATTGC-1,1,38,44,6038,4508 CTTCGTAGATAGGTGA-1,1,39,45,6158,4577 TGCAGAGTACCGAGCA-1,1,38,46,6038,4646 GAAGTGATTTATCGTG-1,1,39,47,6158,4715 CGCTACGGGACATTTA-1,1,38,48,6038,4784 CCACACTGAGATATTA-1,1,39,49,6158,4853 CGATCCGACCCAGTGC-1,1,38,50,6038,4921 CTGTACTTCTTAGCAT-1,1,39,51,6158,4990 ACTTATTAGGATCGGT-1,1,38,52,6038,5059 TAGTCCGCAGAGAATG-1,1,39,53,6158,5128 TTCACGAAAGGATCAC-1,1,38,54,6038,5197 TACATTTCTAACGTGC-1,1,39,55,6158,5266 ACCATATCCGCAATAA-1,1,38,56,6038,5334 CACTCAAGAGCTATGG-1,1,39,57,6158,5403 TGTACGAACAAATCCG-1,1,38,58,6038,5472 ATCATCCAATATTTGT-1,1,39,59,6158,5541 CGCTATTCTTAGGCTC-1,1,38,60,6038,5610 TGGCAGCAGTAATAGT-1,1,39,61,6158,5679 TCACGTGCCCGATTCA-1,1,38,62,6038,5747 CATACGGCGTCTGGGC-1,1,39,63,6158,5816 CACATGATTCAGCAAC-1,1,38,64,6038,5885 GCTAGTAGAGCTTGTA-1,1,39,65,6158,5954 TGCTGTTGAAGAACTC-1,1,38,66,6038,6023 CGGAGCATGGCGATCC-1,1,39,67,6158,6091 TAGCGTTGGGTCTTAC-1,1,38,68,6038,6160 GTAGCGGCTATACACT-1,1,39,69,6158,6229 TAACATACAATGTGGG-1,1,38,70,6038,6298 TCTTCGAATAGACGTT-1,1,39,71,6158,6367 GATCGTGACTGATATC-1,1,38,72,6038,6436 GATCCGGGAATTAACA-1,1,39,73,6158,6504 TTATATACGCTGTCAC-1,1,38,74,6038,6573 GTCGCGTAACCCGTTG-1,1,39,75,6158,6642 AGCTCTAGACGTTCCA-1,1,38,76,6038,6711 GTCAAGCGGACTCGGG-1,1,39,77,6158,6780 CGAGGGACTGCGGTCG-1,1,38,78,6038,6849 AATCGCCTCAGCGCCA-1,1,39,79,6158,6917 CTTGTTGCTGAGTCAA-1,1,38,80,6038,6986 GATATGAGACACTAAC-1,1,39,81,6158,7055 TTATGATCTTAACGAA-1,1,38,82,6038,7124 CGCCGCCCATGCCTGT-1,1,39,83,6158,7193 CTGGGATAAATAATGG-1,1,38,84,6038,7262 GTGCCCGTTCGGATTC-1,1,39,85,6158,7330 TTCAATACTCTGAATC-1,1,38,86,6038,7399 CGCACATGTCCACTAC-1,1,39,87,6158,7468 AGAAGAGCGCCGTTCC-1,1,38,88,6038,7537 GATAACTCGCACTGTG-1,1,39,89,6158,7606 AGTCGACGGTCTCAAG-1,1,38,90,6038,7675 GTGACCGCACACTACG-1,1,39,91,6158,7743 GTATGTGGGTCTAGTT-1,1,38,92,6038,7812 CTTGAGTTAGGGTAAT-1,1,39,93,6158,7881 TTAGCTGATTTGCCGT-1,1,38,94,6038,7950 GCTGTTGCTACCGAAC-1,1,39,95,6158,8019 TATTACCATCCTGCTT-1,1,38,96,6038,8088 TTGAATTCACGTGAGG-1,1,39,97,6158,8156 CCATCTCACCAGTGAA-1,1,38,98,6038,8225 CGCACGTGCGCTATCA-1,1,39,99,6158,8294 ACCCGGATGACGCATC-1,1,38,100,6038,8363 CGCTAGAGACCGCTGC-1,1,39,101,6158,8432 ATAGTTCCACCCACTC-1,1,38,102,6038,8501 GCAGACCCAGCACGTA-1,1,39,103,6158,8569 TAGACTACCTAGCGTT-1,1,38,104,6038,8638 GGTTCTACTCGTCTGA-1,1,39,105,6158,8707 GACTCACCCACGTGAG-1,1,38,106,6038,8776 AGCTCTTCGTAACCTT-1,1,39,107,6158,8845 ACTATCCAGGGCATGG-1,1,38,108,6038,8913 AAGGATCGATCGCTTG-1,1,39,109,6158,8982 ATATCGGTAGGGAGAT-1,1,38,110,6038,9051 TTCCAGACGAGATTTA-1,1,39,111,6158,9120 GACCGACGTGAAAGCA-1,1,38,112,6038,9189 CCTGGAAACGTTCTGC-1,1,39,113,6158,9258 CCGGTAATGGCTAGTC-1,1,38,114,6038,9326 GCCGTGGAAGAAATGT-1,1,39,115,6158,9395 GTCTTGAGGAGCAGTG-1,0,38,116,6038,9464 TCCCAAAGCCCTAAAT-1,0,39,117,6158,9533 TTGAGCGCCACGTGAT-1,0,38,118,6038,9602 TTGAGTCCCGCTGCTG-1,0,39,119,6158,9671 ATGGAACCTTTGCACA-1,0,38,120,6038,9739 GCTAGCACCTGGGCCA-1,0,39,121,6158,9808 CGCCGTCTACCCATCG-1,0,38,122,6038,9877 GATAGGTGTCCCGGGC-1,0,39,123,6158,9946 AGGTATGCGGACATTA-1,0,38,124,6038,10015 TTGGTTCGCTCAAAGG-1,0,39,125,6158,10084 TCTGGAGCGTAAGAGT-1,0,38,126,6038,10152 TGCCTAAATTTAATAG-1,0,39,127,6158,10221 TAATTTCCGTCCAGTA-1,1,40,0,6278,1480 TCCTTCAATCCCTACG-1,1,41,1,6398,1549 TACCTATCCCTAGAGG-1,1,40,2,6278,1618 GCATGGGTACTGACGC-1,1,41,3,6398,1686 GTCGGGAACATGGTAG-1,1,40,4,6278,1755 GCAAATGAGGACACTT-1,1,41,5,6398,1824 GAATGGGCTTATCGAC-1,1,40,6,6278,1893 TGGTCGTGCAAGGCAA-1,1,41,7,6398,1962 CACCACGCCACACAGA-1,1,40,8,6278,2031 GAACCTCGACCTACAC-1,1,41,9,6398,2099 CAATACGCTCTGAGGC-1,1,40,10,6278,2168 TGGTAAGCAGGATTGA-1,1,41,11,6398,2237 AGTGGCTCCGTCGGCC-1,1,40,12,6278,2306 GATCGGATAGAACCAT-1,1,41,13,6398,2375 GCTACAGTACGGACCG-1,1,40,14,6278,2444 TCTATTACTAGAGGAT-1,1,41,15,6398,2512 TTCAGGCGTCAAAGCC-1,1,40,16,6278,2581 AGACCGGGAAACCCTG-1,1,41,17,6398,2650 AGAGATCTCTAAAGCG-1,1,40,18,6278,2719 CCCTGCCCAATCCGCT-1,1,41,19,6398,2788 GTGGCGGTCCCAGCGT-1,1,40,20,6278,2857 GCATTGTAATTCATAT-1,1,41,21,6398,2925 CCGTTCCGAATCTCGG-1,1,40,22,6278,2994 AGCTTGATCTTAACTT-1,1,41,23,6398,3063 CCTGTACTCACGCCCA-1,1,40,24,6278,3132 AAGTGACGACCGAATT-1,1,41,25,6398,3201 CTCACTTGGCTGGTAA-1,1,40,26,6278,3270 CGCCTGGCCTACGTAA-1,1,41,27,6398,3338 CCCGTAAGTCTAGGCC-1,1,40,28,6278,3407 TTGGACATGTGGCTTA-1,1,41,29,6398,3476 ATTACGCGCTGGCAGG-1,1,40,30,6278,3545 ACGCGCTACACAGGGT-1,1,41,31,6398,3614 TACGTTTACCGGCAAT-1,1,40,32,6278,3682 CGAAACGCAATTCATG-1,1,41,33,6398,3751 TAGTCTAACAACGAGA-1,1,40,34,6278,3820 TTGCATGCTGATCACG-1,1,41,35,6398,3889 TCTGGGTAGCGCTCAT-1,1,40,36,6278,3958 ACATCGGTCAGCCGCG-1,1,41,37,6398,4027 AGATACCGGTGTTCAC-1,1,40,38,6278,4095 GATTACTGAATTTGGG-1,1,41,39,6398,4164 TCCAACTTTAAATTCT-1,1,40,40,6278,4233 TCCTAGCAAAGAAGCT-1,1,41,41,6398,4302 GTCTATCTGAGTTTCT-1,1,40,42,6278,4371 GATGTTCAATCCACGA-1,1,41,43,6398,4440 AGTTAAACACTTGCGA-1,1,40,44,6278,4508 AGCTCTTTACTCAGTT-1,1,41,45,6398,4577 ATCCAGGATTCGTGAA-1,1,40,46,6278,4646 AGTCAACACCACCATC-1,1,41,47,6398,4715 CGATACCTCGCGGACA-1,1,40,48,6278,4784 TACAACGCACAACTCA-1,1,41,49,6398,4853 AATTAAAGGTCGGCGT-1,1,40,50,6278,4921 TACGCAGTTCTTTCCT-1,1,41,51,6398,4990 GACCGTGCTGACGGTG-1,1,40,52,6278,5059 GGCAAATTACTTTACT-1,1,41,53,6398,5128 GGTACAAACATGCTAT-1,1,40,54,6278,5197 CGGGCCTTCTTTGTAA-1,1,41,55,6398,5266 CGTGAAGTTAATTCAC-1,1,40,56,6278,5334 ATAGTGAAGCGTTCTC-1,1,41,57,6398,5403 TACGCCATATTCTAAT-1,1,40,58,6278,5472 GCCGGGTTAGGGTCGC-1,1,41,59,6398,5541 TACATAGGCATACACC-1,1,40,60,6278,5610 GCCGATTGGCCAAGCT-1,1,41,61,6398,5679 CTGCCATGCATCACAT-1,1,40,62,6278,5747 TTATGAATGAAAGGGA-1,1,41,63,6398,5816 GCTGAGGCGTGAGTAT-1,1,40,64,6278,5885 GCGCCGTTCCACGATA-1,1,41,65,6398,5954 CGCATGGTGCGATGCT-1,1,40,66,6278,6023 AGGTTTCACACACCTT-1,1,41,67,6398,6091 CAAGGATCGCATGTTC-1,1,40,68,6278,6160 ACGTTAATGTCGAAGA-1,1,41,69,6398,6229 TCCAGAGCACCGGTTC-1,1,40,70,6278,6298 GATTCGACGGTTCACG-1,1,41,71,6398,6367 GTTTCTGCAGTCTCCC-1,1,40,72,6278,6436 GCTGCACGGTTTCTTA-1,1,41,73,6398,6504 CGTGCAGACTGGGACA-1,1,40,74,6278,6573 GTGTTACTATGCGTCC-1,1,41,75,6398,6642 TCCTCGGGCTGGGCTT-1,1,40,76,6278,6711 GTGAGGACACTTAAGG-1,1,41,77,6398,6780 ATACGCCGGCGAAACC-1,1,40,78,6278,6849 TCTGCCAGAAACTGCA-1,1,41,79,6397,6917 TTCTGCGGGTTAGCGG-1,1,40,80,6278,6986 CTCGGTACCACTGCTC-1,1,41,81,6397,7055 GTAAGTAACAGTCTGG-1,1,40,82,6278,7124 GTGCGTGTATATGAGC-1,1,41,83,6397,7193 ATTTGTCTTGGGAGCT-1,1,40,84,6278,7262 CCTCGGACCGGGATAG-1,1,41,85,6397,7330 TAGGTGCTCGCCTAGC-1,1,40,86,6278,7399 CTTTAGGAACACTGTT-1,1,41,87,6397,7468 TCGGGCCGTCGTGGTA-1,1,40,88,6278,7537 AGTGCTTGCACGAATA-1,1,41,89,6397,7606 TGCAGTTTCCTCCCAT-1,1,40,90,6278,7675 TGAGAGATTTACCACG-1,1,41,91,6397,7743 GAAACAGATGACCACC-1,1,40,92,6278,7812 AGCAACATATCTTATT-1,1,41,93,6397,7881 CAAGTGTGGTTGCAAA-1,1,40,94,6278,7950 GCCTCATCTGGAAATA-1,1,41,95,6397,8019 AACCCTACTGTCAATA-1,1,40,96,6278,8088 ACGTATTACTCCGATC-1,1,41,97,6397,8156 TCTGGGAACCTTTGAA-1,1,40,98,6278,8225 GCTCGCTCATGTCCAA-1,1,41,99,6397,8294 GCGCAAGAGCGCGCTG-1,1,40,100,6278,8363 TTGACGCTCCATGAGC-1,1,41,101,6397,8432 TATAGATGGTCGCAGT-1,1,40,102,6278,8501 TTACATGCCACAACTA-1,1,41,103,6397,8569 ACATGGCGCCAAAGTA-1,1,40,104,6278,8638 TATGGTTAGTGGGAGA-1,1,41,105,6397,8707 CATGACTTCGCTGAAT-1,1,40,106,6278,8776 ACCACCAATGTAACAA-1,1,41,107,6397,8845 TCTTAGAGCTCCAATT-1,1,40,108,6278,8913 CCACGAATTTAACCTC-1,1,41,109,6397,8982 TTCTTGCTAGCATCTC-1,1,40,110,6278,9051 ACACCTTACTACTTGC-1,1,41,111,6397,9120 AGTCGGTTGCGTGAGA-1,1,40,112,6278,9189 ACCTACAGTATGTGGT-1,1,41,113,6397,9258 GAGGATAAACAGTGCT-1,1,40,114,6278,9326 TTCCGGTATCTGTGTC-1,0,41,115,6397,9395 GGAGGCCGAAGTCGTC-1,0,40,116,6278,9464 TTCGCTAGGAAGTTGT-1,0,41,117,6397,9533 TAAAGACAACCCTTTA-1,0,40,118,6278,9602 GTTGCGCTAACATTAC-1,0,41,119,6397,9671 GCTGAACTCTCCAGGG-1,0,40,120,6278,9739 AGCCCTGTCGCACCGT-1,0,41,121,6397,9808 AGCGCTAGAGCGATGT-1,0,40,122,6278,9877 TGCGCCGTTAATAACG-1,0,41,123,6397,9946 TGACAACGCATGTCGC-1,0,40,124,6278,10015 CGTCTTGAGTGTGACG-1,0,41,125,6397,10084 GATAGGATTAATTACA-1,0,40,126,6278,10152 GATACCGTGTCGGAGT-1,0,41,127,6397,10221 TGTGTCGAAGTCGAGG-1,1,42,0,6517,1480 GGATCAGAGCCATCAG-1,1,43,1,6637,1549 GTCCCAATCATCCCGC-1,1,42,2,6517,1618 TGCCACACTAGAGGAA-1,1,43,3,6637,1686 TGAATTTCACTTGCCT-1,1,42,4,6517,1755 CTCAGATTGTGATAAG-1,1,43,5,6637,1824 CGGACGTTACTTGAAG-1,1,42,6,6517,1893 CCGCCTTGCGATGTCG-1,1,43,7,6637,1962 CCAGTCAAATCTCTTA-1,1,42,8,6517,2031 AAACAGCTTTCAGAAG-1,1,43,9,6637,2099 TAGCTGATGTGAAGCG-1,1,42,10,6517,2168 CTTATGCGCTCAGGGC-1,1,43,11,6637,2237 TGGCTCTTGTCGCGTA-1,1,42,12,6517,2306 TTGTGAGGCATGACGC-1,1,43,13,6637,2375 CATAAGCTCTCCGTCT-1,1,42,14,6517,2444 CCTTCTCAGCGTTCCT-1,1,43,15,6637,2512 TGCAGCTACGTACTTC-1,1,42,16,6517,2581 CTCTACACTGGCGATT-1,1,43,17,6637,2650 TGCAGATCGTCCTAGG-1,1,42,18,6517,2719 TGCCAAAGTCAGACTT-1,1,43,19,6637,2788 ACAATAGTCGTACGTT-1,1,42,20,6517,2857 CATGCCAACTCGCAAA-1,1,43,21,6637,2925 TGCAGAACTATATCGT-1,1,42,22,6517,2994 TCGTGTCACGCTGACA-1,1,43,23,6637,3063 TGATCAGGGAACTGCT-1,1,42,24,6517,3132 CTTGCAACCGCCTCCT-1,1,43,25,6637,3201 AGCCCATACATGTAAG-1,1,42,26,6517,3270 ATCCAATGGAGGGTCC-1,1,43,27,6637,3338 AAACCGGGTAGGTACC-1,1,42,28,6517,3407 GCAACCACGGCCGCGT-1,1,43,29,6637,3476 CGCTTATTCCCGGTCG-1,1,42,30,6517,3545 TTACTCTGGTACGTAC-1,1,43,31,6637,3614 GTGGTTTCCGCCTTTC-1,1,42,32,6517,3682 ATAGGCGGCTATAGAA-1,1,43,33,6637,3751 GTGCCATCACACGGTG-1,1,42,34,6517,3820 CCCAACATACGTCGCG-1,1,43,35,6637,3889 CGGTGCAGATAGAACG-1,1,42,36,6517,3958 GGGCGGGTTCCCTACG-1,1,43,37,6637,4027 TGAGCCATACAGTCTC-1,1,42,38,6517,4095 CTCCGCCCACATGAGG-1,1,43,39,6637,4164 GTTGAACCGGTTCCAT-1,1,42,40,6517,4233 TTGACTACCATATGGT-1,1,43,41,6637,4302 ACCATCGTATATGGTA-1,1,42,42,6517,4371 TGCGTAAGAACCTGAT-1,1,43,43,6637,4440 AGAAGGTTGCCGAATT-1,1,42,44,6517,4508 AGGACGACCCATTAGA-1,1,43,45,6637,4577 GGTGCTGGTACACATT-1,1,42,46,6517,4646 GCGCTATGCCGAGGCA-1,1,43,47,6637,4715 ACCCGGTTACACTTCC-1,1,42,48,6517,4784 TAACTCATCCGCGCGG-1,1,43,49,6637,4853 CACAATGAGCTGCTAT-1,1,42,50,6517,4921 GTTACTTTGGGCCTAG-1,1,43,51,6637,4990 GGGCCCGTCTTAAACA-1,1,42,52,6517,5059 GAAATTGTCTCTATAA-1,1,43,53,6637,5128 GGCGCATGAATTGATG-1,1,42,54,6517,5197 CATATAGGTACAGTCA-1,1,43,55,6637,5266 TCAACGAGGAGACAAA-1,1,42,56,6517,5334 TTGCACAATTCAGAAA-1,1,43,57,6637,5403 CATCGGACGGGTTAAT-1,1,42,58,6517,5472 ATTAAACATGCGGACC-1,1,43,59,6637,5541 TATCTACCACAGCGGG-1,1,42,60,6517,5610 CGAGACCCTAGAGTGT-1,1,43,61,6637,5679 ACATCGATCGTTTACC-1,1,42,62,6517,5747 ATCGACCCAATACAGA-1,1,43,63,6637,5816 GAATCTGAACATTCTC-1,1,42,64,6517,5885 AGTTCCTATTTATGTT-1,1,43,65,6637,5954 CAGTCTGTATACTGGG-1,1,42,66,6517,6023 GGAGACGACACCTTTG-1,1,43,67,6637,6092 CCTAAATTAACGGTTC-1,1,42,68,6517,6160 GCTACGACTTATTGGG-1,1,43,69,6637,6229 CTGTGCAGGGTAGGTC-1,1,42,70,6517,6298 ACGCCGCTAGACGACC-1,1,43,71,6637,6367 ACTTGACTCCCTCTTT-1,1,42,72,6517,6436 CGCCTCCCTCCTCTAT-1,1,43,73,6637,6504 CTAGATTTACGACGGC-1,1,42,74,6517,6573 GATGCTGTATTTCATC-1,1,43,75,6637,6642 TGCGTTTGTTGACACT-1,1,42,76,6517,6711 CGTGCCCTCCCGAAGA-1,1,43,77,6637,6780 ACTCTCTGACTTAGGT-1,1,42,78,6517,6849 CTGGGTAGGCAGTTAA-1,1,43,79,6637,6917 GTTTGGCCCAAGTTAT-1,1,42,80,6517,6986 GAATGTGGTCCGGATT-1,1,43,81,6637,7055 CCCAGTTAAGGCGCCG-1,1,42,82,6517,7124 CGGTACTAGAATCAAA-1,1,43,83,6637,7193 GCTTAATGTAACTAAC-1,1,42,84,6517,7262 AGGACAGTCGAATCCC-1,1,43,85,6637,7330 GACAGCCAGACCTGAC-1,1,42,86,6517,7399 GGCGAAATCTAACTTG-1,1,43,87,6637,7468 CTGGTAACACATAGAA-1,1,42,88,6517,7537 TAGCCATTTCAAAGTC-1,1,43,89,6637,7606 GGGATTTACCGCACCT-1,1,42,90,6517,7675 ATGCCATTTGCGACCA-1,1,43,91,6637,7743 GAATAGACGCGACCCA-1,1,42,92,6517,7812 TGTATGGCGCAGACAG-1,1,43,93,6637,7881 GGATGAAGATCGCTGA-1,1,42,94,6517,7950 CGACAATTTGATCTAA-1,1,43,95,6637,8019 AAAGTTGACTCCCGTA-1,1,42,96,6517,8088 CGCGGCTCAACTTGAA-1,1,43,97,6637,8156 CACGCGGAACTGTTGC-1,1,42,98,6517,8225 TCTTAGAGTGAACTCT-1,1,43,99,6637,8294 AGTCCATTGGCTGATG-1,1,42,100,6517,8363 TTAAGATAGGATTGAC-1,1,43,101,6637,8432 ACATCCTGGTAACTGT-1,1,42,102,6517,8501 CACCTTGGCGCCTTTG-1,1,43,103,6637,8569 GCTAGACCGTCTACTG-1,1,42,104,6517,8638 CGGCCCAGGTATATCC-1,1,43,105,6637,8707 GTCCATTACTGCTACG-1,1,42,106,6517,8776 GGGTTTAGGATAGGAT-1,1,43,107,6637,8845 TAATTAGGACATCCGT-1,1,42,108,6517,8913 GCTCCCAGTCGGTCCA-1,1,43,109,6637,8982 ATTCGTGCTATCTCTT-1,1,42,110,6517,9051 GTTAACTATGTTGTCA-1,1,43,111,6637,9120 GCTCCGCTCGCTTCAG-1,1,42,112,6517,9189 GCAACGGCTAGTTATG-1,1,43,113,6637,9258 AATCGCGCAGAGGACT-1,1,42,114,6517,9326 AGGTTACACCATGCCG-1,1,43,115,6637,9395 CGAGATTTCGCTCGGG-1,0,42,116,6517,9464 CGATAATACTCAGGTT-1,0,43,117,6637,9533 AAGGCAGGCTGTCTCC-1,0,42,118,6517,9602 GTAAGTCCACACTCTA-1,0,43,119,6637,9671 ATGAGGGCAGCGGCTA-1,0,42,120,6517,9739 GCCGCACTCCGTTTCA-1,0,43,121,6637,9808 GAGCACGGCGCCTCTT-1,0,42,122,6517,9877 ACAATTTGGCCATATT-1,0,43,123,6637,9946 CTGGTTCGCGAGCTAC-1,0,42,124,6517,10015 GACGGTCCTAGGGTGT-1,0,43,125,6637,10084 ACTCGCGATCTGACGC-1,0,42,126,6517,10152 GTGTACGAACCGTTCC-1,0,43,127,6637,10221 CTTTGACGTCGCTTCT-1,1,44,0,6757,1480 CGTTATCATACTTCCA-1,1,45,1,6877,1549 GCTATGCCAGCTTATG-1,1,44,2,6757,1618 CAGTCGGCCTAGATAT-1,1,45,3,6877,1686 CCCGTGAGGGCGGTGA-1,1,44,4,6757,1755 TCTCGTGTTACGAGGA-1,1,45,5,6877,1824 ACGTCTCGTTCCGGGA-1,1,44,6,6757,1893 CGAGAGCGCGTAGATA-1,1,45,7,6877,1962 GACAGATTTCTGGCTC-1,1,44,8,6757,2031 GGGCCTAAATGGGCTA-1,1,45,9,6877,2099 ACTTGTAGTCCCTTCA-1,1,44,10,6757,2168 CCCGAAGTTTCGCGAA-1,1,45,11,6877,2237 ACCATCCGCCAACTAG-1,1,44,12,6757,2306 TGCGAATATGGGATTT-1,1,45,13,6877,2375 TACATCCCTATCCCTG-1,1,44,14,6757,2444 GTGGGAAGACTGAATC-1,1,45,15,6877,2512 TCAACATCGACCGAGA-1,1,44,16,6757,2581 CTATGTGAGTCACGGC-1,1,45,17,6877,2650 CCGAACACTGGGCCTC-1,1,44,18,6757,2719 AAACTTGCAAACGTAT-1,1,45,19,6877,2788 AGGGCGAGCAGCTGAT-1,1,44,20,6757,2857 AACACGAGACGCGGCC-1,1,45,21,6877,2925 TGACGAATATTTCCCT-1,1,44,22,6757,2994 TCGGAGAGTATCGGGA-1,1,45,23,6877,3063 CAAATCTCTCACAAGG-1,1,44,24,6757,3132 AGGCCCTAGAACGCCA-1,1,45,25,6877,3201 TAGAGATCATGCAACT-1,1,44,26,6757,3270 TTGTTTCCATACAACT-1,1,45,27,6877,3338 GAGAGGTGCATTCTGG-1,1,44,28,6757,3407 GTGGACCAACCCGATT-1,1,45,29,6877,3476 CTGGGCCTGCTATATC-1,1,44,30,6757,3545 CATAGTCCACAAGAAC-1,1,45,31,6877,3614 TTGACATGAACGTGGA-1,1,44,32,6757,3682 GGTTACCACCCTCGGG-1,1,45,33,6877,3751 TACCGGTCGTTTCCAT-1,1,44,34,6757,3820 CGAGTACTAAAGAGGA-1,1,45,35,6877,3889 GCAAGAATTCCTTGGC-1,1,44,36,6757,3958 TCGCCGAAGTTGCGTC-1,1,45,37,6877,4027 TTGAGAGTACTGCTAA-1,1,44,38,6757,4095 GCCACAATTTAAGGAC-1,1,45,39,6877,4164 ATATTCAGTTAAACCT-1,1,44,40,6757,4233 TGAGTGCCTCTTAAAT-1,1,45,41,6877,4302 ATCAGACGGCACGCCG-1,1,44,42,6757,4371 GTGCGAAATCGAACAC-1,1,45,43,6877,4440 GTGCCGCTTCAAAGGT-1,1,44,44,6757,4508 GATACGATGGGAGTCA-1,1,45,45,6877,4577 GACACTGAGTTCAGTG-1,1,44,46,6757,4646 ATCCTGCGTGGAATGG-1,1,45,47,6877,4715 ATCCTACCTAAGCTCT-1,1,44,48,6757,4784 AGTGATATGAGTAGTT-1,1,45,49,6877,4853 ATGATGCAATGGTACA-1,1,44,50,6757,4921 GAAACCGAATTACCTT-1,1,45,51,6877,4990 AGTGACCTACTTTACG-1,1,44,52,6757,5059 CAAATGTCCTTCCGTG-1,1,45,53,6877,5128 TTACTGGGATATTTCA-1,1,44,54,6757,5197 CTTGCCCAGGCTCTAC-1,1,45,55,6877,5266 AAATCGTGTACCACAA-1,1,44,56,6757,5334 GTGATCATAGATCTGC-1,1,45,57,6877,5403 TGGCAGATTACGATCA-1,1,44,58,6757,5472 TCACCCTCTTAAGATT-1,1,45,59,6877,5541 CAGGATATATCGTTGT-1,1,44,60,6757,5610 CCTGACCACCGATGGT-1,1,45,61,6877,5679 CTAAAGGGAAATAGGA-1,1,44,62,6757,5747 CCGCTATCAGCACCAG-1,1,45,63,6877,5816 CTTTAGTGCTATTATT-1,1,44,64,6757,5885 CGGGAATTTATGTAAA-1,1,45,65,6877,5954 TACGACTGCCTCTTAG-1,1,44,66,6757,6023 AAACTGCTGGCTCCAA-1,1,45,67,6877,6092 GTACGTTTGCCCGTCA-1,1,44,68,6757,6160 GGCAAGGCGAAATAGC-1,1,45,69,6877,6229 GATCTTGGAGGGCATA-1,1,44,70,6757,6298 AGCGTGGTATTCTACT-1,1,45,71,6877,6367 CTAAGGGAATGATTGG-1,1,44,72,6757,6436 CATGGTAAGTAGCGTT-1,1,45,73,6877,6504 CGTTGAGCGACCGTCG-1,1,44,74,6757,6573 TGCCCGTACCGTTAAA-1,1,45,75,6877,6642 ACAAGGGCAGGCTCTG-1,1,44,76,6757,6711 GAGATCTTCCATGACA-1,1,45,77,6877,6780 AATGACGTAGGATGTC-1,1,44,78,6757,6849 GTGGTGGCCAAGTGAA-1,1,45,79,6877,6917 TCCCGTGTGCAATTTG-1,1,44,80,6757,6986 ACATCGTATGCAATGG-1,1,45,81,6877,7055 GCGAAACTTAACTGGA-1,1,44,82,6757,7124 AATTGAACGCTCTGGT-1,1,45,83,6877,7193 ACAAATGGTAGTGTTT-1,1,44,84,6757,7262 ATGGTCGCGTGGTTTC-1,1,45,85,6877,7330 TGTTATTGTATGTGGC-1,1,44,86,6757,7399 TTCCGGTTACCCACTT-1,1,45,87,6877,7468 GAGTGTGCGGTACCCA-1,1,44,88,6757,7537 CAAGATATTATAACGT-1,1,45,89,6877,7606 ACACACCAGGACCAGT-1,1,44,90,6757,7675 ATGGGCCTCGGCCTCT-1,1,45,91,6877,7743 AAGGTGATAAACCAGC-1,1,44,92,6757,7812 TCTTACTTATGCCTCT-1,1,45,93,6877,7881 AAAGTGTGATTTATCT-1,1,44,94,6757,7950 TGCTCCACAGTTCTTA-1,1,45,95,6877,8019 CTGGCTGATTCATCCT-1,1,44,96,6757,8088 TAAGGCTGAATCCCTC-1,1,45,97,6877,8156 TCTAGTGATATCGTGG-1,1,44,98,6757,8225 TCGAAGAACCGAGCAC-1,1,45,99,6876,8294 GACAAACATATGCAGG-1,1,44,100,6757,8363 AAGTCAATTGTCGTCA-1,1,45,101,6876,8432 AGTGAACAAACTTCTC-1,1,44,102,6757,8501 CATGATGGAAGTTAGC-1,1,45,103,6876,8569 AAGTGCCTTGACTGTA-1,1,44,104,6757,8638 ATCGCCAGTCAACATT-1,1,45,105,6876,8707 ACCGCGGTGGAAGTCG-1,1,44,106,6757,8776 TCTTCTATAACCCGCC-1,1,45,107,6876,8845 CACATTTCTTGTCAGA-1,1,44,108,6757,8914 TAGCGTCCCTCGATTG-1,1,45,109,6876,8982 GTTCGGATCGGGAACA-1,1,44,110,6757,9051 CAAACTCGCGACGCCG-1,1,45,111,6876,9120 GTCTTGTAGCTATTCA-1,1,44,112,6757,9189 TCTCGACGTATCGCCG-1,1,45,113,6876,9258 TTGCCAAGCAGAACCC-1,1,44,114,6757,9326 AAACCCGAACGAAATC-1,0,45,115,6876,9395 TTGAGCAGCCCACGGT-1,0,44,116,6757,9464 CGCCTTTAGCATGCTC-1,0,45,117,6876,9533 TGTGGCTCCCACCAAC-1,0,44,118,6757,9602 CCGCCGTTGAGGATAA-1,0,45,119,6876,9671 CAATACGAGAGTCTGA-1,0,44,120,6757,9739 CATCTAGTGAAGGGAA-1,0,45,121,6876,9808 GGTGGAGGTTGATACG-1,0,44,122,6757,9877 CCGCACACGAACGTGT-1,0,45,123,6876,9946 AGAACCCAGCGTGACA-1,0,44,124,6757,10015 GCGCTCGATCACCTGT-1,0,45,125,6876,10084 ATCATGGACTACCGAC-1,0,44,126,6757,10152 TACGCCGCCTCAGAAG-1,0,45,127,6876,10221 CGACCTACTAGACAAT-1,1,46,0,6997,1480 GAGTCTTGTAAAGGAC-1,1,47,1,7116,1549 AATATCCTAGCAAACT-1,1,46,2,6997,1618 CCCTAGGCAACAAGAG-1,1,47,3,7116,1686 ACAAAGAAGGTAGGCC-1,1,46,4,6997,1755 CCCTGGCTGTTCCTTC-1,1,47,5,7116,1824 TCGCCGCACCGCGTGA-1,1,46,6,6996,1893 TATAGCGCACGTTATC-1,1,47,7,7116,1962 TTATCTGACATTAGGA-1,1,46,8,6996,2031 AGTGGTGTTACCCGTG-1,1,47,9,7116,2099 GCCAAGAATACTTCTG-1,1,46,10,6996,2168 CCGGCGTGAGACTCTG-1,1,47,11,7116,2237 TTCCCGGCGCCAATAG-1,1,46,12,6996,2306 AAACAGGGTCTATATT-1,1,47,13,7116,2375 ACAGTAATACAACTTG-1,1,46,14,6996,2444 CGAACGGCCGGACAAC-1,1,47,15,7116,2512 GCAACACACTAGAACT-1,1,46,16,6996,2581 ACTCCCATTCCTAAAG-1,1,47,17,7116,2650 ACCTGCGTGTCATGTT-1,1,46,18,6996,2719 TACTTTCCGCACGCCA-1,1,47,19,7116,2788 AGGTCAGGTGAGAGTG-1,1,46,20,6996,2857 TCCTCCTAAGACATTC-1,1,47,21,7116,2925 ATGTGAAAGCCTAATG-1,1,46,22,6996,2994 AGTCGGCTCAACTTTA-1,1,47,23,7116,3063 CGATCTGTTGGAGGAC-1,1,46,24,6996,3132 ACGGGAGTGTCGGCCC-1,1,47,25,7116,3201 TTAACTTCAGGTAGGA-1,1,46,26,6996,3270 CCACGGAGCCATAAGA-1,1,47,27,7116,3338 CTTCTATGTTGAAGTA-1,1,46,28,6996,3407 CACCGTTGCGCGATAT-1,1,47,29,7116,3476 TCTAGCAATCTCCGCC-1,1,46,30,6996,3545 AGTTTGGCCAGACCTA-1,1,47,31,7116,3614 TTGTAAGGACCTAAGT-1,1,46,32,6996,3683 AAATTTGCGGGTGTGG-1,1,47,33,7116,3751 AAGTTCGGCCAACAGG-1,1,46,34,6996,3820 CCGCTTACCTCACTCT-1,1,47,35,7116,3889 ATCACGTGCTAATTAA-1,1,46,36,6996,3958 GGTGAAGTACAGGGAT-1,1,47,37,7116,4027 GCTGTATTACTGGCCC-1,1,46,38,6996,4095 AACGGCCATCTCCGGT-1,1,47,39,7116,4164 TAAGTAACATCTTGAC-1,1,46,40,6996,4233 TTCTTGAGCCGCGCTA-1,1,47,41,7116,4302 AGTGCGTAGCTCGTAA-1,1,46,42,6996,4371 GGGATGGTCGTAACCG-1,1,47,43,7116,4440 GTCTGGGCGGTCGAGA-1,1,46,44,6996,4508 CGGAACGTAAACATAG-1,1,47,45,7116,4577 TGCGACACCCTAGTGC-1,1,46,46,6996,4646 CAAACGAGTATCGCAG-1,1,47,47,7116,4715 TCAGTAGGGACTATAA-1,1,46,48,6996,4784 GCGGTTCCCTATCATG-1,1,47,49,7116,4853 GTGACTTCAGTAGTGC-1,1,46,50,6996,4921 CGTCACGTCCATTGGT-1,1,47,51,7116,4990 ATAAGTTACCGCGACG-1,1,46,52,6996,5059 CTGAATCCGAGACCTC-1,1,47,53,7116,5128 TACGACGCTTGCTGCG-1,1,46,54,6996,5197 GAATTCACCCGGGTGT-1,1,47,55,7116,5266 GGGTGACACCTTAACT-1,1,46,56,6996,5334 TGAGTAAATTAGCGTA-1,1,47,57,7116,5403 GAGTCCGCTTACCGGA-1,1,46,58,6996,5472 GGCGCGTTCGAGTTTA-1,1,47,59,7116,5541 ATTCATATACTGTCCA-1,1,46,60,6996,5610 ATAAACGGACCCGTAA-1,1,47,61,7116,5679 GTCGTACCATCTCGGG-1,1,46,62,6996,5747 ATAATAGCTGTTGAAT-1,1,47,63,7116,5816 TTCCGCGTGAGGCGAT-1,1,46,64,6996,5885 TACCGGCTCACTGCCC-1,1,47,65,7116,5954 GTATCTTTCATAACCA-1,1,46,66,6996,6023 GTTACCTACAACTTGC-1,1,47,67,7116,6092 TGACTATAATCCTTTC-1,1,46,68,6996,6160 AAATACCTATAAGCAT-1,1,47,69,7116,6229 TTATTAGAGCGTGTTC-1,1,46,70,6996,6298 ATGTGGACATCTTGAT-1,1,47,71,7116,6367 GTACGCTTCATTGCAC-1,1,46,72,6996,6436 CACATATTAGCAGGAT-1,1,47,73,7116,6504 CAAGAGGGCGGAGTAC-1,1,46,74,6996,6573 TCGCAAAGATGCATTT-1,1,47,75,7116,6642 GTCGTTATTCGCTTAT-1,1,46,76,6996,6711 TCGTGTACTATGGATG-1,1,47,77,7116,6780 GCGGAGAGGGAGAACG-1,1,46,78,6996,6849 TCACAGGGAATCGCAA-1,1,47,79,7116,6917 GACTGCACCAGCCCAG-1,1,46,80,6996,6986 TCCGCTGTCATCCCGG-1,1,47,81,7116,7055 AGATACTCAAGATCGA-1,1,46,82,6996,7124 ACTGAATGGCGAAAGT-1,1,47,83,7116,7193 TCCTACATCCACGGCC-1,1,46,84,6996,7262 TTCAGCCCTGGTCCAC-1,1,47,85,7116,7330 TTAGAGTTTAGAAGGA-1,1,46,86,6996,7399 GGTTAGCTATATGTCT-1,1,47,87,7116,7468 CTGGTTTCGAGCAAGA-1,1,46,88,6996,7537 TGGGCCCATACTAATT-1,1,47,89,7116,7606 ACTTATTTATGTGCCA-1,1,46,90,6996,7675 TTAGTTATTCGTGGCA-1,1,47,91,7116,7743 AACCGCTAAGGGATGC-1,1,46,92,6996,7812 ATACGTCCACTCCTGT-1,1,47,93,7116,7881 CGACACGCTCCGACAG-1,1,46,94,6996,7950 TCTGAACTCGTACCCG-1,1,47,95,7116,8019 AGGATATAGGGATTTA-1,1,46,96,6996,8088 GTGATCACTAACGCCT-1,1,47,97,7116,8156 ACCGGGCCTTTGTTGA-1,1,46,98,6996,8225 TTGACAGGAGCTCCCG-1,1,47,99,7116,8294 CTCTTCTATTGACTGG-1,1,46,100,6996,8363 CGTGGCCGAATATCTA-1,1,47,101,7116,8432 GGTTTAGCCTTTCTTG-1,1,46,102,6996,8501 TCAAGGTTACTACACC-1,1,47,103,7116,8569 TTGACTATTGTCCGGC-1,1,46,104,6996,8638 CGGGAATATAGTATAC-1,1,47,105,7116,8707 GCAACAGCAGTATGCG-1,1,46,106,6996,8776 ATCAAACACTGTTCCA-1,1,47,107,7116,8845 TGGGCAATAGTTGGGT-1,1,46,108,6996,8914 TGAATGTCAGCCGGCC-1,1,47,109,7116,8982 CGGCTCTAAAGCTGCA-1,1,46,110,6996,9051 GGACACAAGTTTACAC-1,1,47,111,7116,9120 CCGAGAAGTCGCATAA-1,1,46,112,6996,9189 AACTACCCGTTTGTCA-1,1,47,113,7116,9258 GCACAAACGAGGCGTG-1,1,46,114,6996,9326 CACACAGCTTGCGCTC-1,0,47,115,7116,9395 GTGCTTACATCAGCGC-1,0,46,116,6996,9464 GCAAACGTCGCCAGGT-1,0,47,117,7116,9533 GAATAGCATTTAGGGT-1,0,46,118,6996,9602 TACTGTTTCTCTGGTA-1,0,47,119,7116,9671 TCCTATCATAGGTAAC-1,0,46,120,6996,9739 TTCGGTGGAGACGCCC-1,0,47,121,7116,9808 TGTTGCCGTCGTCCCA-1,0,46,122,6996,9877 CCAACTTGATAGATCC-1,0,47,123,7116,9946 CAAAGGTCATCTGAAA-1,0,46,124,6996,10015 CTAGCTTAATGGTCCC-1,0,47,125,7116,10084 AGGCCACATTGGTTAC-1,0,46,126,6996,10152 TTAGGATGGGAGGGTA-1,0,47,127,7116,10221 TATGACCTTGCGCTGG-1,1,48,0,7236,1480 ATATACATGTATGGTA-1,1,49,1,7356,1549 TGTGTGACCATGAATC-1,1,48,2,7236,1618 CCCACTCCACGGTATC-1,1,49,3,7356,1686 GGGCAGACGTCACTGC-1,1,48,4,7236,1755 TCAAAGAGCTATCTGT-1,1,49,5,7356,1824 AGATGCATCCTGTGTC-1,1,48,6,7236,1893 GTGAAGTCACGACTCG-1,1,49,7,7356,1962 TGATTCAGGTCCCGCG-1,1,48,8,7236,2031 CGCCACAGGTCGCGAT-1,1,49,9,7356,2099 TTGAATCGTTGTATAA-1,1,48,10,7236,2168 TGGCCAATTTGGTACT-1,1,49,11,7356,2237 CGACCCTTAACGCCGG-1,1,48,12,7236,2306 TCGGCGTACTGCACAA-1,1,49,13,7356,2375 GAGTAAGGCCACGGGA-1,1,48,14,7236,2444 ATCAGCTCGTCCACTA-1,1,49,15,7356,2512 AGGATCACGCGATCTG-1,1,48,16,7236,2581 AAATGGCCCGTGCCCT-1,1,49,17,7356,2650 CAGTACCAGTTTACGT-1,1,48,18,7236,2719 GGTCGTAAGCTCGCAC-1,1,49,19,7356,2788 CGTTCATGGTGCGCGT-1,1,48,20,7236,2857 GACTACAAAGCGGTGG-1,1,49,21,7356,2925 CAGCTGGCGTAACCGT-1,1,48,22,7236,2994 ACTCGTAACCCGTCCT-1,1,49,23,7356,3063 CATATGTCAGGCTACG-1,1,48,24,7236,3132 GCCCGACTTCTTCCCG-1,1,49,25,7356,3201 CTGGCATCCGAATGAG-1,1,48,26,7236,3270 AAGTGCGTTAGAATCT-1,1,49,27,7356,3338 AGAAGTGATTCGTGAT-1,1,48,28,7236,3407 TACCTTAAGATTTCCC-1,1,49,29,7356,3476 AATGACAGCAATGTCT-1,1,48,30,7236,3545 TCAACGCAGGAAATAA-1,1,49,31,7356,3614 TATTCCTCCGCCCACT-1,1,48,32,7236,3683 ATCAAACGAAGGTTTG-1,1,49,33,7356,3751 TTGATTAGCTGTTTCT-1,1,48,34,7236,3820 CTGCTGAGGCCACGAA-1,1,49,35,7356,3889 CCGTGTTAAATTCCAT-1,1,48,36,7236,3958 CACGTCGGCAACCTCT-1,1,49,37,7356,4027 GTATTCTGAGAAACGA-1,1,48,38,7236,4095 CGGTGCGCGTTGGTCC-1,1,49,39,7356,4164 GTGATTCGCCGCTCAA-1,1,48,40,7236,4233 GCCCGCGCGTAAACGG-1,1,49,41,7356,4302 AACGTACTGTGGGTAC-1,1,48,42,7236,4371 GAGTATGCCCGCCTTG-1,1,49,43,7356,4440 TGATTCTGTCGCCGGT-1,1,48,44,7236,4508 GGGCGGCAAATGAATT-1,1,49,45,7356,4577 TTCCAATCTGGCTATC-1,1,48,46,7236,4646 GGAACCGTGTAAATTG-1,1,49,47,7356,4715 TTGGTCACACTCGTAA-1,1,48,48,7236,4784 CGAACCCGCATGCGTC-1,1,49,49,7356,4853 GGGCGTCACCACGTAA-1,1,48,50,7236,4921 TACCAGAAGTAGGTTC-1,1,49,51,7356,4990 TTGGATATCGTCTACG-1,1,48,52,7236,5059 TAGATATGGACTGGAA-1,1,49,53,7356,5128 GCGACGATAGTTGTAC-1,1,48,54,7236,5197 CTGGATTTACACTTGA-1,1,49,55,7356,5266 GTTATATTATCTCCCT-1,1,48,56,7236,5334 TATTCCACTCAGCTCG-1,1,49,57,7356,5403 CTGTTACCCAATCTAG-1,1,48,58,7236,5472 ACGCGAAGTCAGACGA-1,1,49,59,7356,5541 GATAGGTAACGTTGAC-1,1,48,60,7236,5610 TGTTCGTATTGCGGTG-1,1,49,61,7356,5679 GGTCAGTGGGTCCCAC-1,1,48,62,7236,5747 AATTCTAGAGTTAGGC-1,1,49,63,7356,5816 GTTTACGTTCCATCTG-1,1,48,64,7236,5885 AGACCCACCGCTGATC-1,1,49,65,7356,5954 TCTAGCATGCCCAGAA-1,1,48,66,7236,6023 CCCGCAGCGCGAACTA-1,1,49,67,7356,6092 CTCTATTTGGCTGCAG-1,1,48,68,7236,6160 TTCTGCCGCGCCTAGA-1,1,49,69,7356,6229 ACTCCCTAGAATAGTA-1,1,48,70,7236,6298 CCGAACCTTCCCGGCC-1,1,49,71,7356,6367 TCAGACGCTATAGAAG-1,1,48,72,7236,6436 CAGGCGCCATGCTAGG-1,1,49,73,7356,6505 CACTCGAGCTGAACAA-1,1,48,74,7236,6573 ACAAGGAAATCCGCCC-1,1,49,75,7356,6642 ACCACGTGCAGCTATA-1,1,48,76,7236,6711 ACGGCGACGATGGGAA-1,1,49,77,7356,6780 TTACTAAAGGACTTTA-1,1,48,78,7236,6849 GGCTGAAATAGCAAAG-1,1,49,79,7356,6917 TTAGACGAGTCACCTC-1,1,48,80,7236,6986 GTAGTCGCGGGAATCA-1,1,49,81,7356,7055 GCAAACCCTACATTAT-1,1,48,82,7236,7124 GTTAAAGTAGGACTGG-1,1,49,83,7356,7193 ACCCTCCCTTGCTATT-1,1,48,84,7236,7262 CATGTCTCATTTATGG-1,1,49,85,7356,7330 AATAGAATCTGTTTCA-1,1,48,86,7236,7399 TCTCATGAGATAGGGT-1,1,49,87,7356,7468 ATGACTATGCGACATT-1,1,48,88,7236,7537 CCTAACCCAAACAAGT-1,1,49,89,7356,7606 AGTCAAGATGACACTT-1,1,48,90,7236,7675 CGGGAGCTTCAGTGTA-1,1,49,91,7356,7743 CCACAGTACCCATCCT-1,1,48,92,7236,7812 GAGCGCGCACGAGTAG-1,1,49,93,7356,7881 ACCAGTGCGGGAGACG-1,1,48,94,7236,7950 TCCTTACGACGGTCCG-1,1,49,95,7356,8019 TAGTCGATCACGGGTT-1,1,48,96,7236,8088 AGACGAAGTGCCGGTC-1,1,49,97,7356,8156 CGCTGGTGACTACCCT-1,1,48,98,7236,8225 AGGCATTGTCGTAGGG-1,1,49,99,7356,8294 TGGTAGAATATATGGG-1,1,48,100,7236,8363 TTCTTATCCGCTGGGT-1,1,49,101,7356,8432 AGTTTGGCACGGGTTG-1,1,48,102,7236,8501 GCATCGGCCGTGTAGG-1,1,49,103,7356,8569 ATGCGAGTCCCACCAC-1,1,48,104,7236,8638 TCGGCGAACCCAAACC-1,1,49,105,7356,8707 AGAGTAAACTTCACTA-1,1,48,106,7236,8776 AAGCCGAAGCGGTTTA-1,1,49,107,7356,8845 CCCGAGTTTCTCCGTA-1,1,48,108,7236,8914 AATATCGAATCAATGC-1,1,49,109,7356,8982 GCGCCTCCCACTCCGA-1,1,48,110,7236,9051 CGTTAAACTAGTTAGG-1,1,49,111,7356,9120 TGAGTTAAAGACATTC-1,1,48,112,7236,9189 AACAGGTAGTATGGAT-1,1,49,113,7356,9258 GAACGCGGGTCACACG-1,1,48,114,7236,9326 TGTTTGAGATCGTCAG-1,1,49,115,7356,9395 ATCTCGTGAGCGAAAC-1,0,48,116,7236,9464 CTGAGAAAGTTCGGCG-1,0,49,117,7356,9533 ATTACCACACTGCCTG-1,0,48,118,7236,9602 CTGCTGTCTAACGAGC-1,0,49,119,7355,9671 CGTGGAAGCCTCGTAC-1,0,48,120,7236,9739 GGACTCACAAATTAGG-1,0,49,121,7355,9808 CTTTGCTGTCATGGAT-1,0,48,122,7236,9877 GGTGTTGGGCGTCTTA-1,0,49,123,7355,9946 TACCATGTATTGATTT-1,0,48,124,7236,10015 GGTCTCCAAGTAGTGC-1,0,49,125,7355,10084 ATCCTTCTGAAAGAAC-1,0,48,126,7236,10152 CCGGAAGTTATCAGTC-1,0,49,127,7355,10221 TTAGAGGGATATACAG-1,1,50,0,7476,1480 TTGTACACCTCGAACA-1,1,51,1,7595,1549 GTGGGTACTGAGCGTA-1,1,50,2,7476,1618 CTTAAGCAGCGAGCCG-1,1,51,3,7595,1686 GCATTGACTTGCGGAA-1,1,50,4,7476,1755 CCATAACCTGTGCAGT-1,1,51,5,7595,1824 GGGCTACTATTTCGTG-1,1,50,6,7476,1893 GGCGAGCGAAACGGCA-1,1,51,7,7595,1962 GGAGACCATCTACATA-1,1,50,8,7476,2031 AGACCAAACCACACCT-1,1,51,9,7595,2099 CGACGCATCCGTACCT-1,1,50,10,7476,2168 CCTAGGTAAAGGTAGC-1,1,51,11,7595,2237 AGCGGCGGTTAGCGGT-1,1,50,12,7476,2306 CTGGACGCAGTCCGGC-1,1,51,13,7595,2375 AGCCTAATACCCACGT-1,1,50,14,7476,2444 AATCTGGCTTTCTAGT-1,1,51,15,7595,2512 CACCCTTGGTGAGACC-1,1,50,16,7476,2581 GGATCTTGACTCAACC-1,1,51,17,7595,2650 ACGAGGATACCACTCT-1,1,50,18,7476,2719 GAGTTGATGGCAATTT-1,1,51,19,7595,2788 GTGGCAAACAGCGGCA-1,1,50,20,7476,2857 GATGTAACGAACCACC-1,1,51,21,7595,2925 AGCCGTGGCTAAATGT-1,1,50,22,7476,2994 CTCCCTCCTTTCGATC-1,1,51,23,7595,3063 TTGGAAGAATACAGTC-1,1,50,24,7476,3132 AGTTAAGTCAACCGCT-1,1,51,25,7595,3201 CTCATGGTAATTTGCG-1,1,50,26,7475,3270 AAAGTAGCATTGCTCA-1,1,51,27,7595,3338 TTGTGGTAGGAGGGAT-1,1,50,28,7475,3407 AGTCGTGGGCATTACG-1,1,51,29,7595,3476 ACCTAAGTACCTTTCA-1,1,50,30,7475,3545 GTAGAGGGAGACAAGT-1,1,51,31,7595,3614 GATCCTCGACACTGGC-1,1,50,32,7475,3683 CCTACATTCACAGACG-1,1,51,33,7595,3751 TTGACCGTGTTAATGA-1,1,50,34,7475,3820 TCTGTTACCCAGCATA-1,1,51,35,7595,3889 CTAACTGGTCCGGTTC-1,1,50,36,7475,3958 AGCGACAGGAACGGTC-1,1,51,37,7595,4027 TAATAGAACAGAGTTA-1,1,50,38,7475,4095 ACAGGTGGAGGTGAGG-1,1,51,39,7595,4164 TGCGAGAATATTACCC-1,1,50,40,7475,4233 TTGCGTCGGCCAACCG-1,1,51,41,7595,4302 AGGCTTCCCGAAGAAG-1,1,50,42,7475,4371 GCGGACCGCGTTGTGG-1,1,51,43,7595,4440 GTAATCTGATTCTTCG-1,1,50,44,7475,4508 CCGCGGAATGCGTCAC-1,1,51,45,7595,4577 TTCCACACAGATTTGA-1,1,50,46,7475,4646 CTTCTATTAATGCTAG-1,1,51,47,7595,4715 CATTTGAGTGGTACGT-1,1,50,48,7475,4784 TCACAGCAAACTCGAA-1,1,51,49,7595,4853 CAGACGAACCTGATAC-1,1,50,50,7475,4921 TAGCTAGAAGGCATGA-1,1,51,51,7595,4990 ATCCAGAGCAACAACC-1,1,50,52,7475,5059 TCCGGTTCGTCCGGTC-1,1,51,53,7595,5128 CGCGCATGTTTGATTG-1,1,50,54,7475,5197 TGGCGATCAAGTTATG-1,1,51,55,7595,5266 CCCTTTGACAGGTCTT-1,1,50,56,7475,5334 CAGAGACGGTCACCCA-1,1,51,57,7595,5403 TAGCTCGCCTGATAAC-1,1,50,58,7475,5472 TTGTGTTTCCCGAAAG-1,1,51,59,7595,5541 TATACACAGACGCCTT-1,1,50,60,7475,5610 ACATTAGTTTATATCC-1,1,51,61,7595,5679 CCATCGCAGTTAAACT-1,1,50,62,7475,5747 AATTAGCGCTGCAGCG-1,1,51,63,7595,5816 TCCGCTTATCCCATTA-1,1,50,64,7475,5885 GTTTGGGCTTGTGAGC-1,1,51,65,7595,5954 CTTGTCAACATTCGAG-1,1,50,66,7475,6023 GGACCAACAGGATAAC-1,1,51,67,7595,6092 AAGCTAGATCGAGTAA-1,1,50,68,7475,6160 TACCGTGCCTCGGACC-1,1,51,69,7595,6229 GTAGCCAAACATGGGA-1,1,50,70,7475,6298 TGCCAGTACGTGGAGA-1,1,51,71,7595,6367 ATAAGGTGGAGAACAT-1,1,50,72,7475,6436 CTTTAATATTGGTCGA-1,1,51,73,7595,6505 TGGTTCAACGGGTAAT-1,1,50,74,7475,6573 GCTGCTACTGCGTAGC-1,1,51,75,7595,6642 CTGCACAACTACATAT-1,1,50,76,7475,6711 ATATTCCACATAGTGA-1,1,51,77,7595,6780 TGGCAAACTAAATTAC-1,1,50,78,7475,6849 ACCGCAATAACTGCCT-1,1,51,79,7595,6917 TCGCACCAGGAGGCAG-1,1,50,80,7475,6986 ACTTCAGGCTGATCCC-1,1,51,81,7595,7055 ACAAATCGCACCGAAT-1,1,50,82,7475,7124 TCTGATGTATTCTGTC-1,1,51,83,7595,7193 CTTTACCGAATAGTAG-1,1,50,84,7475,7262 GCAGATCCATAAGACT-1,1,51,85,7595,7330 TTCCTCGGACTAACCA-1,1,50,86,7475,7399 TTATCCGGGATCTATA-1,1,51,87,7595,7468 CTGGAAGACACGGTGG-1,1,50,88,7475,7537 GTTCGTCTAAAGAACT-1,1,51,89,7595,7606 GTCTATTGGTTCCGGT-1,1,50,90,7475,7675 CTAGGCGCCCTATCAG-1,1,51,91,7595,7743 AACGGACGTACGTATA-1,1,50,92,7475,7812 AACACACGCTCGCCGC-1,1,51,93,7595,7881 GCTACTATAGTAGAGT-1,1,50,94,7475,7950 AGCATATCAATATGCT-1,1,51,95,7595,8019 CGAACAGTATGGGCGT-1,1,50,96,7475,8088 ACAATTGTGTCTCTTT-1,1,51,97,7595,8156 GATCGCTACCCGATTT-1,1,50,98,7475,8225 ATTGCGATCAGTAACT-1,1,51,99,7595,8294 CAGCAGTCCAGACTAT-1,1,50,100,7475,8363 AGAGGCTTCGGAAACC-1,1,51,101,7595,8432 AAACAAGTATCTCCCA-1,1,50,102,7475,8501 GGCAGCAAACCTATGC-1,1,51,103,7595,8569 ACTAGTTGCGATCGTC-1,1,50,104,7475,8638 TTGGACCATCTGGCAA-1,1,51,105,7595,8707 CCCTCCTCGCTCGTAT-1,1,50,106,7475,8776 GAGCGCAAATACTCCG-1,1,51,107,7595,8845 ATTACATGTCAGTCTT-1,1,50,108,7475,8914 TTGGGACGTAAGAGTT-1,1,51,109,7595,8982 CTTCGGCCAATTGTTT-1,1,50,110,7475,9051 AGACCGCTCCGCGGTT-1,1,51,111,7595,9120 GACCGCGTCTGACGTG-1,1,50,112,7475,9189 CCAAATAACAAGATTC-1,1,51,113,7595,9258 TCTTTAGAGTCTAACA-1,1,50,114,7475,9327 CTCCCAATGAGTCGCG-1,0,51,115,7595,9395 TAGTCTTTCCGAATTG-1,0,50,116,7475,9464 GAGTAAACCGGAAAGT-1,0,51,117,7595,9533 GATCTCGACGCTGTGG-1,0,50,118,7475,9602 CTGACATAGAAATAGA-1,0,51,119,7595,9671 TATACCGAGTGCCACA-1,0,50,120,7475,9739 TCCGAACGTTGCCGCT-1,0,51,121,7595,9808 GCAGAAGGTAATCTCC-1,0,50,122,7475,9877 GGTACTAAGTGCTTTG-1,0,51,123,7595,9946 CAGAATATTCGTTATC-1,0,50,124,7475,10015 TAATCAACCAAATGGG-1,0,51,125,7595,10084 TCTGCTTAGAACAAGC-1,0,50,126,7475,10152 AACTGAGTTATACTGA-1,0,51,127,7595,10221 CCTCAACGATCGCTGT-1,1,52,0,7715,1480 CGGTACGGCAAACCCA-1,1,53,1,7835,1549 CAGATGTTTGTCCCAA-1,1,52,2,7715,1618 GTTTGTTAGCCAAGTA-1,1,53,3,7835,1686 ACGTGACAAAGTAAGT-1,1,52,4,7715,1755 TTGTCACCGCGGTATC-1,1,53,5,7835,1824 GATCTAACCGTATTCA-1,1,52,6,7715,1893 ATGGAAATTTAAGGAG-1,1,53,7,7835,1962 ATTGATCACCACATTT-1,1,52,8,7715,2031 ACTGCTCGGAAGGATG-1,1,53,9,7835,2099 TCTAAAGAACAGTCTC-1,1,52,10,7715,2168 CTGGGTTGAGTTAAAG-1,1,53,11,7835,2237 CCCAGTAAACTTGGGA-1,1,52,12,7715,2306 AGATTCACAACCGATA-1,1,53,13,7835,2375 AGAAGGTACACTTCAC-1,1,52,14,7715,2444 GCAGGAACTTAGATCT-1,1,53,15,7835,2512 AATCTAGGTTTACTTG-1,1,52,16,7715,2581 CCCGGTGTATCGGAAT-1,1,53,17,7835,2650 TTATCCTCAAGGAATA-1,1,52,18,7715,2719 AGCATCATTTCGAAAG-1,1,53,19,7835,2788 CGCGAGTCTGCCGGGT-1,1,52,20,7715,2857 TGCGTCATGACTGAGC-1,1,53,21,7835,2925 GTATGAAATTTCACTC-1,1,52,22,7715,2994 TTGGTTGCGGTGCGCG-1,1,53,23,7835,3063 ACAGAACTGAGAACAA-1,1,52,24,7715,3132 CTACTATCATAGGTTT-1,1,53,25,7835,3201 CCTCTATCGATTAGCA-1,1,52,26,7715,3270 CCGTATCTCGTCGTAG-1,1,53,27,7835,3338 TCACGATGTCCGTGGA-1,1,52,28,7715,3407 TCAACAAAGATAATTC-1,1,53,29,7835,3476 ATGACGCGTTCTATCC-1,1,52,30,7715,3545 ATTTGCGCGAGTAGCT-1,1,53,31,7835,3614 TTCTTGGACGATCTGC-1,1,52,32,7715,3683 AGACGACGATGCCGCT-1,1,53,33,7835,3751 GGTCTCTGAATGGACT-1,1,52,34,7715,3820 GCTCAATCCGTTTATT-1,1,53,35,7835,3889 CCAGAAAGCAACTCAT-1,1,52,36,7715,3958 CACCGTTAGGGATCAC-1,1,53,37,7835,4027 AATAACACTAGAACAA-1,1,52,38,7715,4096 CACCCGGTTTGTGACT-1,1,53,39,7835,4164 CCACCAACTTTACTGT-1,1,52,40,7715,4233 AACTCTCAGTGTGCTC-1,1,53,41,7835,4302 AAACCGTTCGTCCAGG-1,1,52,42,7715,4371 CAGCGATTCCCTTCAA-1,1,53,43,7835,4440 GCTAGCTTGAATAGCT-1,1,52,44,7715,4508 TTGCGGCATCAGAAAG-1,1,53,45,7835,4577 AATAGAACAGAGTGGC-1,1,52,46,7715,4646 GCCATCGAGCTGCGTG-1,1,53,47,7835,4715 ACACTGATCAAGGTGT-1,1,52,48,7715,4784 ACCAACGCTTATTTAT-1,1,53,49,7835,4853 GACATCGATTTATAAC-1,1,52,50,7715,4921 CAGACACCGATCGCTG-1,1,53,51,7835,4990 CCAAGAAAGTGGGCGA-1,1,52,52,7715,5059 GGTAGACCGTTGGGCG-1,1,53,53,7835,5128 TCGTTAGGAGTCCCTA-1,1,52,54,7715,5197 ACGGCCAACATGGACT-1,1,53,55,7835,5266 GTGAGTGGTACAACGC-1,1,52,56,7715,5334 GAGACTTCGCGACCGA-1,1,53,57,7835,5403 GAAGTCAGTTGCACTA-1,1,52,58,7715,5472 GTTTGGTAGGGTCAAC-1,1,53,59,7835,5541 ACGGCACTTGCTTGGG-1,1,52,60,7715,5610 CCTCTGTACTATTCTA-1,1,53,61,7835,5679 CTATTCATGTGTCCCA-1,1,52,62,7715,5747 GCCCGATCTGTGGTCG-1,1,53,63,7835,5816 TTAGAAGAACATGACT-1,1,52,64,7715,5885 AGGATAAAGTCGGGAT-1,1,53,65,7835,5954 CGGCAAACATCGTGCG-1,1,52,66,7715,6023 CTAGTTACAACCCGGT-1,1,53,67,7835,6092 TTCGACAGAGCCCGTG-1,1,52,68,7715,6160 AAGCATACTCTCCTGA-1,1,53,69,7835,6229 GACGACGATCCGCGTT-1,1,52,70,7715,6298 GGTAGAAGACCGCCTG-1,1,53,71,7835,6367 GAGATGGGAGTCGACA-1,1,52,72,7715,6436 AACGATAATGCCGTAG-1,1,53,73,7835,6505 TGGTCCCACGCTACGG-1,1,52,74,7715,6573 CTGCTTGGCGATAGCT-1,1,53,75,7835,6642 ATCGGAGACAGACGGC-1,1,52,76,7715,6711 TAGCGTCCGGTGTGGT-1,1,53,77,7835,6780 GTTGAGTCCCGCCGGT-1,1,52,78,7715,6849 AAATAGGGTGCTATTG-1,1,53,79,7835,6917 AAGTGTTTGGAGACGG-1,1,52,80,7715,6986 AGCACTACCTCACCAG-1,1,53,81,7835,7055 TGCCTTGGCCAGGCAA-1,1,52,82,7715,7124 TCGTCTTAGGCGTTAA-1,1,53,83,7835,7193 TTCTGACCGGGCTCAA-1,1,52,84,7715,7262 CAGAACTTAGCCCTCT-1,1,53,85,7835,7330 AGCTCCTTCGCACATC-1,1,52,86,7715,7399 ACAGGCTTGCCCGACT-1,1,53,87,7835,7468 GCTATACGTCTCGGAC-1,1,52,88,7715,7537 GAGCCAGCTACCTGTG-1,1,53,89,7835,7606 TGCTAAGTGTCTATTT-1,1,52,90,7715,7675 GTCCTACGAATAGTCT-1,1,53,91,7835,7743 CAGTGTCGGCTGGCCC-1,1,52,92,7715,7812 CTATCGACGAAATACA-1,1,53,93,7835,7881 CATCATTACCCTGAGG-1,1,52,94,7715,7950 TAAGTTGCGACGTAGG-1,1,53,95,7835,8019 AGTGCACGCTTAAGAA-1,1,52,96,7715,8088 TGTGCCGGTGCCGGAA-1,1,53,97,7835,8156 AGGAGGCCTTCGCGCG-1,1,52,98,7715,8225 TCCGCGGCCCAATGAA-1,1,53,99,7835,8294 TCCGTTAAGCTAATAT-1,1,52,100,7715,8363 AAATCTAGCCCTGCTA-1,1,53,101,7835,8432 CGCAGGCGATCCAAAC-1,1,52,102,7715,8501 CCATATGGAAACTATA-1,1,53,103,7835,8569 CACCGTATCCCATCCG-1,1,52,104,7715,8638 GGTCAAGACTACTTCG-1,1,53,105,7835,8707 CCCGTTTCGCAGATGT-1,1,52,106,7715,8776 GTTATAATACGGTGAA-1,1,53,107,7835,8845 ACTTGTGGATGGAACG-1,1,52,108,7715,8914 GTTCGCTGAGACGTCT-1,1,53,109,7835,8982 GACACTGGAACCCGAT-1,1,52,110,7715,9051 CCCAGGTCTGAAGGCT-1,1,53,111,7835,9120 TCTCGAGGAGGTTCGC-1,1,52,112,7715,9189 ACTATCTGCCCGCGTA-1,1,53,113,7835,9258 CCTAAATTGTATCCTA-1,1,52,114,7715,9327 AGAAATTATGACTCGC-1,0,53,115,7835,9395 TCCAGCGCTATAAGCG-1,0,52,116,7715,9464 TCGTGTATTGGTCACG-1,0,53,117,7835,9533 CCACATACTGCACCCA-1,0,52,118,7715,9602 GTTGCGGACGGTCAGG-1,0,53,119,7835,9671 GTATTTAATGGCATAA-1,0,52,120,7715,9739 GTCGATAGGTGACTTT-1,0,53,121,7835,9808 AATAATCTTCGTATCG-1,0,52,122,7715,9877 ATTGTTCAACGATCCG-1,0,53,123,7835,9946 GTGCAGCGTAGAGTAG-1,0,52,124,7715,10015 TGTGGTAGGGTGCCTT-1,0,53,125,7835,10084 TGTGGACTATCTACGT-1,0,52,126,7715,10152 GGCGGGCTCTAAGAGT-1,0,53,127,7835,10221 TGTGCCAGAGGCAAAG-1,1,54,0,7955,1480 TGGCTTATGTATAATG-1,1,55,1,8074,1549 GCAAGCTGGAAACCGC-1,1,54,2,7955,1618 GATATCAAGCAGGAGC-1,1,55,3,8074,1686 CACCAATCATCCGTCT-1,1,54,4,7955,1755 CCACATGGCTCTTTAT-1,1,55,5,8074,1824 GTCCTACTCTACGGGC-1,1,54,6,7955,1893 CTCGAGACATACGATA-1,1,55,7,8074,1962 TATCAGTGGCGTAGTC-1,1,54,8,7955,2031 TGACAGGACAAGTCCA-1,1,55,9,8074,2099 TATTAACACCAAAGCA-1,1,54,10,7955,2168 TAGGCGATGAGGTCTC-1,1,55,11,8074,2237 CCGCCGGAACTTCTCG-1,1,54,12,7955,2306 GCCATTAGCCTCAAAC-1,1,55,13,8074,2375 GTTAGGCTACCCGTTT-1,1,54,14,7955,2444 GGGTATTCTAGCAAAC-1,1,55,15,8074,2512 GCCCTAGCCGTCGCGA-1,1,54,16,7955,2581 CAGATCCTGGTTTGAA-1,1,55,17,8074,2650 CTTCAGTGGTCGCCTA-1,1,54,18,7955,2719 GGGCAACCGCACGTGC-1,1,55,19,8074,2788 GACCCAATTATGATAC-1,1,54,20,7955,2857 GAAGCTCGGACCCGTC-1,1,55,21,8074,2925 CGTCGGGTCTAAGCGC-1,1,54,22,7955,2994 GAGGCCCGACTCCGCA-1,1,55,23,8074,3063 TTGCCATAGCCCGCTC-1,1,54,24,7955,3132 TAACATACACGCGATC-1,1,55,25,8074,3201 CCGACGGGCATGAGGT-1,1,54,26,7955,3270 AATGTGCCCGAGGTGT-1,1,55,27,8074,3338 AGCTGTAACCTCAATC-1,1,54,28,7955,3407 CGAGTGAAGGTACCAG-1,1,55,29,8074,3476 AGTCTCACAAGACTAC-1,1,54,30,7955,3545 AAATTGATAGTCCTTT-1,1,55,31,8074,3614 TAAGTCGCCGAGTATC-1,1,54,32,7955,3683 GCGGAGAAACTTCGCA-1,1,55,33,8074,3751 GGCAAAGGCGCCAATA-1,1,54,34,7955,3820 ATTTCCGGGTTCTGCG-1,1,55,35,8074,3889 TAAACCCAGGAGGGCA-1,1,54,36,7955,3958 TTCGGGCGCTAGTCTT-1,1,55,37,8074,4027 GTGGAGTCGGCGGTTG-1,1,54,38,7955,4096 GCACGCCTACTTAGAT-1,1,55,39,8074,4164 CAATATTCTTGACCTA-1,1,54,40,7955,4233 CGTTTGTGTAGAGGGT-1,1,55,41,8074,4302 CATAGCGTTGCCCACC-1,1,54,42,7955,4371 TGATACATTTAGCCGT-1,1,55,43,8074,4440 TTCCGGCCTTGAGGCT-1,1,54,44,7955,4508 CCTATACCGTCCTGTC-1,1,55,45,8074,4577 TCGTTGCTATCCGGTC-1,1,54,46,7954,4646 TGGCGACTGCTCCAAA-1,1,55,47,8074,4715 CAGAGGCGATGCATGA-1,1,54,48,7954,4784 TCACAGGAGAATAAGA-1,1,55,49,8074,4853 GGGTTAACATTTGAGT-1,1,54,50,7954,4921 GCGGGCGAGCCTTACC-1,1,55,51,8074,4990 TGCCAATGGGTACTCT-1,1,54,52,7954,5059 AGGGACTCTACGCGAC-1,1,55,53,8074,5128 GACGCCGTAAAGGCTA-1,1,54,54,7954,5197 AAAGGCTCTCGCGCCG-1,1,55,55,8074,5266 CTGGCTGGTTGTCAGT-1,1,54,56,7954,5334 ACATCCCGGCCATACG-1,1,55,57,8074,5403 CTCTTGTCCCGCTTGG-1,1,54,58,7954,5472 TACGCCTCCATTCCGA-1,1,55,59,8074,5541 CGGAGCAATTTAATCG-1,1,54,60,7954,5610 AACTAGGCTTGGGTGT-1,1,55,61,8074,5679 GCGTCGCCAGGGTGAT-1,1,54,62,7954,5747 GTTGGTCATGCTATCC-1,1,55,63,8074,5816 GCTGGCGGCGCATGCT-1,1,54,64,7954,5885 TGAGCGGAAAGTGTTC-1,1,55,65,8074,5954 CTTTCTGTGCGGGCTT-1,1,54,66,7954,6023 ATCTAATATCCTACGG-1,1,55,67,8074,6092 AACCTTTAAATACGGT-1,1,54,68,7954,6160 TTCACTCGAGCACCTA-1,1,55,69,8074,6229 CTGCACTCCAGTACAG-1,1,54,70,7954,6298 CCATTTCTACCTATTA-1,1,55,71,8074,6367 CAAGCGGCACATAATT-1,1,54,72,7954,6436 GTTAACATCACTTAAA-1,1,55,73,8074,6505 ATATAAATGTAGCTGC-1,1,54,74,7954,6573 GCCTCTATACATAGCA-1,1,55,75,8074,6642 GTGTCGTATAGCGTTC-1,1,54,76,7954,6711 ATACGTACTTAGCCAC-1,1,55,77,8074,6780 GCTATCATACTCATGG-1,1,54,78,7954,6849 TATCCAATTGGTTATC-1,1,55,79,8074,6918 TCTCCCTGGGCAGCGT-1,1,54,80,7954,6986 CCTATAATGAGTGCCC-1,1,55,81,8074,7055 CAATGCGAGAAGTATC-1,1,54,82,7954,7124 TACCAATAAAGTACCA-1,1,55,83,8074,7193 GCTAGGCACCACGGAG-1,1,54,84,7954,7262 GGTGGACTGCTCTGGC-1,1,55,85,8074,7330 TATTCCGAGCTGTTAT-1,1,54,86,7954,7399 TTGAGAAGTTTAGCAT-1,1,55,87,8074,7468 GTCCGGACCTGAAATT-1,1,54,88,7954,7537 CTGTGGTCGGGAGATA-1,1,55,89,8074,7606 CATGTAAGAGACATTT-1,1,54,90,7954,7675 CACGTTTCGTACACAC-1,1,55,91,8074,7743 CATCCTCTCAAAGATC-1,1,54,92,7954,7812 GATTAACCGAAAGCCC-1,1,55,93,8074,7881 AACGCTGTTGCTGAAA-1,1,54,94,7954,7950 TTCAGCTGGCGTGCCC-1,1,55,95,8074,8019 TAGAGGTTCTACTTGT-1,1,54,96,7954,8088 GTATCAAACGTTAGCT-1,1,55,97,8074,8156 AATCATGTAAAGACTC-1,1,54,98,7954,8225 AGTCACTAGCTCTCGA-1,1,55,99,8074,8294 GCGAAACGATCGGGAG-1,1,54,100,7954,8363 CATGGATTGTCTTCCG-1,1,55,101,8074,8432 ATTGGATTACAGCGTA-1,1,54,102,7954,8501 CACACACGCTAACGAG-1,1,55,103,8074,8569 CTATGGGAAGCGGAAT-1,1,54,104,7954,8638 TGCCTAATTGAAGATT-1,1,55,105,8074,8707 TGGCAATGGGACGGCG-1,1,54,106,7954,8776 ACCTCCGCCCTCGCTG-1,1,55,107,8074,8845 CGGCAATAAGATCGCC-1,1,54,108,7954,8914 CCTTGACCACTTTATT-1,1,55,109,8074,8982 TCATTTAGAAGTGTGA-1,1,54,110,7954,9051 CAGAATAACACACGGA-1,1,55,111,8074,9120 TCTGAGCAATTGACTG-1,1,54,112,7954,9189 CAACGTGGTGGAGTCT-1,1,55,113,8074,9258 CACAGTTCGCTTCCCA-1,1,54,114,7954,9327 TCTCTCGCCGCACATA-1,0,55,115,8074,9395 TGGGCAGGCCACCGCA-1,0,54,116,7954,9464 CTCCTGTTCAAGGCAG-1,0,55,117,8074,9533 TATTTGATTTGCACAG-1,0,54,118,7954,9602 GCCACTCCTTACGGTA-1,0,55,119,8074,9671 ACCGATATTTAATCAT-1,0,54,120,7954,9739 TTATTATCTGGAAGGC-1,0,55,121,8074,9808 TACATGCCGGAATTGT-1,0,54,122,7954,9877 GGCACTCAGCCGACCC-1,0,55,123,8074,9946 AAACCGGAAATGTTAA-1,0,54,124,7954,10015 AGACAGGCATCTCAGC-1,0,55,125,8074,10084 GTGGTGATGGTTTGTG-1,0,54,126,7954,10152 CCGATATGACGTAAGG-1,0,55,127,8074,10221 CATACACAAAGTCAGC-1,1,56,0,8194,1480 TGCCGTTCTTAATCGG-1,1,57,1,8314,1549 TCGAGTCTACGATTCG-1,1,56,2,8194,1618 ACTTCCAGTGGAAGCT-1,1,57,3,8314,1687 CAATTGGGCCGCACTC-1,1,56,4,8194,1755 AAATCGCGGAAGGAGT-1,1,57,5,8314,1824 ATATCAATTCCAGCCT-1,1,56,6,8194,1893 CTTTGTCGAATGCTCC-1,1,57,7,8314,1962 TGAGGCATGTACTGTG-1,1,56,8,8194,2031 TTCGCCGCTCGCGCTA-1,1,57,9,8314,2099 GACTCCTTCCAATTGA-1,1,56,10,8194,2168 ACCGAAGAGTCTGGTT-1,1,57,11,8314,2237 TGGCATGAAGTTTGGG-1,1,56,12,8194,2306 ACGAGGTTTACAACGT-1,1,57,13,8314,2375 AACCCATCCCATGATC-1,1,56,14,8194,2444 TCGACAACTGAACCCG-1,1,57,15,8314,2512 TATCTTGCAATACAAC-1,1,56,16,8194,2581 TATTATGTTTGCCTGC-1,1,57,17,8314,2650 GTCAAAGAAGTGGTGT-1,1,56,18,8194,2719 GGGACAGAGTTACTCC-1,1,57,19,8314,2788 AGCAACCGAAAGTAAT-1,1,56,20,8194,2857 CTCTCTAACTGCCTAG-1,1,57,21,8314,2925 ATACGGAACGTCGTTT-1,1,56,22,8194,2994 TCGGTCCCGACAATAG-1,1,57,23,8314,3063 CGCGCAAATGTCCAGA-1,1,56,24,8194,3132 TCTAATACTGCCTCAG-1,1,57,25,8314,3201 CTCGGTTGTCGGCCCT-1,1,56,26,8194,3270 GGTAACCGGGAGGATA-1,1,57,27,8314,3338 CAGGCGCACGGTGGTC-1,1,56,28,8194,3407 TCATCGATGGTCCCAA-1,1,57,29,8314,3476 CAAAGATTATTGGGCC-1,1,56,30,8194,3545 ACAATGAATACGGAGA-1,1,57,31,8314,3614 GCTCCTGACATACTGG-1,1,56,32,8194,3683 GATGACAAGTAGGGCA-1,1,57,33,8314,3751 TACGCCGAGGGTACCC-1,1,56,34,8194,3820 AAGCGTCCCTCATCGA-1,1,57,35,8314,3889 CACTCCTATGTAAGAT-1,1,56,36,8194,3958 TCGAGCCAGGCAGGCC-1,0,57,37,8314,4027 TCGTCAAGTACGCGCA-1,1,56,38,8194,4096 CCCGTAGCTGGGAAGA-1,1,57,39,8314,4164 ACCCTATGCCATATCG-1,1,56,40,8194,4233 GTGGTATAGTCTGCCG-1,1,57,41,8314,4302 CCGGTTTGTAATTGTG-1,1,56,42,8194,4371 GTCGCCGTTGTGTGTT-1,1,57,43,8314,4440 TACTGAACAGATTTAG-1,1,56,44,8194,4508 TATTAACCTGACCGCG-1,1,57,45,8314,4577 CTGTAGCCATCTCACT-1,1,56,46,8194,4646 TACTATGGTTCCTCAG-1,1,57,47,8314,4715 GAGTCAGACCAGAATC-1,1,56,48,8194,4784 TAAGGCATAACATCAA-1,1,57,49,8314,4853 CTTCCGCTCCGTGAAG-1,1,56,50,8194,4921 GGGCTATGATCGATGG-1,1,57,51,8314,4990 GGTCGGTCGTCCACAG-1,1,56,52,8194,5059 GGGACCCGTATATCTT-1,1,57,53,8314,5128 GAAAGCAGTGCACTTT-1,1,56,54,8194,5197 AGCGCATAATGAATCG-1,1,57,55,8314,5266 GCTGCTAAGTAGTCGA-1,1,56,56,8194,5334 AATAGTCCGTCCCGAC-1,1,57,57,8314,5403 AGCAGCCAGATGAATA-1,1,56,58,8194,5472 CCTCGCGCTGTGCGAT-1,1,57,59,8314,5541 TTGTGTATGCCACCAA-1,1,56,60,8194,5610 TGCTCGGCGAAACCCA-1,1,57,61,8314,5679 CGATTAAATATCTCCT-1,1,56,62,8194,5747 TGCCGTGGATCGTCCT-1,1,57,63,8314,5816 GTGGACGCATTTGTCC-1,1,56,64,8194,5885 ACCCGGAAACTCCCAG-1,1,57,65,8314,5954 CCGCATGTGGTACGAT-1,1,56,66,8194,6023 GCAAACCTTGGCCATA-1,1,57,67,8314,6092 ATTCACTGATGTTGGA-1,1,56,68,8194,6160 ACTCCCTAATGCTAAA-1,1,57,69,8314,6229 TCCTAAATTGGGAAGC-1,1,56,70,8194,6298 GAACAGATTACTAAAT-1,1,57,71,8314,6367 ATACAGGCCCTCCAAT-1,1,56,72,8194,6436 CGATTCGCCTGGCTGC-1,1,57,73,8314,6505 TCCGCCTGTCTACAAG-1,1,56,74,8194,6573 CACATCTCACCGACGA-1,1,57,75,8314,6642 CGTCGTCCTTCGCGAA-1,1,56,76,8194,6711 CGTTGAATACCGCGCT-1,1,57,77,8314,6780 ACCAATATGCAAGTTA-1,1,56,78,8194,6849 ACGGATGGTGCGGATA-1,1,57,79,8314,6918 CACCCTAACAAGATCT-1,1,56,80,8194,6986 AAGCTCTTTCATGGTG-1,1,57,81,8314,7055 CATGGGTATGCCTTAT-1,1,56,82,8194,7124 GCGCGTCATTGGTACA-1,1,57,83,8314,7193 TCTGAAGCACGTGGTC-1,1,56,84,8194,7262 AAGTTCACTCCAAGCT-1,1,57,85,8314,7330 TTGGACCTATAACAGT-1,1,56,86,8194,7399 ATCAGCCTCATGCTGC-1,1,57,87,8314,7468 AGTACTCTTATGCCCA-1,1,56,88,8194,7537 GCGCTTAAATAATTGG-1,1,57,89,8314,7606 TTGTGAACCTAATCCG-1,1,56,90,8194,7675 AGTCGTCGACCACCAA-1,1,57,91,8314,7743 TCCTTTAAATCCGCTT-1,1,56,92,8194,7812 ACTGAAACGCCGTTAG-1,1,57,93,8314,7881 GCACACACTGGTAGCC-1,1,56,94,8194,7950 TAAGGGCCTGTCCGAT-1,1,57,95,8314,8019 GCTAGAGTAGAGATGT-1,1,56,96,8194,8088 CACAGGGCCATATAGT-1,1,57,97,8314,8156 CCATGCCCTAGATTTC-1,1,56,98,8194,8225 TCGCGTAGCAGTGTCC-1,1,57,99,8314,8294 GACGAGGCTAATAAAC-1,1,56,100,8194,8363 CTACACTCGCAGATGG-1,1,57,101,8314,8432 ACGCATACGTTTACTA-1,1,56,102,8194,8501 GGAGCAACATTTCAAG-1,1,57,103,8314,8569 CGAGAGGGTAGCCGCG-1,1,56,104,8194,8638 TCGTGTTCGACCACAA-1,1,57,105,8314,8707 TCTACCCAATAGAGAG-1,1,56,106,8194,8776 ATAAACGTTGCACCAC-1,1,57,107,8314,8845 TCAACTGCAGAGTCAG-1,1,56,108,8194,8914 AGCTCCATATATGTTC-1,1,57,109,8314,8982 GTAACATCTAAGATAA-1,1,56,110,8194,9051 CCGACAATAGGCCGCC-1,1,57,111,8314,9120 ATGCTTAGGAGTTGAT-1,1,56,112,8194,9189 AACCCGAGCAGAATCG-1,1,57,113,8314,9258 CGGGTTTGTTAGGGCT-1,1,56,114,8194,9327 AGGCACGTGACTGTCC-1,0,57,115,8314,9395 GACAAGAGATGAGATT-1,0,56,116,8194,9464 AGCCCTTGGACATCCC-1,0,57,117,8314,9533 GCAGTTCGATCCGAGC-1,0,56,118,8194,9602 TCGAGGGCAACAGACG-1,0,57,119,8314,9671 ACTTTGTCGACGCACT-1,0,56,120,8194,9740 GAGTATCAAAGTTACA-1,0,57,121,8314,9808 CCAGGCTGGCGTCTGA-1,0,56,122,8194,9877 GGACTCTTCCGGTTGA-1,0,57,123,8314,9946 TCCCAGCACACGACAT-1,0,56,124,8194,10015 GCTCCATGAGTGCAGA-1,0,57,125,8314,10084 GCAGTGCGGGCGGATG-1,0,56,126,8194,10152 GCGTTCTGACTAAGCG-1,0,57,127,8314,10221 ATTTGGAGATTGCGGT-1,1,58,0,8434,1480 AAGGGTTTGATTTCAG-1,1,59,1,8553,1549 CCGGCCGCGAGCATAT-1,1,58,2,8434,1618 GAGTTCTGTGGGTGCT-1,1,59,3,8553,1687 AAACGAAGATGGAGTA-1,1,58,4,8434,1755 AATTACGAGACCCATC-1,1,59,5,8553,1824 CCTTTGAATTATGGCT-1,1,58,6,8434,1893 CATGGTATTAGTTTGT-1,1,59,7,8553,1962 AACGAAAGTCGTCCCA-1,1,58,8,8434,2031 ACATAAGTCGTGGTGA-1,1,59,9,8553,2099 GCACAACCTCGGGCGT-1,1,58,10,8434,2168 ATGCACGCGCTGTTCA-1,1,59,11,8553,2237 CGATGTTGTTATCTAC-1,1,58,12,8434,2306 GTCTCCTGCCAGTATG-1,1,59,13,8553,2375 GATGCCTTCTGCGGCA-1,1,58,14,8434,2444 ACCGGTCAGGTACACC-1,1,59,15,8553,2512 GTGGAGCGTTTACCGA-1,1,58,16,8434,2581 AGGCGGTTTGTCCCGC-1,1,59,17,8553,2650 TCCCTGGCGTATTAAC-1,1,58,18,8434,2719 AAACACCAATAACTGC-1,1,59,19,8553,2788 TGGACGCAATCCAGCC-1,1,58,20,8434,2857 GGAACCTTGACTCTGC-1,1,59,21,8553,2925 GTATCTCAGTCTTGAC-1,1,58,22,8434,2994 GCTGAATCTTCCAATC-1,1,59,23,8553,3063 AGATGCAAGACGTGCA-1,1,58,24,8434,3132 TTAAGGCCCGTACTTT-1,1,59,25,8553,3201 AGTATGCTGGAGACCA-1,1,58,26,8434,3270 GCGGCAAAGTATTGCC-1,1,59,27,8553,3338 GCGCAAATATATTCAA-1,1,58,28,8434,3407 GAAGAAACGATATTGT-1,1,59,29,8553,3476 GGACCTCTAGGCCGCC-1,1,58,30,8434,3545 AACAGCTGTGTGGCAA-1,0,59,31,8553,3614 TCGAATATCCCGCAGG-1,1,58,32,8434,3683 GCGACCCAACCATCTG-1,0,59,33,8553,3751 AGAGAAGGAGTACAAT-1,1,58,34,8434,3820 GGCTCCTCCACCTGTT-1,0,59,35,8553,3889 TCAGTGTATACGTCAT-1,1,58,36,8434,3958 ATTATTATGTCCGTCA-1,1,59,37,8553,4027 TTAGGTCATAACCGAC-1,0,58,38,8434,4096 AAGATGGCACCGGACC-1,1,59,39,8553,4164 CTACAAGAAATAACCC-1,0,58,40,8434,4233 ACTATTTCCGGGCCCA-1,1,59,41,8553,4302 TTGTTTCACATCCAGG-1,1,58,42,8434,4371 CGCTGTGTGGATGTTG-1,1,59,43,8553,4440 TCAACATAGCGCCCTA-1,1,58,44,8434,4509 GGCCGTTTGGGTTTCA-1,1,59,45,8553,4577 TACTCTTTCGTCTTCA-1,1,58,46,8434,4646 TTCGCGCGCCATACGA-1,1,59,47,8553,4715 CGGAAAGAATCAAACG-1,1,58,48,8434,4784 AGGCAGGGAGCGTACT-1,1,59,49,8553,4853 CCTTTAAGGGAGCACT-1,1,58,50,8434,4921 CGCCCAGCACGCCTAG-1,1,59,51,8553,4990 AGTATTTGGCACGACC-1,1,58,52,8434,5059 TATCCGCACCGTCGGG-1,1,59,53,8553,5128 ACCCGTGTCATCAGTA-1,1,58,54,8434,5197 CAACTGCTCATCCGAT-1,1,59,55,8553,5266 CCGGAGCGTACTTTCT-1,1,58,56,8434,5334 TACCGTAGGTTAACTA-1,1,59,57,8553,5403 TGAGGAGTGCCAGCTT-1,1,58,58,8434,5472 CCAGTCTTGTCATAGA-1,1,59,59,8553,5541 GGGCAGAGCAATCGTT-1,1,58,60,8434,5610 TACCAAATAGCCCAGA-1,1,59,61,8553,5679 TACCTACTCCCAGTAT-1,1,58,62,8434,5747 TGTGAGACTAGCCCAA-1,1,59,63,8553,5816 ATACCTAACCAAGAAA-1,1,58,64,8434,5885 GTCGGGAAGCAGAAAC-1,1,59,65,8553,5954 AATTAACGGATTTCCA-1,1,58,66,8433,6023 GAACCCTCTGTGTTCT-1,1,59,67,8553,6092 ATATAAAGCGCTCGTG-1,1,58,68,8433,6160 CCTGGTCGAATGTGGG-1,1,59,69,8553,6229 CTAGGTTCGGACGTGA-1,1,58,70,8433,6298 TCGGGAGACAGCGTAC-1,1,59,71,8553,6367 CATAAGAAGCTTGGCT-1,1,58,72,8433,6436 AAGCACCCTGCGTATC-1,1,59,73,8553,6505 AGTTTGCACCTGCCTC-1,1,58,74,8433,6573 GACTGCAAATCGAGCT-1,1,59,75,8553,6642 GTGTATATCAGCGGGC-1,1,58,76,8433,6711 CTTCAACTCCACTTGG-1,1,59,77,8553,6780 ACTTACGCATCCACGC-1,1,58,78,8433,6849 CATGAGATGCACTCTC-1,1,59,79,8553,6918 GAGCACCTGTGTCCAG-1,1,58,80,8433,6986 CGCGACCGCGACAGAT-1,1,59,81,8553,7055 GTCCCGCGACGTTATG-1,1,58,82,8433,7124 GCAGTGTGGCTATAGG-1,1,59,83,8553,7193 ATACTGCCTTACACCG-1,1,58,84,8433,7262 TACAGAAACGGTGGGC-1,1,59,85,8553,7330 GGCGCGGAGATCTTTC-1,1,58,86,8433,7399 CGTTAATGTCCCGACG-1,1,59,87,8553,7468 GCTAACTGAAGTCTGA-1,1,58,88,8433,7537 AGTGATAACCTGCGCG-1,1,59,89,8553,7606 TATGATCTTCTCTTTA-1,1,58,90,8433,7675 TGTACCTACACGAGGG-1,1,59,91,8553,7743 ATATCGTTCCTCGAAC-1,1,58,92,8433,7812 CGGCGCCATCAATCCC-1,1,59,93,8553,7881 CCTGTTTGAAGACACG-1,1,58,94,8433,7950 GCCACTCAGAGCGCGA-1,1,59,95,8553,8019 TCGGCTTGTATCGACG-1,1,58,96,8433,8088 TAGATGGTTCCTTACT-1,1,59,97,8553,8156 ATCGTTAGCTAGCGGA-1,1,58,98,8433,8225 ACAGCGACATTCTCAT-1,1,59,99,8553,8294 ATTCAACCATTTAAGG-1,1,58,100,8433,8363 CACTCTTCTGCTAGCC-1,1,59,101,8553,8432 CTAACTGATAATCGCC-1,1,58,102,8433,8501 TGATGTCAATTAAGTG-1,1,59,103,8553,8569 CGCCGTTCAGCATAGT-1,1,58,104,8433,8638 ATACTACCCGTACCAC-1,1,59,105,8553,8707 CCACTGTTTGGATTAA-1,1,58,106,8433,8776 CACTACGGGAGCTGCC-1,1,59,107,8553,8845 GACCAAACGTTGACTG-1,1,58,108,8433,8914 AAGAGGCCCTTTGGAA-1,1,59,109,8553,8982 ACACGTAGGCCACAAG-1,1,58,110,8433,9051 TACTCTTACTTTACTG-1,1,59,111,8553,9120 AGAGCCGCCGAGATTT-1,1,58,112,8433,9189 GGCCTGCTTCTCCCGA-1,1,59,113,8553,9258 TCAAGCGCGGACGGTA-1,1,58,114,8433,9327 ACACTGGGACAGTCGT-1,0,59,115,8553,9395 TGTGTAGTAGCACGTG-1,0,58,116,8433,9464 CTAGCCACAGGCGAGC-1,0,59,117,8553,9533 CTATCGCGTAGAGAAC-1,0,58,118,8433,9602 GTTCCCGTAAACATAT-1,0,59,119,8553,9671 TCTTTCCTTCGAGATA-1,0,58,120,8433,9740 ATATTGGAGAGGCCTT-1,0,59,121,8553,9808 AGGCGATAACTGGCGT-1,0,58,122,8433,9877 ACCCGGAGCACCACAA-1,0,59,123,8553,9946 TGAGCTCAACTGTATA-1,0,58,124,8433,10015 ACGACCCATGAGTTGC-1,0,59,125,8553,10084 CGGTCAGTCCATATTT-1,0,58,126,8433,10152 TCCGGCTGTCGGGTCG-1,0,59,127,8553,10221 ATTCCCGAAGGTACAG-1,1,60,0,8673,1480 CTAACAGCACAATAAC-1,1,61,1,8793,1549 TTGTGCGGAAGCGGAT-1,1,60,2,8673,1618 GGGCACTATTGACCAT-1,1,61,3,8793,1687 CCATAGAGGCTGCCAG-1,1,60,4,8673,1755 CCGAAGCATTGACCAA-1,1,61,5,8793,1824 TACTGAGGGAAGAAAG-1,1,60,6,8673,1893 TATGTAGAAACCCGGC-1,1,61,7,8793,1962 TGGTTAACTTACATTT-1,1,60,8,8673,2031 AGAACCCTCAATTGGG-1,1,61,9,8793,2099 CCTATATCGTGTCACG-1,1,60,10,8673,2168 ACGGCTGGATGTAGAA-1,1,61,11,8793,2237 CTCGCATTGCATAGCC-1,1,60,12,8673,2306 ACTTTGGTCGTGCTCC-1,1,61,13,8793,2375 GCTGGTTTAGGCCATA-1,1,60,14,8673,2444 TGTCGTTATCACATAT-1,1,61,15,8793,2512 ACACATGATCAAATCT-1,1,60,16,8673,2581 TATCCATCTCGGTTAG-1,1,61,17,8793,2650 CTCGTCGAGGGCTCAT-1,1,60,18,8673,2719 ATCAATGCCGTGGCTG-1,1,61,19,8793,2788 GAAACATAGGAAACAG-1,1,60,20,8673,2857 TAATAGGTCACCAGAA-1,1,61,21,8793,2925 CACCAGTCAGCATGCA-1,1,60,22,8673,2994 GAGCGCTGTTAGGTAA-1,1,61,23,8793,3063 CCCTATGTAGAGCAGA-1,1,60,24,8673,3132 GATCGCTATATCTCAG-1,1,61,25,8793,3201 AAGGGACTATGCATTC-1,1,60,26,8673,3270 CTCGGGTTCTCTGGCC-1,0,61,27,8793,3338 TATATATCGAGAAATG-1,1,60,28,8673,3407 ACCCGAGCGAAATTAC-1,0,61,29,8793,3476 TTGTTTCATTAGTCTA-1,0,60,30,8673,3545 ATGGGACCTGCTGAAC-1,0,61,31,8793,3614 TGCGACGGCCGAACGT-1,0,60,32,8673,3683 AGCTAACAAGCAATGT-1,0,61,33,8793,3751 CGTGTCTCGTTACGAC-1,0,60,34,8673,3820 CTAAATCCGGTGTACA-1,0,61,35,8793,3889 GAATAGTGCTCGATTA-1,0,60,36,8673,3958 AACAATACATTGTCGA-1,0,61,37,8793,4027 CACATCGTGCACGCGC-1,0,60,38,8673,4096 ACCGTGACCACGTGGG-1,0,61,39,8793,4164 CCCAATGAGATTTGCA-1,0,60,40,8673,4233 TCAGCAGTAGGCCCTG-1,0,61,41,8793,4302 CTACTCAAGGTATAGT-1,1,60,42,8673,4371 TAGTAGCTTATACCAG-1,1,61,43,8793,4440 ACGTTCGTTCAGGAAA-1,1,60,44,8673,4509 TTAGAATAAGGGTCGG-1,1,61,45,8793,4577 TTCTAGAAAGTCTTAT-1,1,60,46,8673,4646 CCTTCAGTTAAAGTGA-1,1,61,47,8793,4715 TAGCTAGTGATGATGG-1,1,60,48,8673,4784 AGCCCGGTAGCCTGTA-1,1,61,49,8793,4853 TACCTCAGTTGTCTGT-1,1,60,50,8673,4921 GACGGGTTGGCCCGTA-1,1,61,51,8793,4990 GAAACCTATACAAATG-1,1,60,52,8673,5059 TCCACCTCTAGCCTTT-1,1,61,53,8793,5128 AAGACCCAACTGAACA-1,1,60,54,8673,5197 CATACACGGTTCCCAC-1,1,61,55,8793,5266 ACTACGCGTTAGAATT-1,1,60,56,8673,5334 CTTGGCCAAGCTGGGA-1,1,61,57,8793,5403 TTACCCTAGGGATTGG-1,1,60,58,8673,5472 TTCGCACTGTACGACA-1,1,61,59,8793,5541 TAGGTGTTCCACAGAT-1,1,60,60,8673,5610 GAAGCCTGCACATTCC-1,1,61,61,8793,5679 GGCTTTCAATAAGGGT-1,1,60,62,8673,5747 TCCAGGGTATATACGA-1,1,61,63,8793,5816 CTCGCCGAATGTAGGG-1,1,60,64,8673,5885 AGGAAGCTGTCCGCCG-1,1,61,65,8793,5954 GTCTCAAGGCCCGGCT-1,1,60,66,8673,6023 ACCGACACATCTCCCA-1,1,61,67,8793,6092 TATACGCGTCATCACT-1,1,60,68,8673,6160 ATCTGGGCTGTTCTTG-1,1,61,69,8793,6229 TCAAATTTGAGACTCA-1,1,60,70,8673,6298 GTTAAGGGTGCGATGT-1,1,61,71,8793,6367 AGCAAAGGCCGCTAGT-1,1,60,72,8673,6436 TTATAGGTAATTGTCT-1,1,61,73,8793,6505 TTGTGCAGCCACGTCA-1,1,60,74,8673,6573 ACGGACTCTCAAAGCG-1,1,61,75,8793,6642 ATGCTCTGGCGCGGTA-1,1,60,76,8673,6711 ATAATCTTGGAGAACC-1,1,61,77,8793,6780 CTCATTGCTCTAACAA-1,1,60,78,8673,6849 TAGGTGACGATAACCT-1,1,61,79,8793,6918 CGGATCCTCAAGGACT-1,1,60,80,8673,6986 GGGCGTGGTTTCCCAG-1,1,61,81,8793,7055 TGGCTATGTGACATAC-1,1,60,82,8673,7124 CTTAGTGTAGTAGCAT-1,1,61,83,8793,7193 GGATTGCTGTGACTCC-1,1,60,84,8673,7262 ACTTCCATGCGGGACA-1,1,61,85,8793,7331 ACTCAAGTGCAAGGCT-1,1,60,86,8673,7399 ACAAGGATGCTTTAGG-1,1,61,87,8793,7468 TTGAACGACGTGCTGA-1,1,60,88,8673,7537 ATTCATCGTTGAGGCA-1,1,61,89,8793,7606 TGCAACCCATCTGCGG-1,1,60,90,8673,7675 AGTGCTAAACACAGCA-1,1,61,91,8793,7743 AACTCCAGAGCGTGTT-1,1,60,92,8673,7812 TCTCCACAAGTTGAAT-1,1,61,93,8793,7881 CTTTGCATCGCTCTTG-1,1,60,94,8673,7950 TCACAAACCGAGGTAC-1,1,61,95,8793,8019 CCGTAGGAAATCCCTG-1,1,60,96,8673,8088 AAACATTTCCCGGATT-1,1,61,97,8793,8156 GCCTCCGACAATTCAC-1,1,60,98,8673,8225 TCTGCATACCTTGCTT-1,1,61,99,8793,8294 ACCCATTTGTCCCTCT-1,1,60,100,8673,8363 CTGCAGAGAATCAGAG-1,1,61,101,8793,8432 GGAAACTAAATGGGCC-1,1,60,102,8673,8501 CAACCTACCGAGCAGT-1,1,61,103,8793,8569 CCGTAGGGTTGTTTAC-1,1,60,104,8673,8638 GGTTTGTGACCTGAGG-1,1,61,105,8793,8707 TAACCTAGGGAGTCCA-1,1,60,106,8673,8776 TTCCATCGACAGCGTG-1,1,61,107,8793,8845 ATCTCCCACGGAATAT-1,1,60,108,8673,8914 GTTTCAAACGAGTTGT-1,1,61,109,8793,8982 GGAACTTTGGCGATTA-1,1,60,110,8673,9051 CTGCAAGCACGTTCCG-1,1,61,111,8793,9120 CACAGTCCCGCTTCGC-1,1,60,112,8673,9189 TACGCTCGGTATTGGA-1,1,61,113,8793,9258 CTGGTAAAGTGTGGGC-1,0,60,114,8673,9327 CAACCAGTGGCCTACC-1,0,61,115,8793,9395 TCTTCGCGGTGAGAGG-1,0,60,116,8673,9464 TTCGGGTTGCCACGGG-1,0,61,117,8793,9533 ACCGATAGGCATAACC-1,0,60,118,8673,9602 CTTATAGATGGCTGTT-1,0,61,119,8793,9671 AGACGCCCACTTCGCC-1,0,60,120,8673,9740 AGGTCGCCTCCCAACA-1,0,61,121,8793,9808 CCTCACGTCAGCTAAT-1,0,60,122,8673,9877 CCACCTGTATGGAATA-1,0,61,123,8793,9946 GGACTCGAAGCTTTCA-1,0,60,124,8673,10015 AAGCCGGAGAGCAGGA-1,0,61,125,8793,10084 GCCTATCAGGTAAGAT-1,0,60,126,8673,10152 TCAGAGTGTGAGCATG-1,0,61,127,8793,10221 AAACATGGTGAGAGGA-1,1,62,0,8913,1480 ATCATTGTACCGCATT-1,1,63,1,9032,1549 TCAGCTTGAGCTTTCG-1,1,62,2,8913,1618 CCGCGCAAGATACCCA-1,1,63,3,9032,1687 CTATACTTAAAGCGAG-1,1,62,4,8913,1755 ACCGTCCACTGGGCCC-1,1,63,5,9032,1824 AAGACCAAATAACTCA-1,1,62,6,8913,1893 TGTTTCTGAAGCGTGC-1,1,63,7,9032,1962 TTCTCTTACAGGTGAT-1,1,62,8,8913,2031 AATTCATTGTCATGCA-1,1,63,9,9032,2100 GCCCGAGAGTCTAAAT-1,1,62,10,8913,2168 ATATAGAGTATTGGTC-1,1,63,11,9032,2237 TTGAAGGATGGGCGCC-1,1,62,12,8913,2306 TAGTACCACAACTTTC-1,1,63,13,9032,2375 ATCCCATCCACAGCGC-1,1,62,14,8913,2444 CTTAGGTATAGACCAG-1,1,63,15,9032,2512 TGACTCCGAATCATAC-1,1,62,16,8913,2581 ACTACATCCCGACAAG-1,1,63,17,9032,2650 TTGCCGCAGACCTACA-1,1,62,18,8913,2719 TAAGAGGGACAGGGAC-1,1,63,19,9032,2788 CGAATGAAGTCATTGC-1,1,62,20,8913,2857 AAGGGAACGACTGGCT-1,1,63,21,9032,2925 CGAATCTGCTCGACGC-1,1,62,22,8913,2994 ATCGTGGAAAGTCTGG-1,0,63,23,9032,3063 CCTATGGCTCCTAGTG-1,1,62,24,8913,3132 CGTCGCTTGGTTATAC-1,0,63,25,9032,3201 TGCTTAGAGAGAATGC-1,0,62,26,8913,3270 GATTGCTCCAGTTGCA-1,0,63,27,9032,3338 CGTGCTTCTACCTAAA-1,0,62,28,8913,3407 ACTCCAATATCATCAT-1,0,63,29,9032,3476 GCCGACCCACGACTGC-1,0,62,30,8913,3545 GAGGGTAGTAACAAAG-1,0,63,31,9032,3614 GGCCGAGACTCTGGTG-1,0,62,32,8913,3683 TATATAGGGCTTTACG-1,0,63,33,9032,3751 TGCCGGATGTACGAGC-1,0,62,34,8913,3820 AGGTAGATCGAGATAT-1,0,63,35,9032,3889 GATCAACATAAAGGGA-1,0,62,36,8913,3958 TAACAGCGTTTGTGCT-1,0,63,37,9032,4027 CTATAGGCGTTGATGT-1,0,62,38,8913,4096 GGTTTAATTACCATCG-1,0,63,39,9032,4164 CTCTTCTGGAAGTTAG-1,0,62,40,8913,4233 ATCTGACATGGAAGGA-1,0,63,41,9032,4302 TACGTACTAGTGCTGA-1,0,62,42,8913,4371 ATGACTATCAGCTGTG-1,0,63,43,9032,4440 CTTTGGGATTGTTGCA-1,0,62,44,8913,4509 AGACTGTTACCGGGTC-1,0,63,45,9032,4577 TCCTAACCGTCGGGCA-1,1,62,46,8913,4646 ACGCAAACTAATAGAT-1,1,63,47,9032,4715 AAGTGAGTCGGGTTTA-1,1,62,48,8913,4784 TAAAGCGGTATTTCCA-1,1,63,49,9032,4853 CTCCTCCAGCTCACAC-1,1,62,50,8913,4921 CGGTCCGTCGCAAGCC-1,1,63,51,9032,4990 CCTATATTTGTCCTGG-1,1,62,52,8913,5059 TCCAATAAAGGCTACC-1,1,63,53,9032,5128 AAAGGCTACGGACCAT-1,1,62,54,8913,5197 CGTGACCAGTCCTCTG-1,1,63,55,9032,5266 GATCTGCTATCTAAGG-1,1,62,56,8913,5334 CGAATGACGCATAATG-1,1,63,57,9032,5403 GCTCAATGTAATACCG-1,1,62,58,8913,5472 CTATGCCCGAATGCAA-1,1,63,59,9032,5541 CACAGGGCCGTTGTCA-1,1,62,60,8913,5610 TCAATACGCCGTCATG-1,1,63,61,9032,5679 AGCGGACACTTCGTAG-1,1,62,62,8913,5747 ATACGTTATGCACGGA-1,1,63,63,9032,5816 GAGTAGATACTAGTTG-1,1,62,64,8913,5885 CCAGCTCGAACGCATT-1,1,63,65,9032,5954 TGCTTCCCAAGCAGTA-1,1,62,66,8913,6023 GTAGACACGCCTGACT-1,1,63,67,9032,6092 CTCACATTTACTAAAT-1,1,62,68,8913,6160 GAATGCGAATCGGTTC-1,1,63,69,9032,6229 TAAGGCCCGTCACCCT-1,1,62,70,8913,6298 GAGTATGCGCGTGCAT-1,1,63,71,9032,6367 ATCCACATCGACAGAA-1,1,62,72,8913,6436 TAGACGAAACGCCAAT-1,1,63,73,9032,6505 CAGCAGTCTGTGCTGC-1,1,62,74,8913,6573 CTCACTGTGATACTTA-1,1,63,75,9032,6642 TTAATTGCTTTGGGTG-1,1,62,76,8913,6711 GTAGTCTACGATATTG-1,1,63,77,9032,6780 TTGCCGCTTTCTAGTA-1,1,62,78,8913,6849 GAACCTTTAACGATCC-1,1,63,79,9032,6918 TGGACCAATCTAAGAT-1,1,62,80,8913,6986 CTAGTAGAAAGGGATT-1,1,63,81,9032,7055 AACCTCGCTTTAGCCC-1,1,62,82,8913,7124 GAGTGTCAACCAGAAA-1,1,63,83,9032,7193 GTGATCCTTGTCATGA-1,1,62,84,8913,7262 CTCAACTAACCCGGAT-1,1,63,85,9032,7331 AGTGGCGTCTGAAGGT-1,1,62,86,8912,7399 GGGAAGGGCTTTCTCA-1,1,63,87,9032,7468 AGGCCTGAGAATCTCG-1,1,62,88,8912,7537 AGCCACAGGTTACCCG-1,1,63,89,9032,7606 TGCAATCTAACACGGT-1,1,62,90,8912,7675 GTTTGGGTTTCGCCCG-1,1,63,91,9032,7743 CCTATGTCCACTCCAC-1,1,62,92,8912,7812 CTGCCTTTCTAGTAAA-1,1,63,93,9032,7881 TTAGTAAACCTGCTCT-1,1,62,94,8912,7950 TTGTGGTGGTACTAAG-1,1,63,95,9032,8019 CCTCCTGTTGTGTCGT-1,1,62,96,8912,8088 ACAAAGCATGACCTAG-1,1,63,97,9032,8156 ATAGCAACTAGGGAAG-1,1,62,98,8912,8225 CCGCTCTTCCGAACTA-1,1,63,99,9032,8294 TAGAATAGCCGATGAA-1,1,62,100,8912,8363 GGTCTCCGTCCAGGTT-1,1,63,101,9032,8432 TAATTACGTCAGTAGA-1,1,62,102,8912,8501 TACGTAAAGCGGAGTG-1,1,63,103,9032,8569 GCTACAATCGAGGATA-1,1,62,104,8912,8638 TCAGCCAATCCGTAAA-1,1,63,105,9032,8707 ATCATAGCCCTATGTA-1,1,62,106,8912,8776 GAGGCCTGTTGATACA-1,1,63,107,9032,8845 ATATACGCTCGTGACG-1,1,62,108,8912,8914 CCCTTTAATGGAGTTC-1,1,63,109,9032,8982 TTACACGATCTGCGAC-1,1,62,110,8912,9051 TCGCATAAAGGGCGCA-1,1,63,111,9032,9120 TGTAGCCATCCCATTC-1,1,62,112,8912,9189 GGGTCTATCGCTTTCC-1,1,63,113,9032,9258 GGAGGGTCAAGTAAGA-1,0,62,114,8912,9327 TACGGCATGGACGCTA-1,0,63,115,9032,9395 CACGTCAATCAATGGA-1,0,62,116,8912,9464 CGCTTGGACGGATAGA-1,0,63,117,9032,9533 CGCGGTACGGTATACA-1,0,62,118,8912,9602 CCTACTCAACACGATT-1,0,63,119,9032,9671 CGTTTGGTGTTGTGGG-1,0,62,120,8912,9740 GTTGTTGCAAGATGGC-1,0,63,121,9032,9808 CATTCGTCGTAGCGGT-1,0,62,122,8912,9877 CAGCCTCGATAGCGGT-1,0,63,123,9032,9946 TCATAAGTCCAAGAAG-1,0,62,124,8912,10015 AAAGTGCCATCAATTA-1,0,63,125,9032,10084 GTTGCAGTCGACAACA-1,0,62,126,8912,10153 TAAATGGGCTACTGAG-1,0,63,127,9032,10221 GGACAAGCCATGATCG-1,0,64,0,9152,1480 TCCAATGCGTCGCCGC-1,0,65,1,9272,1549 AAAGAATGACCTTAGA-1,0,64,2,9152,1618 CCTATTATTCCGGCCG-1,0,65,3,9272,1687 CAGTAAGGGACGTCTC-1,1,64,4,9152,1755 GGGATGGACCCGCGTC-1,0,65,5,9272,1824 GACGCATACCCGTCGG-1,1,64,6,9152,1893 GGGCAGGATTTCTGTG-1,0,65,7,9272,1962 TATCTGAGCCGATATT-1,1,64,8,9152,2031 AATGTATGGCACTGTA-1,0,65,9,9272,2100 GCCTTTGTCAGTGGAC-1,1,64,10,9152,2168 AACCATAGGGTTGAAC-1,0,65,11,9272,2237 AAACTTAATTGCACGC-1,1,64,12,9152,2306 TAGGCGGCTGCAAAGA-1,0,65,13,9272,2375 TCCATCAATACTAATC-1,1,64,14,9152,2444 ACCTCGAACTTATGCT-1,0,65,15,9272,2512 ACAAACCATGCGTCCT-1,1,64,16,9152,2581 ACACCGGTCTGACCGC-1,0,65,17,9272,2650 ATGGCAGCATTACGAT-1,1,64,18,9152,2719 CTGGTGAATGGGCCTA-1,0,65,19,9272,2788 CACAGAGACGAGGACG-1,0,64,20,9152,2857 GCACAAACCCTAGATG-1,0,65,21,9272,2925 ACTATGTCCAGCTGCC-1,0,64,22,9152,2994 AAGCCGCCGTGAAATC-1,0,65,23,9272,3063 ATGGGTGTATACCTCC-1,0,64,24,9152,3132 TAAGCGCGAATCAAAT-1,0,65,25,9272,3201 AGCCCGGCATTAGAGG-1,0,64,26,9152,3270 GTGATCAAGCGTGCAC-1,0,65,27,9272,3338 CTGCCTTTAATACCTT-1,0,64,28,9152,3407 TACCTCCACACCAATG-1,0,65,29,9272,3476 GAGCATCAACAACTTG-1,0,64,30,9152,3545 CATACGAACTAGCTGG-1,0,65,31,9272,3614 GGGAACGGTTTCAGAT-1,0,64,32,9152,3683 AGGGTGCCGTTCTTTA-1,0,65,33,9272,3751 ACGACTGGTCATACTC-1,0,64,34,9152,3820 GGATCTTACTGCCCTT-1,0,65,35,9272,3889 TACTCCTCTAGTTGAC-1,0,64,36,9152,3958 GTAGCTCCGGGAGGCT-1,0,65,37,9272,4027 GGACTCGTGAGTGGTC-1,0,64,38,9152,4096 GTAGTTCGAAGGCGAA-1,0,65,39,9272,4164 CTCAAACCACTGCCCG-1,0,64,40,9152,4233 GAGGGAGTCAGATCGC-1,0,65,41,9272,4302 GCGTCCCTAAGACATA-1,0,64,42,9152,4371 TCTCTTACCGCGAACC-1,0,65,43,9272,4440 GGCTACTATACACTCC-1,0,64,44,9152,4509 CGTTGCCCGCGTGGGA-1,0,65,45,9272,4577 TATAGAGTCGCTTGAA-1,0,64,46,9152,4646 ATGTTGTAGTCTGTTT-1,0,65,47,9272,4715 AGTATACACAGCGACA-1,0,64,48,9152,4784 TGTGGTTGCTAAAGCT-1,0,65,49,9272,4853 TGATTCCCGGTTACCT-1,1,64,50,9152,4922 CACTGACGATTGTGGA-1,1,65,51,9272,4990 AACATTGTGACTCGAG-1,1,64,52,9152,5059 GTCCAATATTTAGCCT-1,1,65,53,9272,5128 GCTCTTTCCGCTAGTG-1,1,64,54,9152,5197 GGGTCAGGAGCTAGAT-1,1,65,55,9272,5266 AGGTGGTGACCTTCGC-1,1,64,56,9152,5334 CCCGGGTCGTTCAGGG-1,1,65,57,9272,5403 AGCCAAGCTTTGTGTC-1,1,64,58,9152,5472 TGTGGCGGGCTTCTGG-1,1,65,59,9272,5541 CGAGGGTATCCAGGTG-1,1,64,60,9152,5610 CGTTCAGACCCGCGAA-1,1,65,61,9272,5679 TCATATGAGCTTTGTT-1,1,64,62,9152,5747 AGTTGACGGTCCTTGC-1,1,65,63,9272,5816 ATAATTAGCTAAGTAG-1,1,64,64,9152,5885 GGTCTTGAGCGCTCTT-1,1,65,65,9272,5954 CGTTGTCGGCAATTGA-1,1,64,66,9152,6023 TCTTGCTCCCGATACT-1,1,65,67,9272,6092 AGCAGAAGGAGAAAGA-1,1,64,68,9152,6160 GAAGCCACTGATTATG-1,1,65,69,9272,6229 TGCTCTTGAGAGTTTG-1,1,64,70,9152,6298 GACATCCGTCGAACTG-1,1,65,71,9272,6367 AGATATAATACGACTA-1,1,64,72,9152,6436 TCGCTACTGGCTTTGA-1,1,65,73,9272,6505 GTGACAGCTTCCCACT-1,1,64,74,9152,6573 GTGACGCAGGTTTCAT-1,1,65,75,9272,6642 GGGAAGACGGTCTGTC-1,1,64,76,9152,6711 CGTCGCATGTGAGCCA-1,1,65,77,9272,6780 AGAAGGTTGTAGGTCG-1,1,64,78,9152,6849 GGCCGGCGTCTGCTAT-1,1,65,79,9272,6918 TTGACCAGGAACAACT-1,1,64,80,9152,6986 AGTCGTATAAAGCAGA-1,1,65,81,9272,7055 GGCGTAGGGAAAGCTG-1,1,64,82,9152,7124 AAACCTAAGCAGCCGG-1,1,65,83,9272,7193 ATTGCCTTTATGTTTG-1,1,64,84,9152,7262 CAGGCAGTCTTACCAG-1,1,65,85,9272,7331 AGCTAAGTACGCAGGC-1,1,64,86,9152,7399 AAATCCGATACACGCC-1,1,65,87,9272,7468 CGAGAGCTTTCACTAC-1,1,64,88,9152,7537 CGCCCAGCGTTTCACG-1,1,65,89,9272,7606 TTAAGTATTGTTATCC-1,1,64,90,9152,7675 AAACGGGCGTACGGGT-1,1,65,91,9272,7744 GTATCCTTTGGTAACC-1,1,64,92,9152,7812 CATAGTCAAATACATA-1,1,65,93,9272,7881 TCTATGCTATAACGAC-1,1,64,94,9152,7950 ACGTTCTGTACAAGTC-1,1,65,95,9272,8019 CGCGAAGTGGCATACT-1,1,64,96,9152,8088 ACCAACCGCACTCCAC-1,1,65,97,9272,8156 GTCATGCGCGAGGGCT-1,1,64,98,9152,8225 TAGGCTAAAGTGGCAC-1,1,65,99,9272,8294 TGGCTACACTCTACCT-1,1,64,100,9152,8363 TGTCCACGGCTCAACT-1,1,65,101,9272,8432 AATTGCAGCAATCGAC-1,1,64,102,9152,8501 GTCAACCAGGCCTATA-1,1,65,103,9272,8569 ATCTTATCGCACACCC-1,1,64,104,9152,8638 GTCTATTGCATGCTCG-1,1,65,105,9272,8707 GCACGTGGTTTACTTA-1,1,64,106,9152,8776 ATTCGCGCCTTGAGAG-1,1,65,107,9272,8845 AGCGATGCGCCTAATA-1,1,64,108,9152,8914 TCTATAGGTGGGTAAT-1,1,65,109,9272,8982 AGATACCAATAGAACC-1,1,64,110,9152,9051 CGTGTATGGGAGCTGA-1,1,65,111,9272,9120 ATGTACATGCGGTGAG-1,1,64,112,9152,9189 TTGGCTCAATATGTGT-1,1,65,113,9272,9258 GTTCGGAGCACTCAAC-1,0,64,114,9152,9327 TCCCTCTTCTCAAGGG-1,0,65,115,9272,9395 TTACGTATCTATGACA-1,0,64,116,9152,9464 CCCATATAGGTCGATT-1,0,65,117,9272,9533 GTCCACGTCGCATTCG-1,0,64,118,9152,9602 AATTGTGGTTGCCAAA-1,0,65,119,9272,9671 AGGAACGAACGACTTC-1,0,64,120,9152,9740 TAATTCCAATGCTTCA-1,0,65,121,9272,9808 AGGGCCCTAATGTTCT-1,0,64,122,9152,9877 AGGTGTTGCCGACCAC-1,0,65,123,9272,9946 CTGAGAGTAGAAATAC-1,0,64,124,9152,10015 TAGAAGAAGGGTTACA-1,0,65,125,9272,10084 GGTGACTGATAGAGAG-1,0,64,126,9152,10153 TACCCTCGGTAACCCT-1,0,65,127,9272,10221 TGTAAGACTGATAAGA-1,0,66,0,9392,1480 GTCACTCATGAGCGAT-1,0,67,1,9512,1549 CGGTTTACTGAACATT-1,0,66,2,9392,1618 GAGATCGATCTTACTC-1,0,67,3,9512,1687 TCTTTCATCCGTCCTT-1,0,66,4,9392,1755 TCGGATCTGGATGACC-1,0,67,5,9512,1824 GTCCTAGGATACCTTA-1,0,66,6,9392,1893 TGTACCCGACCCTAAT-1,0,67,7,9512,1962 AGGTTCTCCTTTCCGG-1,0,66,8,9392,2031 AATTGCGTGGATTACA-1,0,67,9,9512,2100 CGATAGCGATACAGTG-1,0,66,10,9392,2168 CGCCCTAATTGTTCAA-1,0,67,11,9511,2237 ATTATGAGACCCAATT-1,0,66,12,9392,2306 CCTCCTAGCTAGAGTC-1,0,67,13,9511,2375 CGAAGGGTTTCAGATT-1,0,66,14,9392,2444 CACAAGCTAAGAAAGG-1,0,67,15,9511,2512 CACCGGTAGAGACATT-1,0,66,16,9392,2581 ACTCATGGCAGCCTTC-1,0,67,17,9511,2650 CATCCAATATAGTTTG-1,0,66,18,9392,2719 CAGATATGAAGATGAC-1,0,67,19,9511,2788 GCAAAGGGCGTTAGCC-1,0,66,20,9392,2857 CTGTTGTTCAGTCGTA-1,0,67,21,9511,2925 AAAGGCCCTATAATAC-1,0,66,22,9392,2994 AACATTGAAGTTGATC-1,0,67,23,9511,3063 GCTAGCGATAGGTCTT-1,0,66,24,9392,3132 ATTGCCTATTAGACCG-1,0,67,25,9511,3201 GGGCGTTTACATTCAT-1,0,66,26,9392,3270 CCTCTACAAGGATTGG-1,0,67,27,9511,3338 AGTGACTGTGACACAA-1,0,66,28,9392,3407 AGCCCTCCCTGGTGGC-1,0,67,29,9511,3476 CGGTTGAGTATCCTTC-1,0,66,30,9392,3545 CATTATCCCATTAACG-1,0,67,31,9511,3614 AATGTGCTAATCTGAG-1,0,66,32,9392,3683 CTTAGTTGAGGAATCG-1,0,67,33,9511,3751 CTTCTGGGCGTACCTA-1,0,66,34,9392,3820 AGTCAAATGATGTGAT-1,0,67,35,9511,3889 TGAACAAGCAGGGACT-1,0,66,36,9392,3958 AATACCGGAGGGCTGT-1,0,67,37,9511,4027 GTTTGGTGATCGGTGC-1,0,66,38,9392,4096 GACTGCTGGTGAGAAA-1,0,67,39,9511,4164 CGATCCTCGCAACATA-1,0,66,40,9392,4233 CCAACCTTATGTAACT-1,0,67,41,9511,4302 CGTGGTACCCAAAGGC-1,0,66,42,9392,4371 AACCCGACAACCCGTG-1,0,67,43,9511,4440 TATGTAAAGTGCTTAA-1,0,66,44,9392,4509 GCAAGTGTAAAGCATG-1,0,67,45,9511,4577 GTAACTGCCCAAGGAG-1,0,66,46,9392,4646 GGTGCATAAATGATTA-1,0,67,47,9511,4715 TAGTCCCGGAGACCAC-1,0,66,48,9392,4784 CGGCCAGAGCGACCAT-1,0,67,49,9511,4853 TGAATGAGTGTTTCCC-1,0,66,50,9392,4922 TTCACTTCCTAGAACG-1,0,67,51,9511,4990 CGCTTAGTATTGATAC-1,0,66,52,9392,5059 AACGCGACCTTGGGCG-1,0,67,53,9511,5128 GTGCACCAGCTTCAAC-1,1,66,54,9392,5197 TTCTATTAAACGCAGC-1,1,67,55,9511,5266 TGGCAAGCACAAGTCG-1,1,66,56,9392,5334 ATCTGCTGTTATCGCC-1,1,67,57,9511,5403 AATTCCAAGCATGTAC-1,1,66,58,9392,5472 AAACGGTTGCGAACTG-1,1,67,59,9511,5541 GAAGGAGTCGAGTGCG-1,1,66,60,9392,5610 GGCAAGCCCATAGTGG-1,1,67,61,9511,5679 TGACCAAATCTTAAAC-1,1,66,62,9392,5747 TTCGGTACTGTAGAGG-1,1,67,63,9511,5816 ATGTACGATGACGTCG-1,1,66,64,9392,5885 ATCTAGCTTGTGAGGG-1,1,67,65,9511,5954 AGCGGGTCTGACACTC-1,1,66,66,9392,6023 GAGGTACGCGTGTCCC-1,1,67,67,9511,6092 GCACTGCCTACCTTTA-1,1,66,68,9392,6160 GGAGCGAGGCCTACTT-1,1,67,69,9511,6229 AAACTCGGTTCGCAAT-1,1,66,70,9392,6298 GGCGCTTCATTCCCTG-1,1,67,71,9511,6367 TGCTCGGTGGGTCACC-1,1,66,72,9392,6436 AAGCGCAGGGCTTTGA-1,1,67,73,9511,6505 GACCTTCCACGTCTAC-1,1,66,74,9392,6573 GTTAGCCCATGACATC-1,1,67,75,9511,6642 AATTCGATTCGAGGAT-1,1,66,76,9392,6711 AGTCTTTAAAGTGTCC-1,1,67,77,9511,6780 GACTAGGCCGTTAGGT-1,1,66,78,9392,6849 TCCAAGCCTAGACACA-1,1,67,79,9511,6918 GATTGGGAAAGGTTGT-1,1,66,80,9392,6986 GATGAGGAACCTTCGG-1,1,67,81,9511,7055 TGAATACCGACGCGTA-1,1,66,82,9392,7124 AATCGTGAGCCGAGCA-1,1,67,83,9511,7193 GCGAAGAATCTGACGG-1,1,66,84,9392,7262 GAACAACTGGGATGAA-1,1,67,85,9511,7331 TCGCCGACATATTCGC-1,1,66,86,9392,7399 CGTAGAGAGTAATTAT-1,1,67,87,9511,7468 CGATAGTCGTACTGCA-1,1,66,88,9392,7537 CGCATGCCGAATGCGT-1,1,67,89,9511,7606 CTCTAGCCCTCGGAAA-1,1,66,90,9392,7675 TCCAACTCAGCTATCT-1,1,67,91,9511,7744 GTCTAGTGAGCCGCTT-1,1,66,92,9392,7812 TACCGAATAATTGTAA-1,1,67,93,9511,7881 ATATAACACGGGCGCA-1,1,66,94,9392,7950 GCATGCTAATAACGAT-1,1,67,95,9511,8019 TCCGGGCTTGACGGGA-1,1,66,96,9392,8088 ATTATTCAGAGTCACT-1,1,67,97,9511,8156 TAGCAGTATGACTAAA-1,1,66,98,9392,8225 TATGAAGAATTAAGGT-1,1,67,99,9511,8294 CAAGGCCAGTGGTGCA-1,1,66,100,9392,8363 TGCGTACGGCTAATTG-1,1,67,101,9511,8432 CAGATAATGGGCGGGT-1,1,66,102,9392,8501 GAGAACGGTTCTGACT-1,1,67,103,9511,8569 CGCTTCGGTCTAAGAC-1,1,66,104,9392,8638 AGCATTACGAGGCAAG-1,1,67,105,9511,8707 AAGTTGTGATGTTATA-1,1,66,106,9391,8776 TTGCTCATAGTACGTG-1,1,67,107,9511,8845 TACTTAAACATGTACA-1,1,66,108,9391,8914 AGTGAGTCGAATTAAC-1,1,67,109,9511,8982 TAGGCCTATATAGTCT-1,1,66,110,9391,9051 ACTCGCCGTTCGATAA-1,1,67,111,9511,9120 GGTACGTTGCGGCCGG-1,1,66,112,9391,9189 TATTGCCGGGCTTGTA-1,1,67,113,9511,9258 GGTAAACTCTGCGCTG-1,0,66,114,9391,9327 TCCCTTGTCTGAAACT-1,0,67,115,9511,9395 CATGATCGCTTTGAGA-1,0,66,116,9391,9464 GGACGGGCGACCAACC-1,0,67,117,9511,9533 GAGAGGCCTATGTGTA-1,0,66,118,9391,9602 TCGTAACTCCCAAGAC-1,0,67,119,9511,9671 CGTATCTAGAACTAAG-1,0,66,120,9391,9740 GCACCTTCCCGAAGGT-1,0,67,121,9511,9808 TACCTCTTTACCATCC-1,0,66,122,9391,9877 TGACTCTAACTGGTAA-1,0,67,123,9511,9946 GAGAAACTGGATCCCA-1,0,66,124,9391,10015 ATCCGGACCAGCCTGA-1,0,67,125,9511,10084 TTATTTAGGTTCCTTA-1,0,66,126,9391,10153 CGAATTCCCGGTTCAA-1,0,67,127,9511,10221 TAGTTTGATCGGTCGC-1,0,68,0,9631,1480 AGTCGAAACGATTCAG-1,0,69,1,9751,1549 ACGTCGGGCAACTCGG-1,0,68,2,9631,1618 ACTTGCTCTATCTACC-1,0,69,3,9751,1687 ATTACGGGCTACGGTT-1,0,68,4,9631,1755 GCAAACGTAAGCGACC-1,0,69,5,9751,1824 TTCGGACTGGGCATGG-1,0,68,6,9631,1893 GGGCCATTCGTGCTGG-1,0,69,7,9751,1962 GAGTTGTCACCAGTCT-1,0,68,8,9631,2031 AGTAATTTGCAAGCGT-1,0,69,9,9751,2100 GTGGCCGGTTTCTCGG-1,0,68,10,9631,2168 ATCTTAGGGCATTAAT-1,0,69,11,9751,2237 CTACATATCGCGGGAC-1,0,68,12,9631,2306 ATGGTATTTACTGATT-1,0,69,13,9751,2375 CCGTCAACCTCTGGCG-1,0,68,14,9631,2444 GCTTCGACGTTCAATC-1,0,69,15,9751,2513 GACCGATTAAATATGT-1,0,68,16,9631,2581 GTCGAATTTGGGCGCT-1,0,69,17,9751,2650 TCACGGCCCAAGAGAG-1,0,68,18,9631,2719 CTACGATCCTATCCTA-1,0,69,19,9751,2788 TGCAGTGGTAGGGAAC-1,0,68,20,9631,2857 TTGCCTAATCCAAAGT-1,0,69,21,9751,2925 AGCGAGACGTGAAGGC-1,0,68,22,9631,2994 TTCTACCTCAATCGGT-1,0,69,23,9751,3063 TACGCTGCTGTGTTAA-1,0,68,24,9631,3132 TAGTCCTGCACTAAGC-1,0,69,25,9751,3201 AGCACACGTTTAGACT-1,0,68,26,9631,3270 GTTCGACAATTGTATA-1,0,69,27,9751,3338 CCATGTTCATCTATAT-1,0,68,28,9631,3407 CCCTCGATAATACACA-1,0,69,29,9751,3476 ACTGCCCGCCATTCTC-1,0,68,30,9631,3545 CTATATCCAGCCTGGC-1,0,69,31,9751,3614 CGCACGTCTGTTTATG-1,0,68,32,9631,3683 AATCAGGTTTCATTTA-1,0,69,33,9751,3751 AGAGAAACACCAGAAA-1,0,68,34,9631,3820 ACCCGATTGGTTCCGA-1,0,69,35,9751,3889 TACCAGGAATCCCGTC-1,0,68,36,9631,3958 AAGTGCTTCTCTATTG-1,0,69,37,9751,4027 TGCATGTGACCCATAG-1,0,68,38,9631,4096 TAGATTCAAAGTGCGG-1,0,69,39,9751,4164 AGGGTCAGAGCACTCG-1,0,68,40,9631,4233 GTGAGATAACCTTATA-1,0,69,41,9751,4302 GTGTCAGTGTACGTGG-1,0,68,42,9631,4371 CTGTAGTGAGGATCGA-1,0,69,43,9751,4440 GATAGTGCGAGTAAGT-1,0,68,44,9631,4509 AGGGTTCAGACGGTCC-1,0,69,45,9751,4577 GGACTCGACAGCGCAT-1,0,68,46,9631,4646 CACTCTCAAGCATCGA-1,0,69,47,9751,4715 GAGCGGAATGCGGTGT-1,0,68,48,9631,4784 CAGTTCGAGGACCCGA-1,0,69,49,9751,4853 CGTAACTTCGACACTT-1,0,68,50,9631,4922 TGATCACCACACTGAC-1,0,69,51,9751,4990 CATACTTCTTTCTCCG-1,0,68,52,9631,5059 CAAGTAAGTGATAGAC-1,0,69,53,9751,5128 AGCCCGCAACAAGCAG-1,0,68,54,9631,5197 GCTCCATGCAAAGCAA-1,0,69,55,9751,5266 TCGTACCGACGTCAAG-1,1,68,56,9631,5334 ATTATCGGAATGTACG-1,0,69,57,9751,5403 ACCTACTATAAATCTA-1,1,68,58,9631,5472 TATCCTATCAACTGGT-1,0,69,59,9751,5541 GCGCGGTCTAGTAACT-1,1,68,60,9631,5610 CCTTCGTATAGAATCC-1,1,69,61,9751,5679 TGGGCGATACAATAAG-1,1,68,62,9631,5747 TGACATGTAACGTGAC-1,1,69,63,9751,5816 TTGGGAAGACGAGCCG-1,1,68,64,9631,5885 GGTGTAAATCGATTGT-1,1,69,65,9751,5954 GAACTGTGGAGAGACA-1,1,68,66,9631,6023 TCCACATCGTATATTG-1,1,69,67,9751,6092 CCTATGGTCAAAGCTG-1,1,68,68,9631,6160 TCCTTGTCCTTTAATT-1,1,69,69,9751,6229 AAGTTTATGGGCCCAA-1,1,68,70,9631,6298 GTCCGGGTTCACATTA-1,1,69,71,9751,6367 ATGTAGCGCGCGTAGG-1,1,68,72,9631,6436 TACGGAAGCCAAACCA-1,1,69,73,9751,6505 GCCCTGAGGATGGGCT-1,1,68,74,9631,6573 CGGGCGATGGATCACG-1,1,69,75,9751,6642 TGCGGACTTGACTCCG-1,1,68,76,9631,6711 TAATACTAGAACAGAC-1,1,69,77,9751,6780 TCAAATTGTTGTGCCG-1,1,68,78,9631,6849 GGGCAGTCAACGCCAA-1,1,69,79,9751,6918 TCGCTGCCAATGCTGT-1,1,68,80,9631,6986 TACGTGGGCCCAGGGC-1,1,69,81,9751,7055 GCCTACGTTCTGTGCA-1,1,68,82,9631,7124 TTGCGTGTGTAGGCAT-1,1,69,83,9751,7193 TCCTAAAGATTCAGAC-1,1,68,84,9631,7262 TGGGTGCACAAGCCAT-1,1,69,85,9751,7331 GGGAGCGACCGTAGTG-1,1,68,86,9631,7399 CATAGAGGAGATACTA-1,1,69,87,9751,7468 CTTCGATTGCGCAAGC-1,1,68,88,9631,7537 GTGCCTGAGACCAAAC-1,1,69,89,9751,7606 TCATCCTCAGCTGCTT-1,1,68,90,9631,7675 TGCAACTACTGGTTGA-1,1,69,91,9751,7744 ATGTTGATTAGAGACT-1,1,68,92,9631,7812 GTTTCTAGAGGCGCGG-1,1,69,93,9751,7881 TACAAGGGCTTCTTTA-1,1,68,94,9631,7950 ACAGGCACGGATCCTT-1,1,69,95,9751,8019 CCCAAGTCATTACACT-1,1,68,96,9631,8088 ACTCTGACCTAATAGA-1,1,69,97,9751,8156 GGATGGCTTGAAGTAT-1,1,68,98,9631,8225 ACCCTCCCGTCAGGGC-1,1,69,99,9751,8294 AGCTTCTTCTCGAGCA-1,1,68,100,9631,8363 TTGCGCTTGATCAATA-1,1,69,101,9751,8432 TGGGTGTAATAGATTT-1,1,68,102,9631,8501 CATGGTTTATTAATCA-1,1,69,103,9751,8569 TTGCCTTCTCGCCGGG-1,1,68,104,9631,8638 TTAAAGTAAGTCGCCA-1,1,69,105,9751,8707 AGTTGGCAAGGCTAGA-1,1,68,106,9631,8776 CCGCACTTGCAATGAC-1,1,69,107,9751,8845 AATGTTGTCGTGAGAC-1,1,68,108,9631,8914 CCAAGGTTGCCCTTTC-1,1,69,109,9751,8982 TGTGACTAGAGTTTGC-1,1,68,110,9631,9051 TTGTGATCTGTTCAGT-1,1,69,111,9751,9120 GGAGTTGATTCTGTGT-1,1,68,112,9631,9189 GTAGTGAGCAACCTCA-1,1,69,113,9751,9258 AAGACATACGTGGTTT-1,1,68,114,9631,9327 CATCCCGAGATTCATA-1,0,69,115,9751,9395 GGCCGCAGGAACCGCA-1,0,68,116,9631,9464 CCAGAAACTGATGCGA-1,0,69,117,9751,9533 ACCCGTAGTCTAGTTG-1,0,68,118,9631,9602 AGGGTCTGGACGCAGT-1,0,69,119,9751,9671 TTGAAACCCTCATTCC-1,0,68,120,9631,9740 TCAATCCCGCGCCAAA-1,0,69,121,9751,9808 AGGACATCGGCACACT-1,0,68,122,9631,9877 GACAAGACGCCCGTGC-1,0,69,123,9751,9946 GAGTAAATTAAGAACC-1,0,68,124,9631,10015 TAAGTCGGTGAGCTAG-1,0,69,125,9751,10084 CTTACTGACTCCTCTG-1,0,68,126,9631,10153 TTAACTGATCGTTTGG-1,0,69,127,9751,10221 GGGCGATCCATAGGCC-1,0,70,0,9871,1480 CGTATTGTTTGGCGCC-1,0,71,1,9991,1549 CGCGTGGGCCTGTGTT-1,0,70,2,9871,1618 AAATCTGCCCGCGTCC-1,0,71,3,9991,1687 GCTGAAGGGTTCTTGG-1,0,70,4,9871,1755 TAATTGCGCTGATTAC-1,0,71,5,9991,1824 TCGCGCGTTTACATGA-1,0,70,6,9871,1893 TAACTGAAATACGCCT-1,0,71,7,9991,1962 CCTGTCGTGTATGAAG-1,0,70,8,9871,2031 CGGGAACGCCCTGCAT-1,0,71,9,9991,2100 AGTTGCGGTCCTCAAC-1,0,70,10,9871,2168 GGGTCCTTGGAAGAAG-1,0,71,11,9991,2237 CGGGAAGTACCGTGGC-1,0,70,12,9871,2306 ACGGTCACCGAGAACA-1,0,71,13,9991,2375 TAGACTCAGTTGGCCT-1,0,70,14,9871,2444 CCTTCCGCAACGCTGC-1,0,71,15,9991,2513 CAGTGTTAATCTCTCA-1,0,70,16,9871,2581 CAGGCCAGTACCACCT-1,0,71,17,9991,2650 GATCGCTGTGGTGCGT-1,0,70,18,9871,2719 GTATCTCGGGCGCTTT-1,0,71,19,9991,2788 CCGGGCTAAGAATTTC-1,0,70,20,9871,2857 CTTCACGCCCTGGTAC-1,0,71,21,9991,2925 CGGATGAATGCTGTGA-1,0,70,22,9871,2994 CTCTGCAGGCATTCTT-1,0,71,23,9991,3063 TCAAAGTCACGGCGTC-1,0,70,24,9871,3132 TGGGTTTCGGGCGTAC-1,0,71,25,9991,3201 TGCTATGGCAAAGGGA-1,0,70,26,9871,3270 TTCACGGTCGTCACGT-1,0,71,27,9991,3338 CGTAAAGCAAGAAATC-1,0,70,28,9871,3407 CTGATCCCTTTATGCA-1,0,71,29,9991,3476 GAGTATACCCTAATCA-1,0,70,30,9871,3545 CCACGGCAGGTGTAGG-1,0,71,31,9990,3614 ACGTACTTTGGCACGG-1,0,70,32,9871,3683 CCTGAGAATAAATGCA-1,0,71,33,9990,3751 TTGATGCCGCTCGTCG-1,0,70,34,9871,3820 GCCAGGAAAGAACACT-1,0,71,35,9990,3889 TCCGACCGCTAATCAA-1,0,70,36,9871,3958 TATATCAAAGTGATCT-1,0,71,37,9990,4027 CCTCGGTTTCCTTGCC-1,0,70,38,9871,4096 TAAATTTAGTAACACC-1,0,71,39,9990,4164 CTCGTTACGGCTACCA-1,0,70,40,9871,4233 GCCGTCGGTTTCGGGC-1,0,71,41,9990,4302 GTTCCGTCCGCCTGCA-1,0,70,42,9871,4371 CTAGGGATAGGGACAA-1,0,71,43,9990,4440 GTCCAGGCACGTGTGC-1,0,70,44,9871,4509 GAGGAATATCTCTTTG-1,0,71,45,9990,4577 CGTTCAAGGAAACGGA-1,0,70,46,9871,4646 GTGGGAACAAACCGGG-1,0,71,47,9990,4715 GGGCACGTAGTACTGT-1,0,70,48,9871,4784 ACTACAAAGAGAGGTG-1,0,71,49,9990,4853 GGTGTAGGTAAGTAAA-1,0,70,50,9871,4922 CCTGTTCAACCTCGGG-1,0,71,51,9990,4990 TGAGGTGTGTGGCGGA-1,0,70,52,9871,5059 ATTGTCTGTTTCATGT-1,0,71,53,9990,5128 GCCGGTCGTATCTCTC-1,0,70,54,9871,5197 ATATGGGATAGCAACT-1,0,71,55,9990,5266 GATAGAACCCGCTAGG-1,0,70,56,9871,5335 ATCCCATTTCCGTGGG-1,0,71,57,9990,5403 GATGCTACCTTCGATG-1,0,70,58,9871,5472 TGCAAAGTTCGTCTGT-1,0,71,59,9990,5541 GTTCAGTCGCCAAATG-1,0,70,60,9871,5610 GTCTCCGCCTCAATAC-1,0,71,61,9990,5679 AGTAGAAGGCGCCTCA-1,0,70,62,9871,5747 GTTGTAGATTTATGAG-1,0,71,63,9990,5816 CTATTTGCTTGGAGGA-1,1,70,64,9871,5885 AGCCATATAGTATGTG-1,0,71,65,9990,5954 TGTTCCGGCCTGAGCT-1,1,70,66,9871,6023 TGCCGTGGGACCCAAT-1,1,71,67,9990,6092 TTGCAAGAAGACTCCT-1,1,70,68,9871,6160 CCGCTCCAGGGCGATC-1,1,71,69,9990,6229 CAAACCCTCCGGCGGG-1,1,70,70,9871,6298 CGGACCTTTACGTCCC-1,1,71,71,9990,6367 ACGTAGGAGAGTCGCT-1,1,70,72,9871,6436 CTGCGACCTCGCCGAA-1,1,71,73,9990,6505 GGCCCAGCTGGTTTGC-1,1,70,74,9871,6573 GGATTAATCATGGACC-1,1,71,75,9990,6642 GAGATGGCTTTAATCA-1,1,70,76,9871,6711 AGTACAGAAGCTTATA-1,1,71,77,9990,6780 GCGGCTTTAGCAAGTT-1,1,70,78,9871,6849 ACCGAGTCTCCTTATT-1,1,71,79,9990,6918 GACCACACTTCCCTTT-1,1,70,80,9871,6986 TGTAGCCAATTCCGTT-1,1,71,81,9990,7055 CTCGTCTGTGCCTTCG-1,1,70,82,9871,7124 GGACTCTTTGACTAAG-1,1,71,83,9990,7193 TCATCGACGACCGTCG-1,1,70,84,9871,7262 TACTACGTGCAATGCG-1,1,71,85,9990,7331 CCCTACTTGAACAATG-1,1,70,86,9871,7399 GCAGCTGTCAACGCAT-1,1,71,87,9990,7468 ATCTACCATCTGCTCC-1,1,70,88,9871,7537 TCTCAAATCAATCGGG-1,1,71,89,9990,7606 ACTCCGGCCGACCACT-1,1,70,90,9871,7675 TACACCTCTTCGAATC-1,1,71,91,9990,7744 GTGGCTGTTTCTGTTC-1,1,70,92,9871,7812 ACGACTCTAGGGCCGA-1,1,71,93,9990,7881 TCGTTTACGCGACCCT-1,1,70,94,9871,7950 AGGATATCCGACTGCA-1,1,71,95,9990,8019 GACACTTCCAATTACC-1,1,70,96,9871,8088 ACGGTACAGTTCAATG-1,1,71,97,9990,8157 CTTGATGACCATCCAG-1,1,70,98,9871,8225 CTGGTAAAGACTTACA-1,1,71,99,9990,8294 CTTGCCCACCCACGCA-1,1,70,100,9871,8363 CATTACGCAGGAAGGG-1,1,71,101,9990,8432 GGACAACCATGAAGCC-1,1,70,102,9871,8501 GTTATCAAGCTATCGA-1,1,71,103,9990,8569 AGGTGCACGTCCACAT-1,1,70,104,9871,8638 AAAGAATGTGGACTAA-1,1,71,105,9990,8707 ACCAAGTCATCGGCAG-1,1,70,106,9871,8776 AGTAACTATAGCAGCC-1,1,71,107,9990,8845 GGGAAAGAATGCCAAC-1,1,70,108,9871,8914 AATCGGTATAGCCCTC-1,1,71,109,9990,8982 GGCACTGCGGTGGTTT-1,1,70,110,9871,9051 CGTCCTCATCGCGTGC-1,1,71,111,9990,9120 CTTAACTTACAGTATA-1,1,70,112,9871,9189 GTGAGTCGACTAATAG-1,1,71,113,9990,9258 TGTACTTCCGGGCATG-1,1,70,114,9871,9327 CACAAACCGCAGAACT-1,0,71,115,9990,9395 GCTTTACACAACTGGG-1,0,70,116,9871,9464 CACCGATGATGGGTAC-1,0,71,117,9990,9533 TCAATATACAGGAGGC-1,0,70,118,9871,9602 AACGACCTCCTAGCCG-1,0,71,119,9990,9671 GTTACCAAGGCGTACG-1,0,70,120,9871,9740 TAGCTCACTGTGTTTG-1,0,71,121,9990,9808 TCGGCAGGGTTAAGGG-1,0,70,122,9871,9877 TGTTAACAAAGTGACT-1,0,71,123,9990,9946 CAGAGCGATGGATGCT-1,0,70,124,9871,10015 CATAGTGGGCACGCCT-1,0,71,125,9990,10084 AAACGCTGGGCACGAC-1,0,70,126,9870,10153 TGTTCTCATACTATAG-1,0,71,127,9990,10221 TCCGCTGGGTCGATCG-1,0,72,0,10110,1480 TTAGCATCCCTCACGT-1,0,73,1,10230,1549 AGCCTTGTCACTGATA-1,0,72,2,10110,1618 GTTTCTTGTTAGAGCT-1,0,73,3,10230,1687 TCGGAGTCCTGGTTGC-1,0,72,4,10110,1755 ACGGGTTGTGACCTGT-1,0,73,5,10230,1824 TGACAGAAATCTTGCT-1,0,72,6,10110,1893 TAGCCATGATTGCCTA-1,0,73,7,10230,1962 CGAAGCCACAGCATGG-1,0,72,8,10110,2031 ACCCGTAGCAGAGAAT-1,0,73,9,10230,2100 AGCTTGATCAGGGTAG-1,0,72,10,10110,2168 CTATAAGTAGGGTTTG-1,0,73,11,10230,2237 TATAGTTAGGTGTACT-1,0,72,12,10110,2306 TACGCTGATAGTTGTA-1,0,73,13,10230,2375 TTCTCAATTGCTACAA-1,0,72,14,10110,2444 GTTGCCCTAACGGGTG-1,0,73,15,10230,2513 GACATTTCGCCCAGCC-1,0,72,16,10110,2581 GTAACTACGTAGACCT-1,0,73,17,10230,2650 CCAGTGTACAGACCGA-1,0,72,18,10110,2719 GCCCGATGCCCAGTTC-1,0,73,19,10230,2788 CGAACCTCTTTCCTAG-1,0,72,20,10110,2857 CATGCACATGAGAGGC-1,0,73,21,10230,2925 CCAAATCAAAGGGCAA-1,0,72,22,10110,2994 GTAGTTTAAGCACACG-1,0,73,23,10230,3063 TAAGTGAATAGTCTAC-1,0,72,24,10110,3132 GGTCCACGTCTATTTG-1,0,73,25,10230,3201 CCCTCAGATCGAGAAC-1,0,72,26,10110,3270 ACATTCGCGCGGAATA-1,0,73,27,10230,3338 CTATGAACACCTTGCC-1,0,72,28,10110,3407 CGAATGGTAGGTCGTC-1,0,73,29,10230,3476 TCGACATAGCGTAGCG-1,0,72,30,10110,3545 GACGTTGCTCGGCGGC-1,0,73,31,10230,3614 CTCGAGGCAAGTTTCA-1,0,72,32,10110,3683 GCTGGGAGCGCGTCAA-1,0,73,33,10230,3751 GACGATATCACTGGGT-1,0,72,34,10110,3820 CAGCTCTGGGCTCACT-1,0,73,35,10230,3889 TAGTCTGCGGCACATT-1,0,72,36,10110,3958 TGGTCGATATACCTCT-1,0,73,37,10230,4027 TTCAGTTCAAGAGGAG-1,0,72,38,10110,4096 GTCTGTAGGTTGAACA-1,0,73,39,10230,4164 GAGAGTCTCGGGAGAG-1,0,72,40,10110,4233 TTGTTTGTATTACACG-1,0,73,41,10230,4302 GCACAGCACGGGCCGA-1,0,72,42,10110,4371 AAACAGTGTTCCTGGG-1,0,73,43,10230,4440 TGGAGAATAATCGTCC-1,0,72,44,10110,4509 CGCCGCGTTCTGAACG-1,0,73,45,10230,4577 CATGCGTTGAGAGGAG-1,0,72,46,10110,4646 TACGCTCCTAGAACTG-1,0,73,47,10230,4715 CCTTGTGAACGTGGTT-1,0,72,48,10110,4784 TTAAAGGCGATGCTCG-1,0,73,49,10230,4853 TAAAGTGCACGTCTCG-1,0,72,50,10110,4922 TGTCCGCAAACAATTC-1,0,73,51,10230,4990 AGGCCTATCATACCAA-1,0,72,52,10110,5059 ACTGGGATGCCAGTGC-1,0,73,53,10230,5128 CGATCCACCATTGTTG-1,0,72,54,10110,5197 GGGCATGCATGTCGAG-1,0,73,55,10230,5266 TCCAGGCAGGACGATC-1,0,72,56,10110,5335 CAATGACCCTTAATTT-1,0,73,57,10230,5403 ATCAAGATCCCAGGAC-1,0,72,58,10110,5472 CTCAGAGCTAATGTCG-1,0,73,59,10230,5541 CTGCGATTTCGAGATT-1,0,72,60,10110,5610 ATGGTATTGGGAACCG-1,0,73,61,10230,5679 TAGGTTCTGCTGAGAA-1,0,72,62,10110,5747 GCCTGTCCCGGTGCAT-1,0,73,63,10230,5816 GGCCTGCTCTGATGTT-1,0,72,64,10110,5885 TAGAGGGAGTTTATCT-1,0,73,65,10230,5954 CTCTACATCCTGCGTG-1,0,72,66,10110,6023 ACACATTTCCGTAGAC-1,0,73,67,10230,6092 TGGGTGTTAAGTAGAA-1,0,72,68,10110,6160 GAGCTCAACATGAGCG-1,0,73,69,10230,6229 GCACACAGCTATTACC-1,0,72,70,10110,6298 GCGACGGTAGTCTCCT-1,0,73,71,10230,6367 GTATCAAGGTACTTCC-1,0,72,72,10110,6436 GGAAAGGGAATTGAGC-1,0,73,73,10230,6505 TACACAGCCGTGGTGC-1,1,72,74,10110,6573 CGGTTGGGCAGGGTCC-1,0,73,75,10230,6642 GCACGCCGATTCCCGC-1,1,72,76,10110,6711 CGCTTTCCGCCAAGGT-1,0,73,77,10230,6780 TGCCCGATAGTTAGAA-1,1,72,78,10110,6849 TCAGAACGGCGGTAAT-1,0,73,79,10230,6918 CCACTCAGATCCGCAA-1,1,72,80,10110,6986 GAAGGGTCATTAAGAC-1,0,73,81,10230,7055 GCGGTCTTGCTTTCAC-1,1,72,82,10110,7124 GGAAGGACACCGTATA-1,0,73,83,10230,7193 CGCGGGAATTCCTTTC-1,1,72,84,10110,7262 TCTATCCGATTGCACA-1,0,73,85,10230,7331 TACTCGTTTGAATCAA-1,1,72,86,10110,7399 GTCTGCCGACTCGACG-1,0,73,87,10230,7468 TACGGGTAATAACATA-1,1,72,88,10110,7537 CCAGTTCGGTAACTCA-1,1,73,89,10230,7606 CGTGCACACCACTGTA-1,1,72,90,10110,7675 CTTCAGTTGGACAACG-1,1,73,91,10230,7744 TGATCTATCACACTCT-1,1,72,92,10110,7812 TAGCAGATACTTAGGG-1,1,73,93,10230,7881 CACCGGGCATCACAAG-1,1,72,94,10110,7950 AGTGGCCCGCAAATGG-1,1,73,95,10230,8019 CTCGCTAGGTAAGCGA-1,1,72,96,10110,8088 GATAGATAGTACAGTC-1,1,73,97,10230,8157 CGAGACTACTGCTGCT-1,1,72,98,10110,8225 ACATTTGAAACCTAAC-1,1,73,99,10230,8294 GACACAGCCGGGACTG-1,1,72,100,10110,8363 ACGGAACACGAGTGCC-1,1,73,101,10230,8432 ACGCCAGTGCGTTTGC-1,1,72,102,10110,8501 GCACCTAGGCGAGTCC-1,1,73,103,10230,8569 TCACTATCCCTTCGGT-1,1,72,104,10110,8638 TGAGTGTAACAACGGG-1,1,73,105,10230,8707 ATTGGTTGTGCATTAC-1,1,72,106,10110,8776 CTGGCGATTTACATGT-1,1,73,107,10230,8845 TCCACCAAGACATAGG-1,1,72,108,10110,8914 AACGTCGCTGCACTTC-1,1,73,109,10230,8982 GTCTCCCGAGTCCCGT-1,1,72,110,10110,9051 AGCCCGCACTACAATG-1,1,73,111,10230,9120 GGCACGCTGCTACAGT-1,1,72,112,10110,9189 GCTTGAGTGACCTCTG-1,1,73,113,10230,9258 ATAGCCATAACAGTCA-1,1,72,114,10110,9327 TAGGAGGCTCGAGAAC-1,1,73,115,10230,9395 GTCGGGTGAAGTACCG-1,0,72,116,10110,9464 CACCCGCGTTTGACAC-1,0,73,117,10230,9533 TCTCAGGCTACTCGCT-1,0,72,118,10110,9602 TAGATTCTCTAGCAAA-1,0,73,119,10230,9671 CCCATTATTGTATCCT-1,0,72,120,10110,9740 GTTAATAGCGTCATTA-1,0,73,121,10230,9808 TACTCATTGACGCATC-1,0,72,122,10110,9877 TAGTGACAAGCTCTAC-1,0,73,123,10230,9946 ATCGCCGTGGTTCATG-1,0,72,124,10110,10015 CTGCTCTCAACACACC-1,0,73,125,10230,10084 GCATGACACAAAGGAA-1,0,72,126,10110,10153 CAACGATCGATCCAAT-1,0,73,127,10230,10221 ATACAGCGTCCACTGA-1,0,74,0,10350,1480 TCACGATTAATACGTT-1,0,75,1,10470,1549 CGTGAACTGACCCGAT-1,0,74,2,10350,1618 TCGATGTTACGGCCGT-1,0,75,3,10470,1687 GACCGGTGATACTCTC-1,0,74,4,10350,1755 TCTGTTTAGATTGTTC-1,0,75,5,10470,1824 GCTACTCGGACGCAGA-1,0,74,6,10350,1893 AAGAAAGTTTGATGGG-1,0,75,7,10470,1962 CGCCAGTAGTACCTTG-1,0,74,8,10350,2031 CTCCCTTGTATCAAGG-1,0,75,9,10470,2100 ACTTTATACACCACTT-1,0,74,10,10350,2168 AGATCTGGAGAGGATA-1,0,75,11,10470,2237 TCATCCATCTGATCAC-1,0,74,12,10350,2306 AGAACACGGCGATGGT-1,0,75,13,10470,2375 CCGCTCCGGATAAGCT-1,0,74,14,10350,2444 TGGTCGGGTACAGGGC-1,0,75,15,10470,2513 ACGGACGCAGCGACAA-1,0,74,16,10350,2581 ACATGCTTACGGCAGC-1,0,75,17,10470,2650 CAGGGAGATAGGCCAG-1,0,74,18,10350,2719 TCGCCACCCGGATTAC-1,0,75,19,10470,2788 TCTTGGTCAATGATAC-1,0,74,20,10350,2857 CATAGGGACACTTGTG-1,0,75,21,10470,2926 CACGAGCAAACCAGAC-1,0,74,22,10350,2994 GGGATATTGATCGCCA-1,0,75,23,10470,3063 TAATGTCGGTTCATGG-1,0,74,24,10350,3132 CGATTTGTCATTAATG-1,0,75,25,10470,3201 TTAAGATACCCAGAGA-1,0,74,26,10350,3270 CGACCGTTGGTATTCG-1,0,75,27,10470,3338 ACTACCATCCGAGGGC-1,0,74,28,10350,3407 TCTACGCACGATCTCC-1,0,75,29,10470,3476 AAGCCACTTGCAGGTA-1,0,74,30,10350,3545 ACGTTGTCGTTGAAAG-1,0,75,31,10470,3614 ACCGCTAGTCATTGGT-1,0,74,32,10350,3683 CATGAGTCCATCACGG-1,0,75,33,10470,3751 ATGTCTTGTTTGACTC-1,0,74,34,10350,3820 TCCCAAACATCCTCTA-1,0,75,35,10470,3889 CAACGCGATGAGCCAA-1,0,74,36,10350,3958 GACACCCAAAGACGCG-1,0,75,37,10470,4027 ACGTTCACTATGCCGC-1,0,74,38,10350,4096 ATGGACTGCTTAGTTG-1,0,75,39,10470,4164 CCATGGCAAACGCTCA-1,0,74,40,10350,4233 TTGTCTCGGCAAGATG-1,0,75,41,10470,4302 TTCGAACGAAACATGC-1,0,74,42,10350,4371 GGAAAGTCTTGATTGT-1,0,75,43,10470,4440 ATCATAGATCGACGAG-1,0,74,44,10350,4509 GATTAGAAACAAGCGT-1,0,75,45,10470,4577 GCTGCGAAGAATTATT-1,0,74,46,10350,4646 TTCCAGTGGGTTTCGT-1,0,75,47,10470,4715 GCTCATTGATCATATC-1,0,74,48,10350,4784 ATACGAGGTTTGTAAG-1,0,75,49,10470,4853 GTCTATACACGCATGG-1,0,74,50,10350,4922 GATTTGCGCTAACACC-1,0,75,51,10469,4990 TGGTGCCCTGCCTTAC-1,0,74,52,10350,5059 TCGAATCGCAGGGTAG-1,0,75,53,10469,5128 GGTAGGCCAATATCAC-1,0,74,54,10350,5197 CGTAAATAACAAAGGG-1,0,75,55,10469,5266 GACTCTAGAGTTCCAA-1,0,74,56,10350,5335 ACTTGTTACCGGATCA-1,0,75,57,10469,5403 GGCATTGAACATCTCA-1,0,74,58,10350,5472 AACCCGCTGTATTCCA-1,0,75,59,10469,5541 TAACAGGTTCCCTTAG-1,0,74,60,10350,5610 GTTCTTGTAACTCAAT-1,0,75,61,10469,5679 GATTCTGTTAATGAGT-1,0,74,62,10350,5747 GGACACCTCGGTGTTG-1,0,75,63,10469,5816 CGATCGAGAAGCACCA-1,0,74,64,10350,5885 CTGCTCTGACGGCAAA-1,0,75,65,10469,5954 CCCAGGAAGAATTCGA-1,0,74,66,10350,6023 TGTCGTGGGTATAGGC-1,0,75,67,10469,6092 ATATGTCTAGAGCGTG-1,0,74,68,10350,6160 GCGGGCAGACGGGTGA-1,0,75,69,10469,6229 CGAAGATCAGTTTCAT-1,0,74,70,10350,6298 ACGCCACTCGAAACAG-1,0,75,71,10469,6367 CTTAGATGTTTCATCC-1,0,74,72,10350,6436 GCGAATGGACTAGCGA-1,0,75,73,10469,6505 TAAATTGTGGGTAAAG-1,0,74,74,10350,6573 ATTTATACTGGTAAAG-1,0,75,75,10469,6642 GCGTCGTAACATGGTC-1,0,74,76,10350,6711 CGGTATGGGCACTCTG-1,0,75,77,10469,6780 CTATCACAACGCTGGA-1,0,74,78,10350,6849 ACCACACGGTTGATGG-1,0,75,79,10469,6918 TCAGCGCACGCCGTTT-1,0,74,80,10350,6986 AATGTTAAGACCCTGA-1,0,75,81,10469,7055 CTTACATAGATTTCTT-1,0,74,82,10350,7124 AACATAGCGTGTATCG-1,0,75,83,10469,7193 AGCAGTCGAAGCATGC-1,0,74,84,10350,7262 GTACTACGGCCTCGTT-1,0,75,85,10469,7331 TTGGCCAAATTGTATC-1,0,74,86,10350,7399 TCAGAGGACGCGTTAG-1,0,75,87,10469,7468 AGTAATCTAAGGGTGG-1,0,74,88,10350,7537 AATTATACCCAGCAAG-1,0,75,89,10469,7606 CCCTACCCACACCCAG-1,0,74,90,10350,7675 CTCTGTCCATGCACCA-1,0,75,91,10469,7744 TCGCCTCCTTCGGCTC-1,0,74,92,10350,7812 CCTGATTCGCGAAGAA-1,0,75,93,10469,7881 GACTTCAACGCATCAA-1,0,74,94,10350,7950 GCTTCCCGTAAGCTCC-1,0,75,95,10469,8019 AGAACTGTACTTTGTA-1,0,74,96,10350,8088 CCTTAAGTACGCAATT-1,0,75,97,10469,8157 GGCCAATTGTATAGAC-1,1,74,98,10350,8225 AGAATACAGGCTATCC-1,0,75,99,10469,8294 AATACCTGATGTGAAC-1,1,74,100,10350,8363 TAGAGTGTTCCGGGTA-1,0,75,101,10469,8432 GCTTCCATGTAACCGC-1,1,74,102,10350,8501 GGTTCGCATTTGCCGT-1,0,75,103,10469,8569 TCAATCCGGGAAGTTT-1,1,74,104,10350,8638 CCGGAAGTGCAATATG-1,0,75,105,10469,8707 CTCATAAATGTGTATA-1,1,74,106,10350,8776 GACGGGCATCGAATTT-1,0,75,107,10469,8845 GCTAAGCCCAGTATGC-1,1,74,108,10350,8914 GCTATAAGGGCCAGGA-1,0,75,109,10469,8982 GCGGATTACTTGTTCT-1,1,74,110,10350,9051 TACAACAGCGCATACA-1,1,75,111,10469,9120 TCTAACCTAGCCTGCG-1,1,74,112,10350,9189 TCTACCCGCATCATTT-1,1,75,113,10469,9258 GTCTTACCACGCCAAG-1,1,74,114,10350,9327 AATAACGTCGCGCCCA-1,0,75,115,10469,9395 CGTTTCGGTTATATGC-1,0,74,116,10350,9464 TACGCTGCACGGTCGT-1,0,75,117,10469,9533 CGTTAAATACGACCAG-1,0,74,118,10350,9602 TCGACTGACGATGGCT-1,0,75,119,10469,9671 ACGCTACTGAATGGGC-1,0,74,120,10350,9740 AAGGGTTAGCCATGCG-1,0,75,121,10469,9808 ACGTTTCGGTGCACTT-1,0,74,122,10350,9877 TTAACCCGAGGCGTGT-1,0,75,123,10469,9946 GCCCGTCAAGCCCAAT-1,0,74,124,10350,10015 ATTCGTCCCGAGGTTA-1,0,75,125,10469,10084 TCAAATTATGTTCGAC-1,0,74,126,10350,10153 AACGTTCTACCATTGT-1,0,75,127,10469,10221 CTGTCCTGCGCACTAC-1,0,76,0,10589,1480 GCCCAGCGACACAAAG-1,0,77,1,10709,1549 TATAGCTATTATCTCT-1,0,76,2,10589,1618 AAGCTATGGATTGACC-1,0,77,3,10709,1687 CCGAAACACGACCTCT-1,0,76,4,10589,1755 ATCAGAAGCTGGTTGC-1,0,77,5,10709,1824 TGAACCTGAATGTGAG-1,0,76,6,10589,1893 TCAGAGCATGTCAACG-1,0,77,7,10709,1962 ATACCCTCCCGGCCAA-1,0,76,8,10589,2031 GCCACCTTATTCGCGA-1,0,77,9,10709,2100 CAACGAGCTTATTATG-1,0,76,10,10589,2168 GATCCAACCTTTAAAC-1,0,77,11,10709,2237 TTGTGGAGACAGCCGG-1,0,76,12,10589,2306 AGATAATCACACCTAT-1,0,77,13,10709,2375 GGTTAGGGATGCTAAT-1,0,76,14,10589,2444 TCATGAAGCGCTGCAT-1,0,77,15,10709,2513 CCAGGCGAGATGGTCT-1,0,76,16,10589,2581 CGACTTGCCGGGAAAT-1,0,77,17,10709,2650 GTGCCAAACGTTTCGA-1,0,76,18,10589,2719 TGCTCAAAGGATGCAC-1,0,77,19,10709,2788 GCCTCAGGTACCGGTC-1,0,76,20,10589,2857 ACAAGTAATTGTAAGG-1,0,77,21,10709,2926 TTAGAGTATTGTCGAG-1,0,76,22,10589,2994 GTCATCCCAAACTCAC-1,0,77,23,10709,3063 CTCCTAGTAATCGTGA-1,0,76,24,10589,3132 CCCTCATCACAGAGTA-1,0,77,25,10709,3201 GGAAACAGAGCTTGGG-1,0,76,26,10589,3270 ACATCGCAATATTCGG-1,0,77,27,10709,3338 TTAGCCATAGGGCTCG-1,0,76,28,10589,3407 CGGATTCTGCCTTATG-1,0,77,29,10709,3476 TTGTTGGCAATGACTG-1,0,76,30,10589,3545 GGATCTACCGTTCGTC-1,0,77,31,10709,3614 GGTGGGATTAGGTCCC-1,0,76,32,10589,3683 AATAGGCACGACCCTT-1,0,77,33,10709,3751 GAGGTCCGTTCGCTGT-1,0,76,34,10589,3820 TGACAACTTAAAGGTG-1,0,77,35,10709,3889 AGCTAAGCTCCGTCCG-1,0,76,36,10589,3958 CTAGCCCGGGAGACGA-1,0,77,37,10709,4027 CCCAGATTCCCGTGAC-1,0,76,38,10589,4096 GCCCGTTCACACAATT-1,0,77,39,10709,4164 ACAAACTCCATCAGAG-1,0,76,40,10589,4233 GGTTCCGTACGACTAA-1,0,77,41,10709,4302 TTGCGGAAAGCTGCCC-1,0,76,42,10589,4371 GGAACTCGTGAATACG-1,0,77,43,10709,4440 GGTTACCGCTCCCTAC-1,0,76,44,10589,4509 AGCGACTTTGAAGACA-1,0,77,45,10709,4577 CCGTTTCCTTTCCGTG-1,0,76,46,10589,4646 TGGATGGCATCTTGGA-1,0,77,47,10709,4715 CAAGTCGTTGAAATCT-1,0,76,48,10589,4784 TGCAAACGTACTAGTT-1,0,77,49,10709,4853 GGCTGTCCTACTGCGG-1,0,76,50,10589,4922 TACTGCATGATTAAAT-1,0,77,51,10709,4990 CGGACCTCTGTAGTTA-1,0,76,52,10589,5059 CCCTAGCTCTAAGGTC-1,0,77,53,10709,5128 GTAACAGGTTAACGGC-1,0,76,54,10589,5197 CCCGCAAATAATCATC-1,0,77,55,10709,5266 AATCCCGCTCAGAGCC-1,0,76,56,10589,5335 AACGTCATCCGGCTTG-1,0,77,57,10709,5403 CTCGTGGCACTGAAAG-1,0,76,58,10589,5472 CTTTATCCGACGCATG-1,0,77,59,10709,5541 AAATGACTGATCAAAC-1,0,76,60,10589,5610 TTCGTGCATGTTATAG-1,0,77,61,10709,5679 ATCAAAGAGCCGTGGT-1,0,76,62,10589,5748 ATGTCATAATAAACGA-1,0,77,63,10709,5816 CTTAATCGACTTAGTA-1,0,76,64,10589,5885 CTAAAGGATGAGATAC-1,0,77,65,10709,5954 GTGCTATCCAGCTGGA-1,0,76,66,10589,6023 GACAGAGGTCTTCAGT-1,0,77,67,10709,6092 TCGATTTACGAAACGA-1,0,76,68,10589,6160 AGGCTTAAGTTGCACA-1,0,77,69,10709,6229 TAGGATCTTAACCGCA-1,0,76,70,10589,6298 GACTTTCGAGCGGTTC-1,0,77,71,10709,6367 CCCTGGGAGGGATCCT-1,0,76,72,10589,6436 TAGTAGTTGCCGGACA-1,0,77,73,10709,6505 GCCCTAAGTGCAGGAT-1,0,76,74,10589,6573 ACTTGGGCTTTCGCCA-1,0,77,75,10709,6642 CATGATACGGTGAAAC-1,0,76,76,10589,6711 TCCTGCAGCCGCCAAT-1,0,77,77,10709,6780 GTCCGTTAGAGGGCCT-1,0,76,78,10589,6849 GCTTTATTAAGTTACC-1,0,77,79,10709,6918 AATCGAGGTCTCAAGG-1,0,76,80,10589,6986 GTGCTGTTAGAACATA-1,0,77,81,10709,7055 GGCTTCTCGTGGGTGG-1,0,76,82,10589,7124 CCTACAGTTGAGGGAG-1,0,77,83,10709,7193 TATCCCTCGATCTGCA-1,0,76,84,10589,7262 CCGGTATCTGGCGACT-1,0,77,85,10709,7331 CTCTAACACCGGCAGC-1,0,76,86,10589,7399 CTTCATCACCAGGGCT-1,0,77,87,10709,7468 GGCTGGCAATCCCACG-1,0,76,88,10589,7537 CAAACCATAAGCGTAT-1,0,77,89,10709,7606 CATGTGGGCTCATCAC-1,0,76,90,10589,7675 CCCGTGACAGTGCCTT-1,0,77,91,10709,7744 GATTATCTTGCATTAT-1,0,76,92,10589,7812 GAGTCCACCAGGTTTA-1,0,77,93,10709,7881 TGTACTATCGCTCGTT-1,0,76,94,10589,7950 TTAGCGAATAGATAGG-1,0,77,95,10709,8019 ACATACAATCAAGCGG-1,0,76,96,10589,8088 ATTCGTGTACCCATTC-1,0,77,97,10709,8157 GCTAGCAACGCACCTA-1,0,76,98,10589,8225 ACCTCAGCGAGGCGCA-1,0,77,99,10709,8294 TCCGGGCCACTAACGG-1,0,76,100,10589,8363 TCCCAATATCGACGAC-1,0,77,101,10709,8432 GGATTGAAGTAGCCTC-1,0,76,102,10589,8501 GGCGCACAGTTTACCT-1,0,77,103,10709,8570 CCCGGCACGTGTCAGG-1,0,76,104,10589,8638 AGTAAGGGACAGAATC-1,0,77,105,10709,8707 TTGCGCACAACCACGT-1,0,76,106,10589,8776 CCTGCTACAACCATAC-1,0,77,107,10709,8845 CTGGGCTACTGGAGAG-1,0,76,108,10589,8914 AGCTACGAATGGTGGT-1,0,77,109,10709,8982 ATTCGTTTATCGTATT-1,0,76,110,10589,9051 GGCATTCCCTCCCTCG-1,0,77,111,10709,9120 AGTTATTCAGACTGTG-1,0,76,112,10589,9189 CCACTTTCCTTCTAGG-1,0,77,113,10709,9258 CGCAGTTCTATCTTTC-1,0,76,114,10589,9327 AGGAGCGTTTATTATC-1,0,77,115,10709,9395 GACAGGTAATCCGTGT-1,0,76,116,10589,9464 TTCCCAAAGTACTGAT-1,0,77,117,10709,9533 CTGCAGGGTGACGCTC-1,0,76,118,10589,9602 TCATGGAGGCCTTTGT-1,0,77,119,10709,9671 ATGGCCCGAAAGGTTA-1,0,76,120,10589,9740 CGTAATATGGCCCTTG-1,0,77,121,10709,9808 AGAGTCTTAATGAAAG-1,0,76,122,10589,9877 GAACGTTTGTATCCAC-1,0,77,123,10709,9946 ATTGAATTCCCTGTAG-1,0,76,124,10589,10015 TACCTCACCAATTGTA-1,0,77,125,10709,10084 AGTCGAATTAGCGTAA-1,0,76,126,10589,10153 TTGAAGTGCATCTACA-1,0,77,127,10709,10221 Seurat/tests/testdata/visium/spatial/scalefactors_json.json0000644000176200001440000000024414525500037024037 0ustar liggesusers{"spot_diameter_fullres": 96.3607212981978, "tissue_hires_scalef": 0.17211704, "fiducial_diameter_fullres": 144.54108194729673, "tissue_lowres_scalef": 0.051635113}Seurat/tests/testdata/visium/filtered_feature_bc_matrix.h50000644000176200001440000070561314525500037023620 0ustar liggesusersHDF  `TREEHEAPXmatrixHHhTREE80HEAPXHbarcodesdataindicesindptrshapefeaturesSNOD Hh  deflate %z]PSNOD(8668 (0$0p-TREEPwj6 GCOLATAACGCCGGAGGGTC-1ATAACGGAGTCCAACG-1ATAACGTTACCTCCAC-1ATAAGGTGGAGAACAT-1ATAAGTAGGATTCAGA-1ATAAGTAGGGCGACTC-1ATAAGTTACCGCGACG-1ATAATAGCTGTTGAAT-1 ATAATCTTGGAGAACC-1 ATAATTAGCTAAGTAG-1 ATACAGGCCCTCCAAT-1 ATACCACGGGCAACTT-1 ATACCGTCATCCATAA-1ATACCTAACCAAGAAA-1ATACGCCGGCGAAACC-1ATACGGAACGTCGTTT-1ATACGGGTTTCGATTG-1ATACGTACTTAGCCAC-1ATACGTCCACTCCTGT-1ATACGTTATGCACGGA-1ATACTACCCGTACCAC-1ATACTAGCATGACCCT-1ATACTGCCTTACACCG-1ATAGACAACGGGACCT-1ATAGACGAAGAGAAAG-1ATAGAGTACTGGGACA-1ATAGAGTTATCAACTT-1ATAGCAACTAGGGAAG-1ATAGCCATAACAGTCA-1ATAGGCGGCTATAGAA-1ATAGGCTAGCTTCGCA-1 ATAGGGATATCCTTGA-1!ATAGGTTGGGCAGATG-1"ATAGTGAAGCGTTCTC-1#ATAGTTCCACCCACTC-1$ATATAAAGCGCTCGTG-1%ATATAAATGTAGCTGC-1&ATATAACACGGGCGCA-1'ATATACATGTATGGTA-1(ATATACGCTCGTGACG-1)ATATAGAGTATTGGTC-1*ATATCAACCTACAGAG-1+ATATCAATTCCAGCCT-1,ATATCGGTAGGGAGAT-1-ATATCGTTCCTCGAAC-1.ATATCTCCCTCGTTAA-1/ATATCTTAGGGCCTTC-10ATATGTCTCCCTAGCC-11ATATTCAGTTAAACCT-12ATATTCCACATAGTGA-13ATATTTAACCCTCAAG-14ATCAAACACTGTTCCA-15ATCAAACGAAGGTTTG-16ATCAATCTGGGCTGCA-17ATCAATGCCGTGGCTG-18ATCACATTAGAATATC-19ATCACGTGCTAATTAA-1:ATCACTTCATCCTCGC-1;ATCAGACGGCACGCCG-1<ATCAGCCTCATGCTGC-1=ATCAGCTCGTCCACTA-1>ATCAGTAGGCAGGGAT-1?ATCATAGCCCTATGTA-1@ATCATCCAATATTTGT-1AATCATTGTACCGCATT-1BATCCAACGCAGTCATA-1CATCCAATGGAGGGTCC-1DATCCACATCGACAGAA-1EATCCAGAGCAACAACC-1FATCCAGGATTCGTGAA-1GATCCCATCCACAGCGC-1HATCCTACCTAAGCTCT-1IATCCTGAATCGCTGCG-1JATCCTGCGTGGAATGG-1KATCGACCCAATACAGA-1LATCGACTCTTTCCGTT-1MATCGCACGCCGGGAGA-1NATCGCCAGTCAACATT-1OATCGCTGCGTGCAGCA-1PATCGGAGACAGACGGC-1QATCGGCAAGCAGTCCA-1RATCGTTAGCTAGCGGA-1SATCTAATATCCTACGG-1TATCTACCATCTGCTCC-1UATCTAGCTTGTGAGGG-1VATCTCCCACGGAATAT-1WATCTCCCTGCAATCTA-1XATCTGCACCTCTGCGA-1YATCTGCTGTTATCGCC-1ZATCTGGGCTGTTCTTG-1[ATCTGGTTAAGACTGT-1\ATCTTATCGCACACCC-1]ATCTTGACCTGCAACG-1^ATCTTGACTTGTCCAA-1_ATGAAGCCAAGGAGCC-1`ATGACGCGTTCTATCC-1aATGACTATGCGACATT-1bATGAGGAGTGTTAATC-1cATGATGCAATGGTACA-1dATGCACGCGCTGTTCA-1eATGCACTACCGCATTG-1fATGCATGATCCAGGAT-1gATGCCAATCGCTCTGC-1hATGCCATTTGCGACCA-1iATGCCGGTCTTGCATA-1jATGCCGGTTGATGGGA-1kATGCGACAATTGGTCC-1lATGCGACAGTCCCATT-1mATGCGAGTCCCACCAC-1nATGCTCTGGCGCGGTA-1oATGCTTAGGAGTTGAT-1pATGGAAATTTAAGGAG-1qATGGAGCAGGCCGTGA-1rATGGATCCGGCGTCCG-1sATGGATTGACCAAACG-1tATGGCAGCATTACGAT-1uATGGGCCTCGGCCTCT-1vATGGTCGCGTGGTTTC-1wATGTAAGGCTGCTCTT-1xATGTACATGCGGTGAG-1yATGTACGATGACGTCG-1zATGTAGCGCGCGTAGG-1{ATGTGAAAGCCTAATG-1|ATGTGCATCCGACGCA-1}ATGTGGACATCTTGAT-1~ATGTTGATTAGAGACT-1ATTAAACATGCGGACC-1ATTAATACTACGCGGG-1ATTAATGAACCAGTCG-1ATTACATGTCAGTCTT-1ATTACGCGCTGGCAGG-1ATTACTTACTGGGCAT-1ATTAGATTCCTCAGCA-1ATTAGGCGATGCTTTC-1ATTATACTTTGCTCGT-1ATTATAGCTACTTTAC-1ATTATGCCATAGGGAG-1ATTATTATGTCCGTCA-1ATTATTCAGAGTCACT-1ATTCAACCATTTAAGG-1ATTCACTGATGTTGGA-1ATTCAGGACCTATTTC-1ATTCATATACTGTCCA-1ATTCATCGTTGAGGCA-1ATTCCCGAAGGTACAG-1ATTCCTAAGACGTGGA-1ATTCGACGCCGGGCCT-1ATTCGCGCCTTGAGAG-1ATTCGTGCTATCTCTT-1ATTGAAGATCTTAGTG-1ATTGACCGGCGATGAC-1ATTGATCACCACATTT-1ATTGATGAGTCCTAAC-1ATTGCCTTTATGTTTG-1ATTGCGATCAGTAACT-1ATTGCTGCTCCTCCAT-1ATTGGATTACAGCGTA-1ATTGGTTGTGCATTAC-1ATTGTACAACTCGGCT-1ATTGTCGCAATACCTT-1ATTGTGACTTCGCTGC-1ATTTACAGTTTACTGG-1ATTTACTAAGTCCATT-1ATTTCCGGGTTCTGCG-1ATTTGCGCGAGTAGCT-1ATTTGGAGATTGCGGT-1ATTTGTCTTGGGAGCT-1ATTTGTTCCAGGGCTC-1CAAACCCTCCGGCGGG-1CAAACGAGTATCGCAG-1CAAACGGTCGCACTTT-1CAAACTCGCGACGCCG-1CAAAGATTATTGGGCC-1CAAATCTCTCACAAGG-1CAAATGTCCTTCCGTG-1CAACCTACCGAGCAGT-1CAACGACCCGTTTACA-1CAACGTGGTGGAGTCT-1CAACTATATCGAATGC-1CAACTGCTCATCCGAT-1CAAGAGGGCGGAGTAC-1CAAGATATTATAACGT-1CAAGCAACGTCGGAGT-1CAAGCACCAAATGCCT-1CAAGCGGCACATAATT-1CAAGGATCGCATGTTC-1CAAGGCCAGTGGTGCA-1CAAGGTCCTATAGGCT-1CAAGTGTGGTTGCAAA-1CAATAAACCTTGGCCC-1CAATACGCTCTGAGGC-1CAATATTCTTGACCTA-1CAATCCCTATACCAGC-1CAATGCGAGAAGTATC-1CAATGGATCTCTACCA-1CAATGTGCCAACCCTT-1CAATTAAGGGTGATGA-1CAATTGGGCCGCACTC-1CAATTTCGTATAAGGG-1CACAAGAAAGATATTA-1CACAATGAGCTGCTAT-1CACACACGCTAACGAG-1CACACGCGCTGTCTTA-1CACAGCACCCACGGCA-1CACAGCTAGGGAGTGA-1CACAGGGCCATATAGT-1CACAGGGCCGTTGTCA-1CACAGTCCCGCTTCGC-1CACAGTTCGCTTCCCA-1CACATATTAGCAGGAT-1CACATCTCACCGACGA-1CACATGATTCAGCAAC-1CACATTCTTTCGATGG-1CACATTTCTTGTCAGA-1CACCAATCATCCGTCT-1CACCACGCCACACAGA-1CACCAGTCAGCATGCA-1CACCATGATCGCAAAG-1CACCCAAATCTTATGT-1CACCCACGAGGCAATT-1CACCCGGTTTGTGACT-1CACCCTAACAAGATCT-1CACCCTTGGTGAGACC-1CACCGCCAGAAGGTTT-1CACCGCGTCCACTCTA-1CACCGGGCATCACAAG-1CACCGTATCCCATCCG-1CACCGTTAGGGATCAC-1CACCGTTGCGCGATAT-1CACCTAATCAGTTTAC-1CACCTCGATGGTGGAC-1CACCTTGCGAAACTCG-1CACCTTGGCGCCTTTG-1CACGAAAGTTAGTCCC-1CACGCACAGCGCAGCT-1CACGCAGCGAGGCTTT-1CACGCGGAACTGTTGC-1CACGGCGCGCCAAAGG-1CACGTCGGCAACCTCT-1CACGTTCGTGCTCTAG-1CACGTTTCGTACACAC-1CACTAAAGTTGCCTAT-1CACTACGGGAGCTGCC-1CACTCAAGAGCTATGG-1CACTCCTATGTAAGAT-1CACTCCTCTCGGTCGG-1CACTCGAGCTGAACAA-1CACTCTTCTGCTAGCC-1CACTGACGATTGTGGA-1CACTTAATCAGACGGA-1CAGAACTTAGCCCTCT-1CAGAATAACACACGGA-1CAGACACCGATCGCTG-1CAGACCTGTAAGTGTT-1CAGACGAACCTGATAC-1CAGAGACGGTCACCCA-1CAGAGCATGAGCTTGC-1CAGAGGCGATGCATGA-1CAGATAATGGGCGGGT-1CAGATACTAACATAGT-1CAGATCATTTAAAGTC-1CAGATCCTGGTTTGAA-1CAGATGTTTGTCCCAA-1CAGCAGCCCGTTCCTT-1 CAGCAGTCCAGACTAT-1 CAGCAGTCTGTGCTGC-1 CAGCCTCCTGCAGAGG-1 CAGCCTCTCCTCAAGA-1 CAGCGATTCCCTTCAA-1CAGCTCACTGAGACAT-1CAGCTCGACAAGTTAA-1CAGCTGGCGTAACCGT-1CAGCTTAGTAGGTAGC-1CAGGATATATCGTTGT-1CAGGCAGTCTTACCAG-1CAGGCCGTTTGGGTGT-1CAGGCGCACGGTGGTC-1CAGGCGCCATGCTAGG-1CAGTAACTATTTATTG-1CAGTAAGGGACGTCTC-1CAGTACATTCTCTAAA-1CAGTACCAGTTTACGT-1CAGTAGCCCACGCGGT-1CAGTCGAGGATGCAAT-1CAGTCGGCCTAGATAT-1CAGTCTGTATACTGGG-1CAGTGAATAAATGACT-1 CAGTGTCGGCTGGCCC-1!CAGTTCAAATTGACAC-1"CATAACGGACAGTCGT-1#CATAAGAAGCTTGGCT-1$CATAAGCTCTCCGTCT-1%CATACACAAAGTCAGC-1&CATACACGGTTCCCAC-1'CATACCCGTACCCAGT-1(CATACGGCGTCTGGGC-1)CATACTATGTAATTGT-1*CATACTTAGGCAATAC-1+CATAGAGGAGATACTA-1,CATAGCGTTGCCCACC-1-CATAGTACATTGAGAG-1.CATAGTCAAATACATA-1/CATAGTCCACAAGAAC-10CATATACTACTGATAA-11CATATAGGTACAGTCA-12CATATGTCAGGCTACG-13CATCATTACCCTGAGG-14CATCCTCTCAAAGATC-15CATCGGACGGGTTAAT-16CATCTATCCCGTGTCT-17CATCTTACACCACCTC-18CATGACTTCGCTGAAT-19CATGAGATGCACTCTC-1:CATGATGCACAATTCT-1;CATGATGGAAGTTAGC-1<CATGCCAACTCGCAAA-1=CATGCGACCAGTTTAA-1>CATGGATTGTCTTCCG-1?CATGGGTATGCCTTAT-1@CATGGGTCGGGTGTGG-1ACATGGTAAGTAGCGTT-1BCATGGTATTAGTTTGT-1CCATGGTCTAGATACCG-1DCATGGTTTATTAATCA-1ECATGTAAGAGACATTT-1FCATGTAGGAGCGCCAA-1GCATGTCTCATTTATGG-1HCATTACGCAGGAAGGG-1ICATTACGTCGGCCCGT-1JCATTATGCTTGTTGTG-1KCATTGCGGGTCAATTC-1LCATTTAGCGGACCATG-1MCATTTGAGTGGTACGT-1NCCAAACAGAACCCTCG-1OCCAAATAACAAGATTC-1PCCAACGATGCACTGAT-1QCCAAGAAAGTGGGCGA-1RCCAAGACTTCTGCGAA-1SCCAAGCGTAACTCGTA-1TCCAAGGTTGCCCTTTC-1UCCAATAGTGCCGTCGA-1VCCAATCGGTAGATCGA-1WCCAATGTCACAGCAAG-1XCCAATTACGGGTCGAG-1YCCACAATGTACGTCTT-1ZCCACACTGAGATATTA-1[CCACAGTACCCATCCT-1\CCACATGGCTCTTTAT-1]CCACCAACTTTACTGT-1^CCACCCAAGGAAAGTG-1_CCACGAATTTAACCTC-1`CCACGAGAAGAGAATC-1aCCACGGAGCCATAAGA-1bCCACGGTGCCCGGTAG-1cCCACTCAGATCCGCAA-1dCCACTGGTGGCTGGTT-1eCCACTGTTTGGATTAA-1fCCAGAAAGCAACTCAT-1gCCAGATAGTTGAGTGA-1hCCAGCCTGGACCAATA-1iCCAGCTCGAACGCATT-1jCCAGCTGATGGTACTT-1kCCAGGGACGTGGCCTC-1lCCAGTAGTCTGATCCA-1mCCAGTCAAATCTCTTA-1nCCAGTCTAGACGGCGC-1oCCAGTCTTGTCATAGA-1pCCAGTTCGGTAACTCA-1qCCATAACCTGTGCAGT-1rCCATACCTTTACTTGT-1sCCATAGAGGCTGCCAG-1tCCATAGGTTGGCGTGG-1uCCATATGGAAACTATA-1vCCATCGCAGTTAAACT-1wCCATCTCACCAGTGAA-1xCCATGCCCTAGATTTC-1yCCATGCCTGTTTAGTA-1zCCATTAGCGATAATCC-1{CCATTCCCTGCCCACA-1|CCATTTCTACCTATTA-1}CCCAAACATGCTGCTC-1~CCCAACATACGTCGCG-1CCCAAGTCATTACACT-1CCCAATTTCACAACTT-1CCCACTCCACGGTATC-1CCCAGAGGAGGGCGTA-1CCCAGGTCTGAAGGCT-1CCCAGTAAACTTGGGA-1CCCAGTTAAGGCGCCG-1CCCGAAGTTTCGCGAA-1CCCGACCATAGTCCGC-1CCCGAGTTTCTCCGTA-1CCCGCAGCGCGAACTA-1CCCGGGTCGTTCAGGG-1CCCGGTGTATCGGAAT-1CCCGTAAGTCTAGGCC-1CCCGTAGCTGGGAAGA-1CCCGTCAGCGTCTGAC-1CCCGTGAGGGCGGTGA-1CCCGTTTCGCAGATGT-1CCCTACTTGAACAATG-1CCCTAGGCAACAAGAG-1CCCTATGTAGAGCAGA-1CCCTCCTCGCTCGTAT-1CCCTGAAATGAGTTGA-1CCCTGCCCAATCCGCT-1CCCTGCGCTACGCATA-1CCCTGGCTGTTCCTTC-1CCCTTTAATGGAGTTC-1CCCTTTGACAGGTCTT-1CCGAACACTGGGCCTC-1CCGAACCTTCCCGGCC-1CCGAAGCATTGACCAA-1CCGACAATAGGCCGCC-1CCGACGGGCATGAGGT-1CCGACGTAAACACAAC-1CCGAGAAGTCGCATAA-1CCGAGCTGTGCTTGTC-1CCGATCTCAACCTTAT-1CCGATTCGAGGGACCC-1CCGCACAAAGACCAAC-1CCGCACTTGCAATGAC-1CCGCATGTGGTACGAT-1CCGCCGGAACTTCTCG-1CCGCCTGCGAATTGGT-1CCGCCTTGCGATGTCG-1CCGCGATTTGGTAGGT-1CCGCGCAAGATACCCA-1CCGCGGAATGCGTCAC-1CCGCGGGTACGAAGAA-1CCGCTATCAGCACCAG-1CCGCTCCAGGGCGATC-1CCGCTCTTCCGAACTA-1CCGCTTACCTCACTCT-1CCGCTTGCTGACATGG-1CCGGAGCGTACTTTCT-1CCGGCACGACCGTTTC-1CCGGCCGCGAGCATAT-1CCGGCGTGAGACTCTG-1CCGGGCGGTCTCGTCA-1CCGGGCTGCTCCATAC-1CCGGGTTCGAGGTTAC-1CCGGTAATGGCTAGTC-1CCGGTTTGTAATTGTG-1CCGTAAGTTGGTCCCA-1CCGTAGGAAATCCCTG-1CCGTAGGGTTGTTTAC-1CCGTATCTCGTCGTAG-1CCGTATTAGCGCAGTT-1CCGTGGAACGATCCAA-1CCGTGTTAAATTCCAT-1CCGTTACGTTAGAACA-1CCGTTCCGAATCTCGG-1CCTAAAGGCTGACGCT-1CCTAAATTAACGGTTC-1CCTAAATTGTATCCTA-1CCTAACCCAAACAAGT-1CCTAACTAAGGCTCTA-1CCTACATTCACAGACG-1CCTACTGCTTACACTT-1CCTAGGCGTAGCGATC-1CCTAGGTAAAGGTAGC-1CCTATAATGAGTGCCC-1CCTATACCGTCCTGTC-1CCTATATCGTGTCACG-1CCTATATTTGTCCTGG-1CCTATCTATATCGGAA-1CCTATGAAGTGGTGCC-1CCTATGGCTCCTAGTG-1CCTATGGGTTACCGTC-1CCTATGGTCAAAGCTG-1CCTATGTCCACTCCAC-1CCTCAACGATCGCTGT-1CCTCACCTGAGGGAGC-1CCTCACCTTAGCATCG-1CCTCCGACAATTCAAG-1CCTCCTGAGCCCACAT-1CCTCCTGTTGTGTCGT-1CCTCGCCAGCAAATTA-1CCTCGCGCGATATAGG-1CCTCGCGCTGTGCGAT-1CCTCGGACCGGGATAG-1CCTCTAATCTGCCAAG-1CCTCTATCGATTAGCA-1CCTCTCTCCCATCTAG-1CCTCTGGCCTAGACGG-1CCTCTGTACTATTCTA-1CCTGAACGATATATTC-1CCTGAATATTTACATA-1CCTGACCACCGATGGT-1CCTGCTATTTGAGAAG-1CCTGGAAACGTTCTGC-1CCTGGCTAGACCCGCC-1CCTGGTCGAATGTGGG-1CCTGTAAGACATGATA-1CCTGTACTCACGCCCA-1CCTGTCACCCGGGCTC-1CCTGTCCCTCACGTTA-1CCTGTCGCCCGTAAAT-1CCTGTTTGAAGACACG-1CCTTCAGTTAAAGTGA-1CCTTCGTATAGAATCC-1CCTTCTCAGCGTTCCT-1CCTTCTTGATCCAGTG-1CCTTGACCACTTTATT-1CCTTTAAGGGAGCACT-1CCTTTCAATGAAGAAA-1CCTTTGAATTATGGCT-1CGAAACGCAATTCATG-1CGAACAGTATGGGCGT-1CGAACCCGCATGCGTC-1CGAACGGCCGGACAAC-1ACGATCATACATAGAG-1ACGATCATCTTGTAAA-1ACGATGCATATGTTAT-1ACGCAAACTAATAGAT-1ACGCAATCACTACAGC-1ACGCATACGTTTACTA-1ACGCATTCGTGAGTAC-1ACGCCAGATGATTTCT-1 ACGCCAGTGCGTTTGC-1 ACGCCGCTAGACGACC-1 ACGCGAAGTCAGACGA-1 ACGCGCTACACAGGGT-1 ACGCGGGCCAAGGACA-1ACGCTAGTGATACACT-1ACGCTGTGAGGCGTAG-1ACGGAACACGAGTGCC-1ACGGACTCTCAAAGCG-1ACGGAGCGCAAATTAC-1ACGGATGGTGCGGATA-1ACGGCACTTGCTTGGG-1ACGGCCAACATGGACT-1ACGGCGACGATGGGAA-1ACGGCTGGATGTAGAA-1ACGGGAGTGTCGGCCC-1ACGGTACAGTTCAATG-1ACGTAGATTGCTGATG-1ACGTAGGAGAGTCGCT-1ACGTATTACTCCGATC-1ACGTCTCGTTCCGGGA-1ACGTGACAAAGTAAGT-1ACGTGCGCCTCGTGCA-1 ACGTTAATGTCGAAGA-1!ACGTTAGATTTGCCCG-1"ACGTTCGCAATCAATT-1#ACGTTCGTTCAGGAAA-1$ACGTTCTGTACAAGTC-1%ACTACATCCCGACAAG-1&ACTACCAGCTCTCTGG-1'ACTACGCGTTAGAATT-1(ACTAGTTGCGATCGTC-1)ACTATATGCTGTGTTC-1*ACTATCCAGGGCATGG-1+ACTATCTGCCCGCGTA-1,ACTATTTCCGGGCCCA-1-ACTCAAGTGCAAGGCT-1.ACTCAATAAAGGCACG-1/ACTCCCATTCCTAAAG-10ACTCCCGAATTCGTTT-11ACTCCCTAATGCTAAA-12ACTCCCTAGAATAGTA-13ACTCCGGCCGACCACT-14ACTCGCCGTTCGATAA-15ACTCGTAACCCGTCCT-16ACTCGTCAGTAATCCC-17ACTCTCTGACTTAGGT-18ACTCTGACCTAATAGA-19ACTCTTGTATAGTAAC-1:ACTGAAACGCCGTTAG-1;ACTGAATGGCGAAAGT-1<ACTGCCGTCGTAACTC-1=ACTGCTCGGAAGGATG-1>ACTGTACGATACACAT-1?ACTGTAGCACTTTGGA-1@ACTGTCCAGGATTATA-1AACTGTCTTCTTTAGAA-1BACTTACGCATCCACGC-1CACTTATACTTACCCGG-1DACTTATTAGGATCGGT-1EACTTATTTATGTGCCA-1FACTTCAGGCTGATCCC-1GACTTCCAGTGGAAGCT-1HACTTCCATGCGGGACA-1IACTTCGCCATACGCAC-1JACTTGACTCCCTCTTT-1KACTTGGGACCCGGTGG-1LACTTGTAGTCCCTTCA-1MACTTGTGGATGGAACG-1NACTTTACCCTCATGAA-1OACTTTCCTATAGCTTC-1PACTTTGACTGCATCCT-1QACTTTGGTCGTGCTCC-1RAGAACCCTCAATTGGG-1SAGAACGTGGTACATTC-1TAGAAGAGCGCCGTTCC-1UAGAAGGTACACTTCAC-1VAGAAGGTTGCCGAATT-1WAGAAGGTTGTAGGTCG-1XAGAAGTGATTCGTGAT-1YAGAATGCGGGTTCGGA-1ZAGAATTATGGATTCGA-1[AGACCAAACCACACCT-1\AGACCCACCGCTGATC-1]AGACCCGCCCTCCTCG-1^AGACCGCTCCGCGGTT-1_AGACCGGGAAACCCTG-1`AGACGAAGTGCCGGTC-1aAGACGACGATGCCGCT-1bAGACTAGCCTTCCAGA-1cAGAGAACCGTCTAGGA-1dAGAGAAGGAGTACAAT-1eAGAGATCTCTAAAGCG-1fAGAGCCGCCGAGATTT-1gAGAGGCTTCGGAAACC-1hAGAGTAAACTTCACTA-1iAGATAACTTCAGGGCC-1jAGATACCAATAGAACC-1kAGATACCGGTGTTCAC-1lAGATACTCAAGATCGA-1mAGATATAATACGACTA-1nAGATCGTGCATAAGAT-1oAGATCTCAGGTGTGAT-1pAGATGACTCGCCCACG-1qAGATGAGGGTTGCGAT-1rAGATGATGGAGTCTGG-1sAGATGCAAGACGTGCA-1tAGATGCATCCTGTGTC-1uAGATTATAGGACGTTT-1vAGATTCACAACCGATA-1wAGCAAAGGCCGCTAGT-1xAGCAACATATCTTATT-1yAGCAACCGAAAGTAAT-1zAGCACCAGTACTCACG-1{AGCACTACCTCACCAG-1|AGCACTTAAGGACGCC-1}AGCAGAAGGAGAAAGA-1~AGCAGCCAGATGAATA-1AGCATATCAATATGCT-1AGCATCATTTCGAAAG-1AGCATCGTCGATAATT-1AGCATTACGAGGCAAG-1AGCCAAGCTTTGTGTC-1AGCCACAGGTTACCCG-1AGCCACTCCCGTGCTT-1AGCCCATACATGTAAG-1AGCCCGCACTACAATG-1AGCCCGGTAGCCTGTA-1AGCCCTTCTAATCCGA-1AGCCGTGGCTAAATGT-1AGCCTAATACCCACGT-1AGCGACAGGAACGGTC-1AGCGACCAACGATATT-1AGCGATGCGCCTAATA-1AGCGCATAATGAATCG-1AGCGCGGGTGCCAATG-1AGCGGACACTTCGTAG-1AGCGGCGGTTAGCGGT-1AGCGGGTCTGACACTC-1AGCGTGGTATTCTACT-1AGCTAAGTACGCAGGC-1AGCTAGAAGCAGAAGT-1AGCTATTTAATCCAAC-1AGCTCCATATATGTTC-1AGCTCCTTCGCACATC-1AGCTCTAGACGTTCCA-1AGCTCTTCCCAGTGCA-1AGCTCTTCGTAACCTT-1AGCTCTTTACTCAGTT-1AGCTGAAGTAAACCAA-1AGCTGTAACCTCAATC-1AGCTTCTTCTCGAGCA-1AGCTTGATCTTAACTT-1AGGAAGCTGTCCGCCG-1AGGACAGTCGAATCCC-1AGGACGACCCATTAGA-1AGGACGCTCGATGTTG-1AGGAGGCCTTCGCGCG-1AGGATAAAGTCGGGAT-1AGGATATAGGGATTTA-1AGGATATCCGACTGCA-1AGGATCACGCGATCTG-1AGGCAGATGCGTAAAC-1AGGCAGGGAGCGTACT-1AGGCATTGTCGTAGGG-1AGGCCACCCGTTATGA-1AGGCCCTAGAACGCCA-1AGGCCTGAGAATCTCG-1AGGCGGTTTGTCCCGC-1AGGCGTCTATGGACGG-1AGGCTTCCCGAAGAAG-1AGGGAAACGAGGTACT-1AGGGACCGGCTGCGTT-1AGGGACTCTACGCGAC-1AGGGAGACATACTTCG-1AGGGCGAGCAGCTGAT-1AGGGCTGCAGTTACAG-1AGGGTCAGTAACCCTA-1AGGGTCGATGCGAACT-1AGGGTGCTCTCGAGGG-1AGGGTTCCCTTTGGTT-1AGGGTTTAGTTCGGGA-1AGGTAGGTACAAAGCT-1AGGTATAATTGATAGT-1AGGTCAGGTGAGAGTG-1AGGTCGCGGAGTTACT-1AGGTGCACGTCCACAT-1AGGTGGTGACCTTCGC-1AGGTTACACCATGCCG-1AGGTTGAGGCACGCTT-1AGGTTTCACACACCTT-1AGTAACTATAGCAGCC-1AGTACAGAAGCTTATA-1AGTACATCATTTATCA-1AGTACGGGCACCTGGC-1AGTACTCTTATGCCCA-1AGTAGGAAGGAAGTTG-1AGTATGCTGGAGACCA-1AGTATTTGGCACGACC-1AGTCAACACCACCATC-1AGTCAAGATGACACTT-1AGTCACTAGCTCTCGA-1AGTCCATTGGCTGATG-1AGTCCCGCCTTTAATT-1AGTCGACGGTCTCAAG-1AGTCGGCCCAAACGAC-1AGTCGGCTCAACTTTA-1AGTCGGTTGCGTGAGA-1AGTCGTATAAAGCAGA-1AGTCGTCGACCACCAA-1AGTCGTGGGCATTACG-1AGTCTAAAGTATACTC-1AGTCTCACAAGACTAC-1AGTCTTCTCCTCAAAT-1AGTCTTTAAAGTGTCC-1AGTGAACAAACTTCTC-1AGTGACCTACTTTACG-1AGTGAGACTTCCAGTA-1AGTGAGCCTCGCCGCC-1AGTGAGTCGAATTAAC-1AGTGATAACCTGCGCG-1AGTGATATGAGTAGTT-1AGTGATTCAAGCAGGA-1AGTGCACGCTTAAGAA-1AGTGCGTAGCTCGTAA-1AGTGCTAAACACAGCA-1AGTGCTTGCACGAATA-1AGTGGCCCGCAAATGG-1AGTGGCGGCAATTTGA-1AGTGGCGTCTGAAGGT-1AGTGGCTCCGTCGGCC-1AGTGGTGTTACCCGTG-1AGTGTATTGCGCATTG-1AGTTAAACACTTGCGA-1AGTTAAGCGGTCCCGG-1AGTTAAGTCAACCGCT-1AGTTACTCTATCGTGG-1AGTTCCTACAGAATTA-1AGTTCCTATTTATGTT-1AGTTGACGGTCCTTGC-1AGTTGGCAAGGCTAGA-1AGTTTGCACCTGCCTC-1AGTTTGGCACGGGTTG-1AGTTTGGCCAGACCTA-1ATAAACGGACCCGTAA-1ATAAACGTTGCACCAC-1ATAAAGGCTCGGTCGT-1ATAAATATTAGCAGCT-1AATAGAATCTGTTTCA-1AATAGCTACCGCGTGC-1AATAGTCCGTCCCGAC-1AATATCCTAGCAAACT-1AATATCGAATCAATGC-1AATATCGAGGGTTCTC-1AATATTGGAGTATTGA-1AATCATGTAAAGACTC-1 AATCGCCTCAGCGCCA-1 AATCGCGCAGAGGACT-1 AATCGGTATAGCCCTC-1 AATCGTGAGCCGAGCA-1 AATCTAGGTTTACTTG-1AATCTATGCCGGAGCC-1AATCTCTACTGTGGTT-1AATCTGCGTTGGGACG-1AATCTGGCTTTCTAGT-1AATGACAGCAATGTCT-1AATGACGTAGGATGTC-1AATGACTGTCAGCCGG-1AATGATGATACGCTAT-1AATGATGCGACTCCTG-1AATGCAACCGGGTACC-1AATGCACCAAGCAATG-1AATGTGCCCGAGGTGT-1AATGTTGTCGTGAGAC-1AATTAAAGGTCGGCGT-1AATTAACGGATTTCCA-1AATTACGAGACCCATC-1AATTAGCGCTGCAGCG-1AATTCATAAGGGATCT-1 AATTCATTGTCATGCA-1!AATTCCAAGCATGTAC-1"AATTCGATTCGAGGAT-1#AATTCTAGAGTTAGGC-1$AATTGAACGCTCTGGT-1%AATTGCAGCAATCGAC-1&AATTTGGGACATAGTA-1'ACAAACCATGCGTCCT-1(ACAAAGAAGGTAGGCC-1)ACAAAGCATGACCTAG-1*ACAAATCGCACCGAAT-1+ACAAATGGTAGTGTTT-1,ACAACAGCATGAGCTA-1-ACAAGCAGTGCCTAGC-1.ACAAGGAAATCCGCCC-1/ACAAGGACAAGAGGTT-10ACAAGGATGCTTTAGG-11ACAAGGGCAGGCTCTG-12ACAATAGTCGTACGTT-13ACAATCCATTTAAACC-14ACAATGAATACGGAGA-15ACAATTGTGTCTCTTT-16ACAATTTGAGCAGTGG-17ACACAAAGACGGGTGG-18ACACACCAGGACCAGT-19ACACATGATCAAATCT-1:ACACCCAGCATGCAGC-1;ACACCTTAAGTAGGGC-1<ACACCTTACTACTTGC-1=ACACGAGACTCCTTCT-1>ACACGGGAACTTAGGG-1?ACACGTAGGCCACAAG-1@ACACTGATCAAGGTGT-1AACAGAACTGAGAACAA-1BACAGCGACATTCTCAT-1CACAGGCACGGATCCTT-1DACAGGCTTGCCCGACT-1EACAGGTGGAGGTGAGG-1FACAGTAATACAACTTG-1GACATAAGTCGTGGTGA-1HACATAATAAGGCGGTG-1IACATCCCGGCCATACG-1JACATCCTGGTAACTGT-1KACATCGATCGTTTACC-1LACATCGGTCAGCCGCG-1MACATCGTATGCAATGG-1NACATGGCGCCAAAGTA-1OACATTAGTTTATATCC-1PACATTTGAAACCTAAC-1QACCAAACACCCAGCGA-1RACCAACACCACACACT-1SACCAACCGCACTCCAC-1TACCAACGCTTATTTAT-1UACCAAGTCATCGGCAG-1VACCAATATGCAAGTTA-1WACCACAAGTTTCTATC-1XACCACCAATGTAACAA-1YACCACGTGCAGCTATA-1ZACCAGACCATAACAAC-1[ACCAGTGCGGGAGACG-1\ACCATATCCGCAATAA-1]ACCATCCGCCAACTAG-1^ACCATCGTATATGGTA-1_ACCCAACGCCCGTGGC-1`ACCCATCTTGAGGGTA-1aACCCATTTGTCCCTCT-1bACCCGGAAACTCCCAG-1cACCCGGATGACGCATC-1dACCCGGTTACACTTCC-1eACCCGTGTCATCAGTA-1fACCCTATGCCATATCG-1gACCCTCCCGTCAGGGC-1hACCCTCCCTTGCTATT-1iACCCTGGTAACGCCCT-1jACCGAAGAGTCTGGTT-1kACCGACACATCTCCCA-1lACCGACTGAGTCCCAC-1mACCGAGTCTCCTTATT-1nACCGATGGTAGCATCG-1oACCGCAATAACTGCCT-1pACCGCGGTGGAAGTCG-1qACCGGGCCTTTGTTGA-1rACCGGTCAGGTACACC-1sACCGTCCACTGGGCCC-1tACCTAAGTACCTTTCA-1uACCTAATCGACTTCCT-1vACCTACAGTATGTGGT-1wACCTACTATAAATCTA-1xACCTCCGCCCTCGCTG-1yACCTCCGTTATTCACC-1zACCTGCGTGTCATGTT-1{ACGAAATGGGCGGCAC-1|ACGACTCTAGGGCCGA-1}ACGAGGATACCACTCT-1~ACGAGGTTTACAACGT-1ACGAGTACGGATGCCC-1ACGATACATAGAACTA-1AACGCATGATCTGGGT-1AACGCGGTCTCCAGCC-1AACGCTGTTGCTGAAA-1AACGGACGTACGTATA-1AACGGCCATCTCCGGT-1AACGTACTGTGGGTAC-1AACGTCAGACTAGTGG-1AACGTCGCTGCACTTC-1AACGTGCGAAAGTCTC-1AACTACCCGTTTGTCA-1AACTAGGCTTGGGTGT-1AACTCAAGTTAATTGC-1AACTCCAGAGCGTGTT-1AACTCCTAATCCCATG-1AACTCTCAATAGAGCG-1AACTCTCAGTGTGCTC-1AACTGGGTCCCGACGT-1AACTTGCCCGTATGCA-1AAGACATACGTGGTTT-1AAGACCAAATAACTCA-1AAGACCCAACTGAACA-1AAGACTGCAAGCTACT-1AAGAGCTCTTTATCGG-1AAGAGGATGTACGCGA-1AAGAGGCATGGATCGC-1AAGAGGCCCTTTGGAA-1AAGATGGCACCGGACC-1AAGATTGGCGGAACGT-1AAGCACCCTGCGTATC-1AAGCATACTCTCCTGA-1AAGCCGAAGCGGTTTA-1AAGCGCAGGGCTTTGA-1AAGCGTCCCTCATCGA-1AAGCTAGATCGAGTAA-1AAGCTCGTGCCAAGTC-1AAGCTCTTTCATGGTG-1AAGGAGCGGTTGGTGC-1AAGGATCGATCGCTTG-1AAGGATGAGGGACCTC-1AAGGCGCGTAAAGCTT-1AAGGCTGTGCTCATCG-1AAGGGAACGACTGGCT-1AAGGGACAGATTCTGT-1AAGGGACTATGCATTC-1AAGGGTTTGATTTCAG-1AAGGTATCCTAATATA-1AAGGTGATAAACCAGC-1AAGTAGTGACGCGAGG-1AAGTCAATTGTCGTCA-1AAGTCTTCTGTGGCCT-1AAGTGACGACCGAATT-1AAGTGAGTCGGGTTTA-1AAGTGCCTTGACTGTA-1AAGTGCGTTAGAATCT-1AAGTGTTTGGAGACGG-1AAGTTCACTCCAAGCT-1AAGTTCAGTCTGCGTA-1AAGTTCGGCCAACAGG-1AAGTTGTGATGTTATA-1AAGTTTATGGGCCCAA-1AATAACAACGCTCGGC-1AATAACACTAGAACAA-1AATACCTGATGTGAAC-1AATAGAACAGAGTGGC-1AAATCGCGGAAGGAGT-1AAATCGTGTACCACAA-1AAATCTAGCCCTGCTA-1AAATGATTCGATCAGC-1AAATGGCCCGTGCCCT-1AAATGGTCAATGTGCC-1AAATTAACGGGTAGCT-1AAATTACACGACTCTG-1AAATTACCTATCGATG-1AAATTCCAGGTCCAAA-1AAATTGATAGTCCTTT-1AAATTTGCGGGTGTGG-1AACAACTGGTAGTTGC-1AACACACGCTCGCCGC-1AACACGAGACGCGGCC-1AACAGGATGGGCCGCG-1AACAGGTAGTATGGAT-1AACATATCAACTGGTG-1AACATCGATACGTCTA-1AACATTGTGACTCGAG-1AACCAAGACTTCTCTG-1AACCCATCCCATGATC-1AACCCGAGCAGAATCG-1AACCCTACTGTCAATA-1AACCGAGCTTGGTCAT-1AACCGCTAAGGGATGC-1AACCTCGCTTTAGCCC-1AACCTTTAAATACGGT-1AACCTTTACGACGTCT-1AACGAAAGTCGTCCCA-1AACGATAATGCCGTAG-1AACGATATGTCAACTG-1AAACTGCTGGCTCCAA-1AAACTTAATTGCACGC-1AAACTTGCAAACGTAT-1AAAGAATGTGGACTAA-1AAAGGCTACGGACCAT-1AAAGGCTCTCGCGCCG-1AAAGGGATGTAGCAAG-1AAAGTAGCATTGCTCA-1AAAGTGTGATTTATCT-1AAAGTTGACTCCCGTA-1AAATAACCATACGGGA-1AAATAAGGTAGTGCCC-1AAATACCTATAAGCAT-1AAATAGCTTAGACTTT-1AAATAGGGTGCTATTG-1AAATCCGATACACGCC-1AAACCGTTCGTCCAGG-1AAACCTAAGCAGCCGG-1AAACGAAGATGGAGTA-1AAACGAGACGGTTGAT-1AAACGGGCGTACGGGT-1AAACGGTTGCGAACTG-1AAACTCGGTTCGCAAT-1AAACTCGTGATATAAG-1AAACAGGGTCTATATT-1AAACATGGTGAGAGGA-1AAACATTTCCCGGATT-1AAACCGGGTAGGTACC-1AAACAGAGCGACTCCT-1AAACAGCTTTCAGAAG-1AAACACCAATAACTGC-1AAACAAGTATCTCCCA-1GCATTGTAATTCATAT-1GCATTTCCAAGGCTCC-1GCCAACCATTTCCGGA-1GCCAAGAATACTTCTG-1GCCAATAGGGCATCTC-1GCCACAATTTAAGGAC-1GCCACTCAGAGCGCGA-1GCCAGGAGTAACCGAT-1 GCCATATTGCACACAG-1 GCCATCGAGCTGCGTG-1 GCCATCGATGCTGCAT-1 GCCATTAGCCTCAAAC-1 GCCCGACTTCTTCCCG-1GCCCGAGAGTCTAAAT-1GCCCGATCTGTGGTCG-1GCCCGCGCGTAAACGG-1GCCCGTAATACCTTCT-1GCCCTAGCCGTCGCGA-1GCCCTGAGGATGGGCT-1GCCGAAATTCCTACGT-1GCCGATTGGCCAAGCT-1GCCGCTTGTGAGAAAC-1GCCGGGTTAGGGTCGC-1GCCGTGGAAGAAATGT-1GCCTACGTTCTGTGCA-1GCCTATAGTGTCAGGG-1GCCTCATCTGGAAATA-1GCCTCCGACAATTCAC-1GCCTCTATACATAGCA-1GCCTTTGTCAGTGGAC-1GCGAAACGATCGGGAG-1 GCGAAACTTAACTGGA-1!GCGAAGAATCTGACGG-1"GCGAAGCCATACCCGT-1#GCGACATGTAAACATC-1$GCGACGATAGTTGTAC-1%GCGAGAGGCCATGTAA-1&GCGAGAGTTGCGTCCA-1'GCGAGCGCATGCTCCC-1(GCGAGGCCCGAGCAGA-1)GCGAGTTCTGCAAAGA-1*GCGATTGTTAACGTTA-1+GCGCAAATATATTCAA-1,GCGCAAGAGCGCGCTG-1-GCGCATCCAGTCAGCA-1.GCGCCGTTCCACGATA-1/GCGCCTCCCACTCCGA-10GCGCGGTCTAGTAACT-11GCGCGTCATTGGTACA-12GCGCTAATTGAATAGA-13GCGCTATGCCGAGGCA-14GCGCTGATCCAGACTC-15GCGCTGCTTTGCATTT-16GCGCTTAAATAATTGG-17GCGGACCGCGTTGTGG-18GCGGAGAAACTTCGCA-19GCGGAGAGGGAGAACG-1:GCGGATTACTTGTTCT-1;GCGGCAAAGTATTGCC-1<GCGGCTCTGACGTACC-1=GCGGCTTTAGCAAGTT-1>GCGGGCGAGCCTTACC-1?GCGGTCTTGCTTTCAC-1@GCGGTGAACTGCGCTC-1AGCGGTTCCCTATCATG-1BGCGTAAATGGCCATAA-1CGCGTCGAAATGTCGGT-1DGCGTCGCCAGGGTGAT-1EGCGTCTCTGCATTGGG-1FGCGTGGTACTGGGTTA-1GGCGTTCGGAGACCGGG-1HGCTAACTGAAGTCTGA-1IGCTAAGCCCAGTATGC-1JGCTAAGTAGTTTCTCT-1KGCTAATACCGAATGCC-1LGCTACAATCGAGGATA-1MGCTACAGTACGGACCG-1NGCTACGACTTATTGGG-1OGCTACTATAGTAGAGT-1PGCTAGACCGTCTACTG-1QGCTAGAGTAGAGATGT-1RGCTAGCAGGGAGTGGG-1SGCTAGCTTGAATAGCT-1TGCTAGGCACCACGGAG-1UGCTAGTAGAGCTTGTA-1VGCTATACGTCTCGGAC-1WGCTATCATACTCATGG-1XGCTATGCCAGCTTATG-1YGCTCAATCCGTTTATT-1ZGCTCAATGTAATACCG-1[GCTCATTACTGCATGT-1\GCTCCCAGTCGGTCCA-1]GCTCCGCTCGCTTCAG-1^GCTCCTGACATACTGG-1_GCTCGCTCATGTCCAA-1`GCTCGGAATTTAAAGC-1aGCTCTAAACCCTGACG-1bGCTCTATGTTACGTGC-1cGCTCTCGGGTACCGAA-1dGCTCTTTCCGCTAGTG-1eGCTGAATCTTCCAATC-1fGCTGAGGCGTGAGTAT-1gGCTGCACGGTTTCTTA-1hGCTGCTAAGTAGTCGA-1iGCTGCTACTGCGTAGC-1jGCTGCTCTCCGGACAC-1kGCTGGACCCAAAGTGG-1lGCTGGCATATTCACCT-1mGCTGGCGGCGCATGCT-1nGCTGGTGACTCGTAGT-1oGCTGGTTTAGGCCATA-1pGCTGTATTACTGGCCC-1qGCTGTTGCTACCGAAC-1rGCTTAATGTAACTAAC-1sGCTTACGTAGTTAGTA-1tGCTTATGAAGCAGGAA-1uGCTTCCATGTAACCGC-1vGCTTCCGTCCCTAGAC-1wGCTTGAGTGACCTCTG-1xGCTTGGATCGATTAGG-1yGCTTTCAGAGGAGGTG-1zGGAAACCTTGTTGAAT-1{GGAAACTAAATGGGCC-1|GGAAAGTGCCCATGCC-1}GGAACCGTGTAAATTG-1~GGAACCTTGACTCTGC-1GGAACTTTGGCGATTA-1GGAATGCGCTAGCGTG-1GGACAACCATGAAGCC-1GGACACAAGTTTACAC-1GGACCAACAGGATAAC-1GGACCTACGGTAACGT-1GGACCTCTAGGCCGCC-1GGACGTCCATAGTTGG-1GGACTCTTTGACTAAG-1GGAGAAGTCATTGGCA-1GGAGACCATCTACATA-1GGAGACGACACCTTTG-1GGAGCAACATTTCAAG-1GGAGCACCAAGAACTA-1GGAGCGAGGCCTACTT-1GGAGGGCTTGGTTGGC-1GGAGTTGATTCTGTGT-1GGATCAGAGCCATCAG-1GGATCATCCCGTACGC-1GGATCCGGAATATACT-1GGATCTTGACTCAACC-1GGATGAAGATCGCTGA-1GGATGACGCGAGTTTA-1GGATGGCTTGAAGTAT-1GGATGTCCTTACCGCA-1GGATTAATCATGGACC-1GGATTCCGCTATACCC-1GGATTGCTGTGACTCC-1GGATTTCACTTCTATA-1GGCAAAGGCGCCAATA-1GGCAAATTACTTTACT-1GGCAAGCCCATAGTGG-1GGCAAGGCGAAATAGC-1GGCAATAGTCAATGAG-1GGCACGCTGCTACAGT-1GGCACTCCACTGGGCA-1GGCACTGCGGTGGTTT-1GGCAGCAAACCTATGC-1GGCAGCGGTAATCCTA-1GGCATACAGGTAGCGG-1GGCATATCGGTTCTGC-1GGCCAATTGTATAGAC-1GGCCCAGCTGGTTTGC-1GGCCCTCACCCACTTA-1GGCCGGCGTCTGCTAT-1GGCCGTTTGGGTTTCA-1GGCCTGCTTCTCCCGA-1GGCGAAATCTAACTTG-1GGCGAGCGAAACGGCA-1GGCGCAGGACATCTTC-1GGCGCATGAATTGATG-1GGCGCGGAGATCTTTC-1GGCGCGTTCGAGTTTA-1GGCGCTTCATTCCCTG-1GGCGGAGTAATATTAG-1GGCGGTAGGATCATTG-1GGCGTAGGGAAAGCTG-1GGCGTCCTATCCGCTG-1GGCTAAAGGGCGGGTC-1GGCTAATGATTGAAAT-1GGCTATTAAGTTGTAT-1GGCTCGCGTTGAGGTA-1GGCTCTGCTCCAACGC-1GGCTGAAATAGCAAAG-1GGCTTTCAATAAGGGT-1GGGAAAGAATGCCAAC-1GGGAACGGGAGGTTAG-1GGGAAGACGGTCTGTC-1GGGAAGGGCTTTCTCA-1GGGAATGAGCCCTCAC-1GGGACAGAGTTACTCC-1GGGACCCGTATATCTT-1GGGACTGCATAGATAG-1GGGAGAACTCACAGTA-1GGGAGCGACCGTAGTG-1GGGAGGATGCCCGAAA-1GGGAGTTAATGAGGCG-1GGGATGGTCGTAACCG-1GGGATTTACCGCACCT-1GGGCAACCGCACGTGC-1GGGCACTATTGACCAT-1GGGCAGACGTCACTGC-1GGGCAGAGCAATCGTT-1GGGCAGTCAACGCCAA-1GGGCCCGTCTTAAACA-1GGGCCGGCCGAAGTAC-1GGGCCTAAATGGGCTA-1GGGCGATATGTGTGAA-1GGGCGGCAAATGAATT-1GGGCGGGTTCCCTACG-1GGGCGTACATTTATAT-1GGGCGTCACCACGTAA-1GGGCGTCCACTGGCTC-1GGGCGTGGTTTCCCAG-1GGGCTACTATTTCGTG-1GGGCTATGATCGATGG-1GGGCTCATCGAACCCA-1GGGCTGCCTAGGGCGA-1GGGCTGGTTAGTCGCG-1GGGTACTTCATGAACT-1GGGTATGTATGCACTT-1GGGTATTCTAGCAAAC-1GGGTCACCGTGACGGT-1GGGTCAGGAGCTAGAT-1GGGTCGTGGCAAGTGT-1GGGTCTATCGCTTTCC-1GGGTGACACCTTAACT-1GGGTGCATATGAAAGC-1GGGTTAACATTTGAGT-1GGGTTTAGGATAGGAT-1GGTAACCGGGAGGATA-1GGTACAAACATGCTAT-1GGTACCATTAAGACGG-1GGTACGTTGCGGCCGG-1GGTAGAAGACCGCCTG-1GGTAGACCGTTGGGCG-1GGTAGTGCTCGCACCA-1GGTATAGTGACACATA-1GGTATTGCCGAGTTTA-1GGTCAAGACTACTTCG-1GGTCAGTGGGTCCCAC-1GGTCGGATAAACGGCG-1GGTCGGCCAGGAGCTT-1GGTCGGTCGTCCACAG-1GGTCGTAAGCTCGCAC-1GGTCTCCGTCCAGGTT-1GGTCTCTGAATGGACT-1GGTCTGAGAATCTGGA-1GGTCTTGAGCGCTCTT-1GGTGAAGTACAGGGAT-1GGTGATAAGGAGCAGT-1GGTGCAGAGCCTATCG-1GGTGCTGGTACACATT-1GGTGGACTGCTCTGGC-1GGTGTAAATCGATTGT-1GGTTACCACCCTCGGG-1GGTTAGCTATATGTCT-1GGTTAGGCTTGGAGAA-1 GGTTCGGATTATACTA-1 GGTTCTACTCGTCTGA-1 GGTTTAATTGAGCAGG-1 GGTTTACAATCTCAAT-1 GGTTTAGCCTTTCTTG-1GGTTTCAATCGGTCAG-1GGTTTGACAAGAAGCT-1GGTTTGTGACCTGAGG-1GTAACATCTAAGATAA-1GTAAGCGGGCAGTCAG-1GTAAGTAACAGTCTGG-1GTAAGTAGGGTATACC-1GTAATAAAGGGCTCCC-1GTAATCTGATTCTTCG-1GTACACTTACCTGAAG-1GTACAGAGGCAAGGGT-1GTACGAGATTGCGACA-1GTACGCTTCATTGCAC-1GTACGTCACGTATTAA-1GTACGTTTGCCCGTCA-1GTACTAAGATTTGGAG-1GTACTCCCTTATCGCT-1GTACTCCTGGGTATGC-1 GTACTGGAGTTAGACC-1!GTAGACACGCCTGACT-1"GTAGAGGGAGACAAGT-1#GTAGCCAAACATGGGA-1$GTAGCGGCTATACACT-1%GTAGCTTCCTCTTGTT-1&GTAGGTGATCCGTGTA-1'GTAGTCGCGGGAATCA-1(GTAGTCTACGATATTG-1)GTAGTGAGCAACCTCA-1*GTATAGGACTCAGTAG-1+GTATCAAACGTTAGCT-1,GTATCCTTTGGTAACC-1-GTATCTCAGTCTTGAC-1.GTATCTTTCATAACCA-1/GTATGAAATTTCACTC-10GTATGTGGGTCTAGTT-11GTATTCTGAGAAACGA-12GTCAAAGAAGTGGTGT-13GTCAACCAGGCCTATA-14GTCAAGCGGACTCGGG-15GTCAGAATAGTCTATG-16GTCATGCACCTCCGTT-17GTCATGCGCGAGGGCT-18GTCATGGACATGACTA-19GTCATTAGAGCGAACG-1:GTCCAATATTTAGCCT-1;GTCCATTACTGCTACG-1<GTCCCAACGTAAAGTA-1=GTCCCAATCATCCCGC-1>GTCCCGCGACGTTATG-1?GTCCGGACCTGAAATT-1@GTCCGGCTGAATTGCG-1AGTCCGGGTTCACATTA-1BGTCCTACGAATAGTCT-1CGTCCTACTCTACGGGC-1DGTCGCCGTTGTGTGTT-1EGTCGCGTAACCCGTTG-1FGTCGGAAGGATACCAG-1GGTCGGATATCTCAGAC-1HGTCGGATGTAGCGCGC-1IGTCGGGAACATGGTAG-1JGTCGGGAAGCAGAAAC-1KGTCGTACCATCTCGGG-1LGTCGTACCTACGATTG-1MGTCGTATTGGCGTACA-1NGTCGTGTCTGGTCATC-1OGTCGTTATTCGCTTAT-1PGTCTAGTGAGCCGCTT-1QGTCTATCTGAGTTTCT-1RGTCTATTGCATGCTCG-1SGTCTATTGGTTCCGGT-1TGTCTCAAGGCCCGGCT-1UGTCTCCCGAGTCCCGT-1VGTCTCCTGCCAGTATG-1WGTCTCGACTAAGTTTG-1XGTCTCGATCTGCTTTC-1YGTCTGGGCGGTCGAGA-1ZGTCTTACCACGCCAAG-1[GTCTTGTAGCTATTCA-1\GTGAAACGGCGCCACC-1]GTGAACTCCCATTCGA-1^GTGAAGATTTCAAGTG-1_GTGAAGCCGTATAGTC-1`GTGAAGTCACGACTCG-1aGTGACAGCTTCCCACT-1bGTGACCGCACACTACG-1cGTGACGAGGGTGACCC-1dGTGACGCAGGTTTCAT-1eGTGACTTCAGTAGTGC-1fGTGAGCGTGCTGCACT-1gGTGAGGACACTTAAGG-1hGTGAGGAGCGGTTGAG-1iGTGAGTCGACTAATAG-1jGTGAGTGGTACAACGC-1kGTGATCACTAACGCCT-1lGTGATCATAGATCTGC-1mGTGATCCTTGTCATGA-1nGTGATGCACAACATCT-1oGTGATTCGCCGCTCAA-1pGTGCACCAGCTTCAAC-1qGTGCACGAAAGTGACT-1rGTGCCATCACACGGTG-1sGTGCCCGTTCGGATTC-1tGTGCCGCTTCAAAGGT-1uGTGCCTGAGACCAAAC-1vGTGCGAAATCGAACAC-1wGTGCGACAGGGAGTGT-1xGTGCGGGTCTCCAAAT-1yGTGCGTGTATATGAGC-1zGTGCTCAAGTACTGTC-1{GTGGAACCTACATGCG-1|GTGGACCAACCCGATT-1}GTGGACGCATTTGTCC-1~GTGGAGCGTTTACCGA-1GTGGAGTCGGCGGTTG-1GTGGCAAACAGCGGCA-1GTGGCGGTCCCAGCGT-1GTGGCTGTTTCTGTTC-1GTGGGAAGACTGAATC-1GTGGGCTTAGACACAC-1GTGGGTACTGAGCGTA-1GTGGTATAGTCTGCCG-1GTGGTGGCCAAGTGAA-1GTGGTTACTTCTTTCG-1GTGGTTTCCGCCTTTC-1GTGTACCTTGGCTACG-1GTGTATATCAGCGGGC-1GTGTCGTATAGCGTTC-1GTGTGAATAACTTAGG-1GTGTTACTATGCGTCC-1GTTAAAGTAGGACTGG-1GTTAACATCACTTAAA-1GTTAACTATGTTGTCA-1GTTAAGGGTGCGATGT-1GTTACAATTGGTGACG-1GTTACCTACAACTTGC-1GTTACTTTGGGCCTAG-1GTTAGCCCATGACATC-1GTTAGCCGTAAATCAA-1GTTAGGCTACCCGTTT-1GTTATAATACGGTGAA-1GTTATATCAGGAGCCA-1GTTATATTATCTCCCT-1GTTATCAAGCTATCGA-1GTTCACAGGAGTCTAG-1GTTCATCGTTTGGCTG-1GTTCGCCATAAGTGCC-1GTTCGCTGAGACGTCT-1GTTCGGATCGGGAACA-1GTTCGGGCGTACCATT-1GTTCGTCTAAAGAACT-1GTTGAACCGGTTCCAT-1GTTGAGTCCCGCCGGT-1GTTGCACGGAGTTTCG-1GTTGGACCGCATCAGG-1GTTGGATTCAGTGGCT-1GTTGGATTGAGAACAC-1GTTGGATTTGCGTTGG-1GTTGGTCATGCTATCC-1GTTTACGTTCCATCTG-1GTTTCAAACGAGTTGT-1GTTTCATATCGTCGCT-1GTTTCCTGGAGGGTGA-1GTTTCTAGAGGCGCGG-1GTTTCTGCAGTCTCCC-1GTTTGACCAAATCCTA-1GTTTGGCCCAAGTTAT-1GTTTGGGCTTGTGAGC-1GTTTGGGTTTCGCCCG-1GTTTGGTAGGGTCAAC-1GTTTGTTAGCCAAGTA-1TAAACCCAGGAGGGCA-1TAAAGCGGTATTTCCA-1TAAAGCGTTAGGAGAA-1TAAAGCTGCAATAGGG-1TAAATGCCGTCTCATG-1TAACATACAATGTGGG-1TAACATACACGCGATC-1TAACCTACCGTCCGAG-1TAACCTAGGGAGTCCA-1TAACTATTACGCCAAA-1TAACTCATCCGCGCGG-1TAAGAGGGACAGGGAC-1TAAGCCCTTACGACCA-1TAAGGCAACATAAGAT-1TAAGGCATAACATCAA-1TAAGGCCCGTCACCCT-1TAAGGCTGAATCCCTC-1TAAGGGCCTGTCCGAT-1TAAGTAACATCTTGAC-1TAAGTCGCCGAGTATC-1TAAGTTGCGACGTAGG-1TAATAAACAAGGAGAT-1TAATACACAGTAGTAT-1TAATACTAGAACAGAC-1TAATAGAACAGAGTTA-1TAATAGGTCACCAGAA-1TAATAGTGACGACCAG-1TAATCGATCCGTACGT-1TAATTACGTCAGTAGA-1TAATTAGGACATCCGT-1TAATTGGAATCGGGAA-1TAATTTCCGTCCAGTA-1TACAACAGCGCATACA-1TACAACGCACAACTCA-1TACAAGGGCTTCTTTA-1TACAAGTCTCGTGCAT-1TACACAGCCGTGGTGC-1TACACCTCTTCGAATC-1TACAGAAACGGTGGGC-1TACATAGGCATACACC-1TACATCCCTATCCCTG-1TACATTTCTAACGTGC-1TACCAAATAGCCCAGA-1TACCAATAAAGTACCA-1TACCAGAAGTAGGTTC-1TACCGAATAATTGTAA-1TACCGCGGACTTGCAG-1TACCGGCTCACTGCCC-1TACCGGTCGTTTCCAT-1TACCGTAGGTTAACTA-1TACCGTGCCTCGGACC-1TACCTACTCCCAGTAT-1TACCTATCCCTAGAGG-1TACCTCAGTTGTCTGT-1TACCTTAAGATTTCCC-1TACGAACACGACTTCA-1TACGACGCTTGCTGCG-1TACGACTGCCTCTTAG-1TACGAGAACTTCACGT-1TACGATGTTGATCATC-1TACGCAGTTCTTTCCT-1TACGCCATATTCTAAT-1TACGCCGAGGGTACCC-1TACGCCTCCATTCCGA-1TACGCTATAGAAACCT-1TACGCTCGGTATTGGA-1TACGGAAGCCAAACCA-1TACGGGATGCTAGCAG-1TACGGGTAATAACATA-1TACGTAAAGCGGAGTG-1TACGTGGGCCCAGGGC-1TACGTTTACCGGCAAT-1TACTACGTGCAATGCG-1CTGGGATAAATAATGG-1CTGGGCCTGCTATATC-1CTGGGTAGGCAGTTAA-1CTGGGTTGAGTTAAAG-1CTGGTAAAGACTTACA-1CTGGTAACACATAGAA-1CTGGTAACGAGCTCTT-1CTGGTTTCGAGCAAGA-1 CTGTACTTCTTAGCAT-1 CTGTAGCCATCTCACT-1 CTGTCAAATGGCTCGG-1 CTGTGCAGGGTAGGTC-1 CTGTGGTCGGGAGATA-1CTGTTACCCAATCTAG-1CTGTTCACTGCCTGTG-1CTTAACTTACAGTATA-1CTTAAGCAGCGAGCCG-1CTTACACGGTATTCCA-1CTTAGCCTTCCACATG-1CTTAGGTATAGACCAG-1CTTAGTAGGCCTACAG-1CTTAGTGTAGTAGCAT-1CTTATGCGCTCAGGGC-1CTTCAACTCCACTTGG-1CTTCAGTGGTCGCCTA-1CTTCAGTTGGACAACG-1CTTCATTGTCAGTGGA-1CTTCCGCTCCGTGAAG-1CTTCGATTGCGCAAGC-1CTTCGGCCAATTGTTT-1CTTCGTAGATAGGTGA-1 CTTCTATGTTGAAGTA-1!CTTCTATTAATGCTAG-1"CTTGAGTTAGGGTAAT-1#CTTGATGACCATCCAG-1$CTTGCAACCGCCTCCT-1%CTTGCCCACCCACGCA-1&CTTGCCCAGGCTCTAC-1'CTTGGCCAAGCTGGGA-1(CTTGTACTTGTTGACT-1)CTTGTCAACATTCGAG-1*CTTGTGCTCACCGATT-1+CTTGTTGCTGAGTCAA-1,CTTTAACTTTCAAAGG-1-CTTTAATATTGGTCGA-1.CTTTACCGAATAGTAG-1/CTTTAGGAACACTGTT-10CTTTAGTGCTATTATT-11CTTTCTGTGCGGGCTT-12CTTTGACGTCGCTTCT-13CTTTGCATCGCTCTTG-14CTTTGGCTTTAGTAAA-15CTTTGTCGAATGCTCC-16GAAACAGATGACCACC-17GAAACATAGGAAACAG-18GAAACCATGGTGCGCT-19GAAACCGAATTACCTT-1:GAAACCTATACAAATG-1;GAAAGAACAGCGTTAT-1<GAAAGCAGTGCACTTT-1=GAAAGCCCTTTGGACC-1>GAAATATCACCATCAG-1?GAAATATGCTTGAATG-1@GAAATCGCGCGCAACT-1AGAAATGGCGGTGTTAG-1BGAAATTAGCACGGATA-1CGAAATTGTCTCTATAA-1DGAACAACTGGGATGAA-1EGAACACACATCAACCA-1FGAACAGATTACTAAAT-1GGAACCCTCTGTGTTCT-1HGAACCTCGACCTACAC-1IGAACCTTTAACGATCC-1JGAACGACCGAATGATA-1KGAACGCGGGTCACACG-1LGAACGTCTCATGGTCG-1MGAACTGTGGAGAGACA-1NGAAGAAACGATATTGT-1OGAAGCCACTGATTATG-1PGAAGCCTGCACATTCC-1QGAAGCGTGAGGAATTT-1RGAAGCTCGGACCCGTC-1SGAAGCTTGCTGACCGC-1TGAAGGAGTCGAGTGCG-1UGAAGTCAGTTGCACTA-1VGAAGTCTCCCTAGCGA-1WGAAGTGATTTATCGTG-1XGAAGTTTCCACTCAAT-1YGAATAGACGCGACCCA-1ZGAATAGCCCTGCGGTC-1[GAATCGACATGGTCAC-1\GAATCTGAACATTCTC-1]GAATGCGAATCGGTTC-1^GAATGGGCTTATCGAC-1_GAATGTATGGCAGGTC-1`GAATGTGGTCCGGATT-1aGAATGTTGGGTAATCT-1bGAATTATAGTGAAAGG-1cGAATTCACCCGGGTGT-1dGAATTTCTCGCTGCAG-1eGACAAACATATGCAGG-1fGACAACGACCATTGAA-1GCOLGACAACGCAGCTTACG-1GACACAAGGGAAGAAA-1GACACAGCCGGGACTG-1GACACTGAGTTCAGTG-1GACACTGGAACCCGAT-1GACACTTCCAATTACC-1GACAGATTTCTGGCTC-1GACAGCCAGACCTGAC-1 GACAGGCACACACTAT-1 GACATCCGTCGAACTG-1 GACATCGATTTATAAC-1 GACCAAACGTTGACTG-1 GACCACACTTCCCTTT-1GACCAGAGCCCTGTAG-1GACCCAATTATGATAC-1GACCGACGTGAAAGCA-1GACCGACTGAAGCGTC-1GACCGCGTCTGACGTG-1GACCGTGCTGACGGTG-1GACCGTTACATGCGAC-1GACCTTCCACGTCTAC-1GACGACGATCCGCGTT-1GACGAGGCTAATAAAC-1GACGCATACCCGTCGG-1GACGCCGTAAAGGCTA-1GACGCTTGCTTCTAAA-1GACGGGTTGGCCCGTA-1GACGGTCAATAGAAGC-1GACGTGTAGGGATTAT-1GACTAAGATCATGCAC-1GACTAAGTAGGCTCAC-1 GACTACAAAGCGGTGG-1!GACTAGGCCGTTAGGT-1"GACTCACCCACGTGAG-1#GACTCCCAGAATAAGG-1$GACTCCTTCCAATTGA-1%GACTCGCGGGAATGAC-1&GACTGCAAATCGAGCT-1'GACTGCACCAGCCCAG-1(GAGAACGGTTCTGACT-1)GAGACCCTGCAACGCC-1*GAGACTGATGGGTAGA-1+GAGACTTCGCGACCGA-1,GAGAGGGCGCGAGGTT-1-GAGAGGTGCATTCTGG-1.GAGATCTGCTTGGCAT-1/GAGATCTGTCACTCCG-10GAGATCTTCCATGACA-11GAGATGGCTTTAATCA-12GAGATGGGAGTCGACA-13GAGCACCTGTGTCCAG-14GAGCCAGCTACCTGTG-15GAGCCGAGCGTTTATT-16GAGCGCAAATACTCCG-17GAGCGCGCACGAGTAG-18GAGCGCTGTTAGGTAA-19GAGCTAAGGGCATATC-1:GAGCTGTCGTCTCGGA-1;GAGGAATGGAGAGGTT-1<GAGGATAAACAGTGCT-1=GAGGCCCGACTCCGCA-1>GAGGCCTGTTGATACA-1?GAGGCTATCAAAGTCG-1@GAGGGCGCAGCTCTGC-1AGAGGTACGCGTGTCCC-1BGAGTAAGGCCACGGGA-1CGAGTAGATACTAGTTG-1DGAGTATGCCCGCCTTG-1EGAGTATGCGCGTGCAT-1FGAGTCAGACCAGAATC-1GGAGTCCGCTTACCGGA-1HGAGTCTTGTAAAGGAC-1IGAGTGTCAACCAGAAA-1JGAGTGTGCGGTACCCA-1KGAGTTCTGTGGGTGCT-1LGAGTTGATGGCAATTT-1MGATAACTCGCACTGTG-1NGATACGATGGGAGTCA-1OGATAGATAGTACAGTC-1PGATAGGTAACGTTGAC-1QGATATCAAGCAGGAGC-1RGATATGAGACACTAAC-1SGATATGCGGTAGCCAA-1TGATATTGAGATTGGCG-1UGATATTTCCTACATGG-1VGATCATTCCAAACATT-1WGATCCCTTTATACTGC-1XGATCCGGGAATTAACA-1YGATCCTAAATCGGGAC-1ZGATCCTCGACACTGGC-1[GATCGACACTATCTGA-1\GATCGCGGGCTCTCCA-1]GATCGCTACCCGATTT-1^GATCGCTATATCTCAG-1_GATCGGATAGAACCAT-1`GATCGGTGGCCATAAC-1aGATCGTGACTGATATC-1bGATCTAACCGTATTCA-1cGATCTGCTATCTAAGG-1dGATCTTGGAGGGCATA-1eGATCTTTGCAGGGTAT-1fGATGACAAGTAGGGCA-1gGATGACGATGATCGCG-1hGATGAGGAACCTTCGG-1iGATGCCTTCTGCGGCA-1jGATGCGTCCTGCATTC-1kGATGCTGTATTTCATC-1lGATGTAACGAACCACC-1mGATGTTCAATCCACGA-1nGATTAACCGAAAGCCC-1oGATTACTGAATTTGGG-1pGATTCCCTTGTCGCAG-1qGATTCCGCGTTTCCGT-1rGATTCGACGGTTCACG-1sGATTGGGAAAGGTTGT-1tGCAAACCCTACATTAT-1uGCAAACCTTGGCCATA-1vGCAAATATTACGCTTT-1wGCAAATGAGGACACTT-1xGCAACACACTAGAACT-1yGCAACAGCAGTATGCG-1zGCAACCACCAGACCGG-1{GCAACCACGGCCGCGT-1|GCAACGGCTAGTTATG-1}GCAAGAATTCCTTGGC-1~GCAAGATGTGTTCGCG-1GCAAGCTGGAAACCGC-1GCAAGTGCACAGAGAA-1GCACAAACGAGGCGTG-1GCACAACCTCGGGCGT-1GCACACACTGGTAGCC-1GCACCTAGGCGAGTCC-1GCACGCCGATTCCCGC-1GCACGCCTACTTAGAT-1GCACGTGGTTTACTTA-1GCACTGCCTACCTTTA-1GCAGACCCAGCACGTA-1GCAGATCCATAAGACT-1GCAGATTAGGGATATC-1GCAGCACACAGCCCAG-1GCAGCCTATATCACAT-1GCAGCTATGGACAGGT-1GCAGCTGTCAACGCAT-1GCAGGAACTTAGATCT-1GCAGGACTATAGAATA-1GCAGGTAGAGTATGGT-1GCAGTGTGGCTATAGG-1GCATAGAGCACTCAGG-1GCATCGGCCGTGTAGG-1GCATGAGGGACGCGGC-1GCATGCTAATAACGAT-1GCATGGGTACTGACGC-1GCATTCAAGGCAACGC-1GCATTGACTTGCGGAA-1CGTGCATTGTCGACGC-1CGTGCCCTCCCGAAGA-1CGTGGCCGAATATCTA-1CGTGTATGGGAGCTGA-1CGTGTCCCATTCGCGA-1CGTTAAACTAGTTAGG-1CGTTAATGTCCCGACG-1CGTTAGCTCACAACTG-1CGTTATCATACTTCCA-1CGTTCAGACCCGCGAA-1CGTTCATGGTGCGCGT-1CGTTCTTCGCACACCT-1CGTTGAATACCGCGCT-1CGTTGAGCGACCGTCG-1CGTTGAGTAATTGCGT-1CGTTGTAAGATTGATT-1CGTTGTCGGCAATTGA-1CGTTGTTTCAATTCCC-1CGTTTCACTTCGGGCG-1CGTTTCGCTCATTACA-1CGTTTGTGTAGAGGGT-1CTAAAGAATGCCTACT-1CTAAAGGGAAATAGGA-1CTAAATCCTATTCCGG-1CTAACAGCACAATAAC-1CTAACCGCGCGCCCGT-1CTAACGAAACTTGCTG-1CTAACTGATAATCGCC-1CTAACTGGTCCGGTTC-1CTAAGGGAATGATTGG-1CTAATTCGCACGCGCT-1CTAATTCTCAGATATT-1CTACACTCGCAGATGG-1CTACCCTAAGGTCATA-1CTACGACTAGCTATAA-1CTACGCACGGAGTACC-1CTACTATCATAGGTTT-1CTACTATCTTTCAGAG-1CTACTCAAGGTATAGT-1CTACTCTAGGCCCGGC-1CTACTGCCACCTGACC-1CTAGATGTGAGTGTAA-1CTAGATTTACGACGGC-1CTAGCATAGTATAATG-1CTAGCCGATGTTATGA-1CTAGGCGCCCTATCAG-1CTAGGTCTGAAGGAAT-1CTAGGTTCGGACGTGA-1CTAGTAGAAAGGGATT-1CTAGTCACGTCTTAAG-1CTAGTGAAGGACAGGA-1CTAGTTACAACCCGGT-1CTAGTTGGGCCCGGTA-1CTATAAGAGCCAATCG-1CTATACTTAAAGCGAG-1CTATCGACGAAATACA-1CTATCGGGTCTCAACA-1CTATGCCCGAATGCAA-1CTATGGGAAGCGGAAT-1CTATGTCACTAGCCCA-1CTATGTGAGTCACGGC-1CTATTCATGTGTCCCA-1CTATTGTGTTTGGTCA-1CTATTTGCTTGGAGGA-1CTATTTGGTTACGGAT-1CTCAACTAACCCGGAT-1CTCAAGACATTAGCGC-1CTCACATTTACTAAAT-1CTCACTGTGATACTTA-1CTCACTTGGCTGGTAA-1CTCAGATTGTGATAAG-1CTCAGTCACGACAAAT-1CTCATAAATGTGTATA-1CTCATGGTAATTTGCG-1CTCATTGCTCTAACAA-1CTCATTTGATGGGCGG-1CTCCCTCCTTTCGATC-1CTCCGCCCACATGAGG-1CTCCGGCCTAATATGC-1CTCCTCCAGCTCACAC-1CTCCTTTACGCAAGTC-1CTCGAGACATACGATA-1CTCGAGGTCGAACAGT-1CTCGATATTTGCGAGC-1CTCGCACCTATATAGT-1CTCGCATTGCATAGCC-1CTCGCCGAATGTAGGG-1CTCGCTAGGTAAGCGA-1CTCGGTACCACTGCTC-1CTCGGTTGTCGGCCCT-1CTCGTCGAGGGCTCAT-1CTCGTCTGTGCCTTCG-1CTCGTTTCTAATGTTT-1CTCTAATGCATTGATC-1CTCTACACTGGCGATT-1CTCTAGCCCTCGGAAA-1CTCTATTTGGCTGCAG-1CTCTCACAATCGATGA-1CTCTCTAACTGCCTAG-1CTCTGGACGCCTGGTG-1CTCTTCTATTGACTGG-1CTCTTGTCCCGCTTGG-1CTGAAAGAGATCCGAC-1CTGAATCCGAGACCTC-1CTGAATTTATTGCCAG-1CTGACTGCGCAGCTCG-1CTGCAAGCACGTTCCG-1CTGCACAACTACATAT-1CTGCACTCCAGTACAG-1CTGCAGAGAATCAGAG-1 CTGCCAAGGTTGGAAG-1 CTGCCATGCATCACAT-1 CTGCCTTTCTAGTAAA-1 CTGCGACCTCGCCGAA-1 CTGCGTTACGATATAA-1CTGCTGAGGCCACGAA-1CTGCTTGGCGATAGCT-1CTGCTTTATGTCCGCG-1CTGGAAATGGATGCTT-1CTGGAAGACACGGTGG-1CTGGACGCAGTCCGGC-1CTGGATTTACACTTGA-1CTGGCATCCGAATGAG-1CTGGCGACATAAGTCC-1CTGGCGATTTACATGT-1CTGGCGCACAGGTCTG-1CTGGCTGATTCATCCT-1CTGGCTGGTTGTCAGT-1CGCGTTCATGAAATAC-1CGCTACGGGACATTTA-1CGCTAGAGACCGCTGC-1CGCTATTCTTAGGCTC-1CGCTCTCCGTAGATTA-1 CGCTGGTGACTACCCT-1!CGCTGTGACGCCGCAC-1"CGCTGTGTGGATGTTG-1#CGCTTATTCCCGGTCG-1$CGCTTCGGTCTAAGAC-1%CGCTTTCTTGCATTCG-1&CGGAAAGAATCAAACG-1'CGGAACGTAAACATAG-1(CGGACCTTTACGTCCC-1)CGGACGTTACTTGAAG-1*CGGAGCAATTTAATCG-1+CGGAGCATGGCGATCC-1,CGGAGTTTGAGAGACA-1-CGGATCCTCAAGGACT-1.CGGCAAACATCGTGCG-1/CGGCAATAAGATCGCC-10CGGCAGGGTCGGGTTG-11CGGCCACGCACAAAGT-12CGGCCCAACCTGTAGT-13CGGCCCAGGTATATCC-14CGGCGCCATCAATCCC-15CGGCTCTAAAGCTGCA-16CGGGAATATAGTATAC-17CGGGAATTTATGTAAA-18CGGGAGCTTCAGTGTA-19CGGGATCAATGTAAGA-1:CGGGCAGCTAAACCGC-1;CGGGCCTTCTTTGTAA-1<CGGGCGATGGATCACG-1=CGGGTGTACCCATTTA-1>CGGGTTTGTTAGGGCT-1?CGGTACGGCAAACCCA-1@CGGTACTAGAATCAAA-1ACGGTAGAGGTGCAGGT-1BCGGTATAGGTATTAGC-1CCGGTCAAGTGGGAACC-1DCGGTCCGTCGCAAGCC-1ECGGTCTATCAACCCGT-1FCGGTGAAGACTAAAGT-1GCGGTGCAGATAGAACG-1HCGGTGCGCGTTGGTCC-1ICGGTGGGCTCCAGCCT-1JCGGTTATCCAACAGTG-1KCGGTTCAAGTAGGTGT-1LCGGTTGACCTGGCATA-1MCGTACCTGATAGGCCT-1NCGTAGAGAGTAATTAT-1OCGTATTAAGAGATCTA-1PCGTCACGTCCATTGGT-1QCGTCCTCATCGCGTGC-1RCGTCGCATGTGAGCCA-1SCGTCGGATAGTGTTGA-1TCGTCGGGTCTAAGCGC-1UCGTCGTCCTTCGCGAA-1VCGTGAAGTTAATTCAC-1WCGTGACATTGGGTCGT-1XCGTGACCAGTCCTCTG-1YCGTGCACACCACTGTA-1ZCGTGCAGACTGGGACA-1[CGATGTTGTTATCTAC-1\CGATTAAATATCTCCT-1]CGATTCGCCTGGCTGC-1^CGCAAACACGAGTTAC-1_CGCAATTAGGGTAATA-1`CGCACATGTCCACTAC-1aCGCACGTGCGCTATCA-1bCGCAGATCTTCACCCG-1cCGCAGGCGATCCAAAC-1dCGCATGCCGAATGCGT-1eCGCATGGTGCGATGCT-1fCGCATTAGCTAATAGG-1gCGCCACAGGTCGCGAT-1hCGCCATCCGATTATGA-1iCGCCCAGCACGCCTAG-1jCGCCCAGCGTTTCACG-1kCGCCCGCTTCCGTACA-1lCGCCGCCCATGCCTGT-1mCGCCGTTCAGCATAGT-1nCGCCTCCCTCCTCTAT-1oCGCCTGGCCTACGTAA-1pCGCGAAGTGGCATACT-1qCGCGACCGCGACAGAT-1rCGCGAGAGGGACTTGT-1sCGCGAGTCTGCCGGGT-1tCGCGCAAATGTCCAGA-1uCGCGCATGTTTGATTG-1vCGCGCCCGACTTAATA-1wCGCGGCTCAACTTGAA-1xCGCGGGAATTAGGCAG-1yCGCGGGAATTCCTTTC-1zCGCGGTCACAAACCAA-1{CGAGAGCGCGTAGATA-1|CGAGAGCTTTCACTAC-1}CGAGAGGGTAGCCGCG-1~CGAGCTGGGCTTTAGG-1CGAGGCTAAATATGGC-1CGAGGGACTGCGGTCG-1CGAGGGTATCCAGGTG-1CGAGTACTAAAGAGGA-1CGAGTGAAGGTACCAG-1CGAGTTCTGTCCCACC-1CGAGTTTATCGGACTG-1CGATACCTCGCGGACA-1CGATAGTCGTACTGCA-1CGATCCGACCCAGTGC-1CGATCTGTTGGAGGAC-1CGATGGACCCTACGCC-1CGACCCTTAACGCCGG-1CGACCTACTAGACAAT-1CGACGCATCCGTACCT-1CGACTCAGGATGTTAT-1CGACTTTGTATAGCCT-1CGAGACCCTAGAGTGT-1CGAGACTACTGCTGCT-1CGAGAGATGTGAACCT-1CGAATGACGCATAATG-1CGACAATTTGATCTAA-1CGACACGCTCCGACAG-1CGACAGTTCGCGTTAT-1CGAATCTGCTCGACGC-1CGAATGAAGTCATTGC-1CGAAGTTGCTCTGTGT-1CGAAGCTATAAATTCA-1TGAGAGATTTACCACG-1TGAGATTAGGCCCTAA-1TGAGCCATACAGTCTC-1TGAGCGGAAAGTGTTC-1TGAGCTTTAATGACGC-1TGAGGAGTGCCAGCTT-1TGAGGCATGTACTGTG-1TGAGTAAATTAGCGTA-1TGAGTGCCTCTTAAAT-1TGAGTGGTCCGTGACG-1TGAGTGTAACAACGGG-1TGAGTTAAAGACATTC-1TGATACATTTAGCCGT-1TGATCAGGGAACTGCT-1TGATCCCAGCATTAGT-1TGATCGGTTTGACCCT-1TGATCTACGCTGATCT-1TGATCTATCACACTCT-1TGATCTCCGGCGCCAG-1TGATGGCTGTTTCTGA-1TGATGTCAATTAAGTG-1TGATTATGGCACGCAG-1TGATTCAGGTCCCGCG-1TGATTCCCGGTTACCT-1TGATTCGTCTATCACT-1TGATTCTGTCGCCGGT-1TGATTTATTAGCTGTG-1TGCAACCCATCTGCGG-1TGCAACTACTGGTTGA-1TGCAAGAATGACGTAA-1TGCAATCTAACACGGT-1TGCAATTTGGGCACGG-1TGCACAGTGAAGTTAT-1TGCAGAACTATATCGT-1TGCAGAGTACCGAGCA-1TGCAGATCGTCCTAGG-1TGCAGCTACGTACTTC-1TGCAGGATCGGCAAAG-1TGCAGTTTCCTCCCAT-1TGCATATGTCTGTCAC-1TGCATGGATCGGATCT-1TGCATGTGGTAATCTA-1TGCCAAAGTCAGACTT-1TGCCAATGGGTACTCT-1TGCCACACTAGAGGAA-1TGCCACCTGGCGAAAC-1TGCCAGTACGTGGAGA-1TGCCATTACTAAAGAA-1TGCCCGATAGTTAGAA-1TGCCCGTACCGTTAAA-1TGCCGTGGATCGTCCT-1TGCCGTGGGACCCAAT-1TGCCGTTCTTAATCGG-1TGCCTAATTGAAGATT-1TGCCTGATCAAACGAT-1TGCCTTGGCCAGGCAA-1TGCGAATATGGGATTT-1TGCGACACCCTAGTGC-1TGCGAGAATATTACCC-1TGCGAGATGGCGGCCA-1TGCGAGCCCTTCCGCG-1TGCGCAAAGCATTTGG-1TGCGCGATTAACGGAG-1TGCGGACTTGACTCCG-1TGCGGAGTAAAGGTGC-1TGCGTAAGAACCTGAT-1TGCGTACGGCTAATTG-1TGCGTCATGACTGAGC-1TGCGTTTGTTGACACT-1TGCTAAGTGTCTATTT-1TGCTCCACAGTTCTTA-1TGCTCGGCGAAACCCA-1TGCTCGGTGGGTCACC-1TGCTCTTGAGAGTTTG-1TGCTGTTGAAGAACTC-1TGCTTCCCAAGCAGTA-1TGGAACCACTGACACA-1TGGAAGAAGGGAACGT-1TGGAAGGATAAAGATG-1TGGAATATCCTTGACC-1TGGACCAATCTAAGAT-1TGGACGCAATCCAGCC-1TGGACTGTTCGCTCAA-1TGGAGTGATGCGATGA-1TGGCAAACTAAATTAC-1TGGCAACTCGCGCGCC-1TGGCAAGCACAAGTCG-1TGGCAATGGGACGGCG-1TGGCAGATTACGATCA-1TGGCAGCAGTAATAGT-1TGGCATGAAGTTTGGG-1TGGCCAAACTGAAGTA-1TGGCCAATTTGGTACT-1TGGCCGTATATTGACC-1TGGCGACTGCTCCAAA-1TGGCGATCAAGTTATG-1TGGCTACACTCTACCT-1TGGCTATGTGACATAC-1TGGCTCTTGTCGCGTA-1TGGCTTATGTATAATG-1TGGCTTGTACAAGCTT-1TGGGATGCACTCATTC-1TGGGCAATAGTTGGGT-1TGGGCCACAAGAGCGC-1TGGGCCCATACTAATT-1TGGGCGATACAATAAG-1TGGGTGCACAAGCCAT-1TGGGTGGGATGTCATT-1TGGGTGTAATAGATTT-1TGGTAAGCAGGATTGA-1 TGGTAGAATATATGGG-1 TGGTCCCACGCTACGG-1 TGGTCGTGCAAGGCAA-1 TGGTCTGTTGGGCGTA-1 TGGTTAACTTACATTT-1TGGTTAAGGGCGCTGG-1TGGTTCAACGGGTAAT-1TGGTTCGTAGCAAAGG-1TGGTTTAAACGTGGGT-1TGTACCTACACGAGGG-1TGTACGAACAAATCCG-1TGTACTTCCGGGCATG-1TGTAGCCAATTCCGTT-1TGTAGCCATCCCATTC-1TGTAGGAGAAATTTCC-1TGTAGTGATCTATAAT-1TGTATAACAGATCCTG-1TGTATCAGACTGAAGC-1TGTATGGCGCAGACAG-1TGTCATTTGTTGGGAA-1TGTCCACGGCTCAACT-1TGTCCTAAGTCACCGC-1TGTCGTTATCACATAT-1 TGTGACTACGCCAGTC-1!TGTGACTAGAGTTTGC-1"TGTGAGACTAGCCCAA-1#TGTGCCAGAGGCAAAG-1$TGTGCCGGTGCCGGAA-1%TGTGGCAAAGCGTATG-1&TGTGGCGGGCTTCTGG-1'TGTGTCGAAGTCGAGG-1(TGTGTGACCATGAATC-1)TGTTATTGTATGTGGC-1*TGTTCCGCTTCCATGA-1+TGTTCCGGCCTGAGCT-1,TGTTCGTATTGCGGTG-1-TGTTCTTCCATTGACT-1.TGTTGTCAAGAAGTCT-1/TGTTTCTGAAGCGTGC-10TGTTTGAGATCGTCAG-11TTAAACTCGAATTCAT-12TTAAAGTAAGTCGCCA-13TTAACCAACCCTCCCT-14TTAACGAACAAGCAGT-15TTAACTTCAGGTAGGA-16TTAAGATAGGATTGAC-17TTAAGCGCCTGACCCA-18TTAAGGCCCGTACTTT-19TTAAGTATTGTTATCC-1:TTAATCAGTACGTCAG-1;TTAATGTAGACCAGGT-1<TTAATTGCTTTGGGTG-1=TTAATTTCAGACGCGG-1>TTACAACTACGCATCC-1?TTACACGATCTGCGAC-1@TTACAGACCTAAATGA-1ATTACATGCCACAACTA-1BTTACCATTGATTACCC-1CTTACCCATTGCCGGGT-1DTTACCCTAGGGATTGG-1ETTACGGATGGTTCGAG-1FTTACTAAAGGACTTTA-1GTTACTCCGGCCGGGAA-1HTTACTCTGGTACGTAC-1ITTACTGGGATATTTCA-1JTTACTGTCTAGAGCTC-1KTTAGAAGAACATGACT-1LTTAGAATAAGGGTCGG-1MTTAGACACGATCGTTG-1NTTAGACGAGTCACCTC-1OTTAGAGGGATATACAG-1PTTAGAGTTTAGAAGGA-1QTTAGCTAATACGATCT-1RTTAGCTGATTTGCCGT-1STTAGTAAACCTGCTCT-1TTTAGTAGGGCGGCGGG-1UTTAGTTATTCGTGGCA-1VTTAGTTCAAGTGTTCG-1WTTATAGGTAATTGTCT-1XTTATATACGCTGTCAC-1YTTATATTTGGCAATCC-1ZTTATCCGGGATCTATA-1[TTATCCTCAAGGAATA-1\TTATCTGACATTAGGA-1]TTATCTGTATCATAAC-1^TTATGAATGAAAGGGA-1_TTATGACAAACTGGAT-1`TTATGATCTTAACGAA-1aTTATTAGAGCGTGTTC-1bTTCAAAGTCTCTAGCC-1cTTCAATACTCTGAATC-1dTTCACGAAAGGATCAC-1eTTCACTCGAGCACCTA-1fTTCAGCCCTGGTCCAC-1gTTCAGCTGGCGTGCCC-1hTTCAGGCGTCAAAGCC-1iTTCATGGCGCAACAGG-1jTTCCAATCAGAGCTAG-1kTTCCAATCTGGCTATC-1lTTCCACACAGATTTGA-1mTTCCAGACGAGATTTA-1nTTCCATCATGCGGTGA-1oTTCCATCGACAGCGTG-1pTTCCCGGCGCCAATAG-1qTTCCGCAGAGAAATAT-1rTTCCGCGTGAGGCGAT-1sTTCCGGCCTTGAGGCT-1tTTCCGGCTCGACTTCT-1uTTCCGGTTACCCACTT-1vTTCCTCGAGGGTGTCT-1wTTCCTCGGACTAACCA-1xTTCGACAGAGCCCGTG-1yTTCGACGGGAAGGGCG-1zTTCGCACTCGCGTGCT-1{TTCGCACTGTACGACA-1|TTCGCCGCTCGCGCTA-1}TTCGCGCGCCATACGA-1~TTCGCTATCTGACGTG-1TTCGGCAACCCGCTGA-1TTCGGGACTAATCGCG-1TTCGGGCGCTAGTCTT-1TTCGGTACTGTAGAGG-1TTCGTACTCCAGAACG-1TTCGTTCAACGAAGTT-1TTCTAACCGAAGCTTA-1TTCTAGAAAGTCTTAT-1TTCTAGGCCAATTGTG-1TTCTATTAAACGCAGC-1TTCTCTTACAGGTGAT-1TTCTGACCGGGCTCAA-1TTCTGCCGCGCCTAGA-1TTCTGCGGGTTAGCGG-1TTCTGCTAGACTCCAA-1TTCTTATCCGCTGGGT-1TTCTTGAGCCGCGCTA-1TTCTTGCTAGCATCTC-1TTCTTGGACGATCTGC-1TTCTTGGAGTAATGAG-1TTCTTTGGTCGCGACG-1TTGAAAGGTGTAAAGG-1TTGAACGACGTGCTGA-1TTGAAGAATTCCCAGG-1TTGAAGGATGGGCGCC-1TTGAATATGGACTTTC-1TTGAATCGTTGTATAA-1TTGAATTCACGTGAGG-1TTGACAGGAGCTCCCG-1TTGACATGAACGTGGA-1TTGACCAGGAACAACT-1TTGACCATGTTCTCCG-1TTGACCGTGTTAATGA-1TTGACGCTCCATGAGC-1TTGACTACCATATGGT-1TTGACTATTGTCCGGC-1TTGAGAAGTTTAGCAT-1TTGAGAGTACTGCTAA-1TTGATAGTCAATACAT-1TTGATCTAACTTTGTC-1TTGATGTGTAGTCCCG-1TTGATTAGCTGTTTCT-1TTGATTATGCAGATGA-1TTGCAAGAAGACTCCT-1TTGCACAATTCAGAAA-1TTGCACGGAGCAGCAC-1TTGCATGCTGATCACG-1TTGCCAAGCAGAACCC-1TTGCCATAGCCCGCTC-1TTGCCCTGATCACGGG-1TTGCCGCAGACCTACA-1TTGCCGCTTTCTAGTA-1TTGCCTTCTCGCCGGG-1TTGCGCTTGATCAATA-1TTGCGGCATCAGAAAG-1TTGCGTCGGCCAACCG-1TTGCGTGAACGCTTAG-1TTGCGTGTGTAGGCAT-1TTGCTCATAGTACGTG-1TTGCTCCCATACCGGA-1TTGCTGAAGGAACCAC-1TTGCTGATCATGTTCG-1TTGGAAGAATACAGTC-1TTGGACATGTGGCTTA-1TTGGACCATCTGGCAA-1TTGGACCTATAACAGT-1TTGGATATCGTCTACG-1TTGGCCTAGAATTTCG-1TTGGCTCAATATGTGT-1TTGGGAAGACGAGCCG-1TTGGGACACTGCCCGC-1TTGGGACGTAAGAGTT-1TTGGGCGGCGGTTGCC-1TTGGTCACACTCGTAA-1TTGGTTGCGGTGCGCG-1TTGTAAGGACCTAAGT-1TTGTAAGGCCAGTTGG-1TTGTAATCCGTACTCG-1TTGTACACCTCGAACA-1TTGTCACCGCGGTATC-1TTGTCGTTCAGTTACC-1TTGTGAACCTAATCCG-1TTGTGAGGCATGACGC-1TTGTGATCTGTTCAGT-1TTGTGCAGCCACGTCA-1TTGTGCGGAAGCGGAT-1TTGTGGCCCTGACAGT-1TTGTGGTAGGAGGGAT-1TTGTGGTGGTACTAAG-1TTGTGTATGCCACCAA-1TTGTGTTTCCCGAAAG-1TTGTTCAGTGTGCTAC-1TTGTTGTGTGTCAAGA-1TTGTTTCACATCCAGG-1TTGTTTCCATACAACT-1TCCATCAATACTAATC-1TCCCAAACAGACAACG-1TCCCAAAGACGAAGGA-1TCCCACTCTCTTCCGG-1TCCCAGCTTTAGTCTG-1TCCCAGGCTTAGCTAA-1TCCCGCGTACTCCTGG-1TCCCGGGTGTGCTGCT-1TCCCGTCAGTCCCGCA-1TCCCGTCGCGTCATAG-1TCCCGTGTGCAATTTG-1TCCCTAGATCAATAGG-1TCCCTGGCGTATTAAC-1TCCCTGGCTCGCTGGA-1TCCCTTAGATTACTCG-1TCCGAAGTAGTCACCA-1TCCGAATGGTCCTGAG-1TCCGATAATTGCCATA-1TCCGATGACTGAGCTC-1TCCGATGGTGCGACAT-1TCCGATTACATTGCCG-1TCCGCCTGTCTACAAG-1TCCGCGGCAGCATCTG-1TCCGCGGCCCAATGAA-1TCCGCTGTCATCCCGG-1TCCGCTTATCCCATTA-1TCCGGAGGAAGGGCTG-1TCCGGCCTAGCGTACA-1TCCGGGCTTGACGGGA-1TCCGGTTCGTCCGGTC-1TCCGTTAAGCTAATAT-1TCCGTTTAGCCTTGAA-1TCCTAAAGATTCAGAC-1TCCTAAATTGGGAAGC-1TCCTAACCGTCGGGCA-1TCCTACATCCACGGCC-1TCCTAGCAAAGAAGCT-1TCCTCCTAAGACATTC-1TCCTCGGGCTGGGCTT-1TCCTCTACGAGATGGC-1TCCTGCCAACTGGAGA-1TCCTGCGTTGATACTC-1TCCTGGCGCTGCCTGG-1 TCCTTACGACGGTCCG-1 TCCTTCAATCCCTACG-1 TCCTTGTCCTTTAATT-1 TCCTTTAAATCCGCTT-1 TCCTTTCTTACGCTTA-1TCGAAATTTAGGACCA-1TCGAAGAACCGAGCAC-1TCGAATATCCCGCAGG-1TCGACAACTGAACCCG-1TCGAGACCAACACCGT-1TCGAGTCTACGATTCG-1TCGCAAAGATGCATTT-1TCGCACCAGGAGGCAG-1TCGCATAAAGGGCGCA-1TCGCCGAAGTTGCGTC-1TCGCCGACATATTCGC-1TCGCCGCACCGCGTGA-1TCGCCGGTCGATCCGT-1TCGCGTAGCAGTGTCC-1TCGCGTCCAGAAGGTC-1TCGCTAAACCGCTATC-1TCGCTACTGGCTTTGA-1TCGCTCGATATATTCC-1 TCGCTCGGCACCAGCG-1!TCGCTGCCAATGCTGT-1"TCGCTGGGCGGATTGT-1#TCGCTTTAAACGTTTG-1$TCGGAATGCGCTCTGA-1%TCGGACGCCCAGCCCA-1&TCGGAGAGTATCGGGA-1'TCGGAGTACATGAGTA-1(TCGGCGAACCCAAACC-1)TCGGCGTACTGCACAA-1*TCGGCTTGTATCGACG-1+TCGGGAACGTGCCTAG-1,TCGGGAGACAGCGTAC-1-TCGGGCCGTCGTGGTA-1.TCGGTCCCGACAATAG-1/TCGGTGACCGCTCCGG-10TCGTAAGACGACATTG-11TCGTAAGCTCCGAGGA-12TCGTACCGACGTCAAG-13TCGTATTACCCATTGC-14TCGTCAAGTACGCGCA-15TCGTCTTAGGCGTTAA-16TCGTGTACTATGGATG-17TCGTGTCACGCTGACA-18TCGTGTTCGACCACAA-19TCGTTAGGAGTCCCTA-1:TCGTTGCTATCCGGTC-1;TCGTTTACGCGACCCT-1<TCTAAAGAACAGTCTC-1=TCTAACCTAGCCTGCG-1>TCTAATACTGCCTCAG-1?TCTACCCAATAGAGAG-1@TCTACCCGCATCATTT-1ATCTACCGTCCACAAGC-1BTCTAGCAATCTCCGCC-1CTCTAGCATCTTCGATG-1DTCTAGCATGCCCAGAA-1ETCTAGGTGGCGACGCT-1FTCTAGTGATATCGTGG-1GTCTAGTTATCAGAAGA-1HTCTATAGGTGGGTAAT-1ITCTATCGGTCGCAACA-1JTCTATGCTATAACGAC-1KTCTATTACTAGAGGAT-1LTCTCAAATCAATCGGG-1MTCTCATGAGATAGGGT-1NTCTCCAACGTAGGTTA-1OTCTCCACAAGTTGAAT-1PTCTCCCTGGGCAGCGT-1QTCTCGAACGAGGTCAC-1RTCTCGACGTATCGCCG-1STCTCGAGGAGGTTCGC-1TTCTCGTGTTACGAGGA-1UTCTCTAATAGCTGGTA-1VTCTGAACTCGTACCCG-1WTCTGAAGCACGTGGTC-1XTCTGAATTCCGTACAA-1YTCTGAGCAATTGACTG-1ZTCTGATGTATTCTGTC-1[TCTGCATACCTTGCTT-1\TCTGCCAGAAACTGCA-1]TCTGGGAACCTTTGAA-1^TCTGGGTAGCGCTCAT-1_TCTGTGCCATCATAGT-1`TCTGTTACCCAGCATA-1aTCTTACCGGAACTCGT-1bTCTTACTTATGCCTCT-1cTCTTAGAGCTCCAATT-1dTCTTAGAGTGAACTCT-1eTCTTCCCATGGGCACA-1fTCTTCGAATAGACGTT-1gTCTTCGATACCAATAA-1hTCTTCTATAACCCGCC-1iTCTTGATGCGTAGCGA-1jTCTTGCTCCCGATACT-1kTCTTGGTAACACCAAA-1lTCTTTAAGACTATGAA-1mTCTTTAGAGTCTAACA-1nTGAAAGGACCTGACTC-1oTGAACTGCTATGACTT-1pTGAATACCGACGCGTA-1qTGAATATGCTATAAAC-1rTGAATGTCAGCCGGCC-1sTGAATTTCACTTGCCT-1tTGACACTTCTCTTTGC-1uTGACAGGACAAGTCCA-1vTGACATATATGACGAT-1wTGACATCGAGCGGACC-1xTGACATGTAACGTGAC-1yTGACCAAATCTTAAAC-1zTGACCCACGTTAGACA-1{TGACGAATATTTCCCT-1|TGACGATGCACTAGAA-1}TGACTATAATCCTTTC-1~TGACTCCGAATCATAC-1TGAGACGTACCTCTCA-1TATGGGTACGTATCGT-1TATGGTCTGAGTAACA-1TATGGTTAGTGGGAGA-1TATGTAGAAACCCGGC-1TATGTCAAGACCGACT-1TATGTCTCATTGTGCC-1TATTAACACCAAAGCA-1TATTAACCTGACCGCG-1TATTACCATCCTGCTT-1TATTATGTTTGCCTGC-1TATTCAATTCTAATCC-1TATTCCACTCAGCTCG-1TATTCCGAGCTGTTAT-1TATTCCTCCGCCCACT-1TATTCGTGCCAGAATA-1TATTGCCGGGCTTGTA-1TATTTAGTCTAGATCG-1TATTTATACCGAGTAG-1TATTTGTTACCCTTTA-1TCAAACAACCGCGTCG-1TCAAACTTAGATTGTT-1TCAAAGAGCTATCTGT-1TCAAATTGTTGTGCCG-1TCAAATTTGAGACTCA-1TCAACAAAGATAATTC-1TCAACATAGCGCCCTA-1TCAACATCGACCGAGA-1TCAACCATGTTCGGGC-1TCAACGAGGAGACAAA-1TCAACGCAGGAAATAA-1TCAACGCGACCGGCAG-1TCAACTGCAGAGTCAG-1TCAAGAAATACTAGCT-1TCAAGCGCGGACGGTA-1TCAAGGTTACTACACC-1TCAATACGCCGTCATG-1TCAATCCGGGAAGTTT-1TCACAAACCGAGGTAC-1TCACAGCAAACTCGAA-1TCACAGGAGAATAAGA-1TCACAGGGAATCGCAA-1TCACAGGTTATTGGGC-1TCACCCTCTTAAGATT-1TCACCGCTCGGCACTC-1TCACGATGTCCGTGGA-1TCACGCATTGTAGATC-1TCACGGTCATCGCACA-1TCACGTGCCCGATTCA-1TCACTACGACCAATGC-1TCACTATCCCTTCGGT-1TCACTCGTGCAACGGC-1TCAGAACCTCCACAGG-1TCAGACGCTATAGAAG-1TCAGCAAATGCATCTC-1TCAGCCAATCCGTAAA-1TCAGCTTGAGCTTTCG-1TCAGGGTGTAACGTAA-1TCAGGTTCTTTGAGAA-1TCAGTACTGACCCGCG-1TCAGTAGGGACTATAA-1TCAGTGTATACGTCAT-1TCATATGAGCTTTGTT-1TCATCCTCAGCTGCTT-1TCATCGACGACCGTCG-1TCATCGATGGTCCCAA-1TCATGCAGGTTCTCAT-1TCATTTAAGTCTCCGA-1TCATTTAGAAGTGTGA-1TCCAACTCAGCTATCT-1TCCAACTTTAAATTCT-1TCCAAGCCTAGACACA-1TCCAATAAAGGCTACC-1TCCACAATGGTTTACG-1TCCACATCGTATATTG-1TCCACCAAGACATAGG-1TCCACCTCTAGCCTTT-1TCCACTTTATCTAGGT-1TCCAGAGCACCGGTTC-1TCCAGATGTACGCCAA-1TCCAGGCGAGTACGGT-1TCCAGGGTATATACGA-1TAGGGTGTTTCAAGAG-1TAGGTCGCCGGAACTG-1TAGGTGACGATAACCT-1TAGGTGAGCCCTACTC-1TAGGTGCTCGCCTAGC-1TAGGTGTTCCACAGAT-1TAGGTTCGAGTTCGTC-1TAGTACCACAACTTTC-1TAGTAGCTTATACCAG-1TAGTCCGCAGAGAATG-1TAGTCGATCACGGGTT-1TAGTCTAACAACGAGA-1TAGTCTGTGACGTTGC-1TAGTGCCCTCCAGAGT-1TATACACAGACGCCTT-1TATACGCGTCATCACT-1TATAGATGGTCGCAGT-1TATAGCGCACGTTATC-1TATATATCGAGAAATG-1TATATCCCTGGGAGGA-1TATATTACAAATGTCG-1TATCACCCAACCGACC-1TATCACTTCGAGTAAC-1TATCAGTGGCGTAGTC-1TATCCAATTGGTTATC-1TATCCATCTCGGTTAG-1TATCCGCACCGTCGGG-1TATCGATCTATGCATA-1TATCGATGATTAAACG-1TATCTACCACAGCGGG-1TATCTGAGCCGATATT-1TATCTTGCAATACAAC-1TATGAAGAATTAAGGT-1TATGACCTTGCGCTGG-1TATGATCCGGCACGCC-1TATGATCTTCTCTTTA-1TATGCTCCCTACTTAC-1TATGGATGTGCTACGC-1TATGGCCCGGCCTCGC-1TATGGGACCGAGCAGG-1TAGATATGGACTGGAA-1TAGATGGTTCCTTACT-1TAGCAGATACTTAGGG-1TAGCAGTATGACTAAA-1TAGCCATTTCAAAGTC-1TAGCCGGCGGTCAGCG-1TAGCGTCCCTCGATTG-1TAGCGTCCGGTGTGGT-1TAGCGTTGGGTCTTAC-1TAGCTAAGTCCGGGAG-1TAGCTAGAAGGCATGA-1TAGCTAGTGATGATGG-1TAGCTCGCCTGATAAC-1TAGCTGATGTGAAGCG-1TAGGAGGCTCGAGAAC-1TAGGCATGTTACGCCA-1 TAGGCCTATATAGTCT-1 TAGGCGATGAGGTCTC-1 TAGGCTAAAGTGGCAC-1 TAGGGAGCTTGGGATG-1 TACTTTACTGAGCCGG-1TACTTTCCGCACGCCA-1TAGAAAGGTGGCGCTA-1TAGAATAGCCGATGAA-1TAGACGAAACGCCAAT-1TAGACGCCCGTACCGG-1TAGACTACCTAGCGTT-1TAGAGATCATGCAACT-1TAGAGGTTCTACTTGT-1TAGAGTCTAAGCGAAC-1TACTGAGGGAAGAAAG-1TACTGCAATCAATTAC-1TACTGGACAGCTCGGC-1TACTTAAACATGTACA-1TACTTGTTAGTAGTCC-1TACTCTTACTTTACTG-1TACTCTTTCGTCTTCA-1TACTGAACAGATTTAG-1TACTCTCCGAACAAAT-1 TACTCGTTTGAATCAA-1!TACTATGGTTCCTCAG-1"Gene Expression#Gene Expression$Gene Expression%Gene Expression&Gene Expression'Gene Expression(Gene Expression)Gene Expression*Gene Expression+Gene Expression,Gene Expression-Gene Expression.Gene Expression/Gene Expression0Gene Expression1Gene Expression2Gene Expression3Gene Expression4Gene Expression5Gene Expression6Gene Expression7Gene Expression8Gene Expression9Gene Expression:Gene Expression;Gene Expression<Gene Expression=Gene Expression>Gene Expression?Gene Expression@Gene ExpressionAGene ExpressionBGene ExpressionCGene ExpressionDGene ExpressionEGene ExpressionFGene ExpressionGGene ExpressionHGene ExpressionIGene ExpressionJGene ExpressionKGene ExpressionLGene ExpressionMGene ExpressionNGene ExpressionOGene ExpressionPGene ExpressionQGene ExpressionRGene ExpressionSGene ExpressionTGene ExpressionUGene ExpressionVGene ExpressionWGene ExpressionXGene ExpressionYGene ExpressionZGene Expression[Gene Expression\Gene Expression]Gene Expression^Gene Expression_Gene Expression`Gene ExpressionaGene ExpressionbGene ExpressioncGene ExpressiondGene ExpressioneGene ExpressionfGene ExpressiongGene ExpressionhGene ExpressioniGene ExpressionjGene ExpressionkGene ExpressionlGene ExpressionmGene ExpressionnGene ExpressionoGene ExpressionpGene ExpressionqGene ExpressionrGene ExpressionsGene ExpressiontGene ExpressionuGene ExpressionvGene ExpressionwGene ExpressionxGene ExpressionyGene ExpressionzGene Expression{Gene Expression|Gene Expression}Gene Expression~Gene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene ExpressionGene Expressionmm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10mm10ENSMUSG00000025935ENSMUSG00000025937ENSMUSG00000067813ENSMUSG00000025932ENSMUSG00000054493ENSMUSG00000025930ENSMUSG00000032769ENSMUSG00000092083ENSMUSG00000025925ENSMUSG00000032719ENSMUSG00000067795ENSMUSG00000043716ENSMUSG00000025921ENSMUSG00000100868ENSMUSG00000025920ENSMUSG00000089982ENSMUSG00000025939ENSMUSG00000079658ENSMUSG00000097744ENSMUSG00000025940ENSMUSG00000025779ENSMUSG00000101640ENSMUSG00000042686ENSMUSG00000100110ENSMUSG00000025777ENSMUSG00000099899ENSMUSG00000067780ENSMUSG00000100053ENSMUSG00000085125ENSMUSG00000025776ENSMUSG00000099895 ENSMUSG00000025774 ENSMUSG00000073735 ENSMUSG00000067773 ENSMUSG00000089787 ENSMUSG00000042596ENSMUSG00000025927ENSMUSG00000100538ENSMUSG00000043760ENSMUSG00000089914ENSMUSG00000099906ENSMUSG00000025929ENSMUSG00000041872ENSMUSG00000041859ENSMUSG00000102121ENSMUSG00000097934ENSMUSG00000025931ENSMUSG00000041809ENSMUSG00000041779ENSMUSG00000099971ENSMUSG00000067879ENSMUSG00000099827ENSMUSG00000025912ENSMUSG00000045210 ENSMUSG00000097893!ENSMUSG00000025915"ENSMUSG00000046101#ENSMUSG00000098234$ENSMUSG00000099032%ENSMUSG00000025916&ENSMUSG00000087199'ENSMUSG00000025917(ENSMUSG00000056763)ENSMUSG00000067851*ENSMUSG00000042501+ENSMUSG00000048960,ENSMUSG00000057715-ENSMUSG00000097171.ENSMUSG00000101314/ENSMUSG000000169180ENSMUSG000000259381ENSMUSG000000994982ENSMUSG000000424143ENSMUSG000000058864ENSMUSG000001014765ENSMUSG000000259056ENSMUSG000000337747ENSMUSG000000259078ENSMUSG000000900319ENSMUSG00000087247:ENSMUSG00000033740;ENSMUSG00000051285<ENSMUSG00000097797=ENSMUSG00000103067>ENSMUSG00000025909?ENSMUSG00000061024@ENSMUSG00000025911AENSMUSG00000025903BENSMUSG00000104217CENSMUSG00000033813DENSMUSG00000002459EENSMUSG00000085623FENSMUSG00000033793GENSMUSG00000025902HENSMUSG00000104328IENSMUSG00000033845JENSMUSG00000102343KENSMUSG00000025900LENSMUSG00000089699MENSMUSG00000051951NTram1OLactb2PXkr9QEya1RGm9947SMscTTrpa1UKcnb2VTerf1WSbsponX 4930444P10RikYRpl7ZRdh10[Gm28095\Stau2]Gm7568^Ube2w_Eloc` D030040B21RikaTmem70bLy96cGm28376dJph1eGm28783fGdap1gGm28784hPi15iGm28154jGm16070kCrispld1lGm28153mCrisp4nDefb18oDefb41pGm15825qTfap2drTfap2bsGm28340tPkhd1u 4930486I03RikvGm28653wIl17axIl17fyMcm3zGm28065{ 6720483E21Rik|Paqr8}Efhc1~Tram2Gm28287 3110035E14RikGm29520Mybl1Vcpip1 1700034P13RikSgk3Mcmdc2Snhg6Tcf24Ppp1r42Gm15818Cops5Cspp1Arfgef1Cpa6Prex2 A830018L16RikGm17644Gm29663Sulf1Slco5a1Gm29283Prdm14Ncoa2Gm29570Oprk1Npbwr1Rb1cc1 4732440D04RikAlkal1St18Pcmtd1Gm26901Gm30414Sntg1Rrs1Adhfe1Lypla1Gm37988Tcea1Rgs20Gm16041Atp6v1hSox17Gm37323Mrpl15Gm37381Rp1Gm1992Xkr4+  deflate%z]XTREE#din,s Bx(I}0%8@HȐPݕX`hpxgx[:-    deflate%z]XTREE T(08S@ lHuPXl`!h&p ,7x16za<A_GMRX ^c   deflate@%%z]XTREE0 Rg?s  deflate.%z]XTREE%w68TREE:(HEAPX09feature_typegenomeidname(d deflate;%z]PSNODp9C 8M(xVTREEcd deflateE%z]PTREEbd deflateHN%z]PTREEad deflateW%z]PTREEa_x^RQ/d%H"I@9#9$ ^u;U +z%DQNŸcMس{H=D 7ž=~ ?@0LP?)9üs̻=c?IB~f?G~YF~%_E|o·Bп)#_O)W#W: Ϙ_z-a= ߀7ތ- vxGy D+w^xx"?'a0G0o(""""""""""""""""""""""""""""+x^еVCQ-אۿ]-`jʙݞ8pJ_>xޅu8o O3,< "/+*p; np< <O|S >/K o[G ~_W ~?O8\I8b.rjN1\ p7p c_?umx^Ra@ϙ|i[ ,-v]gwB D!D_#?/+~g9~_W5~w=>/ox[6{x~x^x / «x=ހ7Mx3ނmx;ށw]x7ރ}x?>C0> #!D~g~o;~'>O,| | _ <s</K2  o g=x>\8· B.KR .+8\ W k:n&n[6;$w=p/ <#(<$<OG~gffffffffffffffffffffffffffff7Łsx^鮤5 a71]ґď$u{< Ɵfs 9V2>[mS_>H>}9uT7/ڔWF@oox MZv_}V͙-|ƥ۵[`W銥<%.O}_QZ}_>| 9ENmvmյm){Ҝ_Uҋ_uBLWy/!V1x[ViOcxԧ[9:ߊ+gΙv4Fٷ^qI'#ƪw+O<(o[7 1y+mATךFwИTϱww׿,l2WK^@kõ {hg\V\dgwۣS}x)_}гovlzvUkH.vX]{:q/]kXY}|2]kP1I_.7֕(zyvnnǭڤoߜWV;ևn`n۳`}7밺ѥc^iJ[o+;WWs(W^ѹ\GV/h>9;[md~*ϻoe}87iKs3r-Ms߱yߠهu6oک{o8_vW9u_;1aW_ykjO#/4cob[SkV^L74}wA]ͭ6bcW}7z߭}t}i19:۷1۾`n`eZul{SgrCR+wkm]:7^[UA'K/=l[="Nulǫ|xomt kؠ7}΋Vg=P"x^mE5("R ^L4a >~w/G|228{Q;OP]|_գ]yzN';_DX[*7x}Q:Fm)yk0_c׼7Gmrh +wѱNsLkm`dKw g?ykzؾ AXݭlvG>"/~_/6q0)Mk/gW/oLƸ<}zg{6beWƜ&W {.ةy&w伴nqJk;r6vca{I_z6_|y4]]gy{*ԗ @]o[gB|bәH# i{`fЏ틕wwNպ:=Ys*z_=z+W^ ]Qe{Hw6}s( m{<6@?]g< mT*4ÇN4gDwoo.oY뵾~;`cO~#X2oj]cjпճ>z]l-JƯ)e|=.6^߷K_gPWiv=|omyz'GS/VO|9Fk#p{vzg빻/~[/֨>lur#]Ko^r׾7o9xPo%%ۏ-ksй=6}^Zک tx=.K=|Rޞե,ǾWWeeS`swrl{gM{yXuK@׋[K~h/^=.M~u럯h{|}~T~s{BeF_k־i9~.;O=G/+WIulݿñkt_@+'UN .;>iQڥz̙h.]}o+|l|{Bȋv772# ":/cǑ(۷.{>RO+@93ƈLd(3kS7oo&\^'vW/UA\k8[ڑwqʧ=֫Q?S?DgOwޫ6ѹouc{: kx^TEFE %+9+|zSU{jӯǫoQܷX Nxi\צ4ϟyA~(M YkOY7׸9 5`czchBuihΤ5Niɣ|k2x.ZO+q P?74e٘ԥyk}G_Ap[SֵQ6u`T_m~uUk`ss'S6[k(кk9nohk9z~INh_9r\T<y`N{o}8gnNz9Q"6b2Ncwӯۼioy}y4to-YxկY+c,>v}zVߝ\ytx [߉f;yum㵹T?u>orc\9F(ӽx8õ +n<=7i.̻=\ROc߹6e))]1[ijtmm5յ_㳺k}3fm2hog㕯 qGN uO^ovܾKٯ{q1{ ^{v =)tʧ^y!k|S1|,ߥʣ-wG}sh~|Os\ߏ}Dgm~W[0MbV4Wܜߣжh`^]Wcfl[iqe孿rdjc\w6iwW4uְz|'`~&׷7@s+<};Yٮ9joam9WwgW:/lnn~wGm k}P9뷠Vo7IZݲguPۭ9;m'{~Z}?ٯҚщz4{>,ÞqwGoxmػS+Usl5'gl}'/|S_niܫ[~k:o-)?wZUZsOרmk\{/8VWuo|'^ơ-;8km c\}S]8K+Ou-YY:Z [q}{x^٪$G x; ࠾~RjR'||\>dJmo.wwK^o~=ח><dmLqn_>U<#'7K|Z_W~c/?s}pxo}`chG-/{xU_cV~eʿtǑgey~U4sZVO.PN]~oV5&Qys>)g~k_2Si}^_ګ~>jy'zg*yz>k~{3#']8ꟸ|K8wq\XƲ^p":;3.g.>+l KgϞ:[͇C}͹u}}ʰJ^vvI4s5fywW}8y58goq,o ..s}κX0N_cټ ^>lΚ=Bo}Cۧ8z{hoU{<4Ve߽=/@k/>?'ϮͳkG8;һrl}Ҟ::1]3CߖWhX`k=}L΋W_>+{[y=]Y{V7st͗v5ދ=9~Ӛ='}}9y/yޫGߪ˿)\]Tyk KO56OssqtYhھRs<ꅆ_=[}l .}q(< KՎXȣ#)K&1vmVoӳ-6Οӧ]gQ6~Mk׹="009ʯWwwtv-h%7Ь3yc~{S=9Zڥ{y{xs%rƷv]g Z _K'gy ^Z`Ζ9]+@K|ڗs\۪8s}Tw`t]<=?k}e5q|ch=g z'җ Aoi>iǚ\p.wz^6?;ޑDy%38wm9tdנ2;W({߬/FLkJ^QKẈskkko5ھB۳ վ0mu}E;yQ;<@w}sim{ֺ< mSyOK/W=zvSƦ/^ŭ]yx{EZ;K^l-mmv?6:wZ]>|kz&C}^/Gc{Կiges=W D?|hwsZ_șsչmcsC1mk{uQϫ[yZ z39妺]w+ּ{U=<~Rovi&y{{>{|'/kν7N{1^ǝpwR{ho40kz!!~,NaOp=l=k}9/88೜c^9;"uat?.u:YWocgzk[c[߯aϖPS5G=wlĠ.Ƚs/{7:m@O:x:/כ|f>ˍ߿3f\0=rWqn8dKmL/zF_{ 6zl[c݌fmwg@s/}āwÚ]]_-Yk>X?χ|ǽ~ܵqU Q7ϯ5OlƝ}#? D>WԀwt_s;Aڲ8n\Lk ;ֶw;tJOuofyFl[13qֳ_sZ܅gp>Zoso;{ܱpj6wWخkbTًϋʃ_o+|e/^ |<7F0U Q`zۛ[n˔ݿ|49ݯUw{sr}dgCF/u;^:H,'(^ǚ߲yֳoM2]?;gkmOw-^*㩿>~]ϋܶoh;)^g96Clõzc־]iַyXeԺp=<y=-Ğs~ {oɒ }<~5 1/~ݳFW[[+ ෎`ƻj\^11%;Zݡ.g{Լ)斷:};&oJۺad_c{4ifߠcm~h>5L3G[9oܽ8qSg?ǵzvAR;/~o;;f즆}ɏ>^+smj祏&v6 ɱ19O#xO>X=ϗM~[x^ْ]5`6ygp3"CtԭE|~{{ݫo1r+gNe Ww9+o9j3]rcg_@\~fy-.Y>|X'A7ם¼_;[3}}qL\qͱ]/ڲ'҆Q~oyqgiI+,G۞2ba%:_2/Z=cյ6u5m U=竖j pZkn*j3rNV{)Q<_3[*wk@\\]x7sU{6߻;-b~nZY}cSͻ6k]Ucc۽x\]gr'[|Stso}h*/~;:{pcs=~|ڨVWony; Ph>sԼm>u?g9wGYw,W6捜6wwr9:N9nh|+oc] ] 2x.y7s+kNxAY{mgvԥ5K~ͯ"5wL =5a׹ӽ?v>ao^꭯S;֦k]\qoe9v'\s|Svufl9|x;gWustSg&_ yfcg`k:|[K|_.a;ֳйyxw;zmϓk<] >-,w{5o)4&۾z8~!V%c[]7=ܼx^ٮ7 ,iR@R8\?Hp3>G_=g+kyHdmt9c#wo7_lI d>#?F&^?DqI`v9uoI_|~yQLycGs1[s17yosMAHm6^q/:<ԮXˍ^jzV ƟʠO5'͟>[??k0b9(Y.|ߜ8J^e~W>4V|sR;ՑϜ{E,h->*ޫ/b9VnācxQ{:93okw?==l-uo+_ݒ0NSgs|[_Oi1h>t﹤7:{mnl =/;x9/^ll>:+}69s}=7W`XvԞyTfLWjo*u:Żlns$_֯>*߽H=y)[΍>\]ߔ=SM>ˮKڮ{xԾ1yn~j-J^ߦ|(cW_+cGֵgu 9yn̟{/G=ߢi 73|YNcZRc~+Ƴakqٽr{Fq}]ȍ>3v1{Kvޯ-Y}4bTx$U~_ϟ( ]g7ŽlW{y/fso:Y}dn.=3=klP9Co9gKթ~mUAj|Λ*sۣ덳~16SGjx^׮dU s@%{@c7b\g]X^4tQkVr?3b<uz~f^X_Ʒ8L'z}Fs;9;Ε9=+21qNk6ONó>q0fs3v׺<Ϛ8WL?IXofC|Ӳ篼7lkqAba>7:y~1c:}60mCu47ڽ"_{-|uZq=x-FraxqWZ~vS<Ye/cmo0g]w&ctS[Y^[fFqvWegk/}ߥ&M`g2FxGZ+%yƮؾ<ߔE 迯[B*3f{CdwXw$|w^qunx4^siKݚ{FeGmZI#?}|^gp{^o,!W]>zϭ:İwRutBjK{ޓ #dҫ5e#G?c{{11ٟްmjxm.Z6B}m.uzۚW9?~ךx|n-c]tz12ZyFsǿ7.ǹZoF>̳5YaG=!dY|/-vy߳ͿE6e>2706X=aYh]@:Qk8kh(u=b=+/E}~{oKA?qfDy^H6p/321^;X:# {]-?-:'kObl]ax^ۮ7E!S+9K%-mr]˗y~zzz~ їy?soc>\^uhm>ګ c8uYٝyx5ՅOmX~7fKe[1wƬ\S u<=t}G͋!96[So!~xU% Q‡5 wɚGㅰ)g@ewf⼺+g'd;9LVpcSŠ~>V, {N;}83*ӳVםcy{yVkܽOS派ުbI;bb*SAתQsrq~h=רƧcgwy [#BvkdI{wX׿ܠomL:Nq^/7!ړz>K?˓[rqº{ŘIez&?bhϨ%fz_4K_ x^G1sin@ i !ѳLd 7mBs1Yr"́?فo7J=jw߃5xD8׽Y1.cMM ϺxÞ]_Uv6tVe'UL_-R[xXio}8l篧mj鬽Ϲ.[qmܺh>Η;wZcd|: ΰsG׃si O79r޶Kf41y+lc{j.b3m\yky#ڰ_=ٴc]C9}g W5 {tL{?;Ղ5j]l<5^u&Omп=vw6*W1/s|x6ݍ8>mO;˳>Mh;Q{5U_]o㩆mG^On9VZO6{:'Gkw3v!7:}w۶[e~nG~,s3"7KeO6ѻ'Nk>GUGcǃehW-?n? S״mWk K9G,]Pzq1a}' }{N6^ЯܑaoŮKa?Tnx5=ڟsϛ=^Fo},|wm{{9[~s#F'W]RvmN7ych=|h=>F}odl+~'cPt,TbN1tI{^Wq'6~7 h{p}O1W!6Ϡ=_>ޣږ{m\^gqnSߩz5Sb2V6Ίk/װ}i_^bȹos?^]k'7tbot֖o~kǚz6ק[WW/Yt6TS,خcfZ Aw8w~z/1zo<,]\,F5@ԝv;/27)o]v `osy֞rgo>ܸOj0wYףnÖv}@|.x^ɎA }pPVLv:]Uxi=x6ۿ|zoO>ٯ֔u{{99Ks?YnukccI^ݳпo7{4`^k92Llad^aVeq>zN*cnlܜq <[7t֮kD| kd}_j йػ|qB3ˀm>g_ͳ5-8VA~pf{txY\7GH,ۚ׾w ;8\ї u(bb1o:+0{o]} SAYbam};{ly~@p1w0 .gq^7ABۥywNt>/|b  xwѶNba؟j\&s2vټ~!FvW-،mu-ǜ ~A޿C[sFbx^ dE[]Qt:Td#}><Ϟyç=eW"t:ƴ GV1W߽O%ֵ#gbYco!~_6>0pL&G(r$.|cT׌ŵgxCbfOOm5N[7|N}_LiGlqOeVDګ^TR~ksŘ+]Ǯ׳y>sB>}}n=7>99Rlwk=jmv !/X^Iw\ƿqU1x׶(wGwS{t7%s˷pص/TfsVs_.ګή3J+ٵ[3˫MFߒjr K7ybP,Wvb_{5޻sp{֡vMm=נ.s/i |g۳K>{Q+ ko[D1Mr寽ݟ/i[O8u>+cCK* ݳ:W+_+wπE=p>ouU-6ƨzW7y6k1owt_9mkuWƔ]յ^']o?*'N΃m[5Dky;=ߺo>;ʷǹ{xMq;7}'kַ_re\oЅ7eֿ}nׯd17￧k x^ٮ7E$)',KYr_.%v vwxB/. 5}?^ceὺƵ!~AK~$d 2\9Ɓߥ_l|3o \ƺꗑZ5g}s9^>udjA>jyn=ܿdk|.} |sU2wp-L s Y?q_} Qlk_ʯLey_LįA:泹#v䊯ώݨl뽵2_ؽZ.Y%';dmδ>[WV_E}}3~gQ,Re>b.7^X\WƼX\,E>sRrm/\[Mչ}ݥN׸?e9/ރث/sv70ijwGu;ڼSk3^\=N*5;6ޕO>}OgcglS_g}{Ρڬ-Zƻ$)i,=y"1w2-;כWD6/[q1)MHus}K&;_^bgsnUSn_b#m{>NkWFGcgjm4g7of1!U5dOr}^-?rލMvj}ևz<7]G;Vs[y*˼gvj0kcWJ򍃵yұkz|5ڬ/2/Gb1{ƺ=$﹋ƛ3Kx^W$7{7 l(5􎦺?^g?k>v g^OhXm|9ܾ}ymol.z_ Xp}j._~_Xym 5~ּ:m_pOQ[qٶsۈ]Sር4sTc}Pg}#51מ6.ѧ`?{x'w͹S2\{~k!B+/!>kOZgl[?sb}8E;zƺkW K|a|ͫc,v<ַ楧_8=t7IJ֕)8'e]wgQuhCu26몏[X}7_n;ؚmsqwuضq@fo<[}2p.ԁ33u#ϛ>o_^肎wֶǻ\ImFҌ7;?oy}h*b3e=|#Gulpg~3^_e_bc>9/fT->yһsdloz2wM:o^7ؼgstN3{]"_=z{<#= 追 ;b[dzӿfWl˾{~ހ>u8WL>gl U31w.y/\9y_ro|LbkvOvdb SPr,sٷ~pؼn>9ޱ4i]ճ>h̋hg+?۔\]1hэWUso[̇=p˵{ҷه˗ҽU C;[v>ͳ7 Bw7|'-۩>Ywc@`5+4 Lk\L7mr4n<;VO~7oN6A߻~#wwz{i٧aَm_gHcmkpN}f3LGk`9||&]ZpӶx^T7 J;C[t;V_̵[k1k$obkԖ/TL+0]c+9DgݮCǝ_;܃u7?=)gvI;H\s1gz;mbWoNS٫1Ծw~{ʳF'epyo.WlcbZwB u.o-z<'Ěډ)\Uw;wާUaiٝ L~9N7u(~9w_c|w^~[̟|ӷw=keݔmu16W\͍Vcs+}<ɫ)w 6<'o7ksoD.S {ヰs1U.S9&O}/v孟{]揄Χw%AxƬƮ%ʵzwЮosX[ksSL~/h Xi7lƧ+{'/!~G^3_vxQ?ڙ=/ku,fɳ~<{;^;.;S~ 5xFgpeg{̵>r7I)l}Od/Ɏ=o']sE98:tvmloo|um;gu=zg+b6N[Z;_K+/]ou37z8r{H3}u(v}kSx&CǾ6VʑuQCS]5?)ߖ=:n~x6,o8#kMo0Ofs7 "/nۯ  긏sRƥ\{Zػhmcm.;ƶ*x^n1 $ qSH$j6)J??><\a7Ǐ~ѷ_sF]l_j?v^CB>([_Wc}Os#T["_`:dA`xIj]C\ՃC.|6lGIkg!wX9\]6wS:x3;?Ƃ#lc78NZ ~<'q*?8''L؏lk^{4^7{'?`n=brz:'k5Zweql8~gl5jlqr`o;`m˜ݏw7G1vLyxpc[΍KS9Z0G>Ĺ-zlxw4ػ_[G:9c}:.l7 o9mψ} S?rQ>e^g}vڮMm=vO<]AzwL]g8|]g8_>~0zFXז9q0׺s;;1c;'훇 yq6|3`>K}xt0ξ51抮6 /8`+7oC`qǛ Y/I}2ta΁[B=O}7wo5\E8/?k:}p;͘8r1G̶տwIn݆_B'o,۝#w9r\N{e=+u¶Mv\x-63?>;+u0-ݩwʭ6BbSsu3뉽-\ZϝU|?LpPV377b%Qy~k7ϛsyۏζ3~0dC~j=Y+x6vC焜S'>;qm-6+l=>.rh;ui9q^-x^E`6 >XH\xW:S/}w_d/|YKc ^ó;*gU)eLE^ᓴOq=xn~Ѐo\usz^zc>笍Ch ҭI}8M=iq}Ur} ' v㶟d6Q]K!vmU+W~y 浲:= UCocy{ q>b7W|'>%OYڱ|\OXOݥU>͟ki=#樺k{su>:|䛰cNk8ܟl.{+q;9[fYgT>>@ֻbᓚ@:s M}ʛ9+W{=Kd;*w))N`<+¯.K,z{Ƶ͗8^oY#۾B9!3e^Og?S1sWhMckОi6Hxa5|y hܫwZAV Mwԟ7scmSsx{ԟ  YQwBgcyo:y⋼5O'?sl |^Pk=_t&VO_{T'ge_5%/Oco%+TuưzƿN3st~f]έۜ<[˕ 3>w='zکoySO2RGכj_pfnY'~"W9cjsV+Ϛмۼ;۳臧_e^_ ݽߪ*>7v~cߑg.gEkK*r]^e^^>{O^[[)@?W_m7ձ~-?qt.js[w xA_<I_}k|Gչܞ(tg%we[oƽ pKkO}OY}un;cIYgиgl8kO^pU}]3G{4Mx^G /K H%iJJ=7Vo^nwϼ|</ӫTS)57=gja|Nw_V}l0tpypWk\wuVz'v)>ktM9l-Øζsa'2Fh9=ۥo3,w~ZuwVu𯸠 I?Lã?s9V^j7s;xm=lA9C׉]B>,-kwF=:ó<;= qnN|>m:['='Dݫ;Յž=a):?gv׶/W]zkk9s5MLvsԎڲ_e:&Ȝ2|8M\:~頎z}!~2pvޑo\푓NӿW똺EMLKn4zOզ:V{{w:9xlu]4Y%C}ꛖ|un(޷93>W^={1x5O%7\~+Y7x/F;w`Aqgc3^~-Ns_9TO=]l]Y],k=;Z禎Nu\Cy7Q[̛7~q~oQ=w9;.t'||fybr:# <8~{wEuyvߜ7o!qf߭[/wmΌv膗VONq-sU}tݍ{> 4Oiw֜"\ssaᄉmw:kgG}ylkΟIXG݃z:^}X%}f,`?WyRvZ_з qkzמ޵fY}kͻ}9}Ͽ_/:N#ș0Ųl&boۑkM7ֳwKJK.=7{9ƾ56|xecY|'2&RYOqbkmeou2>|jy77ZR}u{66&gxgm9ad5CXqy |,8gks>#:Hw} ;q|/x2m$ⷭZ]ĦydZ+ox;7s Zs@<̾z+ƹ9'{?;_5GnZ][ZϮsv:lzWfpw_{}k_ 6gka`}s/d˅Y_f=}w?yrrZ;|ӑsgz\kCyVNߗs8z^cʶ!5nͽ~gŶo۵['sU۬o=n2|7ϝe;C#gpg\Xnkcy l;5'|s%ޟx{֪mcva6d{g]Nχmosmo-+lα̉5}plw v{N9{rxMF-<oy +2m ؎k+㜟ޞn?-2&͕}4r|ҵ֩N點jy%׹rh?>s~go楺{Veݭ3͍]ccϝYU>H>g铏h럵Fg^o#kѵ-W_{/"W9Xh<-.]7_ig>ڗ+_fÇ#sxSIlkd l68Nkgjݶ^p[#=W^'_.Q/Ζh\w}͜|bm Ď-5Fc}LL}wk4p<dCi|zՇ6hg9f12gܵNkD>ۖ/O>3>|fM.kNޝdev6Vϑ}\B:Fxxp吸&{x|ΫI+cl5UkxWLk-F|gzrZǶig\?b%?ָb3ȶ| l#s 7.Xmkz}M\{>r|u _Y=ث<˚؍zm{7}d'+/ɺc;ƙ/峮}yoZ_|>z=a_rdrYx+2摒j؍4J#gk9nbs[w93u;]eM{'m[{w. 3"xsrglm$_m`o;a=?h[=,z]y!c^l>_`r8=OrWڗ{x{՟A׿ >zLzrݿ[g߲lϿY{z׃o,=Q*}=hzFR>w/5D}g76b\s丳sn{wzieG=[&|z\;jE-s0'/?9DZ[wxԮX]V' u_}d:y7y<^͌yExC5)n OE&bq.}>@[d<0߈gw~0wCwOUz꿭ۧ&֧8Pbjt{G=Y߽:j?#6gYi>0b |7&oBw7Ηm> m~hbb#([ X/Qx^V7@ !@%R¨[J;̏v/.Oӌw{=:\$z;-ig)7[-|q4Xĸ9QV>~YbgOCڂql̍K [o/$[ZAXWqYkm,:WY]r59r[`n^j;-Ky9\[ye/Ci׋q3n?@Y;O̱g )_,ͣ#R|1oއkԟgr%?[/ڻ򛳒͙v٧O1ŵxfc z:?w.ĝx¼vTjNe &qom;%3S|]^Vo ܯp䘛寍]K|)U;)8ۿ|GPrԾu+]1~|g#Z(v7ͣgT?wk8[ӕY[^uʳfϋy8ꨧ߸8<.Y_mTNwӓSx|;vOcT;ŹN;YߏSN[}@t^j<}:6s7ueij<>WNymṽ__ڳO\/.t/=Zylh<'FK;~eguż5BkUcҷ=\}*WnqVVO\70s=Gmo/@>wԓo,o͟o\}?߮e}1Sӹ>U=Ӌ%N6͉~9wͿֽ57/Q;c;պ{o3Tߕџfһ6֒9^{·wX̣5Z9oXaIhߴj]>/uֿ63g?;Zս>>>k~ μ򭁱gbp}+a_sB{gc[wyNw+[e}gg~͟m8W{vNds`~{ֻx低);l\+'wo|[t,ڲcy??i:-^sPycAlIYL}ڡ;|X^ww֗k{K˹[g2^}x.߬yOY'^qDƨ1lT]%U{ӚQjZy2IwY;~vjx^ɮ\E`0ر@wD 5TUe;}mIכ/uV=-7x,ZM胝̡o)vc|1E#eU gcw4A ;o0y_;}-nZjwTdy?(π|׹ Oqgd$Ϝwmn~YLW}`ncn (݀>n^K_r92ȑCW׮mCѹF~33 <.9Zx]?]Jp9fkhYf_66ȴ3rY;[%Zdo;|ٟvї[k>}Wx..zp?x]C^7w q ~bYon5{`y"y_Y726뵮˟8c]S{6CsEc{i3~~R?p>Ct7^gWl5v.>g~,owSjWn-'[ߓh|Z5O8Ww6܏':_yz= Bx^ n0 6om H!璢l'mnts'v"ytO{fk_7k 6mJeݾ&nlrנe%1ѷONyp6c:wmrdԘz87f?9fMm $&z_s3։}o k2o ~z?9C}{Wz޼geבQ7ĉmp:Wk$|]S]2}sDwɖy3sۺ~۱z{{޹7DMs6>9 񬌍{g??:7?ك}({bg]a ;pŹP/5:"t[zقS-!?)45O<5!1u_&VGyC#lK\:'רu?pы}ܵ=kn}b(X#3L?l"v=.sp<ӹ9F?};w ,Լ'l?Y,~OfCQ~g }xٓGq~!缌Y}˯8Lͻ39?q2;VKwLx#-97;㚄?)c?=0np5Xc5Xc5XcО x^X[UQ/(:,j$jA1냯Gx|.>3{Λ'||ΏKoǿ·oѱ~>&A{sAߠVxz{o.:'Aޞ+[9מLZx9S3+L 6 Uy4G|.p>Ԅ?ʛXpǽIP hWKy_[.;=VΞujz [gNjcno,ڬfzy_c5WH~ sVyf̜ޫq'\CjU\Ιz?}Ok~>{G }D,RmHƨ70%JJb1 /DvY#`1$?}Vܼnߝϑ{$1u k~2)Ο聪3寬JwF Wҝzo8>˚׺W 8KNމL޾9 /7qa+m>HiF@ ;kVd^Ws?{Lʿ!m56.r8~Ccl>94cŻ߉C]f}ݰ7?يGsgη?]^yj.䡝^ f~{Q?zvM > ̇zS{;}7?̊# 7u_б]Oqs9prfv%!&1{/G]#w8b$jK7KΦsVyW9%mxHbm8ji[ܷ5NYƪD>#z$meEx^Yۊ%EEQt ʲiчyk?gāz:UYu<d[~ojzn[/mm=zg[˶>Oe/!?o;:Gzcv1YYc?c~9nz^∭<8 v,rK# @;j6$'Ǭ%佊sk}3 *{#|asG ]>'=Ul~u%_+qnԩྫγ^\K3 8/QfYk1Tc/W7Sj(WF^L6\'q5y.XwQO5?uwƊFydUxMM}y "sjÈs{==>Sg!>z$Q~ǜn>COS>=0\CcN?g۵78h<(_gUsg+wac2ֻ;6~i 9uRGlyAr?A^>~^pAs<-~/> 9k_ʟde/33;{坚8^<̟goDw⊺i0Ct/bG:9KrTok\fɁx/W+b~'S]K,'z7F`UY%MgV{b>wf]U#>|&I1j؋>gCzxYE׸җ>-+i.d O4x^X[EQ/A(&BPNCQ}NSv9y 3.3~1oxs[3ޞތf|4ˌ/f|9_fw3x=/XnjR -Vb>AIX1o⓯r?'p~VsK-c_u?Ìk^;ɗXPkX9 lS}` #s]<}l{LlzKsINܪB3Enw}4>.Or93Tuo^$w؋\q^9q`?Y@'YOs_u3GWruyrƅ{❘|_aS|'X~~<tO+Ȃ-o=ge.:z哪}H͚`m=O/Y39NړkM4rՂ<#f:oMwa=zsw9q0G 1. gJ=xDzrCMKuFf"7znܥTonQYԷk|sLڪ{w71k8\OXz3|Z3{DؕG3Gܽz jN3g. лg|RтNR5kNXǿXsB{vXQMSoy_Jb y{#&5+||LWf7sw& ՙ5iW?Ps>}~z!k{@3{>fr1O- # CZ'}ooZy 6W`_%ϲ&םc8j&D=g\+9O͕#kz|wmNW7SqwT; 58.iuV[b+ϓF7s~gWX3{~˼?7o:_{X1rs7O)19kM|άvL[yP8a=k'W#yGLI_}͙wLbg<|A=gy;.W5 ~sZt=!\b|6AlR0ylx,s.>%{0u gůuv~%3n;ȟHdx^XۊdEEQ,*^PdUViQp|? N'*N֙q`驊̪ű^ձ^뽱XX?uױ~f?ƺoXox_Xߌx{_C 'ذ?sl1asW!r?ϩ8xVLٟqy]#b5HjPWz9nԕxb/1ExaU$k9J3nYuoU^p+/cw#K 3ơ]̸a&JۇXZ4;Ԛs5'\kUy]g}} lu׵CJץ!nn0kAʝ74K#$wW|ӌyrݧ\^\Uȃ˞O+<杳:3 {JGuyOγد]Jgȣ˜VyT:dj=/Up\L*5w2~I}gޥ~>SgTJ/{/u5'cdZ]PϮ[̜t4̽|]n0|=[*?{Ƀ՝9ӵO:wg0oXjF%lظj~QG=7i ޿lG{s\qR8QoaH N\Z u|ul|CW̵afuGPK%N/5b?vQoGH'μXȏyߣck*LYʳ7 YՓXVq6c;>Z~"Gbl9[Hܵr:jU|:vn^j>'>g꫎cvcݢ#׆sb_|^Ug~%wi~o]۬O/B{C]k14˻ɝܯMy)?3Es틇uhuV sɭf?og9gy8Zг+=4/c<OPψ5t0_~`Į HK}1Mjnf sWwfyטPSb{Z|+ZG>ޑGS;ϏC1zWx)7Ky,l6uNU#afW+L<1]ꠞ<)<]r๮sro<>V{x} S0Mecާ; S= :s,y&O0j 纪8){ɼQ]ϰ z bceV3Trr&7sQ{*WFȫ^%5pOϡ~j~_tO?'Hޟ0m}5 ڑ'qC'i"߯S\'0jrGH)uާ&5|ςGg %u;2OLϙLY ~ؚ qVF|z\a{5h5߃MLsp5|{OǮausw+ƙgCs2$.3WXKu!蝟QG9}if_735@-;%M7gX϶gUJϹ'=`uf~GVk8.wsއEhH|f,knw]ʏj{PabdT+^yĔw6zV3,_ҭ1xH{wڃ '=wkyp>"'JNUoKr5fU ;{{90ߕeLM565~=|g{=Gߑ<ƾջ(^gLipϚ>_LlO+ |'}|vT P?#sA}PV(ԄN+a/i<"^ϩK}<~|zyE&=BZiWX$2*K+eV+qOut33}gl_*/+;9X7b^cjwԡܐߟ?^n 1] 23ړ'԰(5ϳ}_}^H爇9ìEN/A'=7e }f׳3%p/Wy=S7-u7aU]gҼ%>F\|UZzjȌ{#m{Qm8޻/Z{zGG`x^X]%5EQ\?P4 ʲrP"]9T:s yIuuuu's~[?nm^Kze[omѶ>WzCq,5g{_uyq>|5X*?j(Uqs f?jV3=^|Qzo^էysd^sMLkor%ǧ1tug3_W^R[ϧ&<|9xCͩ55u}qsFdV GꂽN3??4T+sPs}*k'=k0=\Oȹ&~W_0ʱ+?k# YUayprUU3\)ן=p-y {N٬w}ċ}:->O3*~DZ/4w+m仪7TZx]35;=Ӷ!˿`o88b[jܛc,־:f9?5uUM|1Ovꛣ1a1.c7Kp]'[gt_8Y?; {-E{f9渚:K\g'JSᐗ޹uG1{<0ceoR;$f-σZg=Xc9ˤ{c8kXlhư:n=~ Z݊7u|AW` 9'cq=X% 8>c}T3qz#1jN>h3cqs~Y3<(~uMݟ2nc=:jg{wvs]uq-',~,/匽9_Z |{HɺHK(?5K!KvLiN,!H47v-{_>V7OLzD!z/==wM?a6voU 0ztx5?wPܻKmxe]x5^>95Ԗꂏ]=RG53(6~S^>9R_:'ԋN۵~^SATU=3'ae>y}R0ox߉˟uΓZs/+ߥv=$yznj8b6V:C1UwXs]G[άkfuȝ=]z{O>7QJ{FCsWx2779{/g?;~G1׋YԎ+,T_qٹԹf^!V~b^ݹwf ^\I}E9+u{k!LjM5_3̞tL&\M~Xͤ?\ }0]ZuYyY^%k-3>^[.׿u_j^eT^$3c0Wl|W9+fU: QCc'7s<Oz?=\fuH < Y8/]/<Dj^w]tKLo >}O|&i ֣:l\Uԅ.qn|]b?w~o9W=8x9Iiby{5\9:tqg.::fky Ͼ\q_y u3MxkmWö9,p1qi(3/ϛCqԢɛĬk:穙v [d|~qߤf:U1o0:;c`h"gݏc,=%QYﺬ!%Of]w'2jFfA}-Ņ#Ula&wsVI7yn6GϿ?䍻Y ]ٰrQګ}w1Z17Sޗwa?kUm*.u{r}w|+k b~Z;?2?cxm\ώ|F̗ʘU^Nv3Oru}̌zJ Wf?yw׫ƨޥ'0GUw:zŕwN}r:c5zk4x{y<\T+?{9ϓ?yg^Gӂz17Z幆]Y?ylo' mKϡaI,OͿ 'ڟGdW > * G3wAVK̏3뗆mgsZSzf<žvsv'NmmUƚ7)䑵"' 13U?yuw7糦wAru~aOOd|o㚞/pYʓ'ls[ w^zg ٷ>+m܇ĦFUǙ>w~&6qtV{GU\闳(Gq{%}Ͱg>}?\{x̖8g(E^)ky]ϑ3Ϲ'>tW}1خέ>WiJ_+Oɯ8gʝy'қo +?Q1ORJx^YMEE0(&?!BPTLTҍ{0[tMO&wNsoh}}}}}wl|}k}׃^[}ml|'}}g}}W}}qxg&&'=:X!ob GZծgXԕ0̰۾dž#ҮzX)FӰc\5>bsߪ5ɷνua {xb8}nOZfͬ ̇=${ݩgj$Oe3ݳԈgJyfaӰaWJ%G,JE&3\g]M>?_zR*mϴQ_+ٙUϥ oqaú6׉=* see8WxT礦g̤ci8N]rZxFr'{|w Oö{Re35{b6WsEmka9ԘX?x|>?{3ރGoew [zUZ%n|$w; 6z޼­g:9[͊}{UJj稉S2WD9y#oƮsS+ksJdzy'o&0ΠM<֤p 3?AY~⸎9TA8zא70U܃wg}ha߇'~yo{F3r&/ z9/S/U4RՕqw/!we Gj7Ik<>V:TۻUf|}W3ߥwZs612ܗrٜ}_d+U㊁I<)s_yp4 {5G9qyM3{遗z7ۧksu^"} 3&nwgw~{gZT\YSZSG`^TǩszYwL̅ϷƆ ٨cTp|3{;Gޔćo]y8w j`éwRG>D5\7i_z;]Zԣ0=ҏϊKzܓ}T oηg┹L/Ovld]zOPDWg\UߏrW&_jGg `ʻ:J/?`ጻwYUiO5ϲ]a/p++Wk|/]aynZz7zwzz֥O +k1?hCVL}cx?a++m#H,|kXK[p{+;X#l }:5fUǙpC=v0 { W^/Uq;lf;a [`3-O'vXӶ1&DGn仿|'D~Ö߱{'qt0Fsȋk}\y'{\K8,lr{639qlטj~_?Ƒn'gpx{4 g4Ol:LrMU}u?qnf>49͵3]xyGl>9GY4?z zq\L3gԼ 93Ι9]]=T~yFSU-r(󟽓F+n~kF~f_^`[/l|15#sB/9^;{-xӜ [|Ћ'Tu:{ajfCbr#[z\'>OݩaĔgTW_{s7D썰;ys0[#Un`S<%#/׾e7ڛZ'a>ġ%w<;Qw?0SU|=/ҥųk[+% V' T+}\ݢ[ 7lԽ[?QWH~z;أzF;?|:Rz*=Ts7[UHO Ƀ^_u#^z<^ng;=~忊 Kwo8^aQ9C{5?&#8}f}k\q89O2w~glHhNQ;uܦzyܢ֪ԻwK寬E/ظñ'Q r;F/#zwJU/2VϹK|x^X]\E(~EG+p1DY/H^ķũSg.C5=뇱X~뷱~`Wzm7z{z8'c}:VKlx֟8bzc}j¼u(GO 5U<#lg58\ʛqc\SZI<;Z [JDZa .#O\}zWk>&G1kF?}ձϛjݗKGMOVW%yV= ^#S-jn`tUsVNm3! t7ϻg㵔O}|~Y5u0 lJ;|p>Xy<{]ڴ^sP]Gꔵ{_ɜs.5lr=kǖg}]J<ޮ;=;y+Ls5E}ֱ+SqU ?;^z/jftL\Q|+iA,b|+ɿF?a+VV8>sN6ԾϹyaG^3LMU|98gjVJ{9LOAzeݫwTsT ͑ ;691_i{FoFs;>R5 ɿ%>uYSL;>晘9c*=,}şɩ~Ngհ_i\Tϳ~K S~P-#- ػU}#->QUvzix/gdUû9{νy /}9PyI<;3xySU GMcr0KycƊ4wԂzN|Uq<p*/IS4Ĺ@g-tkx;QsWqVҏ#gkք9ۮ oasM5WHW>ރeN'ﬓxzC Ͷ גXʵ}eg*mɹ]Ϫw@zy(bX }'՛P\DALjqk!=Q>ڼO[q,V.zgWu&cV<p_+-&޿s^ڰU5;}'Oy*.f>-:{hf<=#?^ z_{|y{==wcriشajz'U:xM佦Ygq6ުurO|>84*^oak8rr=*ϤQզA <[:vHwx^XK&E(D QWAAwI8ȂxoamPQY=@櫨~`Bzoxǻ=~g=Uo{[{WEw$~&y$[8pxx/s- 3&Ez#cq|KsɳP?ERSuL.k1:,0w*w>¹6wvwzk̘U*܊td+Ly抧jD1|Ua;_zotQ~k3~gv̺oyuQZqɅg|jJ憹6;q>xmXac9噜 ռ1{߆Qj<5SOy~Ҷꡬ5p.W;m0;ff{J^T5?s~׵ìrqU;CeW#sKO}䩪~q> Y}V8r%\|uWogp\oֿ뷘S>fgʼn|8՞g/ru^UՇ)^q~e鑆2% 5E0ɹaYsP/$rzxz<soVUᒛg:9S>e;|swp#{  Ꙝ>kZxQz"{LZ5H-ɵz%^OކzĽ j?z{|<{9;;sgyFkYQ"ű#rxI9-{$Mz\og1~P_~Ix^XU,0(J5J4(: `rQ\į8q~q8osǙy2|9η K,8bo&96/8V"G|<'Xqqj&I\#Xj3,ssK,'HCڇ#=-yI?rs}{nաyPPi>J;KYczь/1Xj2WWvilTURKrV=|NXTخ]5ݮbXr<i_7K%Lx7RyjQ0&5MO_+y-=@]Smc_G\'y7sv¾a3}Ͻ$?coXiHW}Q9JM#Lu3< t3Zrf? gl{Gwތntͻ$g_uw5W[y>^p0gεx*n&#䵣9뮍NҾf=ݮN#G{['NarzSP'hR;?i3~t׳L<'wqbĪc;T;޷X~O}Mb,-rbT)|^8^gs" ٧W0sܠw+Fz3yJsϻz5u]Ǫ H'׈4}zM@ؕ+!\očXwUCq8aoXOJ+1G3zo{vwih_^<#;~WV{:=V.bflH.9<y|Ҟ3i$O/n|o0HSmL|afl= CuU=GWwߙ;IΊjyyqNy]>;.9.gc^;ǥ&zc{҇wYuJW'Եi^󩹨4.BoWN=:L; tK=b7o|.^<18YvoW:&mP\Ww*U9a[ߨ>V|ŧs^֟F~eGqœk1 iAP;3L~Ws/VZWӻwzwگmUDx^Y˪%E(@G-/>AA/.VP78Yg{1z|O=~K'{<xǣ/xk=zAz|bFN\1r>&|b9xb ܉%',i"k]֞:OkM:ů%7i/FJl'9爫Zy8wrad=#gSk_Ɨ6wWxfzL}%|^ƞ#yVJ~1p;<+džf8wAS&Q}'wZT;{ݓ䜺 WVZkWZ'<.]wko8ΟLp٣[oXfnK{\{ʱ#=ƞg5y+U;3DXvK5s=M?gO/{:\}yԚμ{s9>Kќ!yL<: jN^Y3نkPCv/wLU qTu͑{=@lY.j'V驜\ g\q\BXCoWtgʫzþ+m}{-ǹq/a`c+ [ |y_z+[|^x7s,FoOCgᯇ9~^KNSouf{A|S|+lXckf3N+K*7~ozI{[jmMu]+g1ϩ\^ύ k6+v>U.*Mt~_UFPLќ{s =` P>|yN{Ϸ?Ix^[]EAKEA(Q6AɋN1~Z{9%Y'y\UOz|;w \aPjZ>ڲ]}Z/~OaPW7gܽqoț{x_jiz~L=%w^L+Ͷw;(v_.u o"9 Ay?kr_eU n>C#Sog%stRNj3;V;Zt0tu%5Cz?GUx^Xۊ%EQWU\EF x_YqE'#7:gz늌̬3OKΫvnvv>lvE;_S;k;O;^āy8>ŽI/q9ֆ3#^r &y~ v|i1Mq>1'v̟q:\Oi$T?j+U ̪'.c/8t!VaI[%Ǚ/ {'⨜B%/kcD}s>+\o}7i;x㞭tU^NS:If_֮>z3Gg-}|:~6*49ql?@rG+r5<9șwGKmϾWs|fu}KY yIu|sıQ;ew;b`>AzsqҳzJFzz,e#*6Gj`#FJ5i5k6k$h-8p }/n]HE^Vݳμ5wM}ڣ;gyt"weޮ(~#}kvxc/gwxG[ RiK f0w>3lq<ޠD_<ӥzWM%gb=}f:f|~i{K/ w)4VXޘqM^xzRM/jzGq-t־P3jr'F^9ub}ڐKUrW:TLXI\a-Z2FvXqkc6oE?{qO T\,pN1{{W1=(k[iW9Tkc2ciN'sN,W*sHu]qe8}{DN*ަ+{|.6y0]黡gnJ~z v^̙;{m8c㈟pGqN'UCy߰ۨO}~rjƜY?q*/Yc}yOS4G5x;sH fC<Ƃ#8qt_~ 跴em\['W!YF=ڛ,ȝ9-kor><ƙ{!ņK1/@ߢ-K,i5wڞμ5rg 6r E97.sOz?Ϫ~ jCsڵ7{Ś"cQzEx^X]]5bQXtS bbiTD냯c0fس'+9UȃޝZf&9>[_;}ݾ^;냾i_U_og}]zU?DšH ;g8X㬾?cװ~ƬNb?FpԚ}K8ܻח<~֯qvN>SY}|L3V #%~9&!/]x₵82}l4kap2qjgAC=p6qڞe ~0j¼3keCͅ49;7ii*V3&,tFJ7՝xby%b4w>3ٰ?)06^_/ekF/fcU_<_\7߬b]Ir"<GVE-5uÎe_Uv棞4: c^3e}=wͼhb&^ڪ33"sp]eʕ_\k3g׆[tuU5u~,sX^p$W[/=K -^iS|zhc6;翪)5븳ukr7{owwb(]ivӲUc߾}Ege ✽ؙyO/WoU4J]q5R|?Hx^X[ED F!DQPEE2/"yra.+6:UuNcX5X/z52c=뭱ñ>{Wc}=wXq믱ބO<}~ۗ_ΐ}#='YpCu>yvcMj%8bP?dž!NI A {ƫ“ W# ֤=/&?[\œ6qOr=٪.3kMlxɄ̑XUwdtǞFI̒Q'c&bKI҂Ǘ9Q͙Gjծxo^{?fe[Wz)]߄:g,= YQIGuAGcf&&qFϱ^>ȾԛqVי^[s_q^晩zIK|bwꞒ'񈡼y.[Q}zFw)}:WW3;Gؿa\yiF^> Ҝjܟdϲ}*9_\cvm{ͬߘp}CVu*?]=9 &kP^:\dŠouIIXyÞwyծ3PSy帝W'ӧ[;,{3['g{Z}4]*K3{xvUsv<^ɬtoYR/X#{1q'6qs6׶eW7wp@nV ,8pOQzߢ於X5 q2wg_Ϛɉ8(}u^#\]xWύ<|V ^#]`) ~5O8¸-D7ԙv;}Wo^~oxLg.JLbBg}g2q3uHx^X۪$EYQ\=*.D2(DV΋X3LWdfddTR_/J_z7Z:k_w}51a_6,b|p~G_Ɗ<K}6g(C'X+sc۾~^3ySΟ`+nX1j^O~Xw;kڽʋc5qo0ǖn^6l~ ?X+\d>(?<ӉtYs-,W9Sx^V=sݨߑ&WKMͼ'vUC;9q3_~Uz-ϱ_U3vy-U|x<}[s1K*_jjȡbs*üMl1ِƨj6חX$5}m,/\?hrQ3.?+1^2K=I[!5IL-=/7G:K}>6'~36lgC̳{׷X+omߋ2;zlwjys'>q”sP֝Ȝ̛'9V~==ay6Cª4!@'ͯ4|Gz(,1kfQĬĻz[UZsӣ󝵤G&vNO3c`CX0 7Xg wrK^='CrIeLNnS=~rը^[>kv>T:Йy#LzZp<rMϪ^q煝d.ڗ=Lf杞O|o'x:w]{1fP-gpC^WWL1ӘtmgKƞV<3yd'~fɘ)޳|.y?ϸ.>bU8[ݥf-nz N| ;H#JG[O㴾vV ?'+]]qN|z}<|Oqj_Y270/΄*z6?zNڗG`ߓjcU޵yyW{Fp>twٻ qե;_ziĽņ9W^Oůr=oO|͟c&1'WJ{K FNg^ګ9]0r\ Aϴ@?PladV'謞@LGFFDf ~~^a?a?U??k?}~~=Y?_c{gwlk?w~†.6^ c qWoM\~>†)670nk~]'=VcVOǾT9 5~j Y#$]'{1v}oQ}g̎|yHC͋X̡g//|!SS>+ {#0X=YkB@##略mݝŦup- WK=ϐ/k;ꋟ]?r_fB9WnņH#q̄Y>r>g{Pz9bջ3$mC+nwܷ<ƨ?rO6 3@Î^+kϾzf+ř7q,7;O[  -k^FlbǼV<87Y}W)ərU19aݜ;eS_= >3]y?ϝ!{^Un:٨+ =Or7h^f cM|h(ڃ ji54?<$O+I r9Ϧņ=U;B:^el&܏Q|?Ùi3sH#z_Z8ֹVw8=>էWTw~9_[8Đ/Ou7a7.8􃘼H+kűO:p WWwpt`3Fr_@nwwނcyg5L'_93һᨵgVoQrM<ǚqs_<5iru-?n|;?&{w= F.㌦!b2{1$];b$'ϴS|}`uZQK'3ZK窱j: Sy;K٪]Z[| |xe-> 7Vs}%;.1ԯgx+{%0qf~S#{` Q<}5Mq}{h]w%ey p݂jr*_uL>ޛy/ {^Y;s2<{b0bGwCĊ:Ӟ '7^l i65/LX݃ŗ;PzxV%^$yx;>U7DYk:Hx^XMdEâ(.*~W .* x"j? /^n.=UQ3|gk}k}ַk~Zf_u}?֚kX=l8< <`zezk;kևk}ǜs<<a UclGlq{ $'qT/{zF9M901nY3Ϝnqb+l=ng8szU+5PkHqYk9c ;ׅr}-ϯ{/=?j>&3I|hF;>W9OO9Y>S1fm[wAtg'v)iӽ0u$ לpSGz|7koK>7yfޕ6 \’4s/=*n>t8V޳΋JGœ8-uM<ϳ:|\5Ys~szϋƼyfkϷs>]kiW3c%y`s9szyuwk.g ŪbLԾM{{`#ԯϿkz^`{o*qrDu 9gž'1~8beO1Kj-ңUZSN%Wzqnq1.|wO1/E[9qή0Flzh5_=aש<5gI]=*KCS,\:$N=Fy=ν#MMAkZunъq?Yzi6e☃4zq!oqO}.j7UJZW98q^yg[",iOTogdb)E3v5C+f5K:޽M8cכ#gҕ׽Ÿq4>GkIkx^YYED<΃ċE1y>\SlD |3=_nxzWzz}}ק}}׏}}׎&k/cr?P^L_1:.o]c^DgqJ'*?b} 9J{'65/fG}#-Yשahus~q3 9d M/*;/ߗrQP0𬼘%?퓴M.\-:}o/`y 8+k,^~N`?qj\c(}PyJ(Imy55aͯz{Y{X'/3&tfL~}z,y9-\[r\yfMRSzz?o#I|Uw{6 Đ^ڟ~ks;ֳgYş|p iM>r+^Pqج97"r?j.K)=#w_F4̚ɹ^[CTsck(<|r GUٓ/gC%@+/z<'5q,̻ sUѼq4/ȍܳj87b7ICc%uiWyoùOG|sF1jUWlsO!ë^Cnܯ||Ú_3@;kQ|o_̿CWaq,wwΫY؞?nAuU\I*Gֆ`'=zù'59gn/^\GaKq޽ɸz7|s֗c}=ַc}7ӱ~k뷱~ϱͱX;c\b+;/qŎM>m&.y>IOQ9:uops;󶪝g9OOl0;ּuI:{j^T+q+5My-e/q6'8yscŒ?5%~vyy!X=arհ_;k?ϫcHm/N%VVSMSױςĽײ75$ߪ< Uu!{cbtj֘+Dِ^ LbUO) |8⹯VT]ל7G7HbIXqA7 V7XUx͙}7wM^/wu~Ȏ7vMa罂~Zki9JOJ&˜҃wͻ8zQ @1.LJ˜Ϫӳibm8zKyJ:lf[W񌊿^KL{yǺK?;S}ϓNOG}SJwgu5+#_ý }C|?kqn8~ǥ1q+Mjg^޺Gը'U.tu~Zg9y>*gfWU^U^kˌ9α;ns+ϭpr(&}s{M }BQ%GyaG>?WJ]P8{'?Vw. Ry!3_vRo?召**?~L/s^;z=@wy+yQ|h >QtI8q!OXAߠz63UYu:oc>֟c^륱^덱>X_Xߍ38/2vױ58pvOtϱ5֓~c_qYq;P G1yn?oy(Wʛ93Ʒc}#N^R^<1WܪzC$qp {|Si&ܲo{VOqH4_VV2٪`Cǹ?Q[k ;aUR:TTky.7~rsam8]Ñq֯p+ޣε,Lj[͓;vx3> X`'Mr~H+՜grlqUW$Os1ݗ^}g{vnǎC Ճ{Tی#fO9~Q:2=1=gz̟H=ܗ_XU]saT 㾾8cKÎ~x>z'{^ms8UV3X{ιW=(_k(9.m+K>I3w/ɭU9w޵{ g+^fj9bٛQ)?W@nb>3~5ˉKWws 1g]_H=lj" s+3Lp*^ GV7CΨp[Xҏyf~=I~% 鋫䛴W}Qk>T=֭zSf yk8Wx gf MD䆚{&uH /pѬLN<^UEv^y)71r&KPWCZjIOUH7}x>_譜ηʳ^J-WS*L 3pb=Y7jnP?JXn8dQujyu{ s7}}%َBΔ*8sq-f=T[8ɘ+crׇ^c~ы>\|3򃴨t=0&g̜'5u;k+3>1΁uS\G+~jNWoVt*|oJ߫yx_^}⽹z?8p'ቛޫ[sM=m8 qڈ'h?p|4U~Wɕ1O7gpVHx^WˊÃ(@6DQPVi= fPTLtuϮ*NFEFFfEčw3g<2g.Sl|RoDC룘|ƨ?j s3~駟~駟~駟~?"5x^-ux/-u Zצ.5Cr*5Bh )!0EJ+p:~|}= I^>]ֲ&MIYA1fSr 7(Fuo#"c;yDtAԤf< ,c3wɒ>^ hF;bqeK8]D`%99lR0:xd5[8JLAH#:1 氖1_5hN7$2e(Hyѝ1L+-OɝEO9Ӟ2U$soB}:3 a ;ynق!#zo氞p;9xԢ e,s.s4RJ #GRK2-Pm2ɤޥ gc:~W%WLQђHbg%H'Nq'doԤ1]&Ds4/ڛT!Lg853Oo oS4'ij#!c~9E)ҁLa [9@H~Sv#ֱ$cAT1]Nn~8xNBF]IH` ?r<#OaAeӖHF26\99!N+2q|jvsY7_BcD41g 9J΢AԦ9]I,YF"U,Js0V REPЖNds]"TlHaЖhbbD HWR~QJԢ3g+G8]Җ w(KujҀOa I*7I҄fM$'ʈ!4hd99++(DijҊn a:sYqT2S Mg15MA*Ҙd8&S%/SPZA|b9E9ԣ#t='d%KQ bRD2,%C\yPяQd-{H6TO\ybԠ-#VauG)ќO4!p |hI4\iK2M+VQsGh !]DbkIW3R&t/&r<%GGF3lԻGiҜp1lb=}(F{HX6qǤmfޤe1n& #^b*Y9MҴ0QPZх^e4d;GlioИ`l(OrI *Sc`ǸNZS2҈f,sYnR/iÂ5Ptg8,c;ǹJ6)F%҉>0$r lm6ŨI4f8;\,)C}ӋYA2Gs^VRD1yecr3Bu>+CbYxBmhuInsLG~Ҁp3%l`+G92ttǼCY@&2e`7gOEI?q.JRpֳ3"m'uEQjӎ.$'s=(LyсLd`?sdQJ?1|A_\.W PtfSXsTtT)QđrvrKs˼A~S6D16I~A{hJgF25dxHr|Иpz2e[8nGUhM3,b+Ǹ3 F{Iސ"d&sYͷ !V|DƲDqd)E(-`0$sIrwQf8%TD0x^wubTB!GI 51gQ\24'vB1\f=b\ʽ R-JCD|<~ׇ߯!ԝBcڑƑ"J8 ĵ9!412CT  z5]f N3WMygI#L<ֱ#@!JcJ_3N Y.r!ԧ O҉^ c}O;cgQdѐ'j*q[^iEd1%5UM'@2ɥ-;.S>xA:o3U|A%Rk%ya2sY.*!ɣ'CGKYvsjbWp-Ha Y{Z垈=(O1[89:xeSYrQI5khiO/).Ps=AsО[Le)a3#1yiEgz3Lal+Pc/&X@!|I%Ps9 I >d0\)UbvI)0,f=Tq:!ҕ> &l>}lp4'ƑEcPɍ41%L&elfs MI+|J){8i.Ss3i m)`;Wʝ) $ӓt2G,a9OMSڒB:ɧRvsјDR;d>+/`x^A 0{s^%m_tx^]4v[mm۶m7m۶mvdKX>/p_'o;~w=?O3{>~G1~g9~_W5ރ}x?>C0>c8>OS4>s#(>$>O3,> "/+*&o;.!~')~%~_7-~#?/+'q Aq0!q(aq8'oGđpdGptıplp|ND81Nd89NST85Nt8=΀3L83΂l8;΁s\87΃|8?. B0Eq1\%q)\eq9\Wq%\WUq5\5q-\uq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q? <C0<#(<x'xxgxxx^x^Wx^x^7xތxގwxލxޏ>>O>>/o~~_~~?=?p@ApP !pH apX8"#(8*8&c88.p'‰q'q ©qqg™qgq¹qq\…߸..KҸ .+ʸ kڸnƸ nn[ָ n;θ {޸xxxGx x<Oēd<Ot<ijl<|/ċb/rījzoěfovĻn~|ća|q|ħi|y|_ėe|_u|ķm|}?ďc?sįk{ğgwĿ8xPPx^]C۶m۶m۶mmjc6ڤ6Nsr;b&j@@(AcJԯ<³4<38<O$<OSq? <C0<#(<cq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q<' q"'Iq2)q*iq:gq&gYq69q.yq>q!\Eq1\%q)\eq9\Wq%\WUq5\5q-\uo=/ "/Kw{ /? «o?fovĻn~|ća|q|ħi|y|_ėe|_u|ķ/W|]|C?OS ?/K į?38C88#8׎8c81 Hoq¡qqG‘qGq ±qq'‰q'q ©qqg™qgq¹qq\…q\q \¥q\q\W•q\Wq \µq\q7q7q o8C8 8#8p,_;ptıplp|'ĉpb'prĩpjpzgępfgpvĹpnp~\ąpa\pq\ĥpi\py\Wĕpe\Wpu\ĵpm\p}7čpc7psĭpkp{wĝpgwpwĽpop<ă`<p<ģh<x<Oēd<Ot<ijl<$8C8 8#8 8c88N8 N8NS8 N8Έ38 Ί8Ήs8΋. ¸..KҸ .+ʸ kڸnƸ nn[ָ n;θ {޸xxxGx x'x xgxx^x ^ex9Wx^x^7x 7-x+ކx'ޅw=x/އ >#(>$>O3,> "/+*& {>~G1~g9~_W'~~? qAkAq0!q(aq8Gq$GQq41q,qq<' q"'Iq2)q*iq:gq&gYq69q.yq>q!\Eq1\%q)\eq9\Wq%\WUq5\5q-\uq=\7 q#7Mq3-q+mq;wq'w]q7=q/}q? <C0<#(<c8<O$<OS4<3,<s<</ "/Kw{ /? «o?fovĻn~|ća|q|ħi|y|_ėe|_u|ķ/W|]|C?OS ?/K į?3|x^Eetå;APRQJ:TQBIFPѕ<{wosY93B)}P(?NT85Nd89NS88.8'ĉpb?g9~_W80#8 8cx~|ća|q|ħi|y|_ėe|_u|ķm|}?ďcw]q7=q/}q??xÿ?`' C0<#(<c8<O$<OS4<3,<s<</ "/K2+*k:o&o[6;. ݧi|y|_ėe|_u|ķm|}?ďc?sįpq$GQq41q,qq<'8!N$8)N8%NS48-N &Έ38 ~ gpvĹpn8?. wp!\=>.$.Kp!r).+?UpU\ W5pM\ up]\ǟ/p7q_f-n[p#n;=#(<c8<O$<OS4<3,<s<</ "/K2+*k:o&o[6;.{>!|G1|')|g9|_%|_W5|7-|w=|?#?O3/+q$GQq41q,qq<'8!N$8)N8%NS48-N &Έ38 ~ gpvĹpn8?. wp!\=>.$.Kp!r).+?UpU\ W5pM\ up]\ǟ/p7q_f-n[p#n;pgwpwĽpop<A+ /<p<ģh<x<Oēd<Ot<ijl<|/ċb/rījzoěfovĻn~|ća|q|"?7oSeurat/tests/testthat.R0000644000176200001440000000043714525500056014650 0ustar liggesuserslibrary(testthat) library(Seurat) # # # Run tests for 'v3' # message('Run tests for v3 assay') # options(Seurat.object.assay.version = 'v3') # test_check("Seurat") # Run tests for 'v5' message('Run tests for v5 assay') options(Seurat.object.assay.version = 'v5') test_check("Seurat") Seurat/src/0000755000176200001440000000000014525500232012302 5ustar liggesusersSeurat/src/stats.cpp0000644000176200001440000000227714525500037014157 0ustar liggesusers#include using namespace Rcpp; // the following code in-parts taken from sparseMatrixStats (http://www.bioconductor.org/packages/release/bioc/html/sparseMatrixStats.html). // [[Rcpp::export]] NumericVector row_sum_dgcmatrix(NumericVector &x, IntegerVector &i, int rows, int cols) { NumericVector rowsum(rows, 0.0); int x_length = x.length(); for (int k=0; k #include #include #include #include using namespace ModularityOptimizer; using namespace std::chrono; JavaRandom::JavaRandom(uint64_t seed) { setSeed(seed); } void JavaRandom::setSeed(uint64_t seed) { this->seed = (seed ^ uint64_t(0x5DEECE66D)) & ((uint64_t(1) << 48) - 1); } int JavaRandom::next(int bits) { // Only 31 bits ever used. seed = (seed * uint64_t(0x5DEECE66D) + uint64_t(0xB)) & ((uint64_t(1) << 48) - 1); return static_cast(seed >> (48 - bits)); } int JavaRandom::nextInt(int n) { if (n <= 0) throw std::out_of_range("n must be positive"); if ((n & -n) == n) // i.e., n is a power of 2 return static_cast((static_cast(n) * static_cast(next(31))) >> 31); int bits, val; do { bits = next(31); val = bits % n; } while (bits - val + (n - 1) < 0); return val; } IVector Arrays2::generateRandomPermutation(int nElements, JavaRandom& random) { IVector permutation(nElements, 0); for (int i = 0; i < nElements; i++) permutation[i] = i; for (int i = 0; i < nElements; i++) { int j = random.nextInt(nElements); int k = permutation[i]; permutation[i] = permutation[j]; permutation[j] = k; } return permutation; } Clustering::Clustering(int nNodes): nNodes(nNodes), nClusters(1), cluster(nNodes) {}; Clustering::Clustering(IVector cluster) : nNodes(cluster.size()), cluster(cluster.cbegin(), cluster.cend()) { nClusters = *std::max_element(cluster.cbegin(), cluster.cend()) + 1; } IVector Clustering::getNNodesPerCluster() const { IVector nNodesPerCluster(nClusters, 0); for(const int& clust: cluster) { nNodesPerCluster.at(clust)++; } return nNodesPerCluster; } std::vector Clustering::getNodesPerCluster() const { std::vector nodePerCluster(nClusters); IVector nNodesPerCluster = getNNodesPerCluster(); for(int i =0; i < nClusters; i++) { const int cnt = nNodesPerCluster.at(i); nodePerCluster.at(i).reserve(cnt); } for(int i=0; i< nNodes; i++) { nodePerCluster.at(cluster.at(i)).push_back(i); } return nodePerCluster; } void Clustering::setCluster(int node, int cluster) { this->cluster.at(node) = cluster; nClusters = std::max(nClusters, cluster+1); } void Clustering::initSingletonClusters() { for(int i=0; i < nNodes; i++) { cluster.at(i) = i; } nClusters = nNodes; } void Clustering::orderClustersByNNodes() { typedef std::pair ipair; // holds numNodes, cluster std::vector clusterNNodes; clusterNNodes.reserve(nClusters); IVector nNodesPerCluster = getNNodesPerCluster(); for(int i=0; i&a, const std::pair& b) { return b.first < a.first; }); //std::greater()); // now make a map from old to new names IVector newCluster(nClusters, 0); int i=0; do { newCluster[clusterNNodes[i].second] = i; i++; } while (i < nClusters && clusterNNodes[i].first > 0); nClusters = i; for(int i=0; icbegin(), edgeWeight->cend(), this->edgeWeight.begin()); if (nodeWeight != nullptr) { std::copy(nodeWeight->cbegin(), nodeWeight->cend(), this->nodeWeight.begin()); } else { this->nodeWeight = getTotalEdgeWeightPerNode(); } } Network::Network(int nNodes, DVector* nodeWeight, std::vector& edge, DVector* edgeWeight) : nNodes(nNodes), nEdges(0), nodeWeight(), firstNeighborIndex(nNodes + 1, 0), neighbor(), edgeWeight(), totalEdgeWeightSelfLinks(0) { if(edge.size() != 2 || edge[0].size() != edge[1].size()) { throw std::length_error("Edge was supposed to be an array with 2 columns of equal size."); } IVector neighbor(edge.at(0).size(), 0); DVector edgeWeight2(edge.at(0).size(), 0.0); int i = 1; for (size_t j = 0; j < edge[0].size(); j++) if (edge[0][j] != edge[1][j]) { if (edge[0][j] >= i) for (; i <= edge[0][j]; i++) firstNeighborIndex.at(i) = nEdges; neighbor[nEdges] = edge[1][j]; edgeWeight2[nEdges] = (edgeWeight != nullptr) ? (*edgeWeight)[j] : 1.0; nEdges++; } else totalEdgeWeightSelfLinks += (edgeWeight != nullptr) ? (*edgeWeight)[j] : 1; for (; i <= nNodes; i++) firstNeighborIndex.at(i) = nEdges; this->neighbor.resize(nEdges); std::copy(neighbor.begin(), neighbor.begin() + nEdges, this->neighbor.begin()); this->edgeWeight.resize(nEdges); std::copy(edgeWeight2.begin(), edgeWeight2.begin() + nEdges, this->edgeWeight.begin()); if(nodeWeight == nullptr) { this->nodeWeight = getTotalEdgeWeightPerNode(); } else { this->nodeWeight = *nodeWeight; } } double Network::getTotalNodeWeight() { return std::accumulate(nodeWeight.cbegin(), nodeWeight.cend(), 0.0); } DVector Network::getNodeWeights() { return nodeWeight; } IVector Network::getNEdgesPerNode() { IVector nEdgesPerNode(nNodes, 0); for(int i=0; i< nNodes; i++) { nEdgesPerNode.at(i) = firstNeighborIndex.at(i + 1) - firstNeighborIndex.at(i); } return nEdgesPerNode; } std::vector Network::getEdges() { std::vector edge(2); edge[0].resize(nEdges); for(int i=0; i < nNodes; i++) { std::fill(edge[0].begin() + firstNeighborIndex.at(i), edge[0].begin() + firstNeighborIndex.at(i + 1), i); } edge.at(1) = neighbor; return edge; } IVector Network::getEdges(int node) { return IVector(neighbor.begin() + firstNeighborIndex.at(node), neighbor.begin() + firstNeighborIndex.at(node + 1)); } std::vector Network::getEdgesPerNode() { std::vector edgePerNode(nNodes); for (int i = 0; i < nNodes; i++) { edgePerNode[i] = IVector(neighbor.begin() + firstNeighborIndex.at(i), neighbor.begin() + firstNeighborIndex.at(i + 1)); } return edgePerNode; } double Network::getTotalEdgeWeight() { return std::accumulate(edgeWeight.cbegin(), edgeWeight.cend(), 0.0) / 2.0; } double Network::getTotalEdgeWeight(int node) { return std::accumulate(edgeWeight.cbegin() + firstNeighborIndex.at(node), edgeWeight.cbegin() + firstNeighborIndex.at(node + 1), 0.0); } DVector Network::getTotalEdgeWeightPerNode() { DVector totalEdgeWeightPerNode(nNodes, 0.0); for (int i = 0; i < nNodes; i++) { totalEdgeWeightPerNode[i] = getTotalEdgeWeight(i); } return totalEdgeWeightPerNode; } DVector Network::getEdgeWeights(int node) { return DVector(edgeWeight.cbegin() + firstNeighborIndex.at(node), edgeWeight.cbegin() + firstNeighborIndex.at(node+1)); } std::vector Network::getEdgeWeightsPerNode() { std::vector edgeWeightPerNode(nNodes); for (int i = 0; i < nNodes; i++) edgeWeightPerNode[i] = getEdgeWeights(i); return edgeWeightPerNode; } // Skipping unused Network creators // Network createNetworkWithoutNodeWeights() // Network createNetworkWithoutEdgeWeights() // Network createNetworkWithoutNodeAndEdgeWeights() // Network createNormalizedNetwork1() // Network createNormalizedNetwork2() // Network createPrunedNetwork(int nEdges) // Network createPrunedNetwork(int nEdges, Random random) // Network createSubnetwork(int[] node) // Network createSubnetwork(boolean[] nodeInSubnetwork) // Network createSubnetwork(Clustering clustering, int cluster) std::vector Network::createSubnetworks(Clustering clustering) const { std::vector subnetwork(clustering.nClusters); auto nodePerCluster = clustering.getNodesPerCluster(); IVector subnetworkNode(nNodes); IVector subnetworkNeighbor(nEdges); DVector subnetworkEdgeWeight(nEdges); for (int i = 0; i < clustering.nClusters; i++) subnetwork[i] = createSubnetwork(clustering, i, nodePerCluster[i], subnetworkNode, subnetworkNeighbor, subnetworkEdgeWeight); return subnetwork; } // Network createSubnetworkLargestComponent() // Network createReducedNetwork(Clustering clustering) Network Network::createReducedNetwork(const Clustering& clustering) const { Network reducedNetwork; reducedNetwork.nNodes = clustering.nClusters; reducedNetwork.nEdges = 0; reducedNetwork.nodeWeight = DVector(clustering.nClusters); reducedNetwork.firstNeighborIndex = IVector(clustering.nClusters + 1); reducedNetwork.totalEdgeWeightSelfLinks = totalEdgeWeightSelfLinks; IVector reducedNetworkNeighbor1(nEdges); DVector reducedNetworkEdgeWeight1(nEdges); IVector reducedNetworkNeighbor2(clustering.nClusters - 1); DVector reducedNetworkEdgeWeight2(clustering.nClusters); std::vector nodePerCluster = clustering.getNodesPerCluster(); for (int i = 0; i < clustering.nClusters; i++) { int j = 0; for (size_t k = 0; k < nodePerCluster[i].size(); k++) { int l = nodePerCluster[i][k]; reducedNetwork.nodeWeight[i] += nodeWeight[l]; for (int m = firstNeighborIndex[l]; m < firstNeighborIndex[l + 1]; m++) { int n = clustering.cluster[neighbor[m]]; if (n != i) { if (reducedNetworkEdgeWeight2[n] == 0) { reducedNetworkNeighbor2[j] = n; j++; } reducedNetworkEdgeWeight2[n] += edgeWeight[m]; } else reducedNetwork.totalEdgeWeightSelfLinks += edgeWeight[m]; } } for (int k = 0; k < j; k++) { reducedNetworkNeighbor1[reducedNetwork.nEdges + k] = reducedNetworkNeighbor2[k]; reducedNetworkEdgeWeight1[reducedNetwork.nEdges + k] = reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]]; reducedNetworkEdgeWeight2[reducedNetworkNeighbor2[k]] = 0; } reducedNetwork.nEdges += j; reducedNetwork.firstNeighborIndex[i + 1] = reducedNetwork.nEdges; } reducedNetwork.neighbor = IVector(reducedNetworkNeighbor1.cbegin(), reducedNetworkNeighbor1.cbegin() + reducedNetwork.nEdges); reducedNetwork.edgeWeight = DVector(reducedNetworkEdgeWeight1.cbegin(), reducedNetworkEdgeWeight1.cbegin() + reducedNetwork.nEdges); return reducedNetwork; } Clustering Network::identifyComponents() { std::vector nodeVisited(nNodes, false); IVector node(nNodes); Clustering clustering(nNodes); clustering.nClusters = 0; for (int i = 0; i < nNodes; i++) if (!nodeVisited[i]) { clustering.cluster[i] = clustering.nClusters; nodeVisited[i] = true; node[0] = i; int j = 1; int k = 0; do { for (int l = firstNeighborIndex[node[k]]; l < firstNeighborIndex[node[k] + 1]; l++) if (!nodeVisited[neighbor[l]]) { clustering.cluster[neighbor[l]] = clustering.nClusters; nodeVisited[neighbor[l]] = true; node[j] = neighbor[l]; j++; } k++; } while (k < j); clustering.nClusters++; } clustering.orderClustersByNNodes(); return clustering; } // private: // double generateRandomNumber(int node1, int node2, const IVector& nodePermutation); Network Network::createSubnetwork(const Clustering& clustering, int cluster, IVector& node, IVector& subnetworkNode, IVector& subnetworkNeighbor, DVector& subnetworkEdgeWeight) const { Network subnetwork; subnetwork.nNodes = node.size(); if (subnetwork.nNodes == 1) { subnetwork.nEdges = 0; subnetwork.nodeWeight = DVector(1, nodeWeight[node[0]]); subnetwork.firstNeighborIndex = IVector(2); subnetwork.neighbor = IVector(0); subnetwork.edgeWeight = DVector(0); } else { for (size_t i = 0; i < node.size(); i++) subnetworkNode[node[i]] = i; subnetwork.nEdges = 0; subnetwork.nodeWeight = DVector(subnetwork.nNodes, 0); subnetwork.firstNeighborIndex = IVector(subnetwork.nNodes + 1); for (int i = 0; i < subnetwork.nNodes; i++) { int j = node[i]; subnetwork.nodeWeight[i] = nodeWeight[j]; for (int k = firstNeighborIndex[j]; k < firstNeighborIndex[j + 1]; k++) if (clustering.cluster[neighbor[k]] == cluster) { subnetworkNeighbor[subnetwork.nEdges] = subnetworkNode[neighbor[k]]; subnetworkEdgeWeight[subnetwork.nEdges] = edgeWeight[k]; subnetwork.nEdges++; } subnetwork.firstNeighborIndex[i + 1] = subnetwork.nEdges; } subnetwork.neighbor = IVector(subnetworkNeighbor.cbegin(), subnetworkNeighbor.cbegin() + subnetwork.nEdges); subnetwork.edgeWeight = DVector(subnetworkEdgeWeight.cbegin(), subnetworkEdgeWeight.cbegin() + subnetwork.nEdges); } subnetwork.totalEdgeWeightSelfLinks = 0; return subnetwork; } VOSClusteringTechnique::VOSClusteringTechnique(std::shared_ptr network, double resolution) : network(network), resolution(resolution) { clustering = std::make_shared(network->getNNodes()); clustering->initSingletonClusters(); }; VOSClusteringTechnique::VOSClusteringTechnique(std::shared_ptr network, std::shared_ptr clustering, double resolution) : network(network), clustering(clustering), resolution(resolution){}; double VOSClusteringTechnique::calcQualityFunction() { double qualityFunction = 0.0; for (int i = 0; i < network->getNNodes(); i++) { int j = clustering->cluster[i]; for (int k = network->getFirstNeighborIndexValue(i); k < network->getFirstNeighborIndexValue(i + 1); k++) if (clustering->cluster[network->getNeighborValue(k)] == j) qualityFunction += network->edgeWeight[k]; } qualityFunction += network->totalEdgeWeightSelfLinks; DVector clusterWeight(clustering->nClusters); for (int i = 0; i < network->nNodes; i++) clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; for (int i = 0; i < clustering->nClusters; i++) qualityFunction -= clusterWeight[i] * clusterWeight[i] * resolution; qualityFunction /= 2 * network->getTotalEdgeWeight() + network->totalEdgeWeightSelfLinks; return qualityFunction; } bool VOSClusteringTechnique::runLocalMovingAlgorithm(JavaRandom& random){ bool update = false; double maxQualityFunction, qualityFunction; DVector clusterWeight(network->getNNodes(), 0); IVector nNodesPerCluster(network->getNNodes(), 0); int bestCluster, j, k, l, nNeighboringClusters, nStableNodes; if (network->getNNodes() == 1) return false; for (int i = 0; i < network->getNNodes(); i++) { clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; nNodesPerCluster[clustering->cluster[i]]++; } int nUnusedClusters = 0; IVector unusedCluster(network->getNNodes(), 0); for (int i = 0; i < network->getNNodes(); i++) { if (nNodesPerCluster[i] == 0) { unusedCluster[nUnusedClusters] = i; nUnusedClusters++; } } IVector nodePermutation = Arrays2::generateRandomPermutation(network->nNodes, random); DVector edgeWeightPerCluster(network->getNNodes(), 0.0); IVector neighboringCluster(network->getNNodes() - 1, 0); nStableNodes = 0; int i = 0; do { j = nodePermutation[i]; nNeighboringClusters = 0; for (k = network->firstNeighborIndex.at(j); k < network->firstNeighborIndex.at(j + 1); k++) { l = clustering->cluster[network->neighbor[k]]; if (edgeWeightPerCluster[l] == 0) { neighboringCluster[nNeighboringClusters] = l; nNeighboringClusters++; } edgeWeightPerCluster[l] += network->edgeWeight[k]; } clusterWeight[clustering->cluster[j]] -= network->nodeWeight[j]; nNodesPerCluster[clustering->cluster[j]]--; if (nNodesPerCluster[clustering->cluster[j]] == 0) { unusedCluster[nUnusedClusters] = clustering->cluster[j]; nUnusedClusters++; } bestCluster = -1; maxQualityFunction = 0; for (k = 0; k < nNeighboringClusters; k++) { l = neighboringCluster[k]; qualityFunction = edgeWeightPerCluster[l] - network->nodeWeight[j] * clusterWeight[l] * resolution; if ((qualityFunction > maxQualityFunction) || ((qualityFunction == maxQualityFunction) && (l < bestCluster))) { bestCluster = l; maxQualityFunction = qualityFunction; } edgeWeightPerCluster[l] = 0; } if (maxQualityFunction == 0) { bestCluster = unusedCluster[nUnusedClusters - 1]; nUnusedClusters--; } clusterWeight[bestCluster] += network->nodeWeight[j]; nNodesPerCluster[bestCluster]++; if (bestCluster == clustering->cluster[j]) nStableNodes++; else { clustering->cluster[j] = bestCluster; nStableNodes = 1; update = true; } i = (i < network->nNodes - 1) ? (i + 1) : 0; } while (nStableNodes < network->nNodes); IVector newCluster(network->getNNodes()); clustering->nClusters = 0; for (i = 0; i < network->nNodes; i++) if (nNodesPerCluster[i] > 0) { newCluster[i] = clustering->nClusters; clustering->nClusters++; } for (i = 0; i < network->nNodes; i++) clustering->cluster[i] = newCluster[clustering->cluster[i]]; return update; } bool VOSClusteringTechnique::runLouvainAlgorithm(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); bool update2 = vosClusteringTechnique.runLouvainAlgorithm(random); if (update2) { update = true; clustering->mergeClusters(*vosClusteringTechnique.clustering); } } return update; } bool VOSClusteringTechnique::runIteratedLouvainAlgorithm(int maxNIterations, JavaRandom& random) { bool update; int i = 0; do { update = runLouvainAlgorithm(random); i++; } while ((i < maxNIterations) && update); return ((i > 1) || update); } bool VOSClusteringTechnique::runLouvainAlgorithmWithMultilevelRefinement(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); bool update2 = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); if (update2) { update = true; clustering->mergeClusters(*vosClusteringTechnique.clustering); runLocalMovingAlgorithm(random); } } return update;} bool VOSClusteringTechnique::runIteratedLouvainAlgorithmWithMultilevelRefinement(int maxNIterations, JavaRandom& random) { bool update; int i = 0; do { update = runLouvainAlgorithmWithMultilevelRefinement(random); i++; } while ((i < maxNIterations) && update); return ((i > 1) || update); } bool VOSClusteringTechnique::runSmartLocalMovingAlgorithm(JavaRandom& random) { if (network->nNodes == 1) return false; bool update = runLocalMovingAlgorithm(random); if (clustering->nClusters < network->nNodes) { std::vector subnetwork = network->createSubnetworks(*clustering); auto nodePerCluster = clustering->getNodesPerCluster(); clustering->nClusters = 0; IVector nNodesPerClusterReducedNetwork(subnetwork.size()); for (size_t i = 0; i < subnetwork.size(); i++) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(subnetwork[i]), resolution); vosClusteringTechnique.runLocalMovingAlgorithm(random); for (int j = 0; j < subnetwork[i].nNodes; j++) clustering->cluster[nodePerCluster[i][j]] = clustering->nClusters + vosClusteringTechnique.clustering->cluster[j]; clustering->nClusters += vosClusteringTechnique.clustering->nClusters; nNodesPerClusterReducedNetwork[i] = vosClusteringTechnique.clustering->nClusters; } VOSClusteringTechnique vosClusteringTechnique2(std::make_shared(network->createReducedNetwork(*clustering)), resolution); int i = 0; for (size_t j = 0; j < nNodesPerClusterReducedNetwork.size(); j++) { for (int k = 0; k < nNodesPerClusterReducedNetwork[j]; k++) { vosClusteringTechnique2.clustering->cluster[i] = static_cast(j); i++; } } vosClusteringTechnique2.clustering->nClusters = nNodesPerClusterReducedNetwork.size(); update |= vosClusteringTechnique2.runSmartLocalMovingAlgorithm(random); clustering->mergeClusters(*vosClusteringTechnique2.clustering); } return update; } bool VOSClusteringTechnique::runIteratedSmartLocalMovingAlgorithm(int nIterations, JavaRandom& random) { bool update = false; for (int i = 0; i < nIterations; i++) update |= runSmartLocalMovingAlgorithm(random); return update; } int VOSClusteringTechnique::removeCluster(int cluster) { DVector clusterWeight(clustering->nClusters); DVector totalEdgeWeightPerCluster(clustering->nClusters); for (int i = 0; i < network->nNodes; i++) { clusterWeight[clustering->cluster[i]] += network->nodeWeight[i]; if (clustering->cluster[i] == cluster) for (int j = network->firstNeighborIndex[i]; j < network->firstNeighborIndex[i + 1]; j++) totalEdgeWeightPerCluster[clustering->cluster[network->neighbor[j]]] += network->edgeWeight[j]; } int i = -1; double maxQualityFunction = 0; for (int j = 0; j < clustering->nClusters; j++) if ((j != cluster) && (clusterWeight[j] > 0)) { double qualityFunction = totalEdgeWeightPerCluster[j] / clusterWeight[j]; if (qualityFunction > maxQualityFunction) { i = j; maxQualityFunction = qualityFunction; } } if (i >= 0) { for (int j = 0; j < network->nNodes; j++) if (clustering->cluster[j] == cluster) clustering->cluster[j] = i; if (cluster == clustering->nClusters - 1) clustering->nClusters = *std::max_element(clustering->cluster.cbegin(), clustering->cluster.cend()) + 1; } return i; } void VOSClusteringTechnique::removeSmallClusters(int minNNodesPerCluster) { VOSClusteringTechnique vosClusteringTechnique(std::make_shared(network->createReducedNetwork(*clustering)), resolution); auto nNodesPerCluster = clustering->getNNodesPerCluster(); int i; do { i = -1; int j = minNNodesPerCluster; for (int k = 0; k < vosClusteringTechnique.clustering->nClusters; k++) if ((nNodesPerCluster[k] > 0) && (nNodesPerCluster[k] < j)) { i = k; j = nNodesPerCluster[k]; } if (i >= 0) { j = vosClusteringTechnique.removeCluster(i); if (j >= 0) nNodesPerCluster[j] += nNodesPerCluster[i]; nNodesPerCluster[i] = 0; } } while (i >= 0); clustering->mergeClusters(*vosClusteringTechnique.clustering); } std::shared_ptr ModularityOptimizer::matrixToNetwork(IVector& node1, IVector& node2, DVector& edgeWeight1, int modularityFunction, int nNodes) { int n1_max = *std::max_element(node1.cbegin(), node1.cend()); int n2_max = *std::max_element(node2.cbegin(), node2.cend()); IVector nNeighbors(nNodes); for (size_t i = 0; i < node1.size(); i++) if (node1[i] < node2[i]) { nNeighbors[node1[i]]++; nNeighbors[node2[i]]++; } IVector firstNeighborIndex(nNodes + 1); int nEdges = 0; for (int i = 0; i < nNodes; i++) { firstNeighborIndex[i] = nEdges; nEdges += nNeighbors[i]; } firstNeighborIndex[nNodes] = nEdges; IVector neighbor(nEdges); DVector edgeWeight2(nEdges); std::fill(nNeighbors.begin(), nNeighbors.end(), 0); for (size_t i = 0; i < node1.size(); i++) if (node1[i] < node2[i]) { int j = firstNeighborIndex[node1[i]] + nNeighbors[node1[i]]; neighbor[j] = node2[i]; edgeWeight2[j] = edgeWeight1[i]; nNeighbors[node1[i]]++; j = firstNeighborIndex[node2[i]] + nNeighbors[node2[i]]; neighbor[j] = node1[i]; edgeWeight2[j] = edgeWeight1[i]; nNeighbors[node2[i]]++; } if (modularityFunction == 1) return std::make_shared(nNodes, firstNeighborIndex, neighbor, &edgeWeight2); else { DVector nodeWeight(nNodes, 1.0); return std::make_shared(nNodes, &nodeWeight, firstNeighborIndex, neighbor, &edgeWeight2); } } std::shared_ptr ModularityOptimizer::readInputFile(std::string fname, int modularityFunction) { std::ifstream f; f.open(fname, std::ios::in); if(!f) { throw std::runtime_error("File could not be opened."); } std::string line; int nLines = 0; while(std::getline(f, line)) { nLines++; } f.clear(); f.seekg(0, std::ios::beg); IVector node1(nLines); IVector node2(nLines); DVector edgeWeight1(nLines, 1.0); for (int j = 0; j < nLines; j++) { std::getline(f, line); auto splittedLine = split(line, '\t'); node1[j] =std::stoi(splittedLine[0]); node2[j] = std::stoi(splittedLine[1]); if(splittedLine.size() > 2) { edgeWeight1[j] = std::stod(splittedLine[2]); } } int n1_max = *std::max_element(node1.cbegin(), node1.cend()); int n2_max = *std::max_element(node2.cbegin(), node2.cend()); int nNodes = std::max(n1_max, n2_max) + 1; return matrixToNetwork(node1, node2, edgeWeight1, modularityFunction, nNodes); } std::vector ModularityOptimizer::split(const std::string& s, char delimiter) { std::vector tokens; std::string token; std::istringstream tokenStream(s); while (std::getline(tokenStream, token, delimiter)) { tokens.push_back(token); } return tokens; } #ifdef STANDALONE void writeOutputFile(std::string fname, Clustering& clustering) { int nNodes = clustering.getNNodes(); clustering.orderClustersByNNodes(); std::ofstream f(fname, std::ios::out); for(int i=0; i < nNodes; i++) f << clustering.getCluster(i) << std::endl; f.close(); } template void input(std::string msg, T& value) { std::cout << msg << std::endl << std::endl; std::cin >> value; } int main(int argc, char* argv[]) { std::string msg = "Modularity Optimizer version 1.3.0 by Ludo Waltman and Nees Jan van Eck"; std::vector args; std::string inputFileName, outputFileName; bool printOutput, update; double modularity, maxModularity, resolution, resolution2; int algorithm, i, j, modularityFunction, nIterations, nRandomStarts; unsigned long long int randomSeed; for(int i=0; i 0); if (printOutput) { std::cout << msg << std::endl << std::endl; } } else { std::cout << msg << std::endl << std::endl; input("Input file name: ", inputFileName); input("Output file name: ", outputFileName); input("Modularity function (1 = standard; 2 = alternative): ", modularityFunction); input("Resolution parameter (e.g., 1.0): ", resolution); input("Algorithm (1 = Louvain; 2 = Louvain with multilevel refinement; 3 = smart local moving): ", algorithm); input("Number of random starts (e.g., 10): ", nRandomStarts); input("Number of iterations (e.g., 10): ",nIterations); input("Random seed (e.g., 0): ", randomSeed); int tmp; input("Print output (0 = no; 1 = yes): ",tmp); printOutput = tmp > 0; std::cout << std::endl; } if (printOutput) { std::cout << "Reading input file..." << std::endl << std::endl; } std::shared_ptr network = readInputFile(inputFileName, modularityFunction); if (printOutput) { std::printf("Number of nodes: %d\n", network->getNNodes()); std::printf("Number of edges: %d\n", network->getNEdges()); std::cout << std::endl; std::cout << "Running " << ((algorithm == 1) ? "Louvain algorithm" : ((algorithm == 2) ? "Louvain algorithm with multilevel refinement" : "smart local moving algorithm")) << "..."; std::cout << std::endl; } resolution2 = ((modularityFunction == 1) ? (resolution / (2 * network->getTotalEdgeWeight() + network->getTotalEdgeWeightSelfLinks())) : resolution); auto beginTime = duration_cast(system_clock::now().time_since_epoch()); std::shared_ptr clustering; maxModularity = -std::numeric_limits::infinity(); JavaRandom random(randomSeed); for (i = 0; i < nRandomStarts; i++) { if (printOutput && (nRandomStarts > 1)) std::printf("Random start: %d\n", i + 1); VOSClusteringTechnique vosClusteringTechnique(network, resolution2); j = 0; update = true; do { if (printOutput && (nIterations > 1)) std::printf("Iteration: %d\n", j + 1); if (algorithm == 1) update = vosClusteringTechnique.runLouvainAlgorithm(random); else if (algorithm == 2) update = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); else if (algorithm == 3) vosClusteringTechnique.runSmartLocalMovingAlgorithm(random); j++; modularity = vosClusteringTechnique.calcQualityFunction(); if (printOutput && (nIterations > 1)) std::printf("Modularity: %.4f\n", modularity); } while ((j < nIterations) && update); if (modularity > maxModularity) { clustering = vosClusteringTechnique.getClustering(); maxModularity = modularity; } if (printOutput && (nRandomStarts > 1)) { if (nIterations == 1) std::printf("Modularity: %.4f\n", modularity); std::cout << std::endl; } } auto endTime = duration_cast(system_clock::now().time_since_epoch()); if (printOutput) { if (nRandomStarts == 1) { if (nIterations > 1) std::cout << std::endl; std::printf("Modularity: %.4f\n", maxModularity); } else std::printf("Maximum modularity in %d random starts: %.4f\n", nRandomStarts, maxModularity); std::printf("Number of communities: %d\n", clustering->getNClusters()); std::printf("Elapsed time: %d seconds\n", static_cast((endTime - beginTime).count() / 1000.0)); std::cout << std::endl << "Writing output file..." << std::endl; } writeOutputFile(outputFileName, *clustering); } catch (std::exception a) { std::cout << a.what() << std::endl; } return 0; }; #endif Seurat/src/fast_NN_dist.cpp0000644000176200001440000000400414525500037015362 0ustar liggesusers#include using namespace Rcpp; // code in-parts taken from https://gallery.rcpp.org/articles/parallel-distance-matrix/ // Assumption: the end position of vector2 is implied by the end position of vector1 // generic function for euclidean distance template inline double euclidean_distance(InputIterator1 begin1, InputIterator1 end1, InputIterator2 begin2) { // value to return double rval = 0; // set iterators to beginning of ranges InputIterator1 it1 = begin1; InputIterator2 it2 = begin2; // for each input item while (it1 != end1) { // take the value and increment the iterator double d1 = *it1++; double d2 = *it2++; // update the distance rval += pow(d1-d2, 2); } return sqrt(rval); } // [[Rcpp::export]] List fast_dist(NumericMatrix x, NumericMatrix y, List n) { // extracting the number of element in the knn graph size_t ngraph_size = n.size(); if (x.nrow() != ngraph_size) { return List(); } List distances_list = clone(n); // looping over the neigbors for (size_t i=0; i #include #include #include #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample, bool display_progress); Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample, bool display_progress); Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames); Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress ); NumericMatrix Standardize(const Eigen::Map mat, bool display_progress); Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale, bool center, double scale_max, bool display_progress); Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center); Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center); Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2); Eigen::VectorXd FastExpMean(Eigen::MatrixXd mat, bool display_progress); Eigen::VectorXd FastRowMean(Eigen::MatrixXd mat, bool display_progress); Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress); Eigen::VectorXd FastExpVar(Eigen::SparseMatrix mat, bool display_progress); Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress); NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress); NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress); NumericVector RowVar(Eigen::Map x); template std::vector sort_indexes(const std::vector &v); List GraphToNeighborHelper(Eigen::SparseMatrix mat); //---------------------------------------------------- #endif//DATA_MANIPULATION Seurat/src/integration.h0000644000176200001440000000175414525500037015010 0ustar liggesusers#ifndef CORRECT_EXPRESSION #define CORRECT_EXPRESSION #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix FindWeightsC( NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress ); Eigen::SparseMatrix IntegrateDataC( Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2 ); std::vector ScoreHelper( Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress ); //---------------------------------------------------- #endif//CORRECT_EXPRESSION Seurat/src/ModularityOptimizer.h0000644000176200001440000001323514525500037016516 0ustar liggesusers#pragma once #include #include #include #include #include #include #include #include typedef std::vector IVector; typedef std::vector DVector; namespace ModularityOptimizer { class JavaRandom { private: uint64_t seed; int next(int bits); public: JavaRandom(uint64_t seed); int nextInt(int n); void setSeed(uint64_t seed); }; namespace Arrays2 { IVector generateRandomPermutation(int nElements); IVector generateRandomPermutation(int nElements, JavaRandom& random); } class Clustering { private: int nNodes; public: // Note: These two variables were "protected" in java, which means it is accessible to the whole package/public. // Although we could have used friend classes, this allows for better mirroring of the original code. int nClusters; IVector cluster; Clustering(int nNodes); Clustering(IVector cluster); int getNNodes() const {return nNodes;}; int getNClusters() const {return nClusters;}; IVector getClusters() const {return cluster;}; int getCluster(int node) const {return cluster[node];}; IVector getNNodesPerCluster() const; std::vector getNodesPerCluster() const; void setCluster(int node, int cluster); void initSingletonClusters(); void orderClustersByNNodes(); void mergeClusters(const Clustering& clustering); }; class Network { friend class VOSClusteringTechnique; protected: int nNodes; int nEdges; DVector nodeWeight; IVector firstNeighborIndex; IVector neighbor; DVector edgeWeight; double totalEdgeWeightSelfLinks; public: Network(); Network(int nNodes, DVector* nodeWeight, std::vector& edge, DVector* edgeWeight); Network(int nNodes, std::vector& edge) : Network(nNodes, nullptr, edge, nullptr) { }; Network(int nNodes, DVector* nodeWeight, std::vector edge) : Network(nNodes, nodeWeight, edge, nullptr) {}; Network(int nNodes, std::vector& edge, DVector* edgeWeight) : Network(nNodes, nullptr, edge, edgeWeight) {}; Network(int nNodes, DVector* nodeWeight, IVector& firstNeighborIndex, IVector& neighbor, DVector* edgeWeight); Network(int nNodes, IVector& firstNeighborIndex, IVector& neighbor) : Network(nNodes, nullptr, firstNeighborIndex, neighbor, nullptr) {}; Network(int nNodes, DVector* nodeWeight, IVector& firstNeighborIndex, IVector& neighbor) : Network(nNodes, nodeWeight, firstNeighborIndex, neighbor, nullptr){}; Network(int nNodes, IVector& firstNeighborIndex, IVector& neighbor, DVector* edgeWeight) : Network(nNodes, nullptr, firstNeighborIndex, neighbor, edgeWeight) {}; int getNNodes() {return nNodes;}; double getTotalNodeWeight(); DVector getNodeWeights(); double getNodeWeight(int node) { return nodeWeight.at(node);}; int getNEdges() {return nEdges / 2;}; int getNEdges(int node) {return firstNeighborIndex.at(node + 1) - firstNeighborIndex.at(node);}; IVector getNEdgesPerNode(); std::vector getEdges(); IVector getEdges(int node); std::vector getEdgesPerNode(); double getTotalEdgeWeight(); double getTotalEdgeWeight(int node); DVector getTotalEdgeWeightPerNode(); DVector getEdgeWeights() {return edgeWeight;}; DVector getEdgeWeights(int node); std::vector getEdgeWeightsPerNode(); double getTotalEdgeWeightSelfLinks() { return totalEdgeWeightSelfLinks; }; // Added these to avoid making these values public int getFirstNeighborIndexValue(int i) const { return firstNeighborIndex.at(i); }; int getNeighborValue(int index) const { return neighbor.at(index); } std::vector createSubnetworks(Clustering clustering) const; Network createReducedNetwork(const Clustering& clustering) const; Clustering identifyComponents(); private: double generateRandomNumber(int node1, int node2, const IVector& nodePermutation); Network createSubnetwork(const Clustering& clustering, int cluster, IVector& node, IVector& subnetworkNode, IVector& subnetworkNeighbor, DVector& subnetworkEdgeWeight) const; }; class VOSClusteringTechnique { private: std::shared_ptr network; std::shared_ptr clustering; double resolution; public: VOSClusteringTechnique(std::shared_ptr network, double resolution); VOSClusteringTechnique(std::shared_ptr network, std::shared_ptr clustering, double resolution); std::shared_ptr getNetwork() { return network;} std::shared_ptr getClustering() { return clustering; } double getResolution() {return resolution; } void setNetwork(std::shared_ptr network) {this->network = network;} void setClustering(std::shared_ptr clustering) {this->clustering = clustering;} void setResolution(double resolution) {this->resolution = resolution;} double calcQualityFunction(); bool runLocalMovingAlgorithm(JavaRandom& random); bool runLouvainAlgorithm(JavaRandom& random); bool runIteratedLouvainAlgorithm(int maxNIterations, JavaRandom& random); bool runLouvainAlgorithmWithMultilevelRefinement(JavaRandom& random); bool runIteratedLouvainAlgorithmWithMultilevelRefinement(int maxNIterations, JavaRandom& random); bool runSmartLocalMovingAlgorithm(JavaRandom& random); bool runIteratedSmartLocalMovingAlgorithm(int nIterations, JavaRandom& random); int removeCluster(int cluster); void removeSmallClusters(int minNNodesPerCluster); }; std::shared_ptr matrixToNetwork(IVector& node1, IVector& node2, DVector& edgeWeight1, int modularityFunction, int nNodes); std::shared_ptr readInputFile(std::string fname, int modularityFunction); std::vector split(const std::string& s, char delimiter); }; Seurat/src/valid_pointer.c0000644000176200001440000000024714525500037015313 0ustar liggesusers#include // helper to determine if external c++ pointer is valid SEXP isnull(SEXP pointer) { return Rf_ScalarLogical(!R_ExternalPtrAddr(pointer)); } Seurat/src/integration.cpp0000644000176200001440000001507314525500037015342 0ustar liggesusers#include #include #include #include "data_manipulation.h" using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix FindWeightsC( NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress ) { std::vector tripletList; tripletList.reserve(anchor_cells2.size() * 10); std::unordered_map> cell_map; Progress p(anchor_cells2.size() + cells2.size() , display_progress); // build map from anchor_cells2 to integration_matrix rows for(int i=0; i matches; std::vector::iterator iter = integration_matrix_rownames.begin(); while ((iter = std::find(iter, integration_matrix_rownames.end(), anchor_cells2[i])) != integration_matrix_rownames.end()) { int idx = std::distance(integration_matrix_rownames.begin(), iter); matches.push_back(idx); iter++; } cell_map[i] = matches; p.increment(); } // Construct dist_weights matrix for(auto const &cell : cells2){ Eigen::VectorXd dist = distances.row(cell); Eigen::VectorXd indices = cell_index.row(cell); int k=0; //number of anchors used so far; a cell in the neighbor list may contribute to multiple anchors for(int i=0; i mnn_idx = cell_map[indices[i]-1]; for(int j=0; j return_mat; if(min_dist == 0){ Eigen::SparseMatrix dist_weights(integration_matrix_rownames.size(), cells2.size()); dist_weights.setFromTriplets(tripletList.begin(), tripletList.end(), [] (const double&, const double &b) { return b; }); Eigen::VectorXd colSums = dist_weights.transpose() * Eigen::VectorXd::Ones(dist_weights.rows()); for (int k=0; k < dist_weights.outerSize(); ++k){ for (Eigen::SparseMatrix::InnerIterator it(dist_weights, k); it; ++it){ it.valueRef() = it.value()/colSums[k]; } } return_mat = dist_weights; } else { Eigen::MatrixXd dist_weights = Eigen::MatrixXd::Constant(integration_matrix_rownames.size(), cells2.size(), min_dist); for(int i = 0; i < dist_weights.cols(); ++i){ for(int j = 0; j < dist_weights.rows(); ++j){ dist_weights(j, i) = 1 - exp(-1 * dist_weights(j, i) * anchor_score[j]/ pow(2/sd, 2) ); } } for(auto const &weight : tripletList){ dist_weights(weight.row(), weight.col()) = weight.value(); } Eigen::VectorXd colSums = dist_weights.colwise().sum(); for(int i = 0; i < dist_weights.cols(); ++i){ for(int j = 0; j < dist_weights.rows(); ++j){ dist_weights(j, i) = dist_weights(j, i) / colSums[i]; } } return_mat = dist_weights.sparseView(); } return(return_mat); } // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix IntegrateDataC( Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2 ) { Eigen::SparseMatrix corrected = expression_cells2 - weights.transpose() * integration_matrix; return(corrected); } template std::vector sort_indexes(const std::vector &v) { // initialize original index locations std::vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); std::stable_sort(idx.begin(), idx.end(), [&v](size_t i1, size_t i2) {return v[i1] < v[i2];}); return idx; } // [[Rcpp::export]] std::vector ScoreHelper( Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress ) { std::vector scores; // Loop over all query cells Progress p(snn.outerSize(), display_progress); for (int i=0; i < snn.outerSize(); ++i){ p.increment(); // create vectors to store the nonzero snn elements and their indices std::vector nonzero; std::vector nonzero_idx; for (Eigen::SparseMatrix::InnerIterator it(snn, i); it; ++it) { nonzero.push_back(it.value()); nonzero_idx.push_back(it.row()); } // find the k_snn cells with the smallest non-zero edge weights to use in // computing the transition probability bandwidth std::vector nonzero_order = sort_indexes(nonzero); std::vector bw_dists; int k_snn_i = k_snn; if (k_snn_i > nonzero_order.size()) k_snn_i = nonzero_order.size(); for (int j = 0; j < nonzero_order.size(); ++j) { // compute euclidean distances to cells with small edge weights size_t cell = nonzero_idx[nonzero_order[j]]; if(bw_dists.size() < k_snn_i || nonzero[nonzero_order[j]] == nonzero[nonzero_order[k_snn_i-1]]) { double res = (query_pca.col(cell) - query_pca.col(i)).norm(); bw_dists.push_back(res); } else { break; } } // compute bandwidth as the mean distance of the farthest k_snn cells double bw; if (bw_dists.size() > k_snn_i) { std::sort(bw_dists.rbegin(), bw_dists.rend()); bw = std::accumulate(bw_dists.begin(), bw_dists.begin() + k_snn_i, 0.0) / k_snn_i; } else { bw = std::accumulate(bw_dists.begin(), bw_dists.end(), 0.0) / bw_dists.size(); } // compute transition probabilites double first_neighbor_dist; // subtract off distance to first neighbor? if (subtract_first_nn) { first_neighbor_dist = query_dists(i, 1); } else { first_neighbor_dist = 0; } bw = bw - first_neighbor_dist; double q_tps = 0; for(int j = 0; j < query_dists.cols(); ++j) { q_tps += std::exp(-1 * (query_dists(i, j) - first_neighbor_dist) / bw); } q_tps = q_tps/(query_dists.cols()); double c_tps = 0; for(int j = 0; j < corrected_nns.cols(); ++j) { c_tps += exp(-1 * ((query_pca.col(i) - query_pca.col(corrected_nns(i, j)-1)).norm() - first_neighbor_dist) / bw); } c_tps = c_tps/(corrected_nns.cols()); scores.push_back(c_tps/q_tps); } return(scores); } Seurat/src/data_manipulation.cpp0000644000176200001440000003600314525500037016504 0ustar liggesusers#include #include #include #include #include #include #include using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] // [[Rcpp::export]] Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample = false, bool display_progress=true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ double entry = it.value(); if( (upsample) || (colSums[k] > sample_val)){ entry = entry * double(sample_val) / colSums[k]; if (fmod(entry, 1) != 0){ double rn = R::runif(0,1); if(fmod(entry, 1) <= rn){ it.valueRef() = floor(entry); } else{ it.valueRef() = ceil(entry); } } else{ it.valueRef() = entry; } } } } return(data); } // [[Rcpp::export]] Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample = false, bool display_progress=true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ double entry = it.value(); if( (upsample) || (colSums[k] > sample_val[k])){ entry = entry * double(sample_val[k]) / colSums[k]; if (fmod(entry, 1) != 0){ double rn = R::runif(0,1); if(fmod(entry, 1) <= rn){ it.valueRef() = floor(entry); } else{ it.valueRef() = ceil(entry); } } else{ it.valueRef() = entry; } } } } return(data); } typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames){ // Set up hash maps for rowname based lookup std::unordered_map mat1_map; for(unsigned int i = 0; i < mat1_rownames.size(); i++){ mat1_map[mat1_rownames[i]] = i; } std::unordered_map mat2_map; for(unsigned int i = 0; i < mat2_rownames.size(); i++){ mat2_map[mat2_rownames[i]] = i; } // set up tripletList for new matrix creation std::vector tripletList; int num_rows = all_rownames.size(); int num_col1 = mat1.cols(); int num_col2 = mat2.cols(); tripletList.reserve(mat1.nonZeros() + mat2.nonZeros()); for(int i = 0; i < num_rows; i++){ std::string key = all_rownames[i]; if (mat1_map.count(key)){ for(Eigen::SparseMatrix::InnerIterator it1(mat1, mat1_map[key]); it1; ++it1){ tripletList.emplace_back(i, it1.col(), it1.value()); } } if (mat2_map.count(key)){ for(Eigen::SparseMatrix::InnerIterator it2(mat2, mat2_map[key]); it2; ++it2){ tripletList.emplace_back(i, num_col1 + it2.col(), it2.value()); } } } Eigen::SparseMatrix combined_mat(num_rows, num_col1 + num_col2); combined_mat.setFromTriplets(tripletList.begin(), tripletList.end()); return combined_mat; } // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress = true){ Progress p(data.outerSize(), display_progress); Eigen::VectorXd colSums = data.transpose() * Eigen::VectorXd::Ones(data.rows()); for (int k=0; k < data.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(data, k); it; ++it){ it.valueRef() = log1p(double(it.value()) / colSums[k] * scale_factor); } } return data; } /* Performs column scaling and/or centering. Equivalent to using scale(mat, TRUE, apply(x,2,sd)) in R. Note: Doesn't handle NA/NaNs in the same way the R implementation does, */ // [[Rcpp::export(rng = false)]] NumericMatrix Standardize(Eigen::Map mat, bool display_progress = true){ Progress p(mat.cols(), display_progress); NumericMatrix std_mat(mat.rows(), mat.cols()); for(int i=0; i < mat.cols(); ++i){ p.increment(); Eigen::ArrayXd r = mat.col(i).array(); double colMean = r.mean(); double colSdev = sqrt((r - colMean).square().sum() / (mat.rows() - 1)); NumericMatrix::Column new_col = std_mat(_, i); for(int j=0; j < new_col.size(); j++) { new_col[j] = (r[j] - colMean) / colSdev; } } return std_mat; } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale = true, bool center = true, double scale_max = 10, bool display_progress = true){ mat = mat.transpose(); Progress p(mat.outerSize(), display_progress); Eigen::MatrixXd scaled_mat(mat.rows(), mat.cols()); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { colMean += it.value(); } colMean = colMean / mat.rows(); if (scale == true){ int nnZero = 0; if(center == true){ for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { nnZero += 1; colSdev += pow((it.value() - colMean), 2); } colSdev += pow(colMean, 2) * (mat.rows() - nnZero); } else{ for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { colSdev += pow(it.value(), 2); } } colSdev = sqrt(colSdev / (mat.rows() - 1)); } else{ colSdev = 1; } if(center == false){ colMean = 0; } Eigen::VectorXd col = Eigen::VectorXd(mat.col(k)); scaled_mat.col(k) = (col.array() - colMean) / colSdev; for(int s=0; s scale_max){ scaled_mat(s,k) = scale_max; } } } return scaled_mat.transpose(); } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastSparseRowScaleWithKnownStats(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sigma, bool scale = true, bool center = true, double scale_max = 10, bool display_progress = true){ mat = mat.transpose(); Progress p(mat.outerSize(), display_progress); Eigen::MatrixXd scaled_mat(mat.rows(), mat.cols()); for (int k=0; k scale_max){ scaled_mat(s,k) = scale_max; } } } return scaled_mat.transpose(); } /* Note: May not handle NA/NaNs in the same way the R implementation does, */ // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center = true){ if (center) { mat = mat.rowwise() - mat.colwise().mean(); } Eigen::MatrixXd cov = (mat.adjoint() * mat) / double(mat.rows() - 1); return(cov); } // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center = true){ if(center){ mat1 = mat1.rowwise() - mat1.colwise().mean(); mat2 = mat2.rowwise() - mat2.colwise().mean(); } Eigen::MatrixXd cov = (mat1.adjoint() * mat2) / double(mat1.rows() - 1); return(cov); } /* Note: Faster than the R implementation but is not in-place */ // [[Rcpp::export(rng = false)]] Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2){ Eigen::MatrixXd mat3(mat1.rows() + mat2.rows(), mat1.cols()); mat3 << mat1, mat2; return(mat3); } /* Calculates the row means of the logged values in non-log space */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd FastExpMean(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowmeans(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene means" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += expm1(it.value()); } rm = rm / ncols; rowmeans[k] = log1p(rm); } return(rowmeans); } /* use this if you know the row means */ // [[Rcpp::export(rng = false)]] NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress){ mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variances" << std::endl; } Progress p(mat.outerSize(), display_progress); NumericVector allVars = no_init(mat.cols()); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { nZero -= 1; colSum += pow(it.value() - mu[k], 2); } colSum += pow(mu[k], 2) * nZero; allVars[k] = colSum / (mat.rows() - 1); } return(allVars); } /* standardize matrix rows using given mean and standard deviation, clip values larger than vmax to vmax, then return variance for each row */ // [[Rcpp::export(rng = false)]] NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress){ if(display_progress == true){ Rcpp::Rcerr << "Calculating feature variances of standardized and clipped values" << std::endl; } mat = mat.transpose(); NumericVector allVars(mat.cols()); Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it) { nZero -= 1; colSum += pow(std::min(vmax, (it.value() - mu[k]) / sd[k]), 2); } colSum += pow((0 - mu[k]) / sd[k], 2) * nZero; allVars[k] = colSum / (mat.rows() - 1); } return(allVars); } /* Calculate the variance to mean ratio (VMR) in non-logspace (return answer in log-space) */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowdisp(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variance to mean ratios" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += expm1(it.value()); } rm = rm / ncols; for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it){ v += pow(expm1(it.value()) - rm, 2); nnZero += 1; } v = (v + (ncols - nnZero) * pow(rm, 2)) / (ncols - 1); rowdisp[k] = log(v/rm); } return(rowdisp); } /* Calculates the variance of rows of a matrix */ // [[Rcpp::export(rng = false)]] NumericVector RowVar(Eigen::Map x){ NumericVector out(x.rows()); for(int i=0; i < x.rows(); ++i){ Eigen::ArrayXd r = x.row(i).array(); double rowMean = r.mean(); out[i] = (r - rowMean).square().sum() / (x.cols() - 1); } return out; } /* Calculate the variance in non-logspace (return answer in non-logspace) */ // [[Rcpp::export(rng = false)]] Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress){ int ncols = mat.cols(); Eigen::VectorXd rowdisp(mat.rows()); mat = mat.transpose(); if(display_progress == true){ Rcpp::Rcerr << "Calculating gene variances" << std::endl; } Progress p(mat.outerSize(), display_progress); for (int k=0; k::InnerIterator it(mat,k); it; ++it){ rm += (it.value()); } rm = rm / ncols; for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it){ v += pow((it.value()) - rm, 2); nnZero += 1; } v = (v + (ncols - nnZero) * pow(rm, 2)) / (ncols - 1); rowdisp[k] = v; } return(rowdisp); } //cols_idx should be 0-indexed // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix ReplaceColsC(Eigen::SparseMatrix mat, NumericVector col_idx, Eigen::SparseMatrix replacement){ int rep_idx = 0; for(auto const &ci : col_idx){ mat.col(ci) = replacement.col(rep_idx); rep_idx += 1; } return(mat); } template std::vector sort_indexes(const std::vector &v) { // initialize original index locations std::vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); std::stable_sort(idx.begin(), idx.end(), [&v](size_t i1, size_t i2) {return v[i1] < v[i2];}); return idx; } // [[Rcpp::export(rng = false)]] List GraphToNeighborHelper(Eigen::SparseMatrix mat) { mat = mat.transpose(); //determine the number of neighbors int n = 0; for(Eigen::SparseMatrix::InnerIterator it(mat, 0); it; ++it) { n += 1; } Eigen::MatrixXd nn_idx(mat.rows(), n); Eigen::MatrixXd nn_dist(mat.rows(), n); for (int k=0; k row_idx; std::vector row_dist; row_idx.reserve(n); row_dist.reserve(n); for (Eigen::SparseMatrix::InnerIterator it(mat,k); it; ++it) { if (n_k > (n-1)) { Rcpp::stop("Not all cells have an equal number of neighbors."); } row_idx.push_back(it.row() + 1); row_dist.push_back(it.value()); n_k += 1; } if (n_k != n) { Rcpp::Rcout << n << ":::" << n_k << std::endl; Rcpp::stop("Not all cells have an equal number of neighbors."); } //order the idx based on dist std::vector idx_order = sort_indexes(row_dist); for(int i = 0; i < n; ++i) { nn_idx(k, i) = row_idx[idx_order[i]]; nn_dist(k, i) = row_dist[idx_order[i]]; } } List neighbors = List::create(nn_idx, nn_dist); return(neighbors); } Seurat/src/snn.cpp0000644000176200001440000001032614525500037013611 0ustar liggesusers#include #include "data_manipulation.h" #include #include #include #include #include #include using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] typedef Eigen::Triplet T; // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked, double prune) { std::vector tripletList; int k = nn_ranked.cols(); tripletList.reserve(nn_ranked.rows() * nn_ranked.cols()); for(int j=0; j SNN(nn_ranked.rows(), nn_ranked.rows()); SNN.setFromTriplets(tripletList.begin(), tripletList.end()); SNN = SNN * (SNN.transpose()); for (int i=0; i < SNN.outerSize(); ++i){ for (Eigen::SparseMatrix::InnerIterator it(SNN, i); it; ++it){ it.valueRef() = it.value()/(k + (k - it.value())); if(it.value() < prune){ it.valueRef() = 0; } } } SNN.prune(0.0); // actually remove pruned values return SNN; } // [[Rcpp::export(rng = false)]] void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress){ if (display_progress == true) { Rcpp::Rcerr << "Writing SNN as edge file" << std::endl; } // Write out lower triangle std::ofstream output; output.open(filename); Progress p(snn.outerSize(), display_progress); for (int k=0; k < snn.outerSize(); ++k){ p.increment(); for (Eigen::SparseMatrix::InnerIterator it(snn, k); it; ++it){ if(it.col() >= it.row()){ continue; } output << std::setprecision(15) << it.col() << "\t" << it.row() << "\t" << it.value() << "\n"; } } output.close(); } // Wrapper function so that we don't have to go back into R before writing to file // [[Rcpp::export(rng = false)]] Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename) { Eigen::SparseMatrix SNN = ComputeSNN(nn_ranked, prune); WriteEdgeFile(SNN, filename, display_progress); return SNN; } template std::vector sort_indexes(const std::vector &v) { // initialize original index locations std::vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); std::stable_sort(idx.begin(), idx.end(), [&v](size_t i1, size_t i2) {return v[i1] < v[i2];}); return idx; } // [[Rcpp::export]] std::vector SNN_SmallestNonzero_Dist( Eigen::SparseMatrix snn, Eigen::MatrixXd mat, int n, std::vector nearest_dist ) { std::vector results; for (int i=0; i < snn.outerSize(); ++i){ // create vectors to store the nonzero snn elements and their indices std::vector nonzero; std::vector nonzero_idx; for (Eigen::SparseMatrix::InnerIterator it(snn, i); it; ++it) { nonzero.push_back(it.value()); nonzero_idx.push_back(it.row()); } std::vector nonzero_order = sort_indexes(nonzero); int n_i = n; if (n_i > nonzero_order.size()) n_i = nonzero_order.size(); std::vector dists; for (int j = 0; j < nonzero_order.size(); ++j) { // compute euclidean distances to cells with small edge weights // if multiple entries have same value as nth element, calc dist to all size_t cell = nonzero_idx[nonzero_order[j]]; if(dists.size() < n_i || nonzero[nonzero_order[j]] == nonzero[nonzero_order[n_i-1]]) { double res = (mat.row(cell) - mat.row(i)).norm(); if (nearest_dist[i] > 0) { res = res - nearest_dist[i]; if (res < 0) res = 0; } dists.push_back(res); } else { break; } } double avg_dist; if (dists.size() > n_i) { std::sort(dists.rbegin(), dists.rend()); avg_dist = std::accumulate(dists.begin(), dists.begin() + n_i, 0.0) / n_i; } else { avg_dist = std::accumulate(dists.begin(), dists.end(), 0.0) / dists.size(); } results.push_back(avg_dist); } return results; } Seurat/src/RcppExports.cpp0000644000176200001440000006036314525500056015313 0ustar liggesusers// Generated by using Rcpp::compileAttributes() -> do not edit by hand // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 #include #include using namespace Rcpp; #ifdef RCPP_USE_GLOBAL_ROSTREAM Rcpp::Rostream& Rcpp::Rcout = Rcpp::Rcpp_cout_get(); Rcpp::Rostream& Rcpp::Rcerr = Rcpp::Rcpp_cerr_get(); #endif // RunModularityClusteringCpp IntegerVector RunModularityClusteringCpp(Eigen::SparseMatrix SNN, int modularityFunction, double resolution, int algorithm, int nRandomStarts, int nIterations, int randomSeed, bool printOutput, std::string edgefilename); RcppExport SEXP _Seurat_RunModularityClusteringCpp(SEXP SNNSEXP, SEXP modularityFunctionSEXP, SEXP resolutionSEXP, SEXP algorithmSEXP, SEXP nRandomStartsSEXP, SEXP nIterationsSEXP, SEXP randomSeedSEXP, SEXP printOutputSEXP, SEXP edgefilenameSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type SNN(SNNSEXP); Rcpp::traits::input_parameter< int >::type modularityFunction(modularityFunctionSEXP); Rcpp::traits::input_parameter< double >::type resolution(resolutionSEXP); Rcpp::traits::input_parameter< int >::type algorithm(algorithmSEXP); Rcpp::traits::input_parameter< int >::type nRandomStarts(nRandomStartsSEXP); Rcpp::traits::input_parameter< int >::type nIterations(nIterationsSEXP); Rcpp::traits::input_parameter< int >::type randomSeed(randomSeedSEXP); Rcpp::traits::input_parameter< bool >::type printOutput(printOutputSEXP); Rcpp::traits::input_parameter< std::string >::type edgefilename(edgefilenameSEXP); rcpp_result_gen = Rcpp::wrap(RunModularityClusteringCpp(SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename)); return rcpp_result_gen; END_RCPP } // RunUMISampling Eigen::SparseMatrix RunUMISampling(Eigen::SparseMatrix data, int sample_val, bool upsample, bool display_progress); RcppExport SEXP _Seurat_RunUMISampling(SEXP dataSEXP, SEXP sample_valSEXP, SEXP upsampleSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< int >::type sample_val(sample_valSEXP); Rcpp::traits::input_parameter< bool >::type upsample(upsampleSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(RunUMISampling(data, sample_val, upsample, display_progress)); return rcpp_result_gen; END_RCPP } // RunUMISamplingPerCell Eigen::SparseMatrix RunUMISamplingPerCell(Eigen::SparseMatrix data, NumericVector sample_val, bool upsample, bool display_progress); RcppExport SEXP _Seurat_RunUMISamplingPerCell(SEXP dataSEXP, SEXP sample_valSEXP, SEXP upsampleSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< NumericVector >::type sample_val(sample_valSEXP); Rcpp::traits::input_parameter< bool >::type upsample(upsampleSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(RunUMISamplingPerCell(data, sample_val, upsample, display_progress)); return rcpp_result_gen; END_RCPP } // RowMergeMatrices Eigen::SparseMatrix RowMergeMatrices(Eigen::SparseMatrix mat1, Eigen::SparseMatrix mat2, std::vector< std::string > mat1_rownames, std::vector< std::string > mat2_rownames, std::vector< std::string > all_rownames); RcppExport SEXP _Seurat_RowMergeMatrices(SEXP mat1SEXP, SEXP mat2SEXP, SEXP mat1_rownamesSEXP, SEXP mat2_rownamesSEXP, SEXP all_rownamesSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat2(mat2SEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type mat1_rownames(mat1_rownamesSEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type mat2_rownames(mat2_rownamesSEXP); Rcpp::traits::input_parameter< std::vector< std::string > >::type all_rownames(all_rownamesSEXP); rcpp_result_gen = Rcpp::wrap(RowMergeMatrices(mat1, mat2, mat1_rownames, mat2_rownames, all_rownames)); return rcpp_result_gen; END_RCPP } // LogNorm Eigen::SparseMatrix LogNorm(Eigen::SparseMatrix data, int scale_factor, bool display_progress); RcppExport SEXP _Seurat_LogNorm(SEXP dataSEXP, SEXP scale_factorSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type data(dataSEXP); Rcpp::traits::input_parameter< int >::type scale_factor(scale_factorSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(LogNorm(data, scale_factor, display_progress)); return rcpp_result_gen; END_RCPP } // Standardize NumericMatrix Standardize(Eigen::Map mat, bool display_progress); RcppExport SEXP _Seurat_Standardize(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::Map >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(Standardize(mat, display_progress)); return rcpp_result_gen; END_RCPP } // FastSparseRowScale Eigen::MatrixXd FastSparseRowScale(Eigen::SparseMatrix mat, bool scale, bool center, double scale_max, bool display_progress); RcppExport SEXP _Seurat_FastSparseRowScale(SEXP matSEXP, SEXP scaleSEXP, SEXP centerSEXP, SEXP scale_maxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type scale(scaleSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); Rcpp::traits::input_parameter< double >::type scale_max(scale_maxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastSparseRowScale(mat, scale, center, scale_max, display_progress)); return rcpp_result_gen; END_RCPP } // FastSparseRowScaleWithKnownStats Eigen::MatrixXd FastSparseRowScaleWithKnownStats(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sigma, bool scale, bool center, double scale_max, bool display_progress); RcppExport SEXP _Seurat_FastSparseRowScaleWithKnownStats(SEXP matSEXP, SEXP muSEXP, SEXP sigmaSEXP, SEXP scaleSEXP, SEXP centerSEXP, SEXP scale_maxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< NumericVector >::type sigma(sigmaSEXP); Rcpp::traits::input_parameter< bool >::type scale(scaleSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); Rcpp::traits::input_parameter< double >::type scale_max(scale_maxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastSparseRowScaleWithKnownStats(mat, mu, sigma, scale, center, scale_max, display_progress)); return rcpp_result_gen; END_RCPP } // FastCov Eigen::MatrixXd FastCov(Eigen::MatrixXd mat, bool center); RcppExport SEXP _Seurat_FastCov(SEXP matSEXP, SEXP centerSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); rcpp_result_gen = Rcpp::wrap(FastCov(mat, center)); return rcpp_result_gen; END_RCPP } // FastCovMats Eigen::MatrixXd FastCovMats(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2, bool center); RcppExport SEXP _Seurat_FastCovMats(SEXP mat1SEXP, SEXP mat2SEXP, SEXP centerSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat2(mat2SEXP); Rcpp::traits::input_parameter< bool >::type center(centerSEXP); rcpp_result_gen = Rcpp::wrap(FastCovMats(mat1, mat2, center)); return rcpp_result_gen; END_RCPP } // FastRBind Eigen::MatrixXd FastRBind(Eigen::MatrixXd mat1, Eigen::MatrixXd mat2); RcppExport SEXP _Seurat_FastRBind(SEXP mat1SEXP, SEXP mat2SEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat1(mat1SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat2(mat2SEXP); rcpp_result_gen = Rcpp::wrap(FastRBind(mat1, mat2)); return rcpp_result_gen; END_RCPP } // FastExpMean Eigen::VectorXd FastExpMean(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_FastExpMean(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastExpMean(mat, display_progress)); return rcpp_result_gen; END_RCPP } // SparseRowVar2 NumericVector SparseRowVar2(Eigen::SparseMatrix mat, NumericVector mu, bool display_progress); RcppExport SEXP _Seurat_SparseRowVar2(SEXP matSEXP, SEXP muSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVar2(mat, mu, display_progress)); return rcpp_result_gen; END_RCPP } // SparseRowVarStd NumericVector SparseRowVarStd(Eigen::SparseMatrix mat, NumericVector mu, NumericVector sd, double vmax, bool display_progress); RcppExport SEXP _Seurat_SparseRowVarStd(SEXP matSEXP, SEXP muSEXP, SEXP sdSEXP, SEXP vmaxSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type mu(muSEXP); Rcpp::traits::input_parameter< NumericVector >::type sd(sdSEXP); Rcpp::traits::input_parameter< double >::type vmax(vmaxSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVarStd(mat, mu, sd, vmax, display_progress)); return rcpp_result_gen; END_RCPP } // FastLogVMR Eigen::VectorXd FastLogVMR(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_FastLogVMR(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FastLogVMR(mat, display_progress)); return rcpp_result_gen; END_RCPP } // RowVar NumericVector RowVar(Eigen::Map x); RcppExport SEXP _Seurat_RowVar(SEXP xSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::Map >::type x(xSEXP); rcpp_result_gen = Rcpp::wrap(RowVar(x)); return rcpp_result_gen; END_RCPP } // SparseRowVar Eigen::VectorXd SparseRowVar(Eigen::SparseMatrix mat, bool display_progress); RcppExport SEXP _Seurat_SparseRowVar(SEXP matSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(SparseRowVar(mat, display_progress)); return rcpp_result_gen; END_RCPP } // ReplaceColsC Eigen::SparseMatrix ReplaceColsC(Eigen::SparseMatrix mat, NumericVector col_idx, Eigen::SparseMatrix replacement); RcppExport SEXP _Seurat_ReplaceColsC(SEXP matSEXP, SEXP col_idxSEXP, SEXP replacementSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); Rcpp::traits::input_parameter< NumericVector >::type col_idx(col_idxSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type replacement(replacementSEXP); rcpp_result_gen = Rcpp::wrap(ReplaceColsC(mat, col_idx, replacement)); return rcpp_result_gen; END_RCPP } // GraphToNeighborHelper List GraphToNeighborHelper(Eigen::SparseMatrix mat); RcppExport SEXP _Seurat_GraphToNeighborHelper(SEXP matSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type mat(matSEXP); rcpp_result_gen = Rcpp::wrap(GraphToNeighborHelper(mat)); return rcpp_result_gen; END_RCPP } // fast_dist List fast_dist(NumericMatrix x, NumericMatrix y, List n); RcppExport SEXP _Seurat_fast_dist(SEXP xSEXP, SEXP ySEXP, SEXP nSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP); Rcpp::traits::input_parameter< NumericMatrix >::type y(ySEXP); Rcpp::traits::input_parameter< List >::type n(nSEXP); rcpp_result_gen = Rcpp::wrap(fast_dist(x, y, n)); return rcpp_result_gen; END_RCPP } // FindWeightsC Eigen::SparseMatrix FindWeightsC(NumericVector cells2, Eigen::MatrixXd distances, std::vector anchor_cells2, std::vector integration_matrix_rownames, Eigen::MatrixXd cell_index, Eigen::VectorXd anchor_score, double min_dist, double sd, bool display_progress); RcppExport SEXP _Seurat_FindWeightsC(SEXP cells2SEXP, SEXP distancesSEXP, SEXP anchor_cells2SEXP, SEXP integration_matrix_rownamesSEXP, SEXP cell_indexSEXP, SEXP anchor_scoreSEXP, SEXP min_distSEXP, SEXP sdSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< NumericVector >::type cells2(cells2SEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type distances(distancesSEXP); Rcpp::traits::input_parameter< std::vector >::type anchor_cells2(anchor_cells2SEXP); Rcpp::traits::input_parameter< std::vector >::type integration_matrix_rownames(integration_matrix_rownamesSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type cell_index(cell_indexSEXP); Rcpp::traits::input_parameter< Eigen::VectorXd >::type anchor_score(anchor_scoreSEXP); Rcpp::traits::input_parameter< double >::type min_dist(min_distSEXP); Rcpp::traits::input_parameter< double >::type sd(sdSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(FindWeightsC(cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress)); return rcpp_result_gen; END_RCPP } // IntegrateDataC Eigen::SparseMatrix IntegrateDataC(Eigen::SparseMatrix integration_matrix, Eigen::SparseMatrix weights, Eigen::SparseMatrix expression_cells2); RcppExport SEXP _Seurat_IntegrateDataC(SEXP integration_matrixSEXP, SEXP weightsSEXP, SEXP expression_cells2SEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type integration_matrix(integration_matrixSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type weights(weightsSEXP); Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type expression_cells2(expression_cells2SEXP); rcpp_result_gen = Rcpp::wrap(IntegrateDataC(integration_matrix, weights, expression_cells2)); return rcpp_result_gen; END_RCPP } // ScoreHelper std::vector ScoreHelper(Eigen::SparseMatrix snn, Eigen::MatrixXd query_pca, Eigen::MatrixXd query_dists, Eigen::MatrixXd corrected_nns, int k_snn, bool subtract_first_nn, bool display_progress); RcppExport SEXP _Seurat_ScoreHelper(SEXP snnSEXP, SEXP query_pcaSEXP, SEXP query_distsSEXP, SEXP corrected_nnsSEXP, SEXP k_snnSEXP, SEXP subtract_first_nnSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type query_pca(query_pcaSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type query_dists(query_distsSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type corrected_nns(corrected_nnsSEXP); Rcpp::traits::input_parameter< int >::type k_snn(k_snnSEXP); Rcpp::traits::input_parameter< bool >::type subtract_first_nn(subtract_first_nnSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); rcpp_result_gen = Rcpp::wrap(ScoreHelper(snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress)); return rcpp_result_gen; END_RCPP } // ComputeSNN Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked, double prune); RcppExport SEXP _Seurat_ComputeSNN(SEXP nn_rankedSEXP, SEXP pruneSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type nn_ranked(nn_rankedSEXP); Rcpp::traits::input_parameter< double >::type prune(pruneSEXP); rcpp_result_gen = Rcpp::wrap(ComputeSNN(nn_ranked, prune)); return rcpp_result_gen; END_RCPP } // WriteEdgeFile void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress); RcppExport SEXP _Seurat_WriteEdgeFile(SEXP snnSEXP, SEXP filenameSEXP, SEXP display_progressSEXP) { BEGIN_RCPP Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< String >::type filename(filenameSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); WriteEdgeFile(snn, filename, display_progress); return R_NilValue; END_RCPP } // DirectSNNToFile Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename); RcppExport SEXP _Seurat_DirectSNNToFile(SEXP nn_rankedSEXP, SEXP pruneSEXP, SEXP display_progressSEXP, SEXP filenameSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::traits::input_parameter< Eigen::MatrixXd >::type nn_ranked(nn_rankedSEXP); Rcpp::traits::input_parameter< double >::type prune(pruneSEXP); Rcpp::traits::input_parameter< bool >::type display_progress(display_progressSEXP); Rcpp::traits::input_parameter< String >::type filename(filenameSEXP); rcpp_result_gen = Rcpp::wrap(DirectSNNToFile(nn_ranked, prune, display_progress, filename)); return rcpp_result_gen; END_RCPP } // SNN_SmallestNonzero_Dist std::vector SNN_SmallestNonzero_Dist(Eigen::SparseMatrix snn, Eigen::MatrixXd mat, int n, std::vector nearest_dist); RcppExport SEXP _Seurat_SNN_SmallestNonzero_Dist(SEXP snnSEXP, SEXP matSEXP, SEXP nSEXP, SEXP nearest_distSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< Eigen::SparseMatrix >::type snn(snnSEXP); Rcpp::traits::input_parameter< Eigen::MatrixXd >::type mat(matSEXP); Rcpp::traits::input_parameter< int >::type n(nSEXP); Rcpp::traits::input_parameter< std::vector >::type nearest_dist(nearest_distSEXP); rcpp_result_gen = Rcpp::wrap(SNN_SmallestNonzero_Dist(snn, mat, n, nearest_dist)); return rcpp_result_gen; END_RCPP } // row_sum_dgcmatrix NumericVector row_sum_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_sum_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_sum_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } // row_mean_dgcmatrix NumericVector row_mean_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_mean_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_mean_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } // row_var_dgcmatrix NumericVector row_var_dgcmatrix(NumericVector& x, IntegerVector& i, int rows, int cols); RcppExport SEXP _Seurat_row_var_dgcmatrix(SEXP xSEXP, SEXP iSEXP, SEXP rowsSEXP, SEXP colsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< NumericVector& >::type x(xSEXP); Rcpp::traits::input_parameter< IntegerVector& >::type i(iSEXP); Rcpp::traits::input_parameter< int >::type rows(rowsSEXP); Rcpp::traits::input_parameter< int >::type cols(colsSEXP); rcpp_result_gen = Rcpp::wrap(row_var_dgcmatrix(x, i, rows, cols)); return rcpp_result_gen; END_RCPP } RcppExport SEXP isnull(SEXP); static const R_CallMethodDef CallEntries[] = { {"_Seurat_RunModularityClusteringCpp", (DL_FUNC) &_Seurat_RunModularityClusteringCpp, 9}, {"_Seurat_RunUMISampling", (DL_FUNC) &_Seurat_RunUMISampling, 4}, {"_Seurat_RunUMISamplingPerCell", (DL_FUNC) &_Seurat_RunUMISamplingPerCell, 4}, {"_Seurat_RowMergeMatrices", (DL_FUNC) &_Seurat_RowMergeMatrices, 5}, {"_Seurat_LogNorm", (DL_FUNC) &_Seurat_LogNorm, 3}, {"_Seurat_Standardize", (DL_FUNC) &_Seurat_Standardize, 2}, {"_Seurat_FastSparseRowScale", (DL_FUNC) &_Seurat_FastSparseRowScale, 5}, {"_Seurat_FastSparseRowScaleWithKnownStats", (DL_FUNC) &_Seurat_FastSparseRowScaleWithKnownStats, 7}, {"_Seurat_FastCov", (DL_FUNC) &_Seurat_FastCov, 2}, {"_Seurat_FastCovMats", (DL_FUNC) &_Seurat_FastCovMats, 3}, {"_Seurat_FastRBind", (DL_FUNC) &_Seurat_FastRBind, 2}, {"_Seurat_FastExpMean", (DL_FUNC) &_Seurat_FastExpMean, 2}, {"_Seurat_SparseRowVar2", (DL_FUNC) &_Seurat_SparseRowVar2, 3}, {"_Seurat_SparseRowVarStd", (DL_FUNC) &_Seurat_SparseRowVarStd, 5}, {"_Seurat_FastLogVMR", (DL_FUNC) &_Seurat_FastLogVMR, 2}, {"_Seurat_RowVar", (DL_FUNC) &_Seurat_RowVar, 1}, {"_Seurat_SparseRowVar", (DL_FUNC) &_Seurat_SparseRowVar, 2}, {"_Seurat_ReplaceColsC", (DL_FUNC) &_Seurat_ReplaceColsC, 3}, {"_Seurat_GraphToNeighborHelper", (DL_FUNC) &_Seurat_GraphToNeighborHelper, 1}, {"_Seurat_fast_dist", (DL_FUNC) &_Seurat_fast_dist, 3}, {"_Seurat_FindWeightsC", (DL_FUNC) &_Seurat_FindWeightsC, 9}, {"_Seurat_IntegrateDataC", (DL_FUNC) &_Seurat_IntegrateDataC, 3}, {"_Seurat_ScoreHelper", (DL_FUNC) &_Seurat_ScoreHelper, 7}, {"_Seurat_ComputeSNN", (DL_FUNC) &_Seurat_ComputeSNN, 2}, {"_Seurat_WriteEdgeFile", (DL_FUNC) &_Seurat_WriteEdgeFile, 3}, {"_Seurat_DirectSNNToFile", (DL_FUNC) &_Seurat_DirectSNNToFile, 4}, {"_Seurat_SNN_SmallestNonzero_Dist", (DL_FUNC) &_Seurat_SNN_SmallestNonzero_Dist, 4}, {"_Seurat_row_sum_dgcmatrix", (DL_FUNC) &_Seurat_row_sum_dgcmatrix, 4}, {"_Seurat_row_mean_dgcmatrix", (DL_FUNC) &_Seurat_row_mean_dgcmatrix, 4}, {"_Seurat_row_var_dgcmatrix", (DL_FUNC) &_Seurat_row_var_dgcmatrix, 4}, {"isnull", (DL_FUNC) &isnull, 1}, {NULL, NULL, 0} }; RcppExport void R_init_Seurat(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } Seurat/src/RModularityOptimizer.cpp0000644000176200001440000001272114525500037017172 0ustar liggesusers#include #include #include #include #include #include #include #include #include #include #include "ModularityOptimizer.h" using namespace ModularityOptimizer; using namespace std::chrono; using namespace Rcpp; // [[Rcpp::depends(RcppEigen)]] // [[Rcpp::depends(RcppProgress)]] // [[Rcpp::export]] IntegerVector RunModularityClusteringCpp(Eigen::SparseMatrix SNN, int modularityFunction, double resolution, int algorithm, int nRandomStarts, int nIterations, int randomSeed, bool printOutput, std::string edgefilename) { // validate arguments if(modularityFunction != 1 && modularityFunction != 2) stop("Modularity parameter must be equal to 1 or 2."); if(algorithm != 1 && algorithm !=2 && algorithm !=3 && algorithm !=4) stop("Algorithm for modularity optimization must be 1, 2, 3, or 4"); if(nRandomStarts < 1) stop("Have to have at least one start"); if(nIterations < 1) stop("Need at least one interation"); if (modularityFunction == 2 && resolution > 1.0) stop("error: resolution<1 for alternative modularity"); try { bool update; double modularity, maxModularity, resolution2; int i, j; std::string msg = "Modularity Optimizer version 1.3.0 by Ludo Waltman and Nees Jan van Eck"; if (printOutput) Rcout << msg << std::endl << std::endl; // Load netwrok std::shared_ptr network; if(edgefilename != "") { if (printOutput) Rcout << "Reading input file..." << std::endl << std::endl; try{ network = readInputFile(edgefilename, modularityFunction); } catch(...) { stop("Could not parse edge file."); } } else { // Load lower triangle int network_size = (SNN.nonZeros() / 2) + 3; IVector node1; IVector node2; DVector edgeweights; node1.reserve(network_size); node2.reserve(network_size); edgeweights.reserve(network_size); for (int k=0; k < SNN.outerSize(); ++k){ for (Eigen::SparseMatrix::InnerIterator it(SNN, k); it; ++it){ if(it.col() >= it.row()){ continue; } node1.emplace_back(it.col()); node2.emplace_back(it.row()); edgeweights.emplace_back(it.value()); } } if(node1.size() == 0) { stop("Matrix contained no network data. Check format."); } int nNodes = std::max(SNN.cols(), SNN.rows()); network = matrixToNetwork(node1, node2, edgeweights, modularityFunction, nNodes); Rcpp::checkUserInterrupt(); } if (printOutput) { Rprintf("Number of nodes: %d\n", network->getNNodes()); Rprintf("Number of edges: %d\n", network->getNEdges()); Rcout << std::endl; Rcout << "Running " << ((algorithm == 1) ? "Louvain algorithm" : ((algorithm == 2) ? "Louvain algorithm with multilevel refinement" : "smart local moving algorithm")) << "..."; Rcout << std::endl; } resolution2 = ((modularityFunction == 1) ? (resolution / (2 * network->getTotalEdgeWeight() + network->getTotalEdgeWeightSelfLinks())) : resolution); auto beginTime = duration_cast(system_clock::now().time_since_epoch()); std::shared_ptr clustering; maxModularity = -std::numeric_limits::infinity(); JavaRandom random(randomSeed); Progress p(nRandomStarts, printOutput); for (i = 0; i < nRandomStarts; i++) { //if (printOutput && (nRandomStarts > 1)) //Rprintf("Random start: %d\n", i + 1); VOSClusteringTechnique vosClusteringTechnique(network, resolution2); j = 0; update = true; do { /*if (printOutput && (nIterations > 1)) Rprintf("Iteration: %d\n", j + 1); */ if (algorithm == 1) update = vosClusteringTechnique.runLouvainAlgorithm(random); else if (algorithm == 2) update = vosClusteringTechnique.runLouvainAlgorithmWithMultilevelRefinement(random); else if (algorithm == 3) vosClusteringTechnique.runSmartLocalMovingAlgorithm(random); j++; modularity = vosClusteringTechnique.calcQualityFunction(); //if (printOutput && (nIterations > 1)) // Rprintf("Modularity: %.4f\n", modularity); Rcpp::checkUserInterrupt(); } while ((j < nIterations) && update); if (modularity > maxModularity) { clustering = vosClusteringTechnique.getClustering(); maxModularity = modularity; } /*if (printOutput && (nRandomStarts > 1)) { if (nIterations == 1) Rprintf("Modularity: %.4f\n", modularity); Rcout << std::endl; }*/ p.increment(); } auto endTime = duration_cast(system_clock::now().time_since_epoch()); if(clustering == nullptr) { stop("Clustering step failed."); } if (printOutput) { if (nRandomStarts == 1) { if (nIterations > 1) Rcout << std::endl; Rprintf("Modularity: %.4f\n", maxModularity); } else Rprintf("Maximum modularity in %d random starts: %.4f\n", nRandomStarts, maxModularity); Rprintf("Number of communities: %d\n", clustering->getNClusters()); Rprintf("Elapsed time: %d seconds\n", static_cast((endTime - beginTime).count() / 1000.0)); } // Return results clustering->orderClustersByNNodes(); IntegerVector iv(clustering->cluster.cbegin(), clustering->cluster.cend()); return iv; } catch(std::exception &ex) { forward_exception_to_r(ex); } catch(...) { ::Rf_error("c++ exception (unknown reason)"); } return IntegerVector(1); } Seurat/src/snn.h0000644000176200001440000000117214525500037013255 0ustar liggesusers#ifndef SNN #define SNN #include #include "data_manipulation.h" #include #include #include #include #include #include using namespace Rcpp; //---------------------------------------------------- Eigen::SparseMatrix ComputeSNN(Eigen::MatrixXd nn_ranked); void WriteEdgeFile(Eigen::SparseMatrix snn, String filename, bool display_progress); Eigen::SparseMatrix DirectSNNToFile(Eigen::MatrixXd nn_ranked, double prune, bool display_progress, String filename); //---------------------------------------------------- #endif//SNN Seurat/R/0000755000176200001440000000000014525500232011714 5ustar liggesusersSeurat/R/objects.R0000644000176200001440000030254014525500037013477 0ustar liggesusers#' @include reexports.R #' @include generics.R #' @importFrom Rcpp evalCpp #' @importFrom Matrix colSums rowSums colMeans rowMeans #' @importFrom methods setClass setOldClass setClassUnion slot #' slot<- setMethod new signature slotNames is setAs setValidity .hasSlot #' @importClassesFrom Matrix dgCMatrix #' @useDynLib Seurat #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Class definitions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% setOldClass(Classes = 'package_version') #' The AnchorSet Class #' #' The AnchorSet class is an intermediate data storage class that stores the anchors and other #' related information needed for performing downstream analyses - namely data integration #' (\code{\link{IntegrateData}}) and data transfer (\code{\link{TransferData}}). #' #' @slot object.list List of objects used to create anchors #' @slot reference.cells List of cell names in the reference dataset - needed when performing data #' transfer. #' @slot reference.objects Position of reference object/s in object.list #' @slot query.cells List of cell names in the query dataset - needed when performing data transfer #' @slot anchors The anchor matrix. This contains the cell indices of both anchor pair cells, the #' anchor score, and the index of the original dataset in the object.list for cell1 and cell2 of #' the anchor. #' @slot offsets The offsets used to enable cell look up in downstream functions #' @slot weight.reduction The weight dimensional reduction used to calculate weight matrix #' @slot anchor.features The features used when performing anchor finding. #' @slot neighbors List containing Neighbor objects for reuse later (e.g. mapping) #' @slot command Store log of parameters that were used #' #' @name AnchorSet-class #' @rdname AnchorSet-class #' @concept objects #' @exportClass AnchorSet #' AnchorSet <- setClass( Class = "AnchorSet", contains = 'VIRTUAL', slots = list( object.list = "list", reference.cells = "vector", reference.objects = "vector", query.cells = "vector", anchors = "ANY", offsets = "ANY", weight.reduction = "DimReduc", anchor.features = "ANY", neighbors = "list", command = "ANY" ) ) #' The TransferAnchorSet Class #' #' Inherits from the Anchorset class. Implemented mainly for method dispatch #' purposes. See \code{\link{AnchorSet}} for slot details. #' #' @name TransferAnchorSet-class #' @rdname TransferAnchorSet-class #' @concept objects #' @exportClass TransferAnchorSet #' TransferAnchorSet <- setClass( Class = "TransferAnchorSet", contains = "AnchorSet" ) #' The IntegrationAnchorSet Class #' #' Inherits from the Anchorset class. Implemented mainly for method dispatch #' purposes. See \code{\link{AnchorSet}} for slot details. #' #' @name IntegrationAnchorSet-class #' @rdname IntegrationAnchorSet-class #' @concept objects #' @exportClass IntegrationAnchorSet #' IntegrationAnchorSet <- setClass( Class = "IntegrationAnchorSet", contains = "AnchorSet" ) #' The ModalityWeights Class #' #' The ModalityWeights class is an intermediate data storage class that stores the modality weight and other #' related information needed for performing downstream analyses - namely data integration #' (\code{FindModalityWeights}) and data transfer (\code{\link{FindMultiModalNeighbors}}). #' #' @slot modality.weight.list A list of modality weights value from all modalities #' @slot modality.assay Names of assays for the list of dimensional reductions #' @slot params A list of parameters used in the FindModalityWeights #' @slot score.matrix a list of score matrices representing cross and within-modality prediction #' score, and kernel value #' @slot command Store log of parameters that were used #' #' @name ModalityWeights-class #' @rdname ModalityWeights-class #' @concept objects #' @exportClass ModalityWeights #' ModalityWeights <- setClass( Class = "ModalityWeights", slots = list( modality.weight.list = "list", modality.assay = "vector", params = "list", score.matrix = "list", command = "ANY" ) ) #' The BridgeReferenceSet Class #' The BridgeReferenceSet is an output from PrepareBridgeReference #' @slot bridge The multi-omic object #' @slot reference The Reference object only containing bridge representation assay #' @slot params A list of parameters used in the PrepareBridgeReference #' @slot command Store log of parameters that were used #' #' @name BridgeReferenceSet-class #' @rdname BridgeReferenceSet-class #' @concept objects #' @exportClass BridgeReferenceSet #' BridgeReferenceSet <- setClass( Class = "BridgeReferenceSet", slots = list( bridge = "ANY", reference = "ANY", params = "list", command = "ANY" ) ) #' The IntegrationData Class #' #' The IntegrationData object is an intermediate storage container used internally throughout the #' integration procedure to hold bits of data that are useful downstream. #' #' @slot neighbors List of neighborhood information for cells (outputs of \code{RANN::nn2}) #' @slot weights Anchor weight matrix #' @slot integration.matrix Integration matrix #' @slot anchors Anchor matrix #' @slot offsets The offsets used to enable cell look up in downstream functions #' @slot objects.ncell Number of cells in each object in the object.list #' @slot sample.tree Sample tree used for ordering multi-dataset integration #' #' @name IntegrationData-class #' @rdname IntegrationData-class #' @concept objects #' @exportClass IntegrationData #' IntegrationData <- setClass( Class = "IntegrationData", slots = list( neighbors = "ANY", weights = "ANY", integration.matrix = "ANY", anchors = "ANY", offsets = "ANY", objects.ncell = "ANY", sample.tree = "ANY" ) ) #' The SCTModel Class #' #' The SCTModel object is a model and parameters storage from SCTransform. #' It can be used to calculate Pearson residuals for new genes. #' #' @slot feature.attributes A data.frame with feature attributes in SCTransform #' @slot cell.attributes A data.frame with cell attributes in SCTransform #' @slot clips A list of two numeric of length two specifying the min and max #' values the Pearson residual will be clipped to. One for vst and one for #' SCTransform #' @slot umi.assay Name of the assay of the seurat object containing UMI matrix #' and the default is RNA #' @slot model A formula used in SCTransform #' @slot arguments other information used in SCTransform #' @slot median_umi Median UMI (or scale factor) used to calculate corrected counts #' #' @seealso \code{\link{Assay}} #' #' @name SCTAssay-class #' @rdname SCTAssay-class #' @concept objects #' #' @examples #' \dontrun{ #' # SCTAssay objects are generated from SCTransform #' pbmc_small <- SCTransform(pbmc_small) #' } #' SCTModel <- setClass( Class = 'SCTModel', slots = c( feature.attributes = 'data.frame', cell.attributes = 'data.frame', clips = 'list', umi.assay = 'character', model = 'character', arguments = 'list', median_umi = 'numeric' ) ) #' The SCTAssay Class #' #' The SCTAssay object contains all the information found in an \code{\link{Assay}} #' object, with extra information from the results of \code{\link{SCTransform}} #' #' @slot SCTModel.list A list containing SCT models #' #' @seealso \code{\link{Assay}} #' #' @name SCTAssay-class #' @rdname SCTAssay-class #' @concept objects #' #' @examples #' \dontrun{ #' # SCTAssay objects are generated from SCTransform #' pbmc_small <- SCTransform(pbmc_small) #' pbmc_small[["SCT"]] #' } #' SCTAssay <- setClass( Class = 'SCTAssay', contains = 'Assay', slots = c( SCTModel.list = 'list' ) ) #' @note \code{scalefactors} objects can be created with \code{scalefactors()} #' #' @param spot Spot full resolution scale factor #' @param fiducial Fiducial full resolution scale factor #' @param hires High resolutoin scale factor #' @param lowres Low resolution scale factor #' #' @rdname ScaleFactors #' @concept objects #' @concept spatial #' @export #' scalefactors <- function(spot, fiducial, hires, lowres) { object <- list( spot = spot, fiducial = fiducial, hires = hires, lowres = lowres ) object <- sapply(X = object, FUN = as.numeric, simplify = FALSE, USE.NAMES = TRUE) return(structure(.Data = object, class = 'scalefactors')) } setOldClass(Classes = c('scalefactors')) #' The SlideSeq class #' #' The SlideSeq class represents spatial information from the Slide-seq platform #' #' @inheritSection SeuratObject::SpatialImage Slots #' @slot coordinates ... #' @concept spatial #' SlideSeq <- setClass( Class = 'SlideSeq', contains = 'SpatialImage', slots = list( 'coordinates' = 'data.frame' ) ) #' The STARmap class #' #' #' @inheritSection SeuratObject::SpatialImage Slots #' @concept objects #' @concept spatial #' STARmap <- setClass( Class = 'STARmap', contains = 'SpatialImage', slots = list( 'coordinates' = 'data.frame', 'qhulls' = 'data.frame' ) ) #' The VisiumV1 class #' #' The VisiumV1 class represents spatial information from the 10X Genomics Visium #' platform #' #' @slot image A three-dimensional array with PNG image data, see #' \code{\link[png]{readPNG}} for more details #' @slot scale.factors An object of class \code{\link{scalefactors}}; see #' \code{\link{scalefactors}} for more information #' @slot coordinates A data frame with tissue coordinate information #' @slot spot.radius Single numeric value giving the radius of the spots #' #' @name VisiumV1-class #' @rdname VisiumV1-class #' @concept objects #' @concept spatial #' @exportClass VisiumV1 #' VisiumV1 <- setClass( Class = 'VisiumV1', contains = 'SpatialImage', slots = list( 'image' = 'array', 'scale.factors' = 'scalefactors', 'coordinates' = 'data.frame', 'spot.radius' = 'numeric' ) ) setClass(Class = 'SliceImage', contains = 'VisiumV1') #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Get a vector of cell names associated with an image (or set of images) #' #' @param object Seurat object #' @param images Vector of image names #' @param unlist Return as a single vector of cell names as opposed to a list, #' named by image name. #' #' @return A vector of cell names #' #' @examples #' \dontrun{ #' CellsByImage(object = object, images = "slice1") #' } #' CellsByImage <- function(object, images = NULL, unlist = FALSE) { images <- images %||% Images(object = object) cells <- sapply( X = images, FUN = function(x) { Cells(x = object[[x]]) }, simplify = FALSE, USE.NAMES = TRUE ) if (unlist) { cells <- unname(obj = unlist(x = cells)) } return(cells) } #' Create a SCT Assay object #' #' Create a SCT object from a feature (e.g. gene) expression matrix and a list of SCTModels. #' The expected format of the input matrix is features x cells. #' #' Non-unique cell or feature names are not allowed. Please make unique before #' calling this function. #' @param scale.data a residual matrix #' @param SCTModel.list list of SCTModels #' @param umi.assay The UMI assay name. Default is RNA #' @inheritParams SeuratObject::CreateAssayObject #' #' @importFrom methods as #' @importFrom Matrix colSums rowSums #' #' @export #' @concept objects #' CreateSCTAssayObject <- function( counts, data, scale.data = NULL, umi.assay = "RNA", min.cells = 0, min.features = 0, SCTModel.list = NULL ) { assay <- CreateAssayObject( counts = counts, data = data, min.cells = min.cells, min.features = min.features ) if (!is.null(scale.data)) { assay <- SetAssayData(object = assay, slot = "scale.data", new.data = scale.data) } slot(object = assay, name = "assay.orig") <- umi.assay #checking SCTModel.list format if (is.null(x = SCTModel.list)) { SCTModel.type <- "none" warning("An empty SCTModel will be generated due to no SCTModel input") } else { if (inherits(x = SCTModel.list, what = "SCTModel")) { SCTModel.list <- list(model1 = SCTModel.list) SCTModel.type <- "SCTModel.list" } else if (inherits(x = SCTModel.list, what = "list")) { if (inherits(x = SCTModel.list[[1]], what = "SCTModel")){ SCTModel.type <- "SCTModel.list" } else if (IsVSTout(vst.out = SCTModel.list)){ SCTModel.type <- "vst.out" } else if (IsVSTout(SCTModel.list[[1]])) { SCTModel.type <- "vst.set" } else { stop("SCTModel input is not a correct format") } } } model.list <- switch( EXPR = SCTModel.type, "none" = { list() }, "SCTModel.list" = { SCTModel.list <- lapply(X = SCTModel.list, FUN = function(model) { select.cell <- intersect(x = Cells(x = model), Cells(x = assay)) if (length(x = select.cell) == 0) { stop("Cells in SCTModel.list don't match Cells in assay") } else { model@cell.attributes <- model@cell.attributes[select.cell, , drop = FALSE] } return(model) }) SCTModel.list }, "vst.out" = { SCTModel.list$umi.assay <- umi.assay SCTModel.list <- PrepVSTResults( vst.res = SCTModel.list, cell.names = Cells(x = assay) ) list(model1 = SCTModel.list) }, "vst.set" = { new.model <- lapply( X = SCTModel.list, FUN = function(vst.res) { vst.res$umi.assay <- umi.assay return(PrepVSTResults(vst.res = vst.res, cell.names = colnames(x = assay))) } ) names(x = new.model) <- paste0("model", 1:length(x = new.model)) new.model } ) assay <- new( Class = "SCTAssay", assay, SCTModel.list = model.list ) return(assay) } #' Slim down a Seurat object #' #' Keep only certain aspects of the Seurat object. Can be useful in functions #' that utilize merge as it reduces the amount of data in the merge #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param layers A vector or named list of layers to keep #' @param features Only keep a subset of features, defaults to all features #' @param assays Only keep a subset of assays specified here #' @param dimreducs Only keep a subset of DimReducs specified here (if #' \code{NULL}, remove all DimReducs) #' @param graphs Only keep a subset of Graphs specified here (if \code{NULL}, #' remove all Graphs) #' @param misc Preserve the \code{misc} slot; default is \code{TRUE} #' @param counts Preserve the count matrices for the assays specified #' @param data Preserve the data matrices for the assays specified #' @param scale.data Preserve the scale data matrices for the assays specified #' @param ... Ignored #' #' @return \code{object} with only the sub-object specified retained #' #' @importFrom SeuratObject .FilterObjects .PropagateList Assays #' Layers UpdateSlots #' #' @export #' #' @concept objects #' DietSeurat <- function( object, layers = NULL, features = NULL, assays = NULL, dimreducs = NULL, graphs = NULL, misc = TRUE, counts = deprecated(), data = deprecated(), scale.data = deprecated(), ... ) { CheckDots(...) dep.args <- c(counts = counts, data = data, scale.data = scale.data) for (lyr in names(x = dep.args)) { if (is_present(arg = dep.args[[lyr]])) { if (is.null(x = layers)) { layers <- unique(x = unlist(x = lapply( X = Assays(object = object), FUN = function(x) { return(Layers(object = object[[x]])) } ))) } deprecate_soft( when = '5.0.0', what = paste0('DietSeurat(', lyr, ' = )'), with = 'DietSeurat(layers = )' ) layers <- if (isTRUE(x = dep.args[[lyr]])) { c(layers, lyr) } else { Filter(f = function(x) x != lyr, x = layers) } } } object <- UpdateSlots(object = object) assays <- assays %||% Assays(object = object) assays <- intersect(x = assays, y = Assays(object = object)) if (!length(x = assays)) { abort(message = "No assays provided were found in the Seurat object") } if (!DefaultAssay(object = object) %in% assays) { abort( message = "The default assay is slated to be removed, please change the default assay" ) } layers <- layers %||% assays layers <- .PropagateList(x = layers, names = assays) for (assay in names(x = layers)) { layers[[assay]] <- tryCatch( expr = Layers(object = object[[assay]], search = layers[[assay]]), error = function(...) { return(character(length = 0L)) } ) } layers <- Filter(f = length, x = layers) if (!length(x = layers)) { abort(message = "None of the requested layers found") } for (assay in Assays(object = object)) { if (!(assay %in% assays)) { object[[assay]] <- NULL next } layers.rm <- setdiff( x = Layers(object = object[[assay]]), y = layers[[assay]] ) if (length(x = layers.rm)) { if (inherits(x = object[[assay]], what = 'Assay') && all(c('counts', 'data') %in% layers.rm)) { abort(message = "Cannot remove both 'counts' and 'data' from v3 Assays") } for (lyr in layers.rm) { suppressWarnings(object <- tryCatch(expr = { object[[assay]][[lyr]] <- NULL object }, error = function(e) { if (lyr == "data"){ object[[assay]][[lyr]] <- sparseMatrix(i = 1, j = 1, x = 1, dims = dim(object[[assay]][[lyr]]), dimnames = dimnames(object[[assay]][[lyr]])) } else{ slot(object = object[[assay]], name = lyr) <- new(Class = "dgCMatrix") } message("Converting layer ", lyr, " in assay ", assay, " to empty dgCMatrix") object })) } } if (!is.null(x = features)) { features.assay <- intersect( x = features, y = rownames(x = object[[assay]]) ) if (!length(x = features.assay)) { warn(message = paste0( 'No features found in assay ', sQuote(x = assay), ', removing...' )) object[[assay]] <- NULL next } suppressWarnings(object[[assay]] <- subset(x = object[[assay]], features = features.assay)) } } # remove misc when desired if (!isTRUE(x = misc)) { slot(object = object, name = "misc") <- list() } # remove unspecified DimReducs and Graphs all.objects <- .FilterObjects( object = object, classes.keep = c('DimReduc', 'Graph') ) objects.to.remove <- all.objects[!all.objects %in% c(dimreducs, graphs)] for (ob in objects.to.remove) { object[[ob]] <- NULL } cells.keep <- list() for (assay in Assays(object = object)) { cells.keep[[assay]] <- colnames(x = object[[assay]] ) } cells.keep <- intersect(colnames(x = object), unlist(cells.keep)) if (length(cells.keep) <- ncol(x = object)) { object <- subset(object, cells = cells.keep) } return(object) } #' Filter stray beads from Slide-seq puck #' #' This function is useful for removing stray beads that fall outside the main #' Slide-seq puck area. Essentially, it's a circular filter where you set a #' center and radius defining a circle of beads to keep. If the center is not #' set, it will be estimated from the bead coordinates (removing the 1st and #' 99th quantile to avoid skewing the center by the stray beads). By default, #' this function will display a \code{\link{SpatialDimPlot}} showing which cells #' were removed for easy adjustment of the center and/or radius. #' #' @param object Seurat object with slide-seq data #' @param image Name of the image where the coordinates are stored #' @param center Vector specifying the x and y coordinates for the center of the #' inclusion circle #' @param radius Radius of the circle of inclusion #' @param do.plot Display a \code{\link{SpatialDimPlot}} with the cells being #' removed labeled. #' #' @return Returns a Seurat object with only the subset of cells that pass the #' circular filter #' #' @concept objects #' @concept spatial #' @examples #' \dontrun{ #' # This example uses the ssHippo dataset which you can download #' # using the SeuratData package. #' library(SeuratData) #' data('ssHippo') #' # perform filtering of beads #' ssHippo.filtered <- FilterSlideSeq(ssHippo, radius = 2300) #' # This radius looks to small so increase and repeat until satisfied #' } #' @export #' FilterSlideSeq <- function( object, image = "image", center = NULL, radius = NULL, do.plot = TRUE ) { if (!inherits(x = object[[image]], what = "SlideSeq")) { warning( "This fxn is intended for filtering SlideSeq data and is untested ", "outside of that context." ) } dat <- GetTissueCoordinates(object[[image]]) if (is.null(x = center)) { # heuristic for determining center of puck center <- c() x.vals <- dat[, 1] center[1] <- mean( x = x.vals[x.vals < quantile(x = x.vals, probs = 0.99) & x.vals > quantile(x = x.vals, probs = 0.01)] ) y.vals <- dat[, 2] center[2] <- mean( x = y.vals[y.vals < quantile(x = y.vals, probs = 0.99) & y.vals > quantile(x = y.vals, probs = 0.01)] ) } if (is.null(x = radius)) { stop("Please provide a radius.") } dists <- apply(X = dat, MARGIN = 1, FUN = function(x) { as.numeric(dist(rbind(x[c(1, 2)], center))) }) cells.to.remove <- names(x = which(x = (dists > radius))) if (do.plot) { Idents(object) <- "keep" object <- SetIdent(object = object, cells = cells.to.remove, value = "remove") print(SpatialDimPlot(object = object)) } return(subset(x = object, cells = cells.to.remove, invert = TRUE)) } #' Get integration data #' #' @param object Seurat object #' @param integration.name Name of integration object #' @param slot Which slot in integration object to get #' #' @return Returns data from the requested slot within the integrated object #' #' @export #' @concept objects #' GetIntegrationData <- function(object, integration.name, slot) { tools <- slot(object = object, name = 'tools') if (!(integration.name %in% names(tools))) { stop('Requested integration key does not exist') } int.data <- tools[[integration.name]] return(slot(object = int.data, name = slot)) } #' Set integration data #' #' @param object Seurat object #' @param integration.name Name of integration object #' @param slot Which slot in integration object to set #' @param new.data New data to insert #' #' @return Returns a \code{\link{Seurat}} object #' #' @export #' @concept objects #' SetIntegrationData <- function(object, integration.name, slot, new.data) { tools <- slot(object = object, name = 'tools') if (!(integration.name %in% names(tools))) { new.integrated <- new(Class = 'IntegrationData') slot(object = new.integrated, name = slot) <- new.data tools[[integration.name]] <- new.integrated slot(object = object, name = 'tools') <- tools return(object) } int.data <- tools[[integration.name]] slot(object = int.data, name = slot) <- new.data tools[[integration.name]] <- int.data slot(object = object, name = 'tools') <- tools return(object) } #' Splits object into a list of subsetted objects. #' #' Splits object based on a single attribute into a list of subsetted objects, #' one for each level of the attribute. For example, useful for taking an object #' that contains cells from many patients, and subdividing it into #' patient-specific objects. #' #' @param object Seurat object #' @param split.by Attribute for splitting. Default is "ident". Currently #' only supported for class-level (i.e. non-quantitative) attributes. #' #' @return A named list of Seurat objects, each containing a subset of cells #' from the original object. #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' # Assign the test object a three level attribute #' groups <- sample(c("group1", "group2", "group3"), size = 80, replace = TRUE) #' names(groups) <- colnames(pbmc_small) #' pbmc_small <- AddMetaData(object = pbmc_small, metadata = groups, col.name = "group") #' obj.list <- SplitObject(pbmc_small, split.by = "group") #' SplitObject <- function(object, split.by = "ident") { if (split.by == 'ident') { groupings <- Idents(object = object) } else { groupings <- FetchData(object = object, vars = split.by)[, 1] } groupings <- unique(x = as.character(x = groupings)) obj.list <- list() for (i in groupings) { if (split.by == "ident") { obj.list[[i]] <- subset(x = object, idents = i) } else { cells <- which(x = object[[split.by, drop = TRUE]] == i) cells <- colnames(x = object)[cells] obj.list[[i]] <- subset(x = object, cells = cells) } } return(obj.list) } #' Find features with highest scores for a given dimensional reduction technique #' #' Return a list of features with the strongest contribution to a set of components #' #' @param object DimReduc object #' @param dim Dimension to use #' @param nfeatures Number of features to return #' @param projected Use the projected feature loadings #' @param balanced Return an equal number of features with both + and - scores. #' @param ... Extra parameters passed to \code{\link{Loadings}} #' #' @return Returns a vector of features #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' pbmc_small #' TopFeatures(object = pbmc_small[["pca"]], dim = 1) #' # After projection: #' TopFeatures(object = pbmc_small[["pca"]], dim = 1, projected = TRUE) #' TopFeatures <- function( object, dim = 1, nfeatures = 20, projected = FALSE, balanced = FALSE, ... ) { loadings <- Loadings(object = object, projected = projected, ...)[, dim, drop = FALSE] return(Top( data = loadings, num = nfeatures, balanced = balanced )) } #' Find cells with highest scores for a given dimensional reduction technique #' #' Return a list of genes with the strongest contribution to a set of components #' #' @param object DimReduc object #' @param dim Dimension to use #' @param ncells Number of cells to return #' @param balanced Return an equal number of cells with both + and - scores. #' @param ... Extra parameters passed to \code{\link{Embeddings}} #' #' @return Returns a vector of cells #' #' @export #' @concept objects #' #' @examples #' data("pbmc_small") #' pbmc_small #' head(TopCells(object = pbmc_small[["pca"]])) #' # Can specify which dimension and how many cells to return #' TopCells(object = pbmc_small[["pca"]], dim = 2, ncells = 5) #' TopCells <- function(object, dim = 1, ncells = 20, balanced = FALSE, ...) { embeddings <- Embeddings(object = object, ...)[, dim, drop = FALSE] return(Top( data = embeddings, num = ncells, balanced = balanced )) } #' Get nearest neighbors for given cell #' #' Return a vector of cell names of the nearest n cells. #' #' @param object \code{\link{Neighbor}} object #' @param cell Cell of interest #' @param n Number of neighbors to return #' #' @return Returns a vector of cell names #' #' @export #' @concept objects #' TopNeighbors <- function(object, cell, n = 5) { indices <- Indices(object = object)[cell, 1:n] return(Cells(x = object)[indices]) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param assay Assay to convert #' @param reduction Name of DimReduc to set to main reducedDim in cds #' #' @rdname as.CellDataSet #' @concept objects #' @export #' @method as.CellDataSet Seurat #' as.CellDataSet.Seurat <- function(x, assay = NULL, reduction = NULL, ...) { CheckDots(...) if (!PackageCheck('monocle', error = FALSE)) { stop("Please install monocle from Bioconductor before converting to a CellDataSet object") } else if (packageVersion(pkg = 'monocle') >= package_version(x = '2.99.0')) { stop("Seurat can only convert to/from Monocle v2.X objects") } assay <- assay %||% DefaultAssay(object = x) # make variables, then run `newCellDataSet` # create cellData counts counts <- GetAssayData(object = x, assay = assay, slot = "counts") # metadata cell.metadata <- x[[]] feature.metadata <- x[[assay]][[]] if (!"gene_short_name" %in% colnames(x = feature.metadata)) { feature.metadata$gene_short_name <- rownames(x = feature.metadata) } pd <- new(Class = "AnnotatedDataFrame", data = cell.metadata) fd <- new(Class = "AnnotatedDataFrame", data = feature.metadata) # Now, determine the expressionFamily if ("monocle" %in% names(x = Misc(object = x))) { expressionFamily <- Misc(object = x, slot = "monocle")[["expressionFamily"]] } else { if (all(counts == floor(x = counts))) { expressionFamily <- VGAM::negbinomial.size() } else if (any(counts < 0)) { expressionFamily <- VGAM::uninormal() } else { expressionFamily <- VGAM::tobit() } } cds <- monocle::newCellDataSet( cellData = counts, phenoData = pd, featureData = fd, expressionFamily = expressionFamily ) if ("monocle" %in% names(x = Misc(object = x))) { monocle::cellPairwiseDistances(cds = cds) <- Misc(object = x, slot = "monocle")[["cellPairwiseDistances"]] monocle::minSpanningTree(cds = cds) <- Misc(object = x, slot = "monocle")[["minSpanningTree"]] Biobase::experimentData(cds = cds) <- Misc(object = x, slot = "monocle")[["experimentData"]] Biobase::protocolData(cds = cds) <- Misc(object = x, slot = "monocle")[["protocolData"]] Biobase::classVersion(cds = cds) <- Misc(object = x, slot = "monocle")[["classVersion"]] # no setter methods found for following slots slot(object = cds, name = "lowerDetectionLimit") <- Misc(object = x, slot = "monocle")[["lowerDetectionLimit"]] slot(object = cds, name = "dispFitInfo") <- Misc(object = x, slot = "monocle")[["dispFitInfo"]] slot(object = cds, name = "auxOrderingData") <- Misc(object = x, slot = "monocle")[["auxOrderingData"]] slot(object = cds, name = "auxClusteringData") <- Misc(object = x, slot = "monocle")[["auxClusteringData"]] } # adding dimensionality reduction data to the CDS dr.slots <- c("reducedDimS", "reducedDimK", "reducedDimW", "reducedDimA") reduction <- reduction %||% DefaultDimReduc(object = x, assay = assay) if (!is.null(x = reduction)) { if (grepl(pattern = 'tsne', x = tolower(x = reduction))) { slot(object = cds, name = "dim_reduce_type") <- "tSNE" monocle::reducedDimA(cds = cds) <- t(x = Embeddings(object = x[[reduction]])) } else { slot(object = cds, name = "dim_reduce_type") <- reduction monocle::reducedDimA(cds = cds) <- Loadings(object = x[[reduction]]) slot(object = cds, name = "reducedDimS") <- Embeddings(object = x[[reduction]]) } for (ii in dr.slots) { if (ii %in% names(x = slot(object = x[[reduction]], name = "misc"))) { slot(object = cds, name = ii) <- slot(object = x[[reduction]], name = "misc")[[ii]] } } } return(cds) } #' Convert objects to \code{Seurat} objects #' #' @inheritParams SeuratObject::as.Seurat #' @param slot Slot to store expression data as #' @param verbose Show progress updates #' #' @return A \code{Seurat} object generated from \code{x} #' #' @importFrom utils packageVersion #' #' @rdname as.Seurat #' @concept objects #' @export #' @method as.Seurat CellDataSet #' #' @seealso \code{\link[SeuratObject:as.Seurat]{SeuratObject::as.Seurat}} #' as.Seurat.CellDataSet <- function( x, slot = 'counts', assay = 'RNA', verbose = TRUE, ... ) { CheckDots(...) if (!PackageCheck('monocle', error = FALSE)) { stop("Please install monocle from Bioconductor before converting to a CellDataSet object") } else if (packageVersion(pkg = 'monocle') >= package_version(x = '2.99.0')) { stop("Seurat can only convert to/from Monocle v2.X objects") } slot <- match.arg(arg = slot, choices = c('counts', 'data')) if (verbose) { message("Pulling expression data") } expr <- Biobase::exprs(object = x) if (IsMatrixEmpty(x = expr)) { stop("No data provided in this CellDataSet object", call. = FALSE) } meta.data <- as.data.frame(x = Biobase::pData(object = x)) # if cell names are NULL, fill with cell_X if (is.null(x = colnames(x = expr))) { warning( "The column names of the 'counts' and 'data' matrices are NULL. Setting cell names to cell_columnidx (e.g 'cell_1').", call. = FALSE, immediate. = TRUE ) rownames(x = meta.data) <- colnames(x = expr) <- paste0("cell_", 1:ncol(x = expr)) } # Creating the object if (verbose) { message("Building Seurat object") } if (slot == 'data') { assays <- list(CreateAssayObject(data = expr)) names(x = assays) <- assay Key(object = assays[[assay]]) <- suppressWarnings(expr = UpdateKey(key = assay)) object <- new( Class = 'Seurat', assays = assays, meta.data = meta.data, version = packageVersion(pkg = 'Seurat'), project.name = 'SeuratProject' ) DefaultAssay(object = object) <- assay } else { object <- CreateSeuratObject( counts = expr, meta.data = meta.data, assay = assay ) } # feature metadata if (verbose) { message("Adding feature-level metadata") } feature.metadata <- Biobase::fData(object = x) object[[assay]][[names(x = feature.metadata)]] <- feature.metadata # mean/dispersion values disp.table <- tryCatch( expr = suppressWarnings(expr = monocle::dispersionTable(cds = x)), error = function(...) { return(NULL) } ) if (!is.null(x = disp.table)) { if (verbose) { message("Adding dispersion information") } rownames(x = disp.table) <- disp.table[, 1] disp.table[, 1] <- NULL colnames(x = disp.table) <- paste0('monocle_', colnames(x = disp.table)) object[[assay]][[names(x = disp.table)]] <- disp.table } else if (verbose) { message("No dispersion information in CellDataSet object") } # variable features if ("use_for_ordering" %in% colnames(x = feature.metadata)) { if (verbose) { message("Setting variable features") } VariableFeatures(object = object, assay = assay) <- rownames(x = feature.metadata)[which(x = feature.metadata[, "use_for_ordering"])] } else if (verbose) { message("No variable features present") } # add dim reduction dr.name <- slot(object = x, name = "dim_reduce_type") if (length(x = dr.name) > 0) { if (verbose) { message("Adding ", dr.name, " dimensional reduction") } reduced.A <- t(x = slot(object = x, name = 'reducedDimA')) reduced.S <- t(x = slot(object = x, name = 'reducedDimS')) if (IsMatrixEmpty(x = reduced.S)) { embeddings <- reduced.A loadings <- new(Class = 'matrix') } else { embeddings <- reduced.S loadings <- t(x = reduced.A) } rownames(x = embeddings) <- colnames(x = object) misc.dr <- list( reducedDimS = slot(object = x, name = "reducedDimS"), reducedDimK = slot(object = x, name = "reducedDimK"), reducedDimW = slot(object = x, name = "reducedDimW"), reducedDimA = slot(object = x, name = "reducedDimA") ) dr <- suppressWarnings(expr = CreateDimReducObject( embeddings = embeddings, loadings = loadings, assay = assay, key = UpdateKey(key = tolower(x = dr.name)), misc = misc.dr )) object[[dr.name]] <- dr } else if (verbose) { message("No dimensional reduction information found") } monocle.specific.info <- list( expressionFamily = slot(object = x, name = "expressionFamily"), lowerDetectionLimit = slot(object = x, name = "lowerDetectionLimit"), dispFitInfo = slot(object = x, name = "dispFitInfo"), cellPairwiseDistances = slot(object = x, name = "cellPairwiseDistances"), minSpanningTree = slot(object = x, name = "minSpanningTree"), auxOrderingData = slot(object = x, name = "auxOrderingData"), auxClusteringData = slot(object = x, name = "auxClusteringData"), experimentData = slot(object = x, name = "experimentData"), protocolData = slot(object = x, name = "protocolData"), classVersion = slot(object = x, name = ".__classVersion__") ) Misc(object = object, slot = "monocle") <- monocle.specific.info return(object) } #' @param counts name of the SingleCellExperiment assay to store as \code{counts}; #' set to \code{NULL} if only normalized data are present #' @param data name of the SingleCellExperiment assay to slot as \code{data}. #' Set to NULL if only counts are present #' @param assay Name of assays to convert; set to \code{NULL} for all assays to be converted #' @param project Project name for new Seurat object #' #' @rdname as.Seurat #' @concept objects #' @export #' @method as.Seurat SingleCellExperiment #' as.Seurat.SingleCellExperiment <- function( x, counts = 'counts', data = 'logcounts', assay = NULL, project = 'SingleCellExperiment', ... ) { CheckDots(...) if (!PackageCheck('SingleCellExperiment', error = FALSE)) { stop( "Please install SingleCellExperiment from Bioconductor before converting to a SingeCellExperiment object.", "\nhttps://bioconductor.org/packages/SingleCellExperiment/", call. = FALSE ) } meta.data <- as.data.frame(x = SummarizedExperiment::colData(x = x)) if (packageVersion(pkg = "SingleCellExperiment") >= "1.14.0") { orig.exp <- SingleCellExperiment::mainExpName(x = x) %||% "originalexp" } else { orig.exp <- "originalexp" } if (!is.null(SingleCellExperiment::altExpNames(x = x))) { assayn <- assay %||% SingleCellExperiment::altExpNames(x = x) if (!all(assay %in% SingleCellExperiment::altExpNames(x = x))) { stop("One or more of the assays you are trying to convert is not in the SingleCellExperiment object") } assayn <- c(orig.exp, assayn) } else { assayn <- orig.exp } for (assay in assayn) { if (assay != orig.exp) { x <- SingleCellExperiment::swapAltExp(x = x, name = assay, saved = NULL) } # Pull matrices mats <- list(counts = counts, data = data) mats <- Filter(f = Negate(f = is.null), x = mats) if (length(x = mats) == 0) { stop("Cannot pass 'NULL' to both 'counts' and 'data'") } for (m in 1:length(x = mats)) { mats[[m]] <- tryCatch( expr = SummarizedExperiment::assay(x = x, i = mats[[m]]), error = function(e) { stop("No data in provided assay - ", mats[[m]], call. = FALSE) } ) # if cell names are NULL, fill with cell_X if (is.null(x = colnames(x = mats[[m]]))) { warning( "The column names of the ", names(x = mats)[m], " matrix is NULL. Setting cell names to cell_columnidx (e.g 'cell_1').", call. = FALSE, immediate. = TRUE ) cell.names <- paste0("cell_", 1:ncol(x = mats[[m]])) colnames(x = mats[[m]]) <- cell.names rownames(x = meta.data) <- cell.names } } assays <- if (is.null(x = mats$counts)) { list(CreateAssayObject(data = mats$data)) } else if (is.null(x = mats$data)) { list(CreateAssayObject(counts = mats$counts)) } else { a <- CreateAssayObject(counts = mats$counts) a <- SetAssayData(object = a, slot = 'data', new.data = mats$data) list(a) } names(x = assays) <- assay Key(object = assays[[assay]]) <- paste0(tolower(x = assay), '_') # Create the Seurat object if (!exists(x = "object")) { object <- CreateSeuratObject( counts = assays[[assay]], Class = 'Seurat', assay = assay, meta.data = meta.data, version = packageVersion(pkg = 'Seurat'), project.name = project ) } else { object[[assay]] <- assays[[assay]] } DefaultAssay(object = object) <- assay # add feature level meta data md <- SingleCellExperiment::rowData(x = x) if (ncol(x = md) > 0) { # replace underscores rownames(x = md) <- gsub(pattern = "_", replacement = "-", x = rownames(x = md)) md <- as.data.frame(x = md) # ensure order same as data md <- md[rownames(x = object[[assay]]), , drop = FALSE] object[[assay]] <- AddMetaData( object = object[[assay]], metadata = md ) } Idents(object = object) <- project # Get DimReduc information, add underscores if needed and pull from different alt EXP if (length(x = SingleCellExperiment::reducedDimNames(x = x)) > 0) { for (dr in SingleCellExperiment::reducedDimNames(x = x)) { embeddings <- as.matrix(x = SingleCellExperiment::reducedDim(x = x, type = dr)) if (is.null(x = rownames(x = embeddings))) { rownames(x = embeddings) <- cell.names } else { rownames(x = embeddings) <- make.unique(names = rownames(x = embeddings)) } if (isTRUE(x = !grepl('_$', gsub(pattern = "[[:digit:]]", replacement = "_", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] )))) { key <- gsub( pattern = "[[:digit:]]", replacement = "_", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] ) } else { key <- gsub( pattern = "[[:digit:]]", replacement = "", x = colnames(x = SingleCellExperiment::reducedDim(x = x, type = dr))[1] ) } if (length(x = key) == 0) { key <- paste0(dr, "_") } colnames(x = embeddings) <- paste0(key, 1:ncol(x = embeddings)) object[[dr]] <- CreateDimReducObject( embeddings = embeddings, key = key, assay = DefaultAssay(object = object) ) } } } return(object) } #' @param assay Assays to convert #' #' @rdname as.SingleCellExperiment #' @concept objects #' @export #' @method as.SingleCellExperiment Seurat #' @importFrom SeuratObject .FilterObjects #' as.SingleCellExperiment.Seurat <- function(x, assay = NULL, ...) { CheckDots(...) if (!PackageCheck('SingleCellExperiment', error = FALSE)) { stop("Please install SingleCellExperiment from Bioconductor before converting to a SingeCellExperiment object") } assay <- assay %||% Assays(object = x) if (!all(assay %in% Assays(object = x))) { stop("One or more of the assays you are trying to convert is not in the Seurat object") } if (DefaultAssay(object = x) %in% assay) { assay <- union(DefaultAssay(object = x), assay) } experiments <- list() for (assayn in assay) { assays <- list( counts = GetAssayData(object = x, assay = assayn, slot = "counts"), logcounts = GetAssayData(object = x, assay = assayn, slot = "data") ) scaledata_a <- GetAssayData(object = x, assay = assayn, slot = "scale.data") if (isTRUE(x = all.equal( target = dim(x = assays[["counts"]]), current = dim(x = scaledata_a)) )) { assays[["scaledata"]] <- scaledata_a } assays <- assays[sapply(X = assays, FUN = nrow) != 0] sume <- SummarizedExperiment::SummarizedExperiment(assays = assays) experiments[[assayn]] <- sume } # create one single cell experiment sce <- as(object = experiments[[1]], Class = "SingleCellExperiment") orig.exp.name <- names(x = experiments[1]) if (packageVersion(pkg = "SingleCellExperiment") >= "1.14.0") { SingleCellExperiment::mainExpName(sce) <- names(x = experiments[1]) } if (length(x = experiments) > 1) { sce <- SingleCellExperiment::SingleCellExperiment(sce, altExps = experiments) sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = NULL ) } metadata <- x[[]] metadata$ident <- Idents(object = x) SummarizedExperiment::colData(x = sce) <- S4Vectors::DataFrame(metadata) for (assayn in assay) { if (assayn != orig.exp.name) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = assayn, saved = orig.exp.name ) SummarizedExperiment::rowData(x = sce) <- S4Vectors::DataFrame(x[[assayn]][[]]) sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = assayn ) } } for (dr in .FilterObjects(object = x, classes.keep = "DimReduc")) { assay.used <- DefaultAssay(object = x[[dr]]) swap.exp <- assay.used %in% SingleCellExperiment::altExpNames(x = sce) & assay.used != orig.exp.name if (swap.exp) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = assay.used, saved = orig.exp.name ) } SingleCellExperiment::reducedDim(x = sce, type = toupper(x = dr)) <- Embeddings(object = x[[dr]]) if (swap.exp) { sce <- SingleCellExperiment::swapAltExp( x = sce, name = orig.exp.name, saved = assay.used ) } } return(sce) } #' Cast to Sparse #' #' @inheritParams SeuratObject::as.sparse #' #' @importFrom methods is #' @importFrom Matrix sparseMatrix #' #' @rdname as.sparse #' @concept objects #' @export #' @method as.sparse H5Group #' #' #' @seealso \code{\link[SeuratObject:as.sparse]{SeuratObject::as.sparse}} #' as.sparse.H5Group <- function(x, ...) { CheckDots(...) for (i in c('data', 'indices', 'indptr')) { if (!x$exists(name = i) || !is(object = x[[i]], class2 = 'H5D')) { stop("Invalid H5Group specification for a sparse matrix, missing dataset ", i) } } if ('h5sparse_shape' %in% hdf5r::h5attr_names(x = x)) { return(sparseMatrix( i = x[['indices']][] + 1, p = x[['indptr']][], x = x[['data']][], dims = rev(x = hdf5r::h5attr(x = x, which = 'h5sparse_shape')) )) } return(sparseMatrix( i = x[['indices']][] + 1, p = x[['indptr']][], x = x[['data']][] )) } #' @method as.sparse IterableMatrix #' @export #' as.sparse.IterableMatrix <- function(x, ...) { return(as(object = x, Class = 'dgCMatrix')) } #' Get Cell Names #' #' @inheritParams SeuratObject::Cells #' #' @rdname Cells #' @concept objects #' @method Cells SCTModel #' @export #' Cells.SCTModel <- function(x, ...) { return(rownames(x = slot(object = x, name = "cell.attributes"))) } #' @method Cells SCTAssay #' @export #' Cells.SCTAssay <- function(x, layer = NA, ...) { layer <- layer %||% levels(x = x)[1L] if (rlang::is_na(x = layer)) { return(colnames(x = x)) } return(Cells(x = components(object = x, model = layer))) } #' @rdname Cells #' @concept objects #' @concept spatial #' @method Cells SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:Cells]{SeuratObject::Cells}} #' Cells.SlideSeq <- function(x, ...) { return(rownames(x = GetTissueCoordinates(object = x))) } #' @rdname Cells #' @concept objects #' @concept spatial #' @method Cells STARmap #' @export #' Cells.STARmap <- function(x, ...) { return(rownames(x = GetTissueCoordinates(object = x))) } #' @rdname Cells #' @concept objects #' @method Cells VisiumV1 #' @export #' Cells.VisiumV1 <- function(x, ...) { return(rownames(x = GetTissueCoordinates(object = x, scale = NULL))) } #' @importFrom SeuratObject DefaultLayer Layers #' #' @method Features SCTAssay #' @export #' Features.SCTAssay <- function(x, layer = NA, ...) { layer <- layer %||% DefaultLayer(object = x) if (rlang::is_na(x = layer)) { return(rownames(x = x)) } layer <- rlang::arg_match( arg = layer, values = c(Layers(object = x), levels(x = x))) if (layer %in% levels(x = x)) { return(Features(x = components(object = x, model = layer))) } return(NextMethod()) } #' @method Features SCTModel #' @export #' Features.SCTModel <- function(x, ...) { return(rownames(x = SCTResults(object = x, slot = 'feature.attributes'))) } #' @param assay Assay to get #' #' @rdname GetAssay #' @concept objects #' @export #' @method GetAssay Seurat #' #' @examples #' data("pbmc_small") #' GetAssay(object = pbmc_small, assay = "RNA") #' GetAssay.Seurat <- function(object, assay = NULL, ...) { CheckDots(...) assay <- assay %||% DefaultAssay(object = object) object.assays <- FilterObjects( object = object, classes.keep = c('Assay', 'Assay5')) if (!assay %in% object.assays) { stop(paste0( assay, " is not an assay present in the given object. Available assays are: ", paste(object.assays, collapse = ", ") )) } return(slot(object = object, name = 'assays')[[assay]]) } #' Get Image Data #' #' @inheritParams SeuratObject::GetImage #' #' @rdname GetImage #' @method GetImage SlideSeq #' @concept objects #' @concept spatial #' @export #' #' @seealso \code{\link[SeuratObject:GetImage]{SeuratObject::GetImage}} #' GetImage.SlideSeq <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) return(NullImage(mode = mode)) } #' @rdname GetImage #' @method GetImage STARmap #' @concept objects #' @concept spatial #' @export #' GetImage.STARmap <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) return(NullImage(mode = mode)) } #' @importFrom plotly raster2uri #' @importFrom grDevices as.raster #' @importFrom grid rasterGrob unit #' #' @rdname GetImage #' @concept objects #' @concept spatial #' @method GetImage VisiumV1 #' @export #' GetImage.VisiumV1 <- function( object, mode = c('grob', 'raster', 'plotly', 'raw'), ... ) { mode <- match.arg(arg = mode) image <- slot(object = object, name = 'image') image <- switch( EXPR = mode, 'grob' = rasterGrob( image = image, width = unit(x = 1, units = 'npc'), height = unit(x = 1, units = 'npc') ), 'raster' = as.raster(x = image), 'plotly' = list( source = raster2uri(r = GetImage(object = object, mode = 'raster')), xref = 'x', yref = 'y', # x = -7, # y = -7, sizex = ncol(x = object), sizey = nrow(x = object), sizing = 'stretch', opacity = 1, layer = 'below' ), 'raw' = image, stop("Unknown image mode: ", mode, call. = FALSE) ) return(image) } #' Get Tissue Coordinates #' #' @inheritParams SeuratObject::GetTissueCoordinates #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates SlideSeq #' @concept objects #' @concept spatial #' @export #' #' @seealso \code{\link[SeuratObject:GetTissueCoordinates]{SeuratObject::GetTissueCoordinates}} #' GetTissueCoordinates.SlideSeq <- function(object, ...) { coords <- slot(object = object, name = 'coordinates') colnames(x = coords) <- c('x', 'y') # coords$y <- -rev(x = coords$y) + 1 # coords$y <- FlipCoords(x = coords$y) coords$cells <- rownames(x = coords) return(coords) } #' @param qhulls return qhulls instead of centroids #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates STARmap #' @concept objects #' @concept spatial #' @export #' GetTissueCoordinates.STARmap <- function(object, qhulls = FALSE, ...) { if (qhulls) { return(slot(object = object, name = 'qhulls')) } return(slot(object = object, name = 'coordinates')) } #' @param scale A factor to scale the coordinates by; choose from: 'tissue', #' 'fiducial', 'hires', 'lowres', or \code{NULL} for no scaling #' @param cols Columns of tissue coordinates data.frame to pull #' #' @rdname GetTissueCoordinates #' @method GetTissueCoordinates VisiumV1 #' @concept objects #' @concept spatial #' @export #' GetTissueCoordinates.VisiumV1 <- function( object, scale = 'lowres', cols = c('imagerow', 'imagecol'), ... ) { cols <- cols %||% colnames(x = slot(object = object, name = 'coordinates')) if (!is.null(x = scale)) { coordinates <- slot( object = object, name = 'coordinates')[, c('imagerow', 'imagecol')] scale <- match.arg( arg = scale, choices = c('spot', 'fiducial', 'hires', 'lowres')) scale.use <- ScaleFactors(object = object)[[scale]] coordinates <- coordinates * scale.use } else { coordinates <- slot(object = object, name = 'coordinates')[, cols] } return(coordinates) } #' Get Variable Feature Information #' #' Get variable feature information from \code{\link{SCTAssay}} objects #' #' @inheritParams SeuratObject::HVFInfo #' @param method method to determine variable features #' #' @export #' @method HVFInfo SCTAssay #' #' @seealso \code{\link[SeuratObject]{HVFInfo}} #' #' @examples #' \dontrun{ #' # Get the HVF info directly from an SCTAssay object #' pbmc_small <- SCTransform(pbmc_small) #' HVFInfo(pbmc_small[["SCT"]], method = 'sct')[1:5, ] #' } #' HVFInfo.SCTAssay <- function(object, method, status = FALSE, ...) { CheckDots(...) disp.methods <- c('mean.var.plot', 'dispersion', 'disp') if (tolower(x = method) %in% disp.methods) { method <- 'mvp' } method <- switch( EXPR = tolower(x = method), 'sctransform' = 'sct', method ) vars <- c('gmean', 'variance', 'residual_variance') hvf.info <- SCTResults(object = object, slot = "feature.attributes")[,vars] if (status) { hvf.info$variable <- FALSE hvf.info[VariableFeatures(object = object), "variable"] <- TRUE } return(hvf.info) } #' Get Spot Radius #' #' @inheritParams SeuratObject::Radius #' #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:Radius]{SeuratObject::Radius}} #' Radius.SlideSeq <- function(object) { return(0.005) } #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius STARmap #' @export #' Radius.STARmap <- function(object) { return(NULL) } #' @rdname Radius #' @concept objects #' @concept spatial #' @method Radius VisiumV1 #' @export #' Radius.VisiumV1 <- function(object) { return(slot(object = object, name = 'spot.radius')) } #' @rdname RenameCells #' @export #' @concept objects #' @method RenameCells SCTAssay #' RenameCells.SCTAssay <- function(object, new.names = NULL, ...) { CheckDots(...) old.names <- Cells(x = object) names(x = new.names) <- old.names cell.attributes <- SCTResults(object = object, slot = "cell.attributes") if (length(x = cell.attributes) > 0) { if (is.data.frame(x = cell.attributes)) { old.names <- rownames(x = cell.attributes) rownames(x = cell.attributes) <- unname(obj = new.names[old.names]) } else { cell.attributes <- lapply( X = cell.attributes, FUN = function(x) { old.names <- rownames(x = x) rownames(x = x) <- unname(obj = new.names[old.names]) return(x) } ) } SCTResults(object = object, slot = "cell.attributes") <- cell.attributes } new.names <- unname(obj = new.names) object <- NextMethod() return(object) } #' Rename Cells in an Object #' #' @inheritParams SeuratObject::RenameCells #' #' @rdname RenameCells #' @concept objects #' @method RenameCells SlideSeq #' @export #' #' @seealso \code{\link[SeuratObject:RenameCells]{SeuratObject::RenameCells}} #' RenameCells.SlideSeq <- function(object, new.names = NULL, ...) { return(RenameCells.VisiumV1(object = object, new.names = new.names)) } #' @rdname RenameCells #' @concept objects #' @method RenameCells STARmap #' @export #' RenameCells.STARmap <- function(object, new.names = NULL, ...) { names(x = new.names) <- Cells(x = object) object <- RenameCells.VisiumV1(object = object, new.names = new.names) qhulls <- GetTissueCoordinates(object = object, qhull = TRUE) qhulls$cell <- new.names[qhulls$cell] slot(object = object, name = "qhulls") <- qhulls return(object) } #' @rdname RenameCells #' @concept objects #' @method RenameCells VisiumV1 #' @export #' RenameCells.VisiumV1 <- function(object, new.names = NULL, ...) { if (is.null(x = new.names)) { return(object) } else if (length(x = new.names) != length(x = Cells(x = object))) { stop("Wrong number of cell/spot names", call. = FALSE) } names(x = new.names) <- Cells(x = object) coordinates <- GetTissueCoordinates(object = object, scale = NULL, cols = NULL) rownames(x = coordinates) <- new.names[rownames(x = coordinates)] slot(object = object, name = 'coordinates') <- coordinates return(object) } #' @rdname SCTResults #' @export #' @method SCTResults SCTModel #' SCTResults.SCTModel <- function(object, slot, ...) { CheckDots(...) slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } return(slot(object = object, name = slot)) } #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults<- SCTModel #' "SCTResults<-.SCTModel" <- function(object, slot, ..., value) { slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } slot(object = object, name = slot) <- value return(object) } #' @param slot Which slot to pull the SCT results from #' @param model Name of SCModel to pull result from. Available names can be #' retrieved with \code{levels}. #' #' @return Returns the value present in the requested slot for the requested #' group. If group is not specified, returns a list of slot results for each #' group unless there is only one group present (in which case it just returns #' the slot directly). #' #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults SCTAssay #' SCTResults.SCTAssay <- function(object, slot, model = NULL, ...) { CheckDots(...) slots.use <- c('feature.attributes', 'cell.attributes', 'clips', 'umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } model <- model %||% levels(x = object) model.list <- slot(object = object, name = "SCTModel.list")[model] results.list <- lapply(X = model.list, FUN = function(x) SCTResults(object = x, slot = slot)) if (length(x = results.list) == 1) { results.list <- results.list[[1]] } return(results.list) } #' @rdname SCTResults #' @concept objects #' @export #' @method SCTResults<- SCTAssay #' "SCTResults<-.SCTAssay" <- function(object, slot, model = NULL, ..., value) { slots.use <- c('feature.attributes', 'cell.attributes', 'clips','umi.assay', 'model', 'arguments', 'median_umi') if (!slot %in% slots.use) { stop( "'slot' must be one of ", paste(slots.use, collapse = ', '), call. = FALSE ) } model <- model %||% levels(x = object) model.list <- slot(object = object, name = "SCTModel.list")[model] if (!is.list(x = value) | is.data.frame(x = value)) { value <- list(value) } model.names <- names(x = model.list) model.list <- lapply( X = 1:length(x = model.list), FUN = function(x) { SCTResults(object = model.list[[x]], slot = slot) <- value[[x]] return(model.list[[x]]) } ) names(x = model.list) <- model.names slot(object = object, name = "SCTModel.list")[model.names] <- model.list return(object) } #' @param assay Assay in the Seurat object to pull from #' #' @rdname SCTResults #' @export #' @concept objects #' @method SCTResults Seurat #' SCTResults.Seurat <- function(object, assay = "SCT", slot, model = NULL, ...) { CheckDots(...) return(SCTResults(object = object[[assay]], slot = slot, model = model, ...)) } #' @importFrom utils head #' @method VariableFeatures SCTModel #' @export #' VariableFeatures.SCTModel <- function(object, method = NULL, nfeatures = 3000, ...) { if (!is_scalar_integerish(x = nfeatures) || (!is_na(x = nfeatures < 1L) && nfeatures < 1L)) { abort(message = "'nfeatures' must be a single positive integer") } feature.attr <- SCTResults(object = object, slot = 'feature.attributes') feature.variance <- feature.attr[, 'residual_variance'] names(x = feature.variance) <- row.names(x = feature.attr) feature.variance <- sort(x = feature.variance, decreasing = TRUE) if (is_na(x = nfeatures)) { return(names(x = feature.variance)) } return(head(x = names(x = feature.variance), n = nfeatures)) } #' @importFrom utils head #' @method VariableFeatures SCTAssay #' @export #' VariableFeatures.SCTAssay <- function( object, method = NULL, layer = NULL, nfeatures = NULL, simplify = TRUE, use.var.features = TRUE, ... ) { # Is the information already in var.features? var.features.existing <- slot(object = object, name = "var.features") nfeatures <- nfeatures %||% length(x = var.features.existing) %||% 3000 if (is.null(x = layer)) { layer <- levels(x = object) } if (simplify == TRUE & use.var.features == TRUE & length(var.features.existing) >= nfeatures){ return(head(x = var.features.existing, n = nfeatures)) } layer <- match.arg(arg = layer, choices = levels(x = object), several.ok = TRUE) # run variable features on each model vf.list <- sapply( X = layer, FUN = function(lyr) { return(VariableFeatures( object = components(object = object, model = lyr), nfeatures = nfeatures, ... )) }, simplify = FALSE, USE.NAMES = TRUE ) if (isFALSE(x = simplify)){ return(vf.list) } var.features <- sort( x = table(unlist(x = vf.list, use.names = FALSE)), decreasing = TRUE ) if (length(x = var.features) == 0) { return(NULL) } for (i in 1:length(x = layer)) { vst_out <- SCTModel_to_vst(SCTModel = slot(object = object, name = "SCTModel.list")[[layer[[i]]]]) var.features <- var.features[names(x = var.features) %in% rownames(x = vst_out$gene_attr)] } tie.val <- var.features[min(nfeatures, length(x = var.features))] features <- names(x = var.features[which(x = var.features > tie.val)]) if (length(x = features) > 0) { feature.ranks <- sapply(X = features, FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- names(x = sort(x = feature.ranks)) } features.tie <- var.features[which(x = var.features == tie.val)] tie.ranks <- sapply(X = names(x = features.tie), FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- c( features, names(x = head(x = sort(x = tie.ranks), nfeatures - length(x = features))) ) return(features) } #' @rdname ScaleFactors #' @method ScaleFactors VisiumV1 #' @export #' @concept spatial #' ScaleFactors.VisiumV1 <- function(object, ...) { return(slot(object = object, name = 'scale.factors')) } #' @rdname ScaleFactors #' @method ScaleFactors VisiumV1 #' @export #' @concept spatial #' ScaleFactors.VisiumV1 <- function(object, ...) { return(slot(object = object, name = 'scale.factors')) } #' @method FetchData VisiumV1 #' @export #' @concept spatial #' FetchData.VisiumV1 <- function( object, vars, cells = NULL, ... ) { if (is.numeric(x = cells)) { cells <- Cells(x = object)[cells] } else if (is.null(x = cells)) { cells <- Cells(x = object) } vars.unkeyed <- gsub(pattern = paste0('^', Key(object)), replacement = '', x = vars) coords <- GetTissueCoordinates(object = object)[cells, vars.unkeyed, drop = FALSE] colnames(x = coords) <- vars return(coords) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @method [ SlideSeq #' @concept objects #' @export #' "[.SlideSeq" <- function(x, i, ...) { return(subset(x = x, cells = i, ...)) } #' @method [ VisiumV1 #' @export #' "[.VisiumV1" <- function(x, i, ...) { return(subset(x = x, cells = i)) } #' @method components SCTAssay #' @export #' components.SCTAssay <- function(object, model, ...) { model <- rlang::arg_match(arg = model, values = levels(x = object)) return(slot(object = object, name = 'SCTModel.list')[[model]]) } #' @method dim SlideSeq #' @concept objects #' @export #' dim.SlideSeq <- function(x) { # return(dim(x = GetImage(object = x, mode = 'raw'))) return(c(599, 600)) } #' @method dim STARmap #' @concept objects #' @export #' dim.STARmap <- function(x) { coords <- GetTissueCoordinates(object = x) return(c( max(coords[, 1]) - min(coords[, 1]), max(coords[, 2]) - min(coords[, 2]) )) } #' @method dim VisiumV1 #' @concept objects #' @export #' dim.VisiumV1 <- function(x) { return(dim(x = GetImage(object = x)$raster)) } #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @section Get and set SCT model names: #' SCT results are named by initial run of \code{\link{SCTransform}} in order #' to keep SCT parameters straight between runs. When working with merged #' \code{SCTAssay} objects, these model names are important. \code{levels} #' allows querying the models present. \code{levels<-} allows the changing of #' the names of the models present, useful when merging \code{SCTAssay} objects. #' Note: unlike normal \code{\link[base]{levels<-}}, \code{levels<-.SCTAssay} #' allows complete changing of model names, not reordering. #' #' @param x An \code{SCTAssay} object #' #' @return \code{levels}: SCT model names #' #' @export #' @concept objects #' @method levels SCTAssay #' #' @examples #' \dontrun{ #' # Query and change SCT model names #' levels(pbmc_small[['SCT']]) #' levels(pbmc_small[['SCT']]) <- '3' #' levels(pbmc_small[['SCT']]) #' } #' levels.SCTAssay <- function(x) { return(names(x = slot(object = x, name = "SCTModel.list"))) } #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @param value New levels, must be in the same order as the levels present #' #' @return \code{levels<-}: \code{x} with updated SCT model names #' #' @export #' @concept objects #' @method levels<- SCTAssay #' "levels<-.SCTAssay" <- function(x, value) { value <- sapply(X = value, FUN = function(v) { if (suppressWarnings(expr = !is.na(x = as.numeric(x = v)))) { warning("SCTModel groups cannot be number, group is added in front of ", v) v <- paste0("group", v) } return (v) }) # Get current levels levels <- levels(x = x) if (length(x = value) != length(x = levels)) { stop("Must provide a vector of length ", length(x = levels), " as new levels.", call. = FALSE) } names(x = slot(object = x, name = "SCTModel.list")) <- value return(x) } #' Merge SCTAssay objects #' #' @inheritParams SeuratObject::merge #' @param x A \code{\link[SeuratObject]{Seurat}} object #' @param na.rm If na.rm = TRUE, this will only preserve residuals that are #' present in all SCTAssays being merged. Otherwise, missing residuals will be #' populated with NAs. #' @export #' @method merge SCTAssay #' @concept objects #' merge.SCTAssay <- function( x = NULL, y = NULL, add.cell.ids = NULL, merge.data = TRUE, na.rm = TRUE, ... ) { assays <- c(x, y) if (any(sapply( X = assays, FUN = function(assay.i) inherits(x = assay.i, what = "Assay5") ))) { return(merge(x = as(x, "Assay5"), y, ...)) } parent.call <- grep(pattern = "merge.Seurat", x = sys.calls()) if (length(x = parent.call) > 0) { # Try and fill in missing residuals if called in the context of merge.Seurat all.features <- unique( x = unlist( x = lapply( X = assays, FUN = function(assay) { if (inherits(x = assay, what = "SCTAssay")) { return(rownames(x = GetAssayData(object = assay, slot = "scale.data"))) } }) ) ) if (!is.null(all.features)) { assays <- lapply(X = 1:length(x = assays), FUN = function(assay) { if (inherits(x = assays[[assay]], what = "SCTAssay")) { parent.environ <- sys.frame(which = parent.call[1]) seurat.object <- parent.environ$objects[[assay]] seurat.object <- suppressWarnings(expr = GetResidual(object = seurat.object, features = all.features, assay = parent.environ$assay, verbose = FALSE)) return(seurat.object[[parent.environ$assay]]) } return(assays[[assay]]) }) } } sct.check <- sapply(X = assays, FUN = function(x) inherits(x = x, what = "SCTAssay")) if (any(!sct.check)) { warning("Attempting to merge an SCTAssay with another Assay type \n", "Converting all to standard Assay objects.", call. = FALSE) assays <- lapply(1:length(x = assays), FUN = function(x) { if (sct.check[x]) { assays[[x]] <- as(object = assays[[x]], Class = "Assay") } return(assays[[x]]) }) combined.assay <- merge( x = assays[[1]], y = assays[2:length(x = assays)], add.cell.ids = add.cell.ids, merge.data = merge.data ) return(combined.assay) } combined.assay <- NextMethod() all.levels <- unlist(x = lapply(X = assays, FUN = levels)) while (anyDuplicated(x = all.levels)) { levels.duplicate <- which(x = duplicated(x = all.levels)) all.levels <- sapply(X = 1:length(x = all.levels), FUN = function(l) { if (l %in% levels.duplicate) { return(tryCatch( expr = as.numeric(x = all.levels[l]) + 1, warning = function(...) { make.unique(names = all.levels)[l] }, error = function(...){ make.unique(names = all.levels)[l] } )) } else { return(all.levels[l]) } }) } scale.data <- lapply(X = assays, FUN = function(x) { dat <- GetAssayData(object = x, slot = "scale.data") if (ncol(x = dat) == 0) { dat <- matrix(ncol = ncol(x = x)) } return(dat) }) all.features <- lapply(X = scale.data, FUN = rownames) if (na.rm) { # merge intersection of possible residuals scaled.features <- names(x = which(x = table(x = unlist(x = all.features)) == length(x = assays))) if (length(x = scaled.features) == 0) { scale.data <- list(new(Class = "matrix")) } else { scale.data <- lapply(X = scale.data, FUN = function(x) x[scaled.features, ]) } } else { scaled.features <- unique(x = unlist(x = all.features)) scale.data <- lapply(X = 1:length(x = scale.data), FUN = function(x) { na.features <- setdiff(x = scaled.features, y = rownames(x = scale.data[[x]])) na.mat <- matrix( data = NA, nrow = length(x = na.features), ncol = ncol(x = assays[[x]]), dimnames = list(na.features, colnames(x = assays[[x]])) ) return(rbind(scale.data[[x]], na.mat)[scaled.features, ]) }) } scale.data <- do.call(what = cbind, args = scale.data) combined.assay <- SetAssayData(object = combined.assay, slot = "scale.data", new.data = scale.data) model.list <- unlist(x = lapply( X = assays, FUN = slot, name = "SCTModel.list" )) names(x = model.list) <- all.levels model.list <- model.list %||% list() combined.assay <- new( Class = "SCTAssay", combined.assay, SCTModel.list = model.list ) features <- VariableFeatures(object = combined.assay) VariableFeatures(object = combined.assay) <- features return(combined.assay) } #' Subset an AnchorSet object #' #' @inheritParams base::subset #' @param score.threshold Only anchor pairs with scores greater than this value #' are retained. #' @param disallowed.dataset.pairs Remove any anchors formed between the #' provided pairs. E.g. \code{list(c(1, 5), c(1, 2))} filters out any anchors between #' datasets 1 and 5 and datasets 1 and 2. #' @param dataset.matrix Provide a binary matrix specifying whether a dataset #' pair is allowable (1) or not (0). Should be a dataset x dataset matrix. #' @param group.by Grouping variable to determine allowable ident pairs #' @param disallowed.ident.pairs Remove any anchors formed between provided #' ident pairs. E.g. \code{list(c("CD4", "CD8"), c("B-cell", "T-cell"))} #' @param ident.matrix Provide a binary matrix specifying whether an ident pair #' is allowable (1) or not (0). Should be an ident x ident symmetric matrix #' #' @return Returns an \code{\link{AnchorSet}} object with specified anchors #' filtered out #' #' @export #' @method subset AnchorSet #' @concept objects #' subset.AnchorSet <- function( x, score.threshold = NULL, disallowed.dataset.pairs = NULL, dataset.matrix = NULL, group.by = NULL, disallowed.ident.pairs = NULL, ident.matrix = NULL, ... ) { if (!is.null(x = disallowed.dataset.pairs) && !is.null(x = dataset.matrix)) { stop("Please use either disallowed.dataset.pairs OR dataset.matrix, not both.") } # Filter based on scores if (!is.null(x = score.threshold)) { if (score.threshold > 1 | score.threshold < 0) { stop( "Anchors are scored on a scale between 0 and 1. Please provide a value", " in that range to score.threshold." ) } anchors <- slot(object = x, name = "anchors") anchors <- anchors[anchors[, 'score'] > score.threshold, , drop = FALSE] slot(object = x, name = "anchors") <- anchors } object.names <- names(x = slot(object = x, name = "object.list")) num.obs <- length(x = object.names) # Filter based on dataset pairings if (!is.null(x = disallowed.dataset.pairs)) { dataset.matrix <- matrix(data = 1, nrow = num.obs, ncol = num.obs) for(i in 1:length(x = disallowed.dataset.pairs)) { pair <- disallowed.dataset.pairs[[i]] if (length(x = pair) != 2) { stop("Please ensure all list items in disallowed.dataset.pairs are of length 2.") } if (any(pair %in% object.names)) { pair[which(pair %in% object.names)] <- sapply( X = pair[which(pair %in% object.names)], FUN = function(x) { which(object.names == x) }) } pair <- as.numeric(x = pair) dataset.matrix[pair[1], pair[2]] <- 0 } } if (!is.null(x = dataset.matrix)) { if (any(dim(x = dataset.matrix) != c(num.obs, num.obs))){ stop("Please provide a dataset.matrix that is ", num.obs, " x ", num.obs, ".") } anchors <- slot(object = x, name = "anchors") pairs <- which(dataset.matrix == 0, arr.ind = TRUE) for (i in 1:nrow(x = pairs)) { anchors <- anchors[-which(x = anchors$dataset1 == pairs[i, 1] & anchors$dataset2 == pairs[i, 2]), ] anchors <- anchors[-which(x = anchors$dataset1 == pairs[i, 2] & anchors$dataset2 == pairs[i, 1]), ] } slot(object = x, name = "anchors") <- anchors } # Filter based on ident pairings if (!is.null(x = group.by)) { anchors <- AnnotateAnchors(anchors = x, vars = group.by) if (!is.null(x = disallowed.ident.pairs) && !is.null(x = ident.matrix)) { stop("Please use either disallowed.ident.pairs OR ident.matrix, not both.") } unique.ids <- unique(x = c( as.character(x = anchors[, paste0("cell1.", group.by)]), as.character(x = anchors[, paste0("cell2.", group.by)])) ) unique.ids <- unique.ids[!is.na(x = unique.ids)] num.ids <- length(x = unique.ids) if (!is.null(x = disallowed.ident.pairs)) { ident.matrix <- matrix(data = 1, nrow = num.ids, ncol = num.ids) rownames(x = ident.matrix) <- unique.ids colnames(x = ident.matrix) <- unique.ids for(i in 1:length(x = disallowed.ident.pairs)) { pair <- disallowed.ident.pairs[[i]] if (length(x = pair) != 2) { stop("Please ensure all list items in disallowed.dataset.pairs are of length 2.") } ident.matrix[pair[1], pair[2]] <- 0 } } if (!is.null(x = ident.matrix)) { if (any(dim(x = ident.matrix) != c(num.ids, num.ids))){ stop("Please provide a dataset.matrix that is ", num.ids, " x ", num.ids, ".") } to.remove <- c() pairs <- which(ident.matrix == 0, arr.ind = TRUE) for (i in 1:nrow(x = pairs)) { id1 <- rownames(x = ident.matrix)[pairs[i, 1]] id2 <- colnames(x = ident.matrix)[pairs[i, 2]] to.remove <- c(to.remove, which(x = anchors[, paste0("cell1.", group.by)] == id1 & anchors[, paste0("cell2.", group.by)] == id2)) to.remove <- c(to.remove, which(x = anchors[, paste0("cell1.", group.by)] == id2 & anchors[, paste0("cell2.", group.by)] == id1)) } anchors <- slot(object = x, name = "anchors") anchors <- anchors[-to.remove, ] slot(object = x, name = "anchors") <- anchors } } return(x) } #' @export #' @method subset SCTAssay #' @concept objects #' subset.SCTAssay <- function(x, cells = NULL, features = NULL, ...) { x <- NextMethod() models <- levels(x = x) for (m in models) { attr <- SCTResults(object = x, slot = "cell.attributes", model = m) attr <- attr[intersect(x = rownames(x = attr), y = Cells(x = x)), , drop = FALSE] SCTResults(object = x, slot = "cell.attributes", model = m) <- attr if (nrow(x = attr) == 0) { slot(object = x, name = 'SCTModel.list')[[m]] <- NULL } } return(x) } #' @method subset SlideSeq #' @concept objects #' @export #' subset.SlideSeq <- function(x, cells, ...) { x <- subset.VisiumV1(x = x, cells = cells, ...) return(x) } #' @method subset STARmap #' @concept objects #' @export #' subset.STARmap <- function(x, cells, ...) { x <- subset.VisiumV1(x = x, cells = cells, ...) qhulls <- GetTissueCoordinates(object = x, qhulls = TRUE) qhulls <- qhulls[qhulls$cell %in% cells, ] slot(object = x, name = 'qhulls') <- qhulls return(x) } #' @method subset VisiumV1 #' @concept objects #' @export #' subset.VisiumV1 <- function(x, cells, ...) { coordinates <- GetTissueCoordinates(object = x, scale = NULL, cols = NULL) cells <- cells[cells %in% rownames(x = coordinates)] coordinates <- coordinates[cells, ] slot(object = x, name = 'coordinates') <- coordinates return(x) } #' Update pre-V4 Assays generated with SCTransform in the Seurat to the new #' SCTAssay class # #' @param object A Seurat object #' @export #' @concept objects #' @return A Seurat object with updated SCTAssays #' UpdateSCTAssays <- function(object) { assays <- Assays(object = object) for (assay in assays) { if (IsSCT(assay = object[[assay]]) && !inherits(x = object[[assay]], what = "SCTAssay")) { object[[assay]] <- as(object = object[[assay]], Class = "SCTAssay") } } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # S4 methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @rdname SCTAssay-class #' @name SCTAssay-class #' #' @section Creating an \code{SCTAssay} from an \code{Assay}: #' Conversion from an \code{Assay} object to an \code{SCTAssay} object by #' is done by adding the additional slots to the object. If \code{from} has #' results generated by \code{\link{SCTransform}} from Seurat v3.0.0 to v3.1.1, #' the conversion will automagically fill the new slots with the data #' setAs( from = 'Assay', to = 'SCTAssay', def = function(from) { object.list <- sapply( X = slotNames(x = from), FUN = slot, object = from, simplify = FALSE, USE.NAMES = TRUE ) object.list <- c( list('Class' = 'SCTAssay'), object.list ) if (IsSCT(assay = from)) { vst.slots <- c('vst.set', 'vst.out') vst.use <- vst.slots[vst.slots %in% names(x = Misc(object = from))][1] vst.res <- Misc(object = from, slot = vst.use) umi.assay <- Misc(object = from, slot = "umi.assay") if (vst.use == 'vst.out') { vst.res <- list(vst.res) umi.assay <- list(umi.assay) } if (length(x = vst.res) == 0) { vst.res <- list() } else if (length(x = vst.res) > 0) { vst.res <- lapply( X = 1:length(x = vst.res), FUN = function(i) { vst.res[[i]]$umi.assay <- umi.assay[[i]] return(PrepVSTResults( vst.res = vst.res[[i]], cell.names = colnames(x = from) )) } ) names(x = vst.res) <- paste0("model", 1:length(x = vst.res)) } object.list$misc[[vst.use]] <- NULL object.list$SCTModel.list <- vst.res } return(do.call(what = 'new', args = object.list)) } ) setMethod( f = 'show', signature = 'TransferAnchorSet', definition = function(object) { cat('An AnchorSet object containing', nrow(x = slot(object = object, name = "anchors")), "anchors between the reference and query Seurat objects. \n", "This can be used as input to TransferData.\n") } ) setMethod( f = 'show', signature = 'IntegrationAnchorSet', definition = function(object) { cat('An AnchorSet object containing', nrow(x = slot(object = object, name = "anchors")), "anchors between", length(x = slot(object = object, name = "object.list")), "Seurat objects \n", "This can be used as input to IntegrateData.\n") } ) setMethod( f = 'show', signature = 'ModalityWeights', definition = function(object) { cat( 'A ModalityWeights object containing modality weights between', paste(slot(object = object, name = "modality.assay"), collapse = " and "), "assays \n", "This can be used as input to FindMultiModelNeighbors.\n") } ) setMethod( f = 'show', signature = 'BridgeReferenceSet', definition = function(object) { cat( 'A BridgeReferenceSet object has a bridge object with ', ncol(slot(object = object, name = 'bridge')), 'cells and a reference object with ', ncol(slot(object = object, name = 'reference')), 'cells. \n','The bridge query reduction is ', slot(object = object, name = 'params')$bridge.query.reduction %||% slot(object = object, name = 'params')$supervised.reduction, "\n This can be used as input to FindBridgeTransferAnchors and FindBridgeIntegrationAnchors") } ) setMethod( f = 'show', signature = 'SCTModel', definition = function(object) { cat( "An sctransform model.\n", " Model formula: ", slot(object = object, name = "model"), "\n Parameters stored for", nrow(x = SCTResults(object = object, slot = "feature.attributes")), "features,", nrow(x = SCTResults(object = object, slot = "cell.attributes")), "cells.\n") } ) #' @importFrom utils head # setMethod( f = 'show', signature = 'SCTAssay', definition = function(object) { cat('SCTAssay data with', nrow(x = object), 'features for', ncol(x = object), 'cells, and', length(x = levels(x = object)) , 'SCTModel(s) \n') if (length(x = VariableFeatures(object = object)) > 0) { top.ten <- head(x = VariableFeatures(object = object), n = 10L) top <- 'Top' variable <- 'variable' } else { top.ten <- head(x = rownames(x = object), n = 10L) top <- 'First' variable <- '' } features <- paste0( variable, ' feature', if (length(x = top.ten) != 1) {'s'}, ":\n" ) features <- gsub(pattern = '^\\s+', replacement = '', x = features) cat( top, length(x = top.ten), features, paste(strwrap(x = paste(top.ten, collapse = ', ')), collapse = '\n'), '\n' ) } ) #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal AddMetaData defintion # # @param object An object # @param metadata A vector, list, or data.frame with metadata to add # @param col.name A name for meta data if not a named list or data.frame # # @return object with metadata added # .AddMetaData <- function(object, metadata, col.name = NULL) { object <- UpdateSlots(object = object) if (is.null(x = col.name) && is.atomic(x = metadata)) { stop("'col.name' must be provided for atomic metadata types (eg. vectors)") } if (inherits(x = metadata, what = c('matrix', 'Matrix'))) { metadata <- as.data.frame(x = metadata) } col.name <- col.name %||% names(x = metadata) %||% colnames(x = metadata) if (is.null(x = col.name)) { stop("No metadata name provided and could not infer it from metadata object") } object[[col.name]] <- metadata # if (class(x = metadata) == "data.frame") { # for (ii in 1:ncol(x = metadata)) { # object[[colnames(x = metadata)[ii]]] <- metadata[, ii, drop = FALSE] # } # } else { # object[[col.name]] <- metadata # } return(object) } # Find the names of collections in an object # # @return A vector with the names of slots that are a list # Collections <- function(object) { collections <- vapply( X = slotNames(x = object), FUN = function(x) { return(any(grepl(pattern = 'list', x = class(x = slot(object = object, name = x))))) }, FUN.VALUE = logical(length = 1L) ) collections <- Filter(f = isTRUE, x = collections) return(names(x = collections)) } # Get the default image of an object # # Attempts to find all images associated with the default assay of the object. # If none present, finds all images present in the object. Returns the name of # the first image # # @param object A Seurat object # # @return The name of the default image # DefaultImage <- function(object) { object <- UpdateSlots(object = object) images <- Images(object = object, assay = DefaultAssay(object = object)) if (length(x = images) < 1) { images <- Images(object = object) } return(images[[1]]) } # Get the names of objects within a Seurat object that are of a certain class # # @param object A Seurat object # @param classes.keep A vector of names of classes to get # # @return A vector with the names of objects within the Seurat object that are of class \code{classes.keep} # #' @importFrom stats na.omit # FilterObjects <- function(object, classes.keep = c('Assay', 'DimReduc')) { object <- UpdateSlots(object = object) slots <- na.omit(object = Filter( f = function(x) { sobj <- slot(object = object, name = x) return(is.list(x = sobj) && !is.data.frame(x = sobj) && !is.package_version(x = sobj)) }, x = slotNames(x = object) )) slots <- grep(pattern = 'tools', x = slots, value = TRUE, invert = TRUE) slots <- grep(pattern = 'misc', x = slots, value = TRUE, invert = TRUE) slots.objects <- unlist( x = lapply( X = slots, FUN = function(x) { return(names(x = slot(object = object, name = x))) } ), use.names = FALSE ) object.classes <- sapply( X = slots.objects, FUN = function(i) { return(inherits(x = object[[i]], what = classes.keep)) } ) object.classes <- which(x = object.classes, useNames = TRUE) return(names(x = object.classes)) } # Find the collection of an object within a Seurat object # # @param object A Seurat object # @param name Name of object to find # # @return The collection (slot) of the object # FindObject <- function(object, name) { collections <- c( 'assays', 'graphs', 'neighbors', 'reductions', 'commands', 'images' ) object.names <- lapply( X = collections, FUN = function(x) { return(names(x = slot(object = object, name = x))) } ) names(x = object.names) <- collections object.names <- Filter(f = Negate(f = is.null), x = object.names) for (i in names(x = object.names)) { if (name %in% names(x = slot(object = object, name = i))) { return(i) } } return(NULL) } # Prepare VST results for use with SCTAssay objects # # @param vst.res Results from sctransform::vst # @param cell.names Vector of valid cell names still in object # # @return An SCTModel object. # # PrepVSTResults <- function(vst.res, cell.names) { # Prepare cell attribute information cell.attrs <- vst.res$cell_attr cell.names <- intersect(x = cell.names, y = rownames(x = cell.attrs)) cell.cols <- c( 'umi', 'gene', 'log_umi', 'log_gene', 'umi_per_gene', 'log_umi_per_gene' ) cell.cols <- intersect(x = cell.cols, y = colnames(x = cell.attrs)) cell.attrs <- cell.attrs[cell.names, cell.cols, drop = FALSE] colnames(x = cell.attrs) <- gsub( pattern = 'gene', replacement = 'feature', x = colnames(x = cell.attrs) ) if (!is.null(x = vst.res$cells_step1)) { cell.attrs[, "cells_step1"] <- FALSE cells_step1 <- intersect(x = vst.res$cells_step1, y = rownames(x = cell.attrs)) cell.attrs[cells_step1, "cells_step1"] <- TRUE } # Prepare feature attribute information feature.attrs <- vst.res$gene_attr feature.cols <- c( 'detection_rate', 'gmean', 'variance', 'residual_mean', 'residual_variance' ) feature.cols <- intersect(x = feature.cols, y = colnames(x = feature.attrs)) feature.attrs <- feature.attrs[, feature.cols, drop = FALSE] feature.attrs <- cbind(feature.attrs, vst.res$model_pars_fit[rownames(feature.attrs), , drop = FALSE]) if (!is.null(x = vst.res$genes_log_gmean_step1)) { feature.attrs[,"genes_log_gmean_step1"] <- FALSE genes_step1 <- intersect( x = names(vst.res$genes_log_gmean_step1), y = rownames(feature.attrs) ) feature.attrs[genes_step1,"genes_log_gmean_step1"] <- TRUE # add parameters from step1 feature.attrs[, paste0("step1_", colnames(vst.res$model_pars))] <- NA feature.attrs[genes_step1, paste0("step1_", colnames(vst.res$model_pars))] <- vst.res$model_pars[genes_step1,] } # Prepare clipping information clips <- list( 'vst' = vst.res$arguments$res_clip_range, 'sct' = vst.res$arguments$sct.clip.range ) median_umi <- NA # check if a custom scale_factor was provided to vst() if ("scale_factor" %in% names(vst.res$arguments)){ median_umi <- vst.res$arguments$scale_factor } if (is.na(median_umi)) { if ("umi" %in% colnames(x = cell.attrs)) { median_umi <- median(cell.attrs$umi) } else if ("log_umi" %in% colnames(x = cell.attrs)) { median_umi <- median(10 ^ cell.attrs$log_umi) } } vst.res.SCTModel <- SCTModel( feature.attributes = feature.attrs, cell.attributes = cell.attrs, clips = clips, umi.assay = vst.res$umi.assay %||% "RNA", model = vst.res$model_str, arguments = vst.res$arguments, median_umi = median_umi ) return(vst.res.SCTModel) } # Return a null image # # @param mode Image representation to return # see \code{\link{GetImage}} for more details # #' @importFrom grid nullGrob #' @importFrom grDevices as.raster # NullImage <- function(mode) { image <- switch( EXPR = mode, 'grob' = nullGrob(), 'raster' = as.raster(x = new(Class = 'matrix')), 'plotly' = list('visible' = FALSE), 'raw' = NULL, stop("Unknown image mode: ", mode, call. = FALSE) ) return(image) } # Check to see if projected loadings have been set # # @param object a DimReduc object # # @return TRUE if proejcted loadings have been set, else FALSE # Projected <- function(object) { projected.dims <- dim(x = slot(object = object, name = 'feature.loadings.projected')) if (all(projected.dims == 1)) { return(!all(is.na(x = slot(object = object, name = 'feature.loadings.projected')))) } return(!all(projected.dims == 0)) } # Subset cells in vst data # @param sct.info A vst.out list # @param cells vector of cells to retain # @param features vector of features to retain SubsetVST <- function(sct.info, cells, features) { cells.keep <- intersect(x = cells, y = rownames(x = sct.info$cell_attr)) sct.info$cell_attr <- sct.info$cell_attr[cells.keep, ] # find which subset of features are in the SCT assay feat.keep <- intersect(x = features, y = rownames(x = sct.info$gene_attr)) sct.info$gene_attr <- sct.info$gene_attr[feat.keep, ] return(sct.info) } # Get the top # # @param data Data to pull the top from # @param num Pull top \code{num} # @param balanced Pull even amounts of from positive and negative values # # @return The top \code{num} # @seealso \{code{\link{TopCells}}} \{code{\link{TopFeatures}}} # #' @importFrom utils head tail # Top <- function(data, num, balanced) { nr <- nrow(x = data) if (num > nr) { warning("Requested number is larger than the number of available items (", nr, "). Setting to ", nr , ".", call. = FALSE) num <- nr } if (num == 1) { balanced <- FALSE } top <- if (balanced) { num <- round(x = num / 2) data <- data[order(data, decreasing = TRUE), , drop = FALSE] positive <- head(x = rownames(x = data), n = num) negative <- rev(x = tail(x = rownames(x = data), n = num)) # remove duplicates if (positive[num] == negative[num]) { negative <- negative[-num] } list(positive = positive, negative = negative) } else { data <- data[rev(x = order(abs(x = data))), , drop = FALSE] top <- head(x = rownames(x = data), n = num) top[order(data[top, ])] } return(top) } # Update Seurat assay # # @param old.assay Seurat2 assay # @param assay Name to store for assay in new object # UpdateAssay <- function(old.assay, assay){ cells <- colnames(x = old.assay@data) counts <- old.assay@raw.data data <- old.assay@data if (!inherits(x = counts, what = 'dgCMatrix')) { counts <- as.sparse(x = as.matrix(x = counts)) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as.sparse(x = as.matrix(x = data)) } new.assay <- new( Class = 'Assay', counts = counts[, cells], data = data, scale.data = old.assay@scale.data %||% new(Class = 'matrix'), meta.features = data.frame(row.names = rownames(x = counts)), var.features = old.assay@var.genes, key = paste0(assay, "_") ) return(new.assay) } # Update dimension reduction # # @param old.dr Seurat2 dimension reduction slot # @param assay.used Name of assay used to compute dimension reduction # UpdateDimReduction <- function(old.dr, assay) { new.dr <- list() for (i in names(x = old.dr)) { cell.embeddings <- old.dr[[i]]@cell.embeddings %||% new(Class = 'matrix') feature.loadings <- old.dr[[i]]@gene.loadings %||% new(Class = 'matrix') stdev <- old.dr[[i]]@sdev %||% numeric() misc <- old.dr[[i]]@misc %||% list() new.jackstraw <- UpdateJackstraw(old.jackstraw = old.dr[[i]]@jackstraw) old.key <- old.dr[[i]]@key if (length(x = old.key) == 0) { old.key <- gsub(pattern = "(.+?)(([0-9]+).*)", replacement = "\\1", x = colnames(cell.embeddings)[[1]]) if (length(x = old.key) == 0) { old.key <- i } } new.key <- suppressWarnings(expr = UpdateKey(key = old.key)) colnames(x = cell.embeddings) <- gsub( pattern = old.key, replacement = new.key, x = colnames(x = cell.embeddings) ) colnames(x = feature.loadings) <- gsub( pattern = old.key, replacement = new.key, x = colnames(x = feature.loadings) ) new.dr[[i]] <- new( Class = 'DimReduc', cell.embeddings = as(object = cell.embeddings, Class = 'matrix'), feature.loadings = as(object = feature.loadings, Class = 'matrix'), assay.used = assay, stdev = as(object = stdev, Class = 'numeric'), key = as(object = new.key, Class = 'character'), jackstraw = new.jackstraw, misc = as(object = misc, Class = 'list') ) } return(new.dr) } # Update jackstraw # # @param old.jackstraw # UpdateJackstraw <- function(old.jackstraw) { if (is.null(x = old.jackstraw)) { new.jackstraw <- new( Class = 'JackStrawData', empirical.p.values = new(Class = 'matrix'), fake.reduction.scores = new(Class = 'matrix'), empirical.p.values.full = new(Class = 'matrix'), overall.p.values = new(Class = 'matrix') ) } else { if (.hasSlot(object = old.jackstraw, name = 'overall.p.values')) { overall.p <- old.jackstraw@overall.p.values %||% new(Class = 'matrix') } else { overall.p <- new(Class = 'matrix') } new.jackstraw <- new( Class = 'JackStrawData', empirical.p.values = old.jackstraw@emperical.p.value %||% new(Class = 'matrix'), fake.reduction.scores = old.jackstraw@fake.pc.scores %||% new(Class = 'matrix'), empirical.p.values.full = old.jackstraw@emperical.p.value.full %||% new(Class = 'matrix'), overall.p.values = overall.p ) } return(new.jackstraw) } # Update a Key # # @param key A character to become a Seurat Key # # @return An updated Key that's valid for Seurat # UpdateKey <- function(key) { if (grepl(pattern = '^[[:alnum:]]+_$', x = key)) { return(key) } else { new.key <- regmatches( x = key, m = gregexpr(pattern = '[[:alnum:]]+', text = key) ) new.key <- paste0(paste(unlist(x = new.key), collapse = ''), '_') if (new.key == '_') { new.key <- paste0(RandomName(length = 3), '_') } warning( "Keys should be one or more alphanumeric characters followed by an underscore, setting key from ", key, " to ", new.key, call. = FALSE, immediate. = TRUE ) return(new.key) } } # Update slots in an object # # @param object An object to update # # @return \code{object} with the latest slot definitions # #' @importFrom rlang exec !!! UpdateSlots <- function(object) { object.list <- sapply( X = slotNames(x = object), FUN = function(x) { return(tryCatch( expr = slot(object = object, name = x), error = function(...) { return(NULL) } )) }, simplify = FALSE, USE.NAMES = TRUE ) object.list <- Filter(f = Negate(f = is.null), x = object.list) object.list <- c('Class' = class(x = object)[1], object.list) object <- exec( .fn = new, !!! object.list ) for (x in setdiff(x = slotNames(x = object), y = names(x = object.list))) { xobj <- slot(object = object, name = x) if (is.vector(x = xobj) && !is.list(x = xobj) && length(x = xobj) == 0) { slot(object = object, name = x) <- vector(mode = class(x = xobj), length = 1L) } } return(object) } # Pulls the proper data matrix for merging assay data. If the slot is empty, will return an empty # matrix with the proper dimensions from one of the remaining data slots. # # @param assay Assay to pull data from # @param slot Slot to pull from # # @return Returns the data matrix if present (i.e.) not 0x0. Otherwise, returns an # appropriately sized empty sparse matrix # #' @importFrom Matrix Matrix # ValidateDataForMerge <- function(assay, slot) { mat <- GetAssayData(object = assay, slot = slot) if (any(dim(x = mat) == c(0, 0))) { slots.to.check <- setdiff(x = c("counts", "data", "scale.data"), y = slot) for (ss in slots.to.check) { data.dims <- dim(x = GetAssayData(object = assay, slot = ss)) data.slot <- ss if (!any(data.dims == c(0, 0))) { break } } if (any(data.dims == c(0, 0))) { stop("The counts, data, and scale.data slots are all empty for the provided assay.") } mat <- Matrix( data = 0, nrow = data.dims[1], ncol = data.dims[2], dimnames = dimnames(x = GetAssayData(object = assay, slot = data.slot)) ) mat <- as.sparse(x = mat) } return(mat) } Seurat/R/mixscape.R0000644000176200001440000013760414525500037013666 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Calculate a perturbation Signature #' #' Function to calculate perturbation signature for pooled CRISPR screen datasets. #' For each target cell (expressing one target gRNA), we identified 20 cells #' from the control pool (non-targeting cells) with the most similar mRNA #' expression profiles. The perturbation signature is calculated by subtracting the #' averaged mRNA expression profile of the non-targeting neighbors from the mRNA #' expression profile of the target cell. #' #' @param object An object of class Seurat. #' @param assay Name of Assay PRTB signature is being calculated on. #' @param features Features to compute PRTB signature for. Defaults to the #' variable features set in the assay specified. #' @param slot Data slot to use for PRTB signature calculation. #' @param gd.class Metadata column containing target gene classification. #' @param nt.cell.class Non-targeting gRNA cell classification identity. #' @param split.by Provide metadata column if multiple biological replicates #' exist to calculate PRTB signature for every replicate separately. #' @param num.neighbors Number of nearest neighbors to consider. #' @param ndims Number of dimensions to use from dimensionality reduction method. #' @param reduction Reduction method used to calculate nearest neighbors. #' @param new.assay.name Name for the new assay. #' @param verbose Display progress + messages #' @return Returns a Seurat object with a new assay added containing the #' perturbation signature for all cells in the data slot. #' #' @importFrom RANN nn2 #' @export #' @concept mixscape #' CalcPerturbSig <- function( object, assay = NULL, features = NULL, slot = "data", gd.class = "guide_ID", nt.cell.class = "NT", split.by = NULL, num.neighbors = NULL, reduction = "pca", ndims = 15, new.assay.name = "PRTB", verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object ) if (is.null(x = reduction)) { stop('Please provide dimensionality reduction name.') } if (is.null(x = num.neighbors)) { stop("Please specify number of nearest neighbors to consider") } if (is.null(x = ndims)) { stop("Please provide number of ", reduction, " dimensions to consider") } features <- features %||% VariableFeatures(object = object[[assay]]) if (length(x = features) == 0) { features <- rownames(x = GetAssayData(object = object[[assay]], slot = slot)) } if (! is.null(x = split.by)) { Idents(object = object) <- split.by } else { Idents(object = object) <- "rep1" } replicate <- unique(x = Idents(object = object)) all_diff <- list() all_nt_cells <- Cells(x = object)[which(x = object[[]][gd.class] == nt.cell.class)] all_neighbors <- list() for (r in replicate) { if (verbose) { message("Processing ", r) } all_cells <- WhichCells(object = object, idents = r) nt_cells <- intersect(x = all_nt_cells, all_cells) # get pca cell embeddings all_mtx <- Embeddings(object = object, reduction = reduction)[all_cells, ] nt_mtx <- Embeddings(object = object, reduction = reduction)[nt_cells, ] # run nn2 to find the 20 nearest NT neighbors for all cells. Use the same # number of PCs as the ones you used for umap neighbors <- NNHelper( data = nt_mtx[, 1:ndims], query = all_mtx[, 1:ndims], k = num.neighbors, method = "rann" ) diff <- PerturbDiff( object = object, assay = assay, slot = slot, all_cells = all_cells, nt_cells = nt_cells, features = features, neighbors = neighbors, verbose = verbose ) all_diff[[r]] <- diff all_neighbors[[make.names(names = paste0(new.assay.name, "_", r))]] <- neighbors } slot(object = object, name = "tools")[[paste("CalcPerturbSig", assay, reduction, sep = ".")]] <- all_neighbors all_diff <- do.call(what = cbind, args = all_diff) prtb.assay <- suppressWarnings( expr = CreateAssayObject( data = all_diff[, colnames(x = object)], min.cells = -Inf, min.features = -Inf, check.matrix = FALSE ) ) object[[new.assay.name]] <- prtb.assay object <- LogSeuratCommand(object = object) return(object) } #' DE and EnrichR pathway visualization barplot #' #' @inheritParams FindMarkers #' @param object Name of object class Seurat. #' @param ident.1 Cell class identity 1. #' @param ident.2 Cell class identity 2. #' @param balanced Option to display pathway enrichments for both negative and #' positive DE genes.If false, only positive DE gene will be displayed. #' @param max.genes Maximum number of genes to use as input to enrichR. #' @param p.val.cutoff Cutoff to select DE genes. #' @param cols A list of colors to use for barplots. #' @param enrich.database Database to use from enrichR. #' @param num.pathway Number of pathways to display in barplot. #' @param return.gene.list Return list of DE genes #' #' @return Returns one (only enriched) or two (both enriched and depleted) #' barplots with the top enriched/depleted GO terms from EnrichR. #' #' @importFrom ggplot2 ggplot geom_bar geom_density coord_flip scale_fill_manual #' ylab ggtitle theme_classic theme element_text #' @importFrom patchwork wrap_plots #' #' @export #' @concept mixscape DEenrichRPlot <- function( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = NULL, max.genes, test.use = 'wilcox', p.val.cutoff = 0.05, cols = NULL, enrich.database = NULL, num.pathway = 10, return.gene.list = FALSE, ... ) { enrichr.installed <- PackageCheck("enrichR", error = FALSE) if (!enrichr.installed[1]) { stop( "Please install the enrichR package to use DEenrichRPlot", "\nThis can be accomplished with the following command: ", "\n----------------------------------------", "\ninstall.packages('enrichR')", "\n----------------------------------------", call. = FALSE ) } if (is.null(x = enrich.database)) { stop("Please specify the name of enrichR database to use") } if (!is.numeric(x = max.genes)) { stop("please set max.genes") } assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay all.markers <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, only.pos = FALSE, logfc.threshold = logfc.threshold, test.use = test.use, assay = assay ) pos.markers <- all.markers[all.markers[, 2] > logfc.threshold & all.markers[, 1] < p.val.cutoff, , drop = FALSE] if(nrow(pos.markers) == 0){ message("No positive markers pass the logfc.thershold") pos.er <- c() } else{ pos.markers.list <- rownames(x = pos.markers)[1:min(max.genes, nrow(x = pos.markers))] pos.er <- enrichR::enrichr(genes = pos.markers.list, databases = enrich.database) pos.er <- do.call(what = cbind, args = pos.er) pos.er$log10pval <- -log10(x = pos.er[, paste(enrich.database, sep = ".", "P.value")]) pos.er$term <- pos.er[, paste(enrich.database, sep = ".", "Term")] pos.er <- pos.er[1:num.pathway, ] pos.er$term <- factor(x = pos.er$term, levels = pos.er$term[order(pos.er$log10pval)]) gene.list <- list(pos = pos.er) } if (isTRUE(x = balanced)) { neg.markers <- all.markers[all.markers[, 2] < -logfc.threshold & all.markers[, 1] < p.val.cutoff, , drop = FALSE] neg.markers.list <- rownames(x = neg.markers)[1:min(max.genes, nrow(x = neg.markers))] Sys.sleep(1) neg.er <- enrichR::enrichr(genes = neg.markers.list, databases = enrich.database) neg.er <- do.call(what = cbind, args = neg.er) neg.er$log10pval <- -log10(x = neg.er[, paste(enrich.database, sep = ".", "P.value")]) neg.er$term <- neg.er[, paste(enrich.database, sep = ".", "Term")] neg.er <- neg.er[1:num.pathway, ] neg.er$term <- factor(x = neg.er$term, levels = neg.er$term[order(neg.er$log10pval)]) if(isTRUE(length(neg.er$term) == 0) & isTRUE(length(pos.er == 0))){ stop("No positive or negative marker genes identified") } else{ if(isTRUE(length(neg.er$term) == 0)){ gene.list <- list(pos = pos.er) } else{ gene.list <- list(pos = pos.er, neg = neg.er) } } } if (return.gene.list) { return(gene.list) } if(nrow(pos.markers) == 0){ message("No positive markers to plot") if (isTRUE(x = balanced)) { p2 <- ggplot(data = neg.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "indianred2") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "negative markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) p <- p2 } else{ stop("Nothing to plot") } } else { p <- ggplot(data = pos.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "dodgerblue") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "positive markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) if (isTRUE(x = balanced)) { p2 <- ggplot(data = neg.er, aes_string(x = "term", y = "log10pval")) + geom_bar(stat = "identity", fill = "indianred2") + coord_flip() + xlab("Pathway") + scale_fill_manual(values = cols, drop = FALSE) + ylab("-log10(pval)") + ggtitle(paste(enrich.database, ident.1, sep = "_", "negative markers")) + theme_classic() + geom_text(aes_string(label = "term", y = 0), size = 5, color = "black", position = position_dodge(1), hjust = 0)+ theme(axis.title.y= element_blank(), axis.text.y = element_blank(), axis.ticks.y = element_blank()) p <- p+p2 } } return(p) } #' Linear discriminant analysis on pooled CRISPR screen data. #' #' This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all #' cells in the data. Finally, it uses the first 10 principle components from each projection as input to lda in MASS package together with mixscape class labels. #' #' @inheritParams PrepLDA #' @inheritParams RunLDA #' #' @return Returns a Seurat object with LDA added in the reduction slot. #' #' @export #' @concept mixscape #' MixscapeLDA <- function( object, assay = NULL, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) { projected_pcs <- PrepLDA( object = object, de.assay = assay, pc.assay = pc.assay, labels = labels, nt.label = nt.label, npcs = npcs , verbose = verbose ) lda.lables <- object[[labels]][,] object_lda <- RunLDA( object = projected_pcs, labels = lda.lables, assay = assay, verbose = verbose ) object[["lda"]] <- object_lda return(object) } #' Function to prepare data for Linear Discriminant Analysis. #' #' This function performs unsupervised PCA on each mixscape class separately and projects each subspace onto all #' cells in the data. #' #' @param object An object of class Seurat. #' @param de.assay Assay to use for selection of DE genes. #' @param pc.assay Assay to use for running Principle components analysis. #' @param labels Meta data column with target gene class labels. #' @param nt.label Name of non-targeting cell class. #' @param npcs Number of principle components to use. #' @param verbose Print progress bar. #' @inheritParams FindMarkers #' @return Returns a list of the first 10 PCs from each projection. #' #' @export #' @concept mixscape #' PrepLDA <- function( object, de.assay = "RNA", pc.assay = "PRTB", labels = "gene", nt.label = "NT", npcs = 10, verbose = TRUE, logfc.threshold = 0.25 ) { projected_pcs <- list() gene_list <- setdiff(x = unique(x = object[[labels]][, 1]), y = nt.label) Idents(object = object) <- labels DefaultAssay(object = object) <- pc.assay all_genes <- list() nt.cells <- WhichCells(object = object, idents = nt.label) for (g in gene_list) { if (verbose) { message(g) } gd.cells <- WhichCells(object = object, idents = g) gene_set <- TopDEGenesMixscape( object = object, ident.1 = gd.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = labels, verbose = verbose ) if (length(x = gene_set) < (npcs + 1)) { all_genes[[g]] <- character() next } all_genes[[g]] <- gene_set } all_markers <- unique(x = unlist(x = all_genes)) missing_genes <- all_markers[!all_markers %in% rownames(x = object[[pc.assay]])] object <- GetMissingPerturb(object = object, assay = pc.assay, features = missing_genes, verbose = verbose) for (g in gene_list) { if (verbose) { message(g) } gene_subset <- subset(x = object, idents = c(g, nt.label)) gene_set <- all_genes[[g]] if (length(x = gene_set) == 0) { next } gene_subset <- ScaleData( object = gene_subset, features = gene_set, verbose = FALSE ) gene_subset <- RunPCA( object = gene_subset, features = gene_set, npcs = npcs, verbose = FALSE ) project_pca <- ProjectCellEmbeddings( reference = gene_subset, query = object, dims = 1:npcs, verbose = FALSE ) colnames(x = project_pca) <- paste(g, colnames(x = project_pca), sep = "_") projected_pcs[[g]] <- project_pca } return(projected_pcs) } #' @param object Input values for LDA (numeric), with observations as rows #' @param labels Observation labels for LDA #' @param assay Name of Assay LDA is being run on #' @param ndims.print PCs to print genes for #' @param nfeatures.print Number of genes to print for each PC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. LDA by default #' @param seed Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' #' @importFrom MASS lda #' @importFrom stats predict #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA default #' RunLDA.default <- function( object, labels, assay = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) { if (!is.null(x = seed)) { set.seed(seed = seed) } object <- data.frame(object) var_names <- colnames(x = object) object$lda_cluster_label <- labels lda_results <- lda(formula = lda_cluster_label ~ ., data = object) lda_predictions <- predict(object = lda_results, newdata = object) lda_cv <-lda( formula = lda_cluster_label ~ ., data = object, CV = TRUE )$posterior feature.loadings <- lda_results$scaling cell.embeddings <- lda_predictions$x lda.assignments <- lda_predictions$class lda.posterior <- lda_predictions$posterior colnames(x = lda.posterior) <- paste0("LDAP_", colnames(x = lda.posterior)) rownames(x = feature.loadings) <- var_names colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) rownames(x = cell.embeddings) <- rownames(x = object) colnames(x = cell.embeddings) <- colnames(x = feature.loadings) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, key = reduction.key, misc = list( assignments = lda.assignments, posterior = lda.posterior, model = lda_results, cv = lda_cv ) ) if (verbose) { print(x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print) } return(reduction.data) } #' Function to perform Linear Discriminant Analysis. #' #' @param ndims.print Number of LDA dimensions to print. #' @param nfeatures.print Number of features to print for each LDA component. #' @param reduction.key Reduction key name. #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA Assay #' RunLDA.Assay <- function( object, assay = NULL, labels, features = NULL, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "LDA_", seed = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunLDA( object = t(x = data.use), assay = assay, labels = labels, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed = seed, ... ) return(reduction.data) } #' @param object An object of class Seurat. #' @param assay Assay to use for performing Linear Discriminant Analysis (LDA). #' @param labels Meta data column with target gene class labels. #' @param features Features to compute LDA on #' @param reduction.name dimensional reduction name, lda by default #' @param reduction.key Reduction key name. #' @param seed Value for random seed #' @param verbose Print the top genes associated with high/low loadings for #' the PCs #' @param ndims.print Number of LDA dimensions to print. #' @param nfeatures.print Number of features to print for each LDA component. #' #' @rdname RunLDA #' @concept mixscape #' @export #' @method RunLDA Seurat #' RunLDA.Seurat <- function( object, assay = NULL, labels, features = NULL, reduction.name = "lda", reduction.key = "LDA_", seed = 42, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, ... ) { assay <- assay %||% DefaultAssay(object = object) reduction.data <- RunLDA( object = object[[assay]], assay = assay, labels = labels, features = features, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed = seed, ... ) object[[reduction.name]] <- reduction.data object$lda.assignments <- slot(object = object[[reduction.name]], name = "misc")[["assignments"]] object <- AddMetaData( object = object, metadata = as.data.frame( x = slot(object = object[[reduction.name]], name = "misc")[["posterior"]] ) ) object <- LogSeuratCommand(object = object) object <- ProjectDim( object = object, reduction = reduction.name, assay = assay, verbose = verbose, dims.print = ndims.print, nfeatures.print = nfeatures.print ) Loadings(object = object[[reduction.name]]) <- Loadings( object = object[[reduction.name]], projected = TRUE ) return(object) } #' Run Mixscape #' #' Function to identify perturbed and non-perturbed gRNA expressing cells that #' accounts for multiple treatments/conditions/chemical perturbations. #' #' @importFrom ggplot2 geom_density position_dodge #' @param object An object of class Seurat. #' @param assay Assay to use for mixscape classification. #' @param slot Assay data slot to use. #' @param labels metadata column with target gene labels. #' @param nt.class.name Classification name of non-targeting gRNA cells. #' @param new.class.name Name of mixscape classification to be stored in #' metadata. #' @param min.de.genes Required number of genes that are differentially #' expressed for method to separate perturbed and non-perturbed cells. #' @param min.cells Minimum number of cells in target gene class. If fewer than #' this many cells are assigned to a target gene class during classification, #' all are assigned NP. #' @param de.assay Assay to use when performing differential expression analysis. #' Usually RNA. #' @param logfc.threshold Limit testing to genes which show, on average, #' at least X-fold difference (log-scale) between the two groups of cells. #' Default is 0.25 Increasing logfc.threshold speeds up the function, but can miss #' weaker signals. #' @param iter.num Number of normalmixEM iterations to run if convergence does #' not occur. #' @param verbose Display messages #' @param split.by metadata column with experimental condition/cell type #' classification information. This is meant to be used to account for cases a #' perturbation is condition/cell type -specific. #' @param fine.mode When this is equal to TRUE, DE genes for each target gene #' class will be calculated for each gRNA separately and pooled into one DE list #' for calculating the perturbation score of every cell and their subsequent #' classification. #' @param fine.mode.labels metadata column with gRNA ID labels. #' @param prtb.type specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO. #' @return Returns Seurat object with with the following information in the #' meta data and tools slots: #' \describe{ #' \item{mixscape_class}{Classification result with cells being either #' classified as perturbed (KO, by default) or non-perturbed (NP) based on their target #' gene class.} #' \item{mixscape_class.global}{Global classification result (perturbed, NP or NT)} #' \item{p_ko}{Posterior probabilities used to determine if a cell is KO (default). Name of this item will change to match prtb.type parameter setting. #' (>0.5) or NP} #' \item{perturbation score}{Perturbation scores for every cell calculated in #' the first iteration of the function.} #' } #' #' @export #' @concept mixscape #' RunMixscape <- function( object, assay = "PRTB", slot = "scale.data", labels = "gene", nt.class.name = "NT", new.class.name = "mixscape_class", min.de.genes = 5, min.cells = 5, de.assay = "RNA", logfc.threshold = 0.25, iter.num = 10, verbose = FALSE, split.by = NULL, fine.mode = FALSE, fine.mode.labels = "guide_ID", prtb.type = "KO" ) { mixtools.installed <- PackageCheck("mixtools", error = FALSE) if (!mixtools.installed[1]) { stop("Please install the mixtools package to use RunMixscape", "\nThis can be accomplished with the following command: ", "\n----------------------------------------", "\ninstall.packages('mixtools')", "\n----------------------------------------", call. = FALSE) } assay <- assay %||% DefaultAssay(object = object) if (is.null(x = labels)) { stop("Please specify target gene class metadata name") } prtb_markers <- list() object[[new.class.name]] <- object[[labels]] object[[new.class.name]][, 1] <- as.character(x = object[[new.class.name]][, 1]) object[[paste0(new.class.name, "_p_", tolower(x = prtb.type))]] <- 0 #create list to store perturbation scores. gv.list <- list() if (is.null(x = split.by)) { split.by <- splits <- "con1" } else { splits <- as.character(x = unique(x = object[[split.by]][, 1])) } # determine gene sets across all splits/groups cells.s.list <- list() for (s in splits) { Idents(object = object) <- split.by cells.s <- WhichCells(object = object, idents = s) cells.s.list[[s]] <- cells.s genes <- setdiff(x = unique(x = object[[labels]][cells.s, 1]), y = nt.class.name) Idents(object = object) <- labels for (gene in genes) { if (isTRUE(x = verbose)) { message("Processing ", gene) } orig.guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) nt.cells <- intersect(x = WhichCells(object = object, idents = nt.class.name), y = cells.s) if (isTRUE(x = fine.mode)) { guides <- setdiff(x = unique(x = object[[fine.mode.labels]][orig.guide.cells, 1]), y = nt.class.name) all.de.genes <- c() for (gd in guides) { gd.cells <- rownames(x = object[[]][orig.guide.cells, ])[which(x = object[[]][orig.guide.cells, fine.mode.labels] == gd)] de.genes <- TopDEGenesMixscape( object = object, ident.1 = gd.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = fine.mode.labels, verbose = verbose ) all.de.genes <- c(all.de.genes, de.genes) } all.de.genes <- unique(all.de.genes) } else { all.de.genes <- TopDEGenesMixscape( object = object, ident.1 = orig.guide.cells, ident.2 = nt.cells, de.assay = de.assay, logfc.threshold = logfc.threshold, labels = labels, verbose = verbose ) } prtb_markers[[s]][[gene]] <- all.de.genes if (length(x = all.de.genes) < min.de.genes) { prtb_markers[[s]][[gene]] <- character() } } } all_markers <- unique(x = unlist(x = prtb_markers)) missing_genes <- all_markers[!all_markers %in% rownames(x = object[[assay]])] object <- GetMissingPerturb(object = object, assay = assay, features = missing_genes, verbose = verbose) for (s in splits) { cells.s <- cells.s.list[[s]] genes <- setdiff(x = unique(x = object[[labels]][cells.s, 1]), y = nt.class.name) if (verbose) { message("Classifying cells for: ") } for (gene in genes) { Idents(object = object) <- labels post.prob <- 0 orig.guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) nt.cells <- intersect(x = WhichCells(object = object, idents = nt.class.name), y = cells.s) all.cells <- c(orig.guide.cells, nt.cells) if (length(x = prtb_markers[[s]][[gene]]) == 0) { if (verbose) { message(" Fewer than ", min.de.genes, " DE genes for ", gene, ". Assigning cells as NP.") } object[[new.class.name]][orig.guide.cells, 1] <- paste0(gene, " NP") } else { if (verbose) { message(" ", gene) } de.genes <- prtb_markers[[s]][[gene]] dat <- GetAssayData(object = object[[assay]], slot = "data")[de.genes, all.cells, drop = FALSE] if (slot == "scale.data") { dat <- ScaleData(object = dat, features = de.genes, verbose = FALSE) } converged <- FALSE n.iter <- 0 old.classes <- object[[new.class.name]][all.cells, ] while (!converged && n.iter < iter.num) { Idents(object = object) <- new.class.name guide.cells <- intersect(x = WhichCells(object = object, idents = gene), y = cells.s) vec <- rowMeans2(x = dat[, guide.cells, drop = FALSE]) - rowMeans2(x = dat[, nt.cells, drop = FALSE]) pvec <- apply(X = dat, MARGIN = 2, FUN = ProjectVec, v2 = vec) if (n.iter == 0){ #store pvec gv <- as.data.frame(x = pvec) gv[, labels] <- nt.class.name gv[intersect(x = rownames(x = gv), y = guide.cells), labels] <- gene gv.list[[gene]][[s]] <- gv } guide.norm <- DefineNormalMixscape(pvec[guide.cells]) nt.norm <- DefineNormalMixscape(pvec[nt.cells]) mm <- mixtools::normalmixEM( x = pvec, mu = c(nt.norm$mu, guide.norm$mu), sigma = c(nt.norm$sd, guide.norm$sd), k = 2, mean.constr = c(nt.norm$mu, NA), sd.constr = c(nt.norm$sd, NA), verb = FALSE, maxit = 5000, maxrestarts = 100 ) lik.ratio <- dnorm(x = pvec[orig.guide.cells], mean = mm$mu[1], sd = mm$sigma[1]) / dnorm(x = pvec[orig.guide.cells], mean = mm$mu[2], sd = mm$sigma[2]) post.prob <- 1/(1 + lik.ratio) object[[new.class.name]][names(x = which(post.prob > 0.5)), 1] <- gene object[[new.class.name]][names(x = which(post.prob < 0.5)), 1] <- paste(gene, " NP", sep = "") if (length(x = which(x = object[[new.class.name]] == gene & Cells(x = object) %in% cells.s)) < min.de.genes) { if (verbose) { message("Fewer than ", min.cells, " cells assigned as ", gene, "Assigning all to NP.") } object[[new.class.name]][guide.cells, 1] <- "NP" converged <- TRUE } if (all(object[[new.class.name]][all.cells, ] == old.classes)) { converged <- TRUE } old.classes <- object[[new.class.name]][all.cells, ] n.iter <- n.iter + 1 } object[[new.class.name]][which(x = object[[new.class.name]] == gene & Cells(x = object) %in% cells.s), 1] <- paste(gene, prtb.type, sep = " ") } object[[paste0(new.class.name, ".global")]] <- as.character(x = sapply(X = as.character(x = object[[new.class.name]][, 1]), FUN = function(x) {strsplit(x = x, split = " (?=[^ ]+$)", perl = TRUE)[[1]][2]})) object[[paste0(new.class.name, ".global")]][which(x = is.na(x = object[[paste0(new.class.name, ".global")]])), 1] <- nt.class.name object[[paste0(new.class.name,"_p_", tolower(prtb.type))]][names(x = post.prob), 1] <- post.prob } } Tool(object = object) <- gv.list Idents(object = object) <- new.class.name return(object) } #' Differential expression heatmap for mixscape #' #' Draws a heatmap of single cell feature expression with cells ordered by their #' mixscape ko probabilities. #' #' @inheritParams FindMarkers #' @inheritParams DoHeatmap #' @param max.cells.group Number of cells per identity to plot. #' @param max.genes Total number of DE genes to plot. #' @param balanced Plot an equal number of genes with both groups of cells. #' @param order.by.prob Order cells on heatmap based on their mixscape knockout #' probability from highest to lowest score. #' @param group.by (Deprecated) Option to split densities based on mixscape #' classification. Please use mixscape.class instead #' @param mixscape.class metadata column with mixscape classifications. #' @param prtb.type specify type of CRISPR perturbation expected for labeling #' mixscape classifications. Default is KO. #' @param fc.name Name of the fold change, average difference, or custom #' function column in the output data.frame. Default is avg_log2FC #' @param pval.cutoff P-value cut-off for selection of significantly DE genes. #' @return A ggplot object. #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian ggplot_build aes_string #' @export #' @concept mixscape #' MixscapeHeatmap <- function( object, ident.1 = NULL, ident.2 = NULL, balanced = TRUE, logfc.threshold = 0.25, assay = "RNA", max.genes = 100, test.use ='wilcox', max.cells.group = NULL, order.by.prob = TRUE, group.by = NULL, mixscape.class = "mixscape_class", prtb.type = "KO", fc.name = "avg_log2FC", pval.cutoff = 5e-2, ... ) { if (!is.null(x = group.by)) { message("The group.by parameter is being deprecated. Please use ", "mixscape.class instead. Setting mixscape.class = ", group.by, " and continuing.") mixscape.class <- group.by } DefaultAssay(object = object) <- assay if (is.numeric(x = max.genes)) { all.markers <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, only.pos = FALSE, logfc.threshold = logfc.threshold, test.use = test.use ) if (balanced) { pos.markers <- all.markers[which(x = all.markers[,fc.name] > (logfc.threshold)), ] neg.markers <- all.markers[which(x = all.markers[,fc.name] < (-logfc.threshold)), ] if (length(x = rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) < max.genes ) { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) if (length(x = rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) < max.genes){ marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) } else { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))[1:max.genes]) } } else { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))[1:max.genes]) if (length(x = rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) < max.genes) { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))) } else { marker.list <- c(marker.list, rownames(x = subset(x = neg.markers, p_val < pval.cutoff))[1:max.genes]) } } } else { pos.markers <- all.markers[which(x = all.markers[, fc.name] > (logfc.threshold)),] if (length(x = rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) < max.genes ){ marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))) } else { marker.list <- c(rownames(x = subset(x = pos.markers, p_val < pval.cutoff))[1:max.genes]) } } if (is.null(x = max.cells.group)) { if (is.null(x = group.by)) { sub2 <- subset(x = object, idents = c(ident.1, ident.2)) } else{ sub2 <- subset(x = object, idents = c(ident.1, ident.2)) Idents(object = sub2) <- group.by } } else { if (is.null(x = group.by)) { sub2 <- subset(x = object, idents = c(ident.1, ident.2), downsample = max.cells.group) } else { sub <- subset(x = object, idents = c(ident.1, ident.2)) Idents(object = sub) <- group.by sub2 <- subset(x = sub, downsample = max.cells.group) } } sub2 <- ScaleData(object = sub2, features = marker.list, assay = assay) if (isTRUE(x = order.by.prob)) { p_ko <- sub2[[paste0(mixscape.class, "_p_", tolower(x = prtb.type) )]][, 1, drop = FALSE] ordered.cells <- rownames(x = p_ko)[order(p_ko[,1], decreasing = TRUE)] p <- DoHeatmap(object = sub2, features = marker.list, label = TRUE, cells = ordered.cells, assay = assay, ...) } else{ p <- DoHeatmap(object = sub2, features = marker.list, label = TRUE, cells = sample(x = Cells(x = sub2)), assay = assay, ...) } return(p) } } #' Function to plot perturbation score distributions. #' #' Density plots to visualize perturbation scores calculated from RunMixscape #' function. #' #' @param object An object of class Seurat. #' @param target.gene.ident Target gene name to visualize perturbation scores for. #' @param target.gene.class meta data column specifying all target gene names in the experiment. #' @param before.mixscape Option to split densities based on mixscape classification (default) or original target gene classification. #' Default is set to NULL and plots cells by original class ID. #' @param col Specify color of target gene class or knockout cell class. For #' control non-targeting and non-perturbed cells, colors are set to different #' shades of grey. #' @param mixscape.class meta data column specifying mixscape classifications. #' @param prtb.type specify type of CRISPR perturbation expected for labeling mixscape classifications. Default is KO. #' @param split.by For datasets with more than one cell type. Set equal TRUE to visualize perturbation scores for each cell type separately. #' @return A ggplot object. #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian ggplot_build aes_string #' geom_density theme_classic #' @export #' @concept mixscape #' PlotPerturbScore <- function( object, target.gene.class = "gene", target.gene.ident = NULL, mixscape.class = "mixscape_class", col = "orange2", split.by = NULL, before.mixscape = FALSE, prtb.type = "KO" ){ if(is.null(target.gene.ident) == TRUE){ message("Please provide name of target gene class to plot") } prtb_score_list <- Tool(object = object, slot = "RunMixscape")[[target.gene.ident]] for (nm in names(prtb_score_list)){ prtb_score_list[[nm]]['name'] <- nm } prtb_score <- do.call(rbind, prtb_score_list) prtb_score[, 2] <- as.factor(x = prtb_score[, 2]) gd <- setdiff(x = unique(x = prtb_score[, target.gene.class]), y = target.gene.ident) colnames(x = prtb_score)[2] <- "gene" prtb_score$cell.bc <- sapply(rownames(prtb_score), FUN = function(x) substring(x, regexpr("[.]", x) + 1)) if (isTRUE(x = before.mixscape)) { cols <- setNames( object = c("grey49", col), nm = c(gd, target.gene.ident) ) p <- ggplot(data = prtb_score, mapping = aes_string(x = "pvec", color = "gene")) + geom_density() + theme_classic() top_r <- ggplot_build(p)$layout$panel_params[[1]]$y.range[2] prtb_score$y.jitter <- prtb_score$pvec prtb_score$y.jitter[prtb_score[, "gene"] == gd] <- runif( n = prtb_score$y.jitter[prtb_score[, "gene"] == gd], min = 0.001, max = top_r / 10 ) prtb_score$y.jitter[prtb_score[,"gene"] == target.gene.ident] <- runif( n = prtb_score$y.jitter[prtb_score[, "gene"] == target.gene.ident], min = -top_r / 10, max = 0 ) if(is.null(split.by)==FALSE) { prtb_score$split <- as.character(object[[split.by]][prtb_score$cell.bc,1]) p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour = "black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold"))+ facet_wrap(vars(split)) } else{ p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour = "black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold")) } } else { cols <- setNames( object = c("grey49", "grey79", col), nm = c(gd, paste0(target.gene.ident, " NP"), paste(target.gene.ident, prtb.type, sep = " ")) ) #add mixscape identities prtb_score$mix <- object[[mixscape.class]][prtb_score$cell.bc,] p <- ggplot(data = prtb_score, aes_string(x = "pvec", color = "mix")) + geom_density() + theme_classic() top_r <- ggplot_build(p)$layout$panel_params[[1]]$y.range[2] prtb_score$y.jitter <- prtb_score$pvec gd2 <- setdiff( x = unique(x = prtb_score[, "mix"]), y = c(paste0(target.gene.ident, " NP"), paste(target.gene.ident, prtb.type, sep = " ")) ) prtb_score$y.jitter[prtb_score[, "mix"] == gd2] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == gd2], min = 0.001, max = top_r / 10 ) prtb_score$y.jitter[prtb_score$mix == paste(target.gene.ident, prtb.type, sep = " ")] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == paste(target.gene.ident, prtb.type, sep = " ")], min = -top_r / 10, max = 0 ) prtb_score$y.jitter[prtb_score$mix == paste0(target.gene.ident, " NP")] <- runif( n = prtb_score$y.jitter[prtb_score[, "mix"] == paste0(target.gene.ident, " NP")], min = -top_r / 10, max = 0 ) prtb_score[, "mix"] <- as.factor(x = prtb_score[,"mix"]) if(is.null(split.by) == FALSE){ prtb_score$split <- as.character(object[[split.by]][prtb_score$cell.bc,1]) p2 <- ggplot(data = prtb_score, aes_string(x = "pvec", color = "mix")) + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme_classic() + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour ="black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold"))+ facet_wrap(vars(split)) } else{ p2 <- p + scale_color_manual(values = cols, drop = FALSE) + geom_density(size = 1.5) + geom_point(data = prtb_score, aes_string(x = "pvec", y = "y.jitter"), size = 0.1) + theme(axis.text = element_text(size = 18), axis.title = element_text(size = 20)) + ylab("Cell density") + xlab("perturbation score") + theme(legend.key.size = unit(1, "cm"), legend.text = element_text(colour ="black", size = 14), legend.title = element_blank(), plot.title = element_text(size = 16, face = "bold")) } } return(p2) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Function to define Normal distribution - # returns list with mu (mean) and sd (standard deviation) DefineNormalMixscape <- function(x) { mu <- mean(x) sd <- sd(x) return(list(mu = mu, sd = sd)) } # Get missing perturbation signature for missing features # # @param object Seurat object # @param assay Perturbation signature assay name # @param features vector of features to compute for # @param verbose display progress # @return Returns Seurat object with assay updated with new features # GetMissingPerturb <- function(object, assay, features, verbose = TRUE) { if (length(x = features) == 0) { return(object) } if (verbose) { message("Computing perturbation signature for missing features.") } command <- grep(pattern = "CalcPerturbSig", x = Command(object = object), value = TRUE) command.match <- sapply(X = command, FUN = function(x) { Command(object = object, command = x, value = "new.assay.name") == assay }) if (length(x = which(x = command.match)) > 1) { stop("Ambiguous command log.") } if(length(x = which(x = command.match)) == 0) { stop("Cannot find previously run CalcPertubSig command. Please make sure you've run CalcPerturbSig to create the provided assay.") } command <- names(x = command.match) if ("split.by" %in% names(x = slot(object = Command(object = object, command = command), name ="params"))) { split.by <- Command(object = object, command = command, value = "split.by") } else { split.by <- NULL } gd.class <- Command(object = object, command = command, value = "gd.class") nt.cell.class <- Command(object = object, command = command, value = "nt.cell.class") slot <- Command(object = object, command = command, value = "slot") assay.orig <- Command(object = object, command = command, value = "assay") old.idents <- Idents(object = object) if (! is.null(x = split.by)) { Idents(object = object) <- split.by } else { Idents(object = object) <- "rep1" } replicate <- unique(x = Idents(object = object)) all_diff <- list() all_nt_cells <- Cells(x = object)[which(x = object[[]][gd.class] == nt.cell.class)] features <- setdiff(x = features, y = rownames(x = object[[assay]])) for (r in replicate) { # isolate nt cells all_cells <- WhichCells(object = object, idents = r) nt_cells <- intersect(x = all_nt_cells, all_cells) # pull previously computed neighbors neighbors <- Tool(object = object, slot = command)[[make.names(names = paste0(assay, "_", r))]] diff <- PerturbDiff( object = object, assay = assay.orig, slot = slot, all_cells = all_cells, nt_cells = nt_cells, features = features, neighbors = neighbors, verbose = verbose ) all_diff[[r]] <- diff } all_diff <- do.call(what = cbind, args = all_diff) all_diff <- all_diff[, colnames(x = object[[assay]]), drop = FALSE] new.assay <- CreateAssayObject( data = rbind( GetAssayData(object = object[[assay]], slot = "data"), all_diff ), min.cells = 0, min.features = 0, check.matrix = FALSE ) new.assay <- SetAssayData( object = new.assay, slot = "scale.data", new.data = GetAssayData(object = object[[assay]], slot = "scale.data") ) object[[assay]] <- new.assay Idents(object = object) <- old.idents return(object) } # Helper function to compute the perturbation differences - enables reuse in # GetMissingPerturb # # @param object Seurat object # @param assay assay to use # @param slot slot to use # @param all_cells vector of cell names to compute difference for # @param nt_cells vector of nt cell names # @param features vector of features to compute for # @param neighbors Neighbor object containing indices of nearest NT cells # @param verbose display progress bar # @return returns matrix of perturbation differences # #' @importFrom matrixStats rowMeans2 #' @importFrom Matrix sparseMatrix colSums #' PerturbDiff <- function(object, assay, slot, all_cells, nt_cells, features, neighbors, verbose) { nt_data <- as.matrix(x = expm1(x = GetAssayData(object = object, assay = assay, slot = slot)[features, nt_cells, drop = FALSE])) mysapply <- ifelse(test = verbose, yes = pbsapply, no = sapply) # new_expr <- mysapply(X = all_cells, FUN = function(i) { # index <- Indices(object = neighbors)[i, ] # nt_cells20 <- nt_cells[index] # avg_nt <- rowMeans2(x = nt_data[, nt_cells20, drop = FALSE]) # avg_nt <- as.matrix(x = avg_nt) # colnames(x = avg_nt) <- i # return(avg_nt) # }) idx <- Indices(object = neighbors)[all_cells,] model.matrix <- sparseMatrix(i = as.vector(idx), j = rep(1:nrow(x = idx), times = ncol(x = idx)), x = 1, dims = c(length(x = nt_cells), nrow(x = idx))) model.matrix <- model.matrix/rep(colSums(model.matrix), each = nrow(x = model.matrix)) new_expr <- nt_data %*% model.matrix new_expr <- matrix(data = new_expr, nrow = length(x = features)) new_expr <- log1p(x = new_expr) rownames(x = new_expr) <- rownames(x = nt_data) colnames(x = new_expr) <- all_cells diff <- new_expr - as.matrix(GetAssayData(object = object, slot = slot, assay = assay)[features, colnames(x = new_expr), drop = FALSE]) return(diff) } # Helper function to project cells onto the perturbation vector # @param v1 vector 1 # @param v2 vector 2 # ProjectVec <- function(v1, v2) { return(as.vector(x = (v1 %*% v2) / (v2 %*% v2))) } # Function to find top DE genes that pass some p value cutoff between cells # with targeting and non-targeting gRNAs. # # @param object An object of class Seurat. # @param ident.1 Target gene class or cells to find DE genes for. # @param ident.2 Non-targetting class or cells # @param labels metadata column with target gene classification. # @param de.assay Name of Assay DE is performed on. # @param test.use Denotes which test to use. See all available tests on # FindMarkers documentation. # @param pval.cutoff P-value cut-off for selection of significantly DE genes. # @param logfc.threshold Limit testing to genes which show, on average, at # least X-fold difference (log-scale) between the two groups of cells. Default # is 0.25 Increasing logfc.threshold speeds up the function, but can miss # weaker signals. # @param verbose Display messages # @return # TopDEGenesMixscape <- function( object, ident.1, ident.2 = NULL, labels = 'gene', de.assay = "RNA", test.use = "wilcox", pval.cutoff = 5e-2, logfc.threshold = 0.25, verbose = TRUE ) { if (verbose) { message("Finding new perturbation gene set") } de.genes <- data.frame() tryCatch( expr = { de.genes <- FindMarkers( object = object, ident.1 = ident.1, ident.2 = ident.2, group.by = labels, assay = de.assay, test.use = test.use, logfc.threshold = logfc.threshold, verbose = verbose, min.pct = 0.1 ) de.genes <- de.genes[de.genes$p_val_adj < pval.cutoff, ] }, error = function(e) {} ) return(rownames(x = de.genes)) } Seurat/R/zzz.R0000644000176200001440000000606014525500037012701 0ustar liggesusers#' @importFrom progressr progressor #' @importFrom methods slot slot<- #' @importFrom lifecycle deprecated deprecate_soft deprecate_stop #' deprecate_warn is_present #' @importFrom rlang !!! #' abort #' arg_match #' arg_match0 #' as_name #' caller_env #' check_installed #' enquo #' inform #' is_integerish #' is_na #' is_quosure #' is_scalar_integerish #' quo_get_env #' quo_get_expr #' warn #' NULL #' @section Package options: #' #' Seurat uses the following [options()] to configure behaviour: #' #' \describe{ #' \item{\code{Seurat.memsafe}}{global option to call gc() after many operations. #' This can be helpful in cleaning up the memory status of the R session and #' prevent use of swap space. However, it does add to the computational overhead #' and setting to FALSE can speed things up if you're working in an environment #' where RAM availability is not a concern.} #' \item{\code{Seurat.warn.umap.uwot}}{Show warning about the default backend #' for \code{\link{RunUMAP}} changing from Python UMAP via reticulate to UWOT} #' \item{\code{Seurat.checkdots}}{For functions that have ... as a parameter, #' this controls the behavior when an item isn't used. Can be one of warn, #' stop, or silent.} #' \item{\code{Seurat.limma.wilcox.msg}}{{Show message about more efficient #' Wilcoxon Rank Sum test available via the limma package}} #' \item{\code{Seurat.Rfast2.msg}}{{Show message about more efficient #' Moran's I function available via the Rfast2 package}} #' \item{\code{Seurat.warn.vlnplot.split}}{Show message about changes to #' default behavior of split/multi violin plots} #' } #' #' @docType package #' @rdname Seurat-package #' @name Seurat-package #' "_PACKAGE" #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Options #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% seurat_default_options <- list( Seurat.memsafe = FALSE, Seurat.warn.umap.uwot = TRUE, Seurat.checkdots = "warn", Seurat.presto.wilcox.msg = TRUE, #CHANGE Seurat.Rfast2.msg = TRUE, Seurat.warn.vlnplot.split = TRUE, Seurat.object.assay.version = "v5" ) #' @importFrom methods setClassUnion #' @importClassesFrom Matrix dgCMatrix #' NULL setClassUnion(name = 'V3Matrix', members = c('matrix', 'dgCMatrix')) AttachDeps <- function(deps) { for (d in deps) { if (!paste0('package:', d) %in% search()) { packageStartupMessage("Attaching ", d) attachNamespace(ns = d) } } } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Hooks #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom SeuratObject AttachDeps #' .onAttach <- function(libname, pkgname) { AttachDeps(deps = c('SeuratObject')) return(invisible(x = NULL)) } .onLoad <- function(libname, pkgname) { toset <- setdiff( x = names(x = seurat_default_options), y = names(x = options()) ) # toset <- names(x = seurat_default_options) if (length(x = toset)) { options(seurat_default_options[toset]) } return(invisible(x = NULL)) } Seurat/R/utilities.R0000644000176200001440000026002614525500056014064 0ustar liggesusers#' @include generics.R #' @importFrom SeuratObject PackageCheck #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Add Azimuth Results #' #' Add mapping and prediction scores, UMAP embeddings, and imputed assay (if #' available) #' from Azimuth to an existing or new \code{\link[SeuratObject]{Seurat}} object #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param filename Path to Azimuth mapping scores file #' #' @return \code{object} with Azimuth results added #' #' @examples #' \dontrun{ #' object <- AddAzimuthResults(object, filename = "azimuth_results.Rds") #' } #' #' @export AddAzimuthResults <- function(object = NULL, filename) { if (is.null(x = filename)) { stop("No Azimuth results provided.") } azimuth_results <- readRDS(file = filename) if (!is.list(x = azimuth_results) || any(!(c('umap', 'pred.df') %in% names(x = azimuth_results)))) { stop("Expected following format for azimuth_results: `list(umap = , pred.df = [, impADT = ])`") } if (is.null(x = object)) { message("No existing Seurat object provided. Creating new one.") object <- CreateSeuratObject( counts = matrix( nrow = 1, ncol = nrow(x = azimuth_results$umap), dimnames = list( row.names = 'Dummy.feature', col.names = rownames(x = azimuth_results$umap)) ), assay = 'Dummy' ) } else { overlap.cells <- intersect( x = Cells(x = object), y = rownames(x = azimuth_results$umap) ) if (!(all(overlap.cells %in% Cells(x = object)))) { stop("Cells in object do not match cells in download") } else if (length(x = overlap.cells) < length(x = Cells(x = object))) { warning(paste0("Subsetting out ", length(x = Cells(x = object)) - length(x = overlap.cells), " cells that are absent in downloaded results (perhaps filtered by Azimuth)")) object <- subset(x = object, cells = overlap.cells) } } azimuth_results$pred.df$cell <- NULL object <- AddMetaData(object = object, metadata = azimuth_results$pred.df) object[['umap.proj']] <- azimuth_results$umap if ('impADT' %in% names(x = azimuth_results)) { object[['impADT']] <- azimuth_results$impADT if ('Dummy' %in% Assays(object = object)) { DefaultAssay(object = object) <- 'impADT' object[['Dummy']] <- NULL } } return(object) } #' Add Azimuth Scores #' #' Add mapping and prediction scores from Azimuth to a #' \code{\link[SeuratObject]{Seurat}} object #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param filename Path to Azimuth mapping scores file #' #' @return \code{object} with the mapping scores added #' #' @examples #' \dontrun{ #' object <- AddAzimuthScores(object, filename = "azimuth_pred.tsv") #' } #' AddAzimuthScores <- function(object, filename) { if (!file.exists(filename)) { stop("Cannot find Azimuth scores file ", filename, call. = FALSE) } object <- AddMetaData( object = object, metadata = read.delim(file = filename, row.names = 1) ) return(object) } #' Calculate module scores for feature expression programs in single cells #' #' Calculate the average expression levels of each program (cluster) on single #' cell level, subtracted by the aggregated expression of control feature sets. #' All analyzed features are binned based on averaged expression, and the #' control features are randomly selected from each bin. #' #' @param object Seurat object #' @param features A list of vectors of features for expression programs; each #' entry should be a vector of feature names #' @param pool List of features to check expression levels against, defaults to #' \code{rownames(x = object)} #' @param nbin Number of bins of aggregate expression levels for all #' analyzed features #' @param ctrl Number of control features selected from the same bin per #' analyzed feature #' @param k Use feature clusters returned from DoKMeans #' @param assay Name of assay to use #' @param name Name for the expression programs; will append a number to the #' end for each entry in \code{features} (eg. if \code{features} has three #' programs, the results will be stored as \code{name1}, \code{name2}, #' \code{name3}, respectively) #' @param seed Set a random seed. If NULL, seed is not set. #' @param search Search for symbol synonyms for features in \code{features} that #' don't match features in \code{object}? Searches the HGNC's gene names #' database; see \code{\link{UpdateSymbolList}} for more details #' @param slot Slot to calculate score values off of. Defaults to data slot (i.e log-normalized counts) #' @param ... Extra parameters passed to \code{\link{UpdateSymbolList}} #' #' @return Returns a Seurat object with module scores added to object meta data; #' each module is stored as \code{name#} for each module program present in #' \code{features} #' #' @importFrom ggplot2 cut_number #' @importFrom Matrix rowMeans colMeans #' #' @references Tirosh et al, Science (2016) #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' data("pbmc_small") #' cd_features <- list(c( #' 'CD79B', #' 'CD79A', #' 'CD19', #' 'CD180', #' 'CD200', #' 'CD3D', #' 'CD2', #' 'CD3E', #' 'CD7', #' 'CD8A', #' 'CD14', #' 'CD1C', #' 'CD68', #' 'CD9', #' 'CD247' #' )) #' pbmc_small <- AddModuleScore( #' object = pbmc_small, #' features = cd_features, #' ctrl = 5, #' name = 'CD_Features' #' ) #' head(x = pbmc_small[]) #' } #' AddModuleScore <- function( object, features, pool = NULL, nbin = 24, ctrl = 100, k = FALSE, assay = NULL, name = 'Cluster', seed = 1, search = FALSE, slot = 'data', ... ) { if (!is.null(x = seed)) { set.seed(seed = seed) } assay.old <- DefaultAssay(object = object) assay <- assay %||% assay.old DefaultAssay(object = object) <- assay assay.data <- GetAssayData(object = object, assay = assay, slot = slot) features.old <- features if (k) { .NotYetUsed(arg = 'k') features <- list() for (i in as.numeric(x = names(x = table(object@kmeans.obj[[1]]$cluster)))) { features[[i]] <- names(x = which(x = object@kmeans.obj[[1]]$cluster == i)) } cluster.length <- length(x = features) } else { if (is.null(x = features)) { stop("Missing input feature list") } features <- lapply( X = features, FUN = function(x) { missing.features <- setdiff(x = x, y = rownames(x = object)) if (length(x = missing.features) > 0) { warning( "The following features are not present in the object: ", paste(missing.features, collapse = ", "), ifelse( test = search, yes = ", attempting to find updated synonyms", no = ", not searching for symbol synonyms" ), call. = FALSE, immediate. = TRUE ) if (search) { tryCatch( expr = { updated.features <- UpdateSymbolList(symbols = missing.features, ...) names(x = updated.features) <- missing.features for (miss in names(x = updated.features)) { index <- which(x == miss) x[index] <- updated.features[miss] } }, error = function(...) { warning( "Could not reach HGNC's gene names database", call. = FALSE, immediate. = TRUE ) } ) missing.features <- setdiff(x = x, y = rownames(x = object)) if (length(x = missing.features) > 0) { warning( "The following features are still not present in the object: ", paste(missing.features, collapse = ", "), call. = FALSE, immediate. = TRUE ) } } } return(intersect(x = x, y = rownames(x = object))) } ) cluster.length <- length(x = features) } if (!all(LengthCheck(values = features))) { warning(paste( 'Could not find enough features in the object from the following feature lists:', paste(names(x = which(x = !LengthCheck(values = features)))), 'Attempting to match case...' )) features <- lapply( X = features.old, FUN = CaseMatch, match = rownames(x = object) ) } if (!all(LengthCheck(values = features))) { stop(paste( 'The following feature lists do not have enough features present in the object:', paste(names(x = which(x = !LengthCheck(values = features)))), 'exiting...' )) } pool <- pool %||% rownames(x = object) data.avg <- Matrix::rowMeans(x = assay.data[pool, ]) data.avg <- data.avg[order(data.avg)] data.cut <- cut_number(x = data.avg + rnorm(n = length(data.avg))/1e30, n = nbin, labels = FALSE, right = FALSE) #data.cut <- as.numeric(x = Hmisc::cut2(x = data.avg, m = round(x = length(x = data.avg) / (nbin + 1)))) names(x = data.cut) <- names(x = data.avg) ctrl.use <- vector(mode = "list", length = cluster.length) for (i in 1:cluster.length) { features.use <- features[[i]] for (j in 1:length(x = features.use)) { ctrl.use[[i]] <- c( ctrl.use[[i]], names(x = sample( x = data.cut[which(x = data.cut == data.cut[features.use[j]])], size = ctrl, replace = FALSE )) ) } } ctrl.use <- lapply(X = ctrl.use, FUN = unique) ctrl.scores <- matrix( data = numeric(length = 1L), nrow = length(x = ctrl.use), ncol = ncol(x = object) ) for (i in 1:length(ctrl.use)) { features.use <- ctrl.use[[i]] ctrl.scores[i, ] <- Matrix::colMeans(x = assay.data[features.use, ]) } features.scores <- matrix( data = numeric(length = 1L), nrow = cluster.length, ncol = ncol(x = object) ) for (i in 1:cluster.length) { features.use <- features[[i]] data.use <- assay.data[features.use, , drop = FALSE] features.scores[i, ] <- Matrix::colMeans(x = data.use) } features.scores.use <- features.scores - ctrl.scores rownames(x = features.scores.use) <- paste0(name, 1:cluster.length) features.scores.use <- as.data.frame(x = t(x = features.scores.use)) rownames(x = features.scores.use) <- colnames(x = object) object[[colnames(x = features.scores.use)]] <- features.scores.use CheckGC() DefaultAssay(object = object) <- assay.old return(object) } #' Aggregated feature expression by identity class #' #' Returns summed counts ("pseudobulk") for each identity class. #' #' If \code{return.seurat = TRUE}, aggregated values are placed in the 'counts' #' layer of the returned object. The data is then normalized by running \code{\link{NormalizeData}} #' on the aggregated counts. \code{\link{ScaleData}} is then run on the default assay #' before returning the object. #' #' @param object Seurat object #' @param assays Which assays to use. Default is all assays #' @param features Features to analyze. Default is all features in the assay #' @param return.seurat Whether to return the data as a Seurat object. Default is FALSE #' @param group.by Category (or vector of categories) for grouping (e.g, ident, replicate, celltype); 'ident' by default #' To use multiple categories, specify a vector, such as c('ident', 'replicate', 'celltype') #' @param add.ident (Deprecated). Place an additional label on each cell prior to pseudobulking #' @param normalization.method Method for normalization, see \code{\link{NormalizeData}} #' @param scale.factor Scale factor for normalization, see \code{\link{NormalizeData}} #' @param margin Margin to perform CLR normalization, see \code{\link{NormalizeData}} #' @param verbose Print messages and show progress bar #' @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}} #' #' @return Returns a matrix with genes as rows, identity classes as columns. #' If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' data("pbmc_small") #' head(AggregateExpression(object = pbmc_small)$RNA) #' head(AggregateExpression(object = pbmc_small, group.by = c('ident', 'groups'))$RNA) #' } #' AggregateExpression <- function( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) { return( PseudobulkExpression( object = object, assays = assays, features = features, return.seurat = return.seurat, group.by = group.by, add.ident = add.ident, layer = 'counts', method = 'aggregate', normalization.method = normalization.method, scale.factor = scale.factor, margin = margin, verbose = verbose, ... ) ) } #' Averaged feature expression by identity class #' #' Returns averaged expression values for each identity class. #' #' If layer is set to 'data', this function assumes that the data has been log #' normalized and therefore feature values are exponentiated prior to averaging #' so that averaging is done in non-log space. Otherwise, if layer is set to #' either 'counts' or 'scale.data', no exponentiation is performed prior to averaging. #' If \code{return.seurat = TRUE} and layer is not 'scale.data', averaged values #' are placed in the 'counts' layer of the returned object and 'log1p' #' is run on the averaged counts and placed in the 'data' layer \code{\link{ScaleData}} #' is then run on the default assay before returning the object. #' If \code{return.seurat = TRUE} and layer is 'scale.data', the 'counts' layer contains #' average counts and 'scale.data' is set to the averaged values of 'scale.data'. #' #' @param object Seurat object #' @param assays Which assays to use. Default is all assays #' @param features Features to analyze. Default is all features in the assay #' @param return.seurat Whether to return the data as a Seurat object. Default is FALSE #' @param group.by Category (or vector of categories) for grouping (e.g, ident, replicate, celltype); 'ident' by default #' To use multiple categories, specify a vector, such as c('ident', 'replicate', 'celltype') #' @param add.ident (Deprecated). Place an additional label on each cell prior to pseudobulking #' @param layer Layer(s) to use; if multiple layers are given, assumed to follow #' the order of 'assays' (if specified) or object's assays #' @param slot (Deprecated). Slots(s) to use #' @param verbose Print messages and show progress bar #' @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}} #' #' @return Returns a matrix with genes as rows, identity classes as columns. #' If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. #' @export #' @concept utilities #' @importFrom SeuratObject .FilterObjects #' #' @examples #' data("pbmc_small") #' head(AverageExpression(object = pbmc_small)$RNA) #' head(AverageExpression(object = pbmc_small, group.by = c('ident', 'groups'))$RNA) #' AverageExpression <- function( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, layer = 'data', slot = deprecated(), verbose = TRUE, ... ) { return( PseudobulkExpression( object = object, assays = assays, features = features, return.seurat = return.seurat, group.by = group.by, add.ident = add.ident, layer = layer, slot = slot, method = 'average', verbose = verbose, ... ) ) } #' Match the case of character vectors #' #' @param search A vector of search terms #' @param match A vector of characters whose case should be matched #' #' @return Values from search present in match with the case of match #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' cd_genes <- c('Cd79b', 'Cd19', 'Cd200') #' CaseMatch(search = cd_genes, match = rownames(x = pbmc_small)) #' CaseMatch <- function(search, match) { search.match <- sapply( X = search, FUN = function(s) { return(grep( pattern = paste0('^', s, '$'), x = match, ignore.case = TRUE, perl = TRUE, value = TRUE )) } ) return(unlist(x = search.match)) } #' Score cell cycle phases #' #' @param object A Seurat object #' @param s.features A vector of features associated with S phase #' @param g2m.features A vector of features associated with G2M phase #' @param ctrl Number of control features selected from the same bin per #' analyzed feature supplied to \code{\link{AddModuleScore}}. #' Defaults to value equivalent to minimum number of features #' present in 's.features' and 'g2m.features'. #' @param set.ident If true, sets identity to phase assignments #' Stashes old identities in 'old.ident' #' @param ... Arguments to be passed to \code{\link{AddModuleScore}} #' #' @return A Seurat object with the following columns added to object meta data: S.Score, G2M.Score, and Phase #' #' @seealso \code{AddModuleScore} #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' data("pbmc_small") #' # pbmc_small doesn't have any cell-cycle genes #' # To run CellCycleScoring, please use a dataset with cell-cycle genes #' # An example is available at http://satijalab.org/seurat/cell_cycle_vignette.html #' pbmc_small <- CellCycleScoring( #' object = pbmc_small, #' g2m.features = cc.genes$g2m.genes, #' s.features = cc.genes$s.genes #' ) #' head(x = pbmc_small@meta.data) #' } #' CellCycleScoring <- function( object, s.features, g2m.features, ctrl = NULL, set.ident = FALSE, ... ) { name <- 'Cell.Cycle' features <- list('S.Score' = s.features, 'G2M.Score' = g2m.features) if (is.null(x = ctrl)) { ctrl <- min(vapply(X = features, FUN = length, FUN.VALUE = numeric(length = 1))) } object.cc <- AddModuleScore( object = object, features = features, name = name, ctrl = ctrl, ... ) cc.columns <- grep(pattern = name, x = colnames(x = object.cc[[]]), value = TRUE) cc.scores <- object.cc[[cc.columns]] rm(object.cc) CheckGC() assignments <- apply( X = cc.scores, MARGIN = 1, FUN = function(scores, first = 'S', second = 'G2M', null = 'G1') { if (all(scores < 0)) { return(null) } else { if (length(which(x = scores == max(scores))) > 1) { return('Undecided') } else { return(c(first, second)[which(x = scores == max(scores))]) } } } ) cc.scores <- merge(x = cc.scores, y = data.frame(assignments), by = 0) colnames(x = cc.scores) <- c('rownames', 'S.Score', 'G2M.Score', 'Phase') rownames(x = cc.scores) <- cc.scores$rownames cc.scores <- cc.scores[, c('S.Score', 'G2M.Score', 'Phase')] object[[colnames(x = cc.scores)]] <- cc.scores if (set.ident) { object[['old.ident']] <- Idents(object = object) Idents(object = object) <- 'Phase' } return(object) } #' Slim down a multi-species expression matrix, when only one species is primarily of interenst. #' #' Valuable for CITE-seq analyses, where we typically spike in rare populations of 'negative control' cells from a different species. #' #' @param object A UMI count matrix. Should contain rownames that start with #' the ensuing arguments prefix.1 or prefix.2 #' @param prefix The prefix denoting rownames for the species of interest. #' Default is "HUMAN_". These rownames will have this prefix removed in the returned matrix. #' @param controls The prefix denoting rownames for the species of 'negative #' control' cells. Default is "MOUSE_". #' @param ncontrols How many of the most highly expressed (average) negative #' control features (by default, 100 mouse genes), should be kept? All other #' rownames starting with prefix.2 are discarded. #' #' @return A UMI count matrix. Rownames that started with \code{prefix} have this #' prefix discarded. For rownames starting with \code{controls}, only the #' \code{ncontrols} most highly expressed features are kept, and the #' prefix is kept. All other rows are retained. #' #' @importFrom utils head #' @importFrom Matrix rowSums #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' cbmc.rna.collapsed <- CollapseSpeciesExpressionMatrix(cbmc.rna) #' } #' CollapseSpeciesExpressionMatrix <- function( object, prefix = "HUMAN_", controls = "MOUSE_", ncontrols = 100 ) { features <- grep(pattern = prefix, x = rownames(x = object), value = TRUE) controls <- grep(pattern = controls, x = rownames(x = object), value = TRUE) others <- setdiff(x = rownames(x = object), y = c(features, controls)) controls <- rowSums(x = object[controls, ]) controls <- names(x = head( x = sort(x = controls, decreasing = TRUE), n = ncontrols )) object <- object[c(features, controls, others), ] rownames(x = object) <- gsub( pattern = prefix, replacement = '', x = rownames(x = object) ) return(object) } # Create an Annoy index # # @note Function exists because it's not exported from \pkg{uwot} # # @param name Distance metric name # @param ndim Number of dimensions # # @return An nn index object # #' @importFrom methods new #' @importFrom RcppAnnoy AnnoyAngular AnnoyManhattan AnnoyEuclidean AnnoyHamming # CreateAnn <- function(name, ndim) { return(switch( EXPR = name, cosine = new(Class = AnnoyAngular, ndim), manhattan = new(Class = AnnoyManhattan, ndim), euclidean = new(Class = AnnoyEuclidean, ndim), hamming = new(Class = AnnoyHamming, ndim), stop("BUG: unknown Annoy metric '", name, "'") )) } #' Run a custom distance function on an input data matrix #' #' @author Jean Fan #' #' @param my.mat A matrix to calculate distance on #' @param my.function A function to calculate distance #' @param ... Extra parameters to my.function #' #' @return A distance matrix #' #' @importFrom stats as.dist #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' # Define custom distance matrix #' manhattan.distance <- function(x, y) return(sum(abs(x-y))) #' #' input.data <- GetAssayData(pbmc_small, assay.type = "RNA", slot = "scale.data") #' cell.manhattan.dist <- CustomDistance(input.data, manhattan.distance) #' CustomDistance <- function(my.mat, my.function, ...) { CheckDots(..., fxns = my.function) n <- ncol(x = my.mat) mat <- matrix(data = 0, ncol = n, nrow = n) colnames(x = mat) <- rownames(x = mat) <- colnames(x = my.mat) for (i in 1:nrow(x = mat)) { for (j in 1:ncol(x = mat)) { mat[i,j] <- my.function(my.mat[, i], my.mat[, j], ...) } } return(as.dist(m = mat)) } #' Calculate the mean of logged values #' #' Calculate mean of logged values in non-log space (return answer in log-space) #' #' @param x A vector of values #' @param ... Other arguments (not used) #' #' @return Returns the mean in log-space #' #' @export #' @concept utilities #' #' @examples #' ExpMean(x = c(1, 2, 3)) #' ExpMean <- function(x, ...) { if (inherits(x = x, what = 'AnyMatrix')) { return(apply(X = x, FUN = function(i) {log(x = mean(x = exp(x = i) - 1) + 1)}, MARGIN = 1)) } else { return(log(x = mean(x = exp(x = x) - 1) + 1)) } } #' Calculate the standard deviation of logged values #' #' Calculate SD of logged values in non-log space (return answer in log-space) #' #' @param x A vector of values #' #' @return Returns the standard deviation in log-space #' #' @importFrom stats sd #' #' @export #' @concept utilities #' #' @examples #' ExpSD(x = c(1, 2, 3)) #' ExpSD <- function(x) { return(log1p(x = sd(x = expm1(x = x)))) } #' Calculate the variance of logged values #' #' Calculate variance of logged values in non-log space (return answer in #' log-space) #' #' @param x A vector of values #' #' @return Returns the variance in log-space #' #' @importFrom stats var #' #' @export #' @concept utilities #' #' @examples #' ExpVar(x = c(1, 2, 3)) #' ExpVar <- function(x) { return(log1p(x = var(x = expm1(x = x)))) } #' Scale and/or center matrix rowwise #' #' Performs row scaling and/or centering. Equivalent to using t(scale(t(mat))) #' in R except in the case of NA values. #' #' @param mat A matrix #' @param center a logical value indicating whether to center the rows #' @param scale a logical value indicating whether to scale the rows #' @param scale_max clip all values greater than scale_max to scale_max. Don't #' clip if Inf. #' @return Returns the center/scaled matrix #' #' @importFrom matrixStats rowMeans2 rowSds rowSums2 #' #' @export #' @concept utilities #' FastRowScale <- function( mat, center = TRUE, scale = TRUE, scale_max = 10 ) { # inspired by https://www.r-bloggers.com/a-faster-scale-function/ if (center) { rm <- rowMeans2(x = mat, na.rm = TRUE) } if (scale) { if (center) { rsd <- rowSds(mat, center = rm) } else { rsd <- sqrt(x = rowSums2(x = mat^2)/(ncol(x = mat) - 1)) } } if (center) { mat <- mat - rm } if (scale) { mat <- mat / rsd } if (scale_max != Inf) { mat[mat > scale_max] <- scale_max } return(mat) } #' Get updated synonyms for gene symbols #' #' Find current gene symbols based on old or alias symbols using the gene #' names database from the HUGO Gene Nomenclature Committee (HGNC) #' #' @details For each symbol passed, we query the HGNC gene names database for #' current symbols that have the provided symbol as either an alias #' (\code{alias_symbol}) or old (\code{prev_symbol}) symbol. All other queries #' are \strong{not} supported. #' #' @note This function requires internet access #' #' @param symbols A vector of gene symbols #' @param timeout Time to wait before canceling query in seconds #' @param several.ok Allow several current gene symbols for each #' provided symbol #' @param search.types Type of query to perform: #' \describe{ #' \item{\dQuote{\code{alias_symbol}}}{Find alternate symbols for the genes #' described by \code{symbols}} #' \item{\dQuote{\code{prev_symbol}}}{Find new new symbols for the genes #' described by \code{symbols}} #' } #' This parameter accepts multiple options and short-hand options #' (eg. \dQuote{\code{prev}} for \dQuote{\code{prev_symbol}}) #' @param verbose Show a progress bar depicting search progress #' @param ... Extra parameters passed to \code{\link[httr]{GET}} #' #' @return \code{GeneSymbolThesarus}:, if \code{several.ok}, a named list #' where each entry is the current symbol found for each symbol provided and #' the names are the provided symbols. Otherwise, a named vector with the #' same information. #' #' @source \url{https://www.genenames.org/} \url{https://www.genenames.org/help/rest/} #' #' @importFrom utils txtProgressBar setTxtProgressBar #' @importFrom httr GET accept_json timeout status_code content #' #' @rdname UpdateSymbolList #' @name UpdateSymbolList #' #' @export #' @concept utilities #' #' @seealso \code{\link[httr]{GET}} #' #' @examples #' \dontrun{ #' GeneSybmolThesarus(symbols = c("FAM64A")) #' } #' GeneSymbolThesarus <- function( symbols, timeout = 10, several.ok = FALSE, search.types = c('alias_symbol', 'prev_symbol'), verbose = TRUE, ... ) { db.url <- 'http://rest.genenames.org/fetch' # search.types <- c('alias_symbol', 'prev_symbol') search.types <- match.arg(arg = search.types, several.ok = TRUE) synonyms <- vector(mode = 'list', length = length(x = symbols)) not.found <- vector(mode = 'logical', length = length(x = symbols)) multiple.found <- vector(mode = 'logical', length = length(x = symbols)) names(x = multiple.found) <- names(x = not.found) <- names(x = synonyms) <- symbols if (verbose) { pb <- txtProgressBar(max = length(x = symbols), style = 3, file = stderr()) } for (symbol in symbols) { sym.syn <- character() for (type in search.types) { response <- GET( url = paste(db.url, type, symbol, sep = '/'), config = c(accept_json(), timeout(seconds = timeout)), ... ) if (!identical(x = status_code(x = response), y = 200L)) { next } response <- content(x = response) if (response$response$numFound != 1) { if (response$response$numFound > 1) { warning( "Multiple hits found for ", symbol, " as ", type, ", skipping", call. = FALSE, immediate. = TRUE ) } next } sym.syn <- c(sym.syn, response$response$docs[[1]]$symbol) } not.found[symbol] <- length(x = sym.syn) < 1 multiple.found[symbol] <- length(x = sym.syn) > 1 if (length(x = sym.syn) == 1 || (length(x = sym.syn) > 1 && several.ok)) { synonyms[[symbol]] <- sym.syn } if (verbose) { setTxtProgressBar(pb = pb, value = pb$getVal() + 1) } } if (verbose) { close(con = pb) } if (sum(not.found) > 0) { warning( "The following symbols had no synonyms: ", paste(names(x = which(x = not.found)), collapse = ', '), call. = FALSE, immediate. = TRUE ) } if (sum(multiple.found) > 0) { msg <- paste( "The following symbols had multiple synonyms:", paste(names(x = which(x = multiple.found)), sep = ', ') ) if (several.ok) { message(msg) message("Including anyways") } else { warning(msg, call. = FALSE, immediate. = TRUE) } } synonyms <- Filter(f = Negate(f = is.null), x = synonyms) if (!several.ok) { synonyms <- unlist(x = synonyms) } return(synonyms) } #' Compute the correlation of features broken down by groups with another #' covariate #' #' @param object Seurat object #' @param assay Assay to pull the data from #' @param slot Slot in the assay to pull feature expression data from (counts, #' data, or scale.data) #' @param var Variable with which to correlate the features #' @param group.assay Compute the gene groups based off the data in this assay. #' @param min.cells Only compute for genes in at least this many cells #' @param ngroups Number of groups to split into #' @param do.plot Display the group correlation boxplot (via #' \code{GroupCorrelationPlot}) #' #' @return A Seurat object with the correlation stored in metafeatures #' #' @export #' @concept utilities #' GroupCorrelation <- function( object, assay = NULL, slot = "scale.data", var = NULL, group.assay = NULL, min.cells = 5, ngroups = 6, do.plot = TRUE ) { assay <- assay %||% DefaultAssay(object = object) group.assay <- group.assay %||% assay var <- var %||% paste0("nCount_", group.assay) gene.grp <- GetFeatureGroups( object = object, assay = group.assay, min.cells = min.cells, ngroups = ngroups ) data <- as.matrix(x = GetAssayData(object = object[[assay]], slot = slot)) data <- data[rowMeans(x = data) != 0, ] grp.cors <- apply( X = data, MARGIN = 1, FUN = function(x) { cor(x = x, y = object[[var]]) } ) grp.cors <- grp.cors[names(x = gene.grp)] grp.cors <- as.data.frame(x = grp.cors[which(x = !is.na(x = grp.cors))]) grp.cors$gene_grp <- gene.grp[rownames(x = grp.cors)] colnames(x = grp.cors) <- c(paste0(var, "_cor"), "feature.grp") object[[assay]][] <- grp.cors if (isTRUE(x = do.plot)) { print(GroupCorrelationPlot( object = object, assay = assay, feature.group = "feature.grp", cor = paste0(var, "_cor") )) } return(object) } #' Load the Annoy index file #' #' @param object Neighbor object #' @param file Path to file with annoy index #' #' @return Returns the Neighbor object with the index stored #' @export #' @concept utilities #' LoadAnnoyIndex <- function(object, file){ metric <- slot(object = object, name = "alg.info")$metric ndim <- slot(object = object, name = "alg.info")$ndim if (is.null(x = metric)) { stop("Provided Neighbor object wasn't generated with annoy") } annoy.idx <- CreateAnn(name = metric, ndim = ndim) annoy.idx$load(path.expand(path = file)) Index(object = object) <- annoy.idx return(object) } #' Calculate the variance to mean ratio of logged values #' #' Calculate the variance to mean ratio (VMR) in non-logspace (return answer in #' log-space) #' #' @param x A vector of values #' @param ... Other arguments (not used) #' #' @return Returns the VMR in log-space #' #' @importFrom stats var #' #' @export #' @concept utilities #' #' @examples #' LogVMR(x = c(1, 2, 3)) #' LogVMR <- function(x, ...) { if (inherits(x = x, what = 'AnyMatrix')) { return(apply(X = x, FUN = function(i) {log(x = var(x = exp(x = i) - 1) / mean(x = exp(x = i) - 1))}, MARGIN = 1)) } else { return(log(x = var(x = exp(x = x) - 1) / mean(x = exp(x = x) - 1))) } } #' Aggregate expression of multiple features into a single feature #' #' Calculates relative contribution of each feature to each cell #' for given set of features. #' #' @param object A Seurat object #' @param features List of features to aggregate #' @param meta.name Name of column in metadata to store metafeature #' @param cells List of cells to use (default all cells) #' @param assay Which assay to use #' @param slot Which slot to take data from (default data) #' #' @return Returns a \code{Seurat} object with metafeature stored in objct metadata #' #' @importFrom Matrix rowSums colMeans #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' pbmc_small <- MetaFeature( #' object = pbmc_small, #' features = c("LTB", "EAF2"), #' meta.name = 'var.aggregate' #' ) #' head(pbmc_small[[]]) #' MetaFeature <- function( object, features, meta.name = 'metafeature', cells = NULL, assay = NULL, slot = 'data' ) { cells <- cells %||% colnames(x = object) assay <- assay %||% DefaultAssay(object = object) newmat <- GetAssayData(object = object, assay = assay, slot = slot) newmat <- newmat[features, cells] if (slot == 'scale.data') { newdata <- Matrix::colMeans(newmat) } else { rowtotals <- Matrix::rowSums(newmat) newmat <- newmat / rowtotals newdata <- Matrix::colMeans(newmat) } object[[meta.name]] <- newdata return(object) } #' Apply a ceiling and floor to all values in a matrix #' #' @param data Matrix or data frame #' @param min all values below this min value will be replaced with min #' @param max all values above this max value will be replaced with max #' @return Returns matrix after performing these floor and ceil operations #' @export #' @concept utilities #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) #' mat #' MinMax(data = mat, min = 4, max = 5) #' MinMax <- function(data, min, max) { data2 <- data data2[data2 > max] <- max data2[data2 < min] <- min return(data2) } #' Calculate the percentage of a vector above some threshold #' #' @param x Vector of values #' @param threshold Threshold to use when calculating percentage #' #' @return Returns the percentage of \code{x} values above the given threshold #' #' @export #' @concept utilities #' #' @examples #' set.seed(42) #' PercentAbove(sample(1:100, 10), 75) #' PercentAbove <- function(x, threshold) { return(length(x = x[x > threshold]) / length(x = x)) } #' Calculate the percentage of all counts that belong to a given set of features #' #' This function enables you to easily calculate the percentage of all the counts belonging to a #' subset of the possible features for each cell. This is useful when trying to compute the percentage #' of transcripts that map to mitochondrial genes for example. The calculation here is simply the #' column sum of the matrix present in the counts slot for features belonging to the set divided by #' the column sum for all features times 100. #' #' @param object A Seurat object #' @param pattern A regex pattern to match features against #' @param features A defined feature set. If features provided, will ignore the pattern matching #' @param col.name Name in meta.data column to assign. If this is not null, returns a Seurat object #' with the proportion of the feature set stored in metadata. #' @param assay Assay to use #' #' @return Returns a vector with the proportion of the feature set or if md.name is set, returns a #' Seurat object with the proportion of the feature set stored in metadata. #' @importFrom Matrix colSums #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' # Calculate the proportion of transcripts mapping to mitochondrial genes #' # NOTE: The pattern provided works for human gene names. You may need to adjust depending on your #' # system of interest #' pbmc_small[["percent.mt"]] <- PercentageFeatureSet(object = pbmc_small, pattern = "^MT-") #' PercentageFeatureSet <- function( object, pattern = NULL, features = NULL, col.name = NULL, assay = NULL ) { assay <- assay %||% DefaultAssay(object = object) if (!is.null(x = features) && !is.null(x = pattern)) { warn(message = "Both pattern and features provided. Pattern is being ignored.") } percent.featureset <- list() layers <- Layers(object = object, search = "counts") for (i in seq_along(along.with = layers)) { layer <- layers[i] features.layer <- features %||% grep( pattern = pattern, x = rownames(x = object[[assay]][layer]), value = TRUE) layer.data <- LayerData(object = object, assay = assay, layer = layer) layer.sums <- colSums(x = layer.data[features.layer, , drop = FALSE]) layer.perc <- layer.sums / object[[]][colnames(layer.data), paste0("nCount_", assay)] * 100 percent.featureset[[i]] <- layer.perc } percent.featureset <- unlist(percent.featureset) if (!is.null(x = col.name)) { object <- AddMetaData(object = object, metadata = percent.featureset, col.name = col.name) return(object) } return(percent.featureset) } # Pseudobulk feature expression by identity class # # Returns a representative expression value for each identity class # # @param object Seurat object # @param method Whether to 'average' (default) or 'aggregate' expression levels # @param assays Which assays to use. Default is all assays # @param features Features to analyze. Default is all features in the assay # @param return.seurat Whether to return the data as a Seurat object. Default is FALSE # @param group.by Categories for grouping (e.g, ident, replicate, celltype); 'ident' by default # @param add.ident (Deprecated) Place an additional label on each cell prior to pseudobulking # (very useful if you want to observe cluster pseudobulk values, separated by replicate, for example) # @param slot Slot(s) to use; if multiple slots are given, assumed to follow # the order of 'assays' (if specified) or object's assays # @param verbose Print messages and show progress bar # @param ... Arguments to be passed to methods such as \code{\link{CreateSeuratObject}} # # @return Returns a matrix with genes as rows, identity classes as columns. # If return.seurat is TRUE, returns an object of class \code{\link{Seurat}}. #' @method PseudobulkExpression Assay #' @importFrom SeuratObject .IsFutureSeurat #' @export # # PseudobulkExpression.Assay <- function( object, assay, category.matrix, features = NULL, layer = 'data', slot = deprecated(), verbose = TRUE, ... ) { if (is_present(arg = slot)) { f <- if (.IsFutureSeurat(version = '5.1.0')) { deprecate_stop } else if (.IsFutureSeurat(version = '5.0.0')) { deprecate_warn } else { deprecate_soft } f( when = '5.0.0', what = 'GetAssayData(slot = )', with = 'GetAssayData(layer = )' ) layer <- slot } data.use <- GetAssayData( object = object, layer = layer ) features.to.avg <- features %||% rownames(x = data.use) if (IsMatrixEmpty(x = data.use)) { warning( "The ", layer, " layer for the ", assay, " assay is empty. Skipping assay.", immediate. = TRUE, call. = FALSE) return(NULL) } bad.features <- setdiff(x = features.to.avg, y = rownames(x = data.use)) if (length(x = bad.features) > 0) { warning( "The following ", length(x = bad.features), " features were not found in the ", assay, " assay: ", paste(bad.features, collapse = ", "), call. = FALSE, immediate. = TRUE) } features.assay <- intersect(x = features.to.avg, y = rownames(x = data.use)) if (length(x = features.assay) > 0) { data.use <- data.use[features.assay, ] } else { warning("None of the features specified were found in the ", assay, " assay.", call. = FALSE, immediate. = TRUE) return(NULL) } if (layer == 'data') { data.use <- expm1(x = data.use) if (any(data.use == Inf)) { warning("Exponentiation yielded infinite values. `data` may not be log-normed.") } } data.return <- data.use %*% category.matrix return(data.return) } #' @method PseudobulkExpression StdAssay #' @export # # PseudobulkExpression.StdAssay <- function( object, assay, category.matrix, features = NULL, layer = 'data', slot = deprecated(), verbose = TRUE, ... ) { if (is_present(arg = slot)) { f <- if (.IsFutureSeurat(version = '5.1.0')) { deprecate_stop } else if (.IsFutureSeurat(version = '5.0.0')) { deprecate_warn } else { deprecate_soft } f( when = '5.0.0', what = 'GetAssayData(slot = )', with = 'GetAssayData(layer = )' ) layer <- slot } layers.set <- Layers(object = object, search = layer) features.to.avg <- features %||% rownames(x = object) bad.features <- setdiff(x = features.to.avg, y = rownames(x = object)) if (length(x = bad.features) > 0) { warning( "The following ", length(x = bad.features), " features were not found in the ", assay, " assay: ", paste(bad.features, collapse = ", "), call. = FALSE, immediate. = TRUE) } features.assay <- Reduce( f = intersect, x = c(list(features.to.avg), lapply(X = layers.set, FUN = function(l) rownames(object[l])) ) ) if (length(x = features.assay) == 0) { warning("None of the features specified were found in the ", assay, " assay.", call. = FALSE, immediate. = TRUE) return(NULL) } data.return <- as.sparse( x = matrix( data = 0, nrow = length(x = features.assay), ncol = ncol(x = category.matrix) ) ) for (i in seq_along(layers.set)) { data.i <- LayerData(object = object, layer = layers.set[i], features = features.assay ) if (layers.set[i] == "data") { data.use.i <- expm1(x = data.i) if (any(data.use.i == Inf)) { warning("Exponentiation yielded infinite values. `data` may not be log-normed.") } } else { data.use.i <- data.i } category.matrix.i <- category.matrix[colnames(x = data.i),] if (inherits(x = data.i, what = 'DelayedArray')) { stop("PseudobulkExpression does not support DelayedArray objects") } else { data.return.i <- as.sparse(x = data.use.i %*% category.matrix.i) } data.return <- data.return + data.return.i } return(data.return) } #' @method PseudobulkExpression Seurat #' @importFrom SeuratObject .IsFutureSeurat #' @export PseudobulkExpression.Seurat <- function( object, assays = NULL, features = NULL, return.seurat = FALSE, group.by = 'ident', add.ident = NULL, layer = 'data', slot = deprecated(), method = 'average', normalization.method = "LogNormalize", scale.factor = 10000, margin = 1, verbose = TRUE, ... ) { CheckDots(..., fxns = 'CreateSeuratObject') if (!is.null(x = add.ident)) { .Deprecated(msg = "'add.ident' is a deprecated argument. Please see documentation to see how to pass a vector to the 'group.by' argument to specify multiple grouping variables") group.by <- c('ident', add.ident) } if (!(method %in% c('average', 'aggregate'))) { stop("'method' must be either 'average' or 'aggregate'") } if (is_present(arg = slot)) { f <- if (.IsFutureSeurat(version = '5.1.0')) { deprecate_stop } else if (.IsFutureSeurat(version = '5.0.0')) { deprecate_warn } else { deprecate_soft } f( when = '5.0.0', what = 'AverageExpression(slot = )', with = 'AverageExpression(layer = )' ) layer <- slot } if (method == "average") { inform( message = "As of Seurat v5, we recommend using AggregateExpression to perform pseudo-bulk analysis.", .frequency = "once", .frequency_id = "AverageExpression" ) } object.assays <- .FilterObjects(object = object, classes.keep = c('Assay', 'Assay5')) assays <- assays %||% object.assays if (!all(assays %in% object.assays)) { assays <- assays[assays %in% object.assays] if (length(x = assays) == 0) { stop("None of the requested assays are present in the object") } else { warning("Requested assays that do not exist in object. Proceeding with existing assays only.") } } if (length(x = layer) == 1) { layer <- rep_len(x = layer, length.out = length(x = assays)) } else if (length(x = layer) != length(x = assays)) { stop("Number of layers provided does not match number of assays") } data <- FetchData(object = object, vars = rev(x = group.by)) #only keep meta-data columns that are in object group.by <- intersect(group.by, colnames(data)) data <- data[which(rowSums(x = is.na(x = data)) == 0), , drop = F] if (nrow(x = data) < ncol(x = object)) { inform("Removing cells with NA for 1 or more grouping variables") object <- subset(x = object, cells = rownames(x = data)) } for (i in 1:ncol(x = data)) { data[, i] <- as.factor(x = data[, i]) } num.levels <- sapply( X = 1:ncol(x = data), FUN = function(i) { length(x = levels(x = data[, i])) } ) if (any(num.levels == 1)) { message( paste0( "The following grouping variables have 1 value and will be ignored: ", paste0(colnames(x = data)[which(num.levels <= 1)], collapse = ", ") ) ) group.by <- colnames(x = data)[which(num.levels > 1)] data <- data[, which(num.levels > 1), drop = F] } category.matrix <- CreateCategoryMatrix(labels = data, method = method) #check if column names are numeric col.names <- colnames(category.matrix) if (any(!(grepl("^[a-zA-Z]|^\\.[^0-9]", col.names)))) { col.names <- ifelse( !(grepl("^[a-zA-Z]|^\\.[^0-9]", col.names)), paste0("g", col.names), col.names ) colnames(category.matrix) <- col.names inform( message = paste0("First group.by variable `", group.by[1], "` starts with a number, appending `g` to ensure valid variable names"), .frequency = "regularly", .frequency_id = "PseudobulkExpression" ) } data.return <- list() for (i in 1:length(x = assays)) { if (inherits(x = features, what = "list")) { features.i <- features[[i]] } else { features.i <- features } data.return[[assays[i]]] <- PseudobulkExpression( object = object[[assays[i]]], assay = assays[i], category.matrix = category.matrix, features = features.i, layer = layer[i], verbose = verbose, ... ) } if (return.seurat) { op <- options(Seurat.object.assay.version = "v5", Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) if (layer[1] == 'scale.data') { na.matrix <- as.matrix(x = data.return[[1]]) na.matrix[1:length(x = na.matrix)] <- NA #sum up counts to make seurat object summed.counts <- PseudobulkExpression( object = object[[assays[1]]], assay = assays[1], category.matrix = category.matrix, features = features[[1]], layer = "counts" ) toRet <- CreateSeuratObject( counts = summed.counts, project = if (method == "average") "Average" else "Aggregate", assay = names(x = data.return)[1], ... ) LayerData( object = toRet, layer = "scale.data", assay = names(x = data.return)[i] ) <- data.return[[1]] } else { toRet <- CreateSeuratObject( counts = data.return[[1]], project = if (method == "average") "Average" else "Aggregate", assay = names(x = data.return)[1], ... ) if (method == "aggregate") { LayerData( object = toRet, layer = "data", assay = names(x = data.return)[1] ) <- NormalizeData( as.matrix(x = data.return[[1]]), normalization.method = normalization.method, verbose = verbose ) } else { LayerData(object = toRet, layer = "data", assay = names(x = data.return)[1] ) <- log1p(x = as.matrix(x = data.return[[1]])) } } #for multimodal data if (length(x = data.return) > 1) { for (i in 2:length(x = data.return)) { if (layer[i] == 'scale.data') { summed.counts <- PseudobulkExpression( object = object[[assays[i]]], assay = assays[i], category.matrix = category.matrix, features = features[[i]], layer = "counts" ) toRet[[names(x = data.return)[i]]] <- CreateAssayObject(counts = summed.counts) LayerData( object = toRet, layer = "scale.data", assay = names(x = data.return)[i] ) <- data.return[[i]] } else { toRet[[names(x = data.return)[i]]] <- CreateAssayObject( counts = data.return[[i]], check.matrix = FALSE ) if (method == "aggregate") { LayerData( object = toRet, layer = "data", assay = names(x = data.return)[i] ) <- NormalizeData( as.matrix(x = data.return[[i]]), normalization.method = normalization.method, scale.factor = scale.factor, margin = margin, verbose = verbose ) } else { LayerData( object = toRet, layer = "data", assay = names(x = data.return)[i] ) <- log1p(x = as.matrix(x = data.return[[i]])) } } } } if (DefaultAssay(object = object) %in% names(x = data.return)) { DefaultAssay(object = toRet) <- DefaultAssay(object = object) if (layer[which(DefaultAssay(object = object) %in% names(x = data.return))[1]] != 'scale.data') { toRet <- ScaleData(object = toRet, verbose = verbose) } } #add meta-data based on group.by variables cells <- Cells(toRet) for (i in 1:length(group.by)) { if (group.by[i] != "ident") { v <- sapply( strsplit(cells, "_"), function(x) {return(x[i])} ) names(v) <- cells toRet <- AddMetaData(toRet, metadata = v, col.name = group.by[i] ) } } #set idents to pseudobulk variables Idents(toRet) <- cells #make orig.ident variable #orig.ident = ident if group.by includes `ident` #if not, orig.ident is equal to pseudobulk cell names if(any(group.by == "ident")) { i = which(group.by == "ident") v <- sapply( strsplit(cells, "_"), function(x) {return(x[i])} ) names(v) <- cells toRet <- AddMetaData(toRet, metadata = v, col.name = "orig.ident" ) } else { toRet$orig.ident <- cells } return(toRet) } else { return(data.return) } } #' Regroup idents based on meta.data info #' #' For cells in each ident, set a new identity based on the most common value #' of a specified metadata column. #' #' @param object Seurat object #' @param metadata Name of metadata column #' @return A Seurat object with the active idents regrouped #' #' @export #' @concept utilities #' #' @examples #' data("pbmc_small") #' pbmc_small <- RegroupIdents(pbmc_small, metadata = "groups") #' RegroupIdents <- function(object, metadata) { for (ii in levels(x = object)) { ident.cells <- WhichCells(object = object, idents = ii) if (length(x = ident.cells) == 0) { next } new.ident <- names(x = which.max(x = table(object[[metadata]][ident.cells, ]))) if (is.null(x = new.ident)) { stop("Cluster ", ii, " contains only cells with NA values in the '", metadata, "' metadata column.") } Idents(object = object, cells = ident.cells) <- new.ident } return(object) } #' Save the Annoy index #' #' @param object A Neighbor object with the annoy index stored #' @param file Path to file to write index to #' #' @export #' @concept utilities #' SaveAnnoyIndex <- function( object, file ) { index <- Index(object = object) if (is.null(x = index)) { stop("Index for provided Neighbor object is NULL") } index$save(path.expand(path = file)) } #' Find the Quantile of Data #' #' Converts a quantile in character form to a number regarding some data. #' String form for a quantile is represented as a number prefixed with #' \dQuote{q}; for example, 10th quantile is \dQuote{q10} while 2nd quantile is #' \dQuote{q2}. Will only take a quantile of non-zero data values #' #' @param cutoff The cutoff to turn into a quantile #' @param data The data to turn find the quantile of #' #' @return The numerical representation of the quantile #' #' @importFrom stats quantile #' #' @export #' @concept utilities #' #' @examples #' set.seed(42) #' SetQuantile('q10', sample(1:100, 10)) #' SetQuantile <- function(cutoff, data) { if (grepl(pattern = '^q[0-9]{1,2}$', x = as.character(x = cutoff), perl = TRUE)) { this.quantile <- as.numeric(x = sub( pattern = 'q', replacement = '', x = as.character(x = cutoff) )) / 100 data <- unlist(x = data) data <- data[data > 0] cutoff <- quantile(x = data, probs = this.quantile) } return(as.numeric(x = cutoff)) } #' @rdname UpdateSymbolList #' #' @return \code{UpdateSymbolList}: \code{symbols} with updated symbols from #' HGNC's gene names database #' #' @export #' @concept utilities #' #' @examples #' \dontrun{ #' UpdateSymbolList(symbols = cc.genes$s.genes) #' } #' UpdateSymbolList <- function( symbols, timeout = 10, several.ok = FALSE, verbose = TRUE, ... ) { new.symbols <- suppressWarnings(expr = GeneSymbolThesarus( symbols = symbols, timeout = timeout, several.ok = several.ok, search.types = 'prev_symbol', verbose = verbose, ... )) if (length(x = new.symbols) < 1) { warning("No updated symbols found", call. = FALSE, immediate. = TRUE) } else { if (verbose) { message("Found updated symbols for ", length(x = new.symbols), " symbols") x <- sapply(X = new.symbols, FUN = paste, collapse = ', ') message(paste(names(x = x), x, sep = ' -> ', collapse = '\n')) } for (sym in names(x = new.symbols)) { index <- which(x = symbols == sym) symbols <- append( x = symbols[-index], values = new.symbols[[sym]], after = index - 1 ) } } return(symbols) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @inheritParams base::as.data.frame #' #' @return \code{as.data.frame.Matrix}: A data frame representation of the S4 Matrix #' #' @importFrom Matrix as.matrix #' #' @rdname as.sparse #' @concept utilities #' @export #' @method as.data.frame Matrix #' as.data.frame.Matrix <- function( x, row.names = NULL, optional = FALSE, ..., stringsAsFactors = getOption(x = "stringsAsFactors", default = FALSE) ) { return(as.data.frame( x = as.matrix(x = x), row.names = row.names, optional = optional, stringsAsFactors = stringsAsFactors, ... )) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Create Abbreviations #' #' @param x A character vector #' @param digits Include digits in the abbreviation #' #' @return Abbreviated versions of \code{x} #' #' @keywords internal #' #' @examples #' .Abbrv(c('HelloWorld, 'LetsGo3', 'tomato')) #' .Abbrv(c('HelloWorld, 'LetsGo3', 'tomato'), digits = FALSE) #' .Abbrv('Wow3', digits = FALSE) #' #' @noRd #' .Abbrv <- function(x, digits = TRUE) { pattern <- ifelse(test = isTRUE(x = digits), yes = '[A-Z0-9]+', no = '[A-Z]+') y <- vapply( X = regmatches(x = x, m = gregexec(pattern = pattern, text = x)), FUN = paste, FUN.VALUE = character(length = 1L), collapse = '' ) na <- nchar(x = y) <= 1L y[na] <- x[na] return(tolower(x = y)) } .AsList <- function(x) { x <- as.list(x = x) return(sapply( X = unique(x = names(x = x)), FUN = function(i) { return(unlist( x = x[which(x = names(x = x) == i)], recursive = FALSE, use.names = FALSE )) }, simplify = FALSE, USE.NAMES = TRUE )) } #' @importFrom ggplot2 cut_number #' .Cut <- function(min, max, n) { breaks <- levels(x = cut_number(x = c(min, max), n = n)) breaks <- gsub(pattern = '.*,', replacement = '', x = breaks) breaks <- gsub(pattern = ']$', replacement = '', x = breaks) as.numeric(x = breaks) } .FindE <- function(x) { x <- as.character(x = x) if (grepl(pattern = 'e', x = x)) { return(as.integer(x = gsub(pattern = '.*e', replacement = '', x = x))) } else if (grepl(pattern = '^0\\.', x = x)) { x <- unlist(x = strsplit( x = gsub(pattern = '.*\\.', replacement = '', x = x), split = '' )) idx <- which(x = x != '0') return(-idx) } stop("Invalid format") } #' @importFrom SeuratObject Boundaries #' .BoundariesByImage <- function(object, fov, boundaries) { if (!is.list(x = boundaries)) { if (is.null(x = names(x = boundaries))) { boundaries <- rep_len(x = list(boundaries), length.out = length(x = fov)) names(x = boundaries) <- fov } else { boundaries <- .AsList(x = boundaries) } } if (any(!nchar(x = names(x = boundaries)))) { missing <- setdiff(x = fov, y = names(x = boundaries)) idx <- which(x = !nchar(x = names(x = boundaries))) boundaries <- c( boundaries[intersect(x = names(x = boundaries), y = fov)], rep_len(x = boundaries[idx], length.out = length(x = missing)) ) names(x = boundaries)[!nchar(x = names(x = boundaries))] <- missing } if (any(!fov %in% names(x = boundaries))) { for (i in setdiff(x = fov, y = names(x = boundaries))) { boundaries[[i]] <- Boundaries(object = object[[i]])[1L] } } fov <- union(x = fov, y = names(x = boundaries)) if (length(x = boundaries) != length(x = fov)) { fov <- intersect(x = fov, y = names(x = boundaries)) } boundaries <- boundaries[fov] for (i in fov) { boundaries[[i]] <- Filter( f = function(x) { return(x %in% Boundaries(object = object[[i]]) || is_na(x = x)) }, x = boundaries[[i]] ) } boundaries <- Filter(f = length, x = boundaries) return(boundaries) } # Generate chunk points # # @param dsize How big is the data being chunked # @param csize How big should each chunk be # # @return A matrix where each column is a chunk, row 1 is start points, row 2 is end points # ChunkPoints <- function(dsize, csize) { return(vapply( X = 1L:ceiling(x = dsize / csize), FUN = function(i) { return(c( start = (csize * (i - 1L)) + 1L, end = min(csize * i, dsize) )) }, FUN.VALUE = numeric(length = 2L) )) } # L2 normalize the columns (or rows) of a given matrix # @param mat Matrix to cosine normalize # @param MARGIN Perform normalization over rows (1) or columns (2) # # # @return returns l2-normalized matrix # # L2Norm <- function(mat, MARGIN = 1){ normalized <- Sweep( x = mat, MARGIN = MARGIN, STATS = apply( X = mat, MARGIN = MARGIN, FUN = function(x){ sqrt(x = sum(x ^ 2)) } ), FUN = "/" ) normalized[!is.finite(x = normalized)] <- 0 return(normalized) } # Check the use of ... # # @param ... Arguments passed to a function that fall under ... # @param fxns A list/vector of functions or function names # # @return ... # # @importFrom utils argsAnywhere getAnywhere #' @importFrom utils isS3stdGeneric methods argsAnywhere isS3method # # @examples # CheckDots <- function(..., fxns = NULL) { args.names <- names(x = list(...)) if (length(x = list(...)) == 0) { return(invisible(x = NULL)) } if (is.null(x = args.names)) { stop("No named arguments passed") } if (length(x = fxns) == 1) { fxns <- list(fxns) } for (f in fxns) { if (!(is.character(x = f) || is.function(x = f))) { stop("CheckDots only works on characters or functions, not ", class(x = f)) } } fxn.args <- suppressWarnings(expr = sapply( X = fxns, FUN = function(x) { x <- tryCatch( expr = if (isS3stdGeneric(f = x)) { as.character(x = methods(generic.function = x)) } else { x }, error = function(...) { return(x) } ) x <- if (is.character(x = x)) { sapply(X = x, FUN = argsAnywhere, simplify = FALSE, USE.NAMES = TRUE) } else if (length(x = x) <= 1) { list(x) } return(sapply( X = x, FUN = function(f) { return(names(x = formals(fun = f))) }, simplify = FALSE, USE.NAMES = TRUE )) }, simplify = FALSE, USE.NAMES = TRUE )) fxn.args <- unlist(x = fxn.args, recursive = FALSE) fxn.null <- vapply(X = fxn.args, FUN = is.null, FUN.VALUE = logical(length = 1L)) if (all(fxn.null) && !is.null(x = fxns)) { stop("None of the functions passed could be found") } else if (any(fxn.null)) { warning( "The following functions passed could not be found: ", paste(names(x = which(x = fxn.null)), collapse = ', '), call. = FALSE, immediate. = TRUE ) fxn.args <- Filter(f = Negate(f = is.null), x = fxn.args) } dfxns <- vector(mode = 'logical', length = length(x = fxn.args)) names(x = dfxns) <- names(x = fxn.args) for (i in 1:length(x = fxn.args)) { dfxns[i] <- any(grepl(pattern = '...', x = fxn.args[[i]], fixed = TRUE)) } if (any(dfxns)) { dfxns <- names(x = which(x = dfxns)) if (any(nchar(x = dfxns) > 0)) { fx <- vapply( X = Filter(f = nchar, x = dfxns), FUN = function(x) { if (isS3method(method = x)) { x <- unlist(x = strsplit(x = x, split = '\\.')) x <- x[length(x = x) - 1L] } return(x) }, FUN.VALUE = character(length = 1L) ) message( "The following functions and any applicable methods accept the dots: ", paste(unique(x = fx), collapse = ', ') ) if (any(nchar(x = dfxns) < 1)) { message( "In addition, there is/are ", length(x = Filter(f = Negate(f = nchar), x = dfxns)), " other function(s) that accept(s) the dots" ) } } else { message("There is/are ", length(x = dfxns), 'function(s) that accept(s) the dots') } } else { unused <- Filter( f = function(x) { return(!x %in% unlist(x = fxn.args)) }, x = args.names ) if (length(x = unused) > 0) { msg <- paste0( "The following arguments are not used: ", paste(unused, collapse = ', ') ) switch( EXPR = getOption(x = "Seurat.checkdots"), "warn" = warning(msg, call. = FALSE, immediate. = TRUE), "stop" = stop(msg), "silent" = NULL, stop("Invalid Seurat.checkdots option. Please choose one of warn, stop, silent") ) unused.hints <- sapply(X = unused, FUN = OldParamHints) names(x = unused.hints) <- unused unused.hints <- na.omit(object = unused.hints) if (length(x = unused.hints) > 0) { message( "Suggested parameter: ", paste(unused.hints, "instead of", names(x = unused.hints), collapse = '; '), "\n" ) } } } } # Call gc() to perform garbage collection # CheckGC <- function() { if (getOption(x = "Seurat.memsafe")) { gc(verbose = FALSE) } } # Check a list of objects for duplicate cell names # # @param object.list List of Seurat objects # @param verbose Print message about renaming # @param stop Error out if any duplicate names exist # # @return Returns list of objects with duplicate cells renamed to be unique # # @keywords internal # # @noRd # CheckDuplicateCellNames <- function(object.list, verbose = TRUE, stop = FALSE) { cell.names <- unlist(x = lapply(X = object.list, FUN = colnames)) if (any(duplicated(x = cell.names))) { if (stop) { stop("Duplicate cell names present across objects provided.") } if (verbose) { warning("Some cell names are duplicated across objects provided. Renaming to enforce unique cell names.") } object.list <- lapply( X = 1:length(x = object.list), FUN = function(x) { return(RenameCells( object = object.list[[x]], new.names = paste0(Cells(x = object.list[[x]]), "_", x) )) } ) } return(object.list) } # Create an empty dummy assay to replace existing assay #' @importFrom Matrix sparseMatrix CreateDummyAssay <- function(assay) { cm <- sparseMatrix( i = {}, j = {}, dims = c(nrow(x = assay), ncol(x = assay)) ) cm <- as.sparse(x = cm) rownames(x = cm) <- rownames(x = assay) colnames(x = cm) <- colnames(x = assay) return(CreateAssayObject( counts = cm, check.matrix = FALSE )) } # Extract delimiter information from a string. # # Parses a string (usually a cell name) and extracts fields based on a delimiter # # @param string String to parse. # @param field Integer(s) indicating which field(s) to extract. Can be a vector multiple numbers. # @param delim Delimiter to use, set to underscore by default. # # @return A new string, that parses out the requested fields, and (if multiple), rejoins them with the same delimiter # # @export # # @examples # ExtractField(string = 'Hello World', field = 1, delim = '_') # ExtractField <- function(string, field = 1, delim = "_") { fields <- as.numeric(x = unlist(x = strsplit(x = as.character(x = field), split = ","))) if (length(x = fields) == 1) { return(strsplit(x = string, split = delim)[[1]][field]) } return(paste(strsplit(x = string, split = delim)[[1]][fields], collapse = delim)) } # Resize GenomicRanges upstream and or downstream # from https://support.bioconductor.org/p/78652/ # Extend <- function(x, upstream = 0, downstream = 0) { if (any(GenomicRanges::strand(x = x) == "*")) { warning("'*' ranges were treated as '+'") } on_plus <- GenomicRanges::strand(x = x) == "+" | GenomicRanges::strand(x = x) == "*" new_start <- GenomicRanges::start(x = x) - ifelse(test = on_plus, yes = upstream, no = downstream) new_end <- GenomicRanges::end(x = x) + ifelse(test = on_plus, yes = downstream, no = upstream) IRanges::ranges(x = x) <- IRanges::IRanges(start = new_start, end = new_end) x <- GenomicRanges::trim(x = x) return(x) } # Interleave vectors together # # @param ... Vectors to be interleaved # # @return A vector with the values from each vector in ... interleaved # Interleave <- function(...) { return(as.vector(x = t(x = as.data.frame(x = list(...))))) } # Check if a matrix is empty # # Takes a matrix and asks if it's empty (either 0x0 or 1x1 with a value of NA) # # @param x A matrix # # @return Whether or not \code{x} is empty # IsMatrixEmpty <- function(x) { matrix.dims <- dim(x = x) matrix.na <- all(matrix.dims == 1) && all(is.na(x = x)) return(all(matrix.dims == 0) || matrix.na) } # Check if externalptr is null # From https://stackoverflow.com/questions/26666614/how-do-i-check-if-an-externalptr-is-null-from-within-r # is.null.externalptr <- function(pointer) { stopifnot(is(pointer, "externalptr")) .Call("isnull", pointer) } # Check whether an assay has been processed by sctransform # # @param assay assay to check # # @return Boolean # IsSCT <- function(assay) { if (is.list(x = assay)) { sct.check <- lapply(X = assay, FUN = function(x) { return(!is.null(x = Misc(object = x, slot = 'vst.out')) | !is.null(x = Misc(object = x, slot = 'vst.set')) | inherits(x = x, what = "SCTAssay")) }) return(unlist(x = sct.check)) } return(!is.null(x = Misc(object = assay, slot = 'vst.out')) | !is.null(x = Misc(object = assay, slot = 'vst.set')) | inherits(x = assay, what = "SCTAssay")) } # Check whether a vst.out is from sctransform # # @param vst.out a sct model from sctransform # # @return Boolean # IsVSTout <- function(vst.out) { vst.element <- c("model_str", "model_pars_fit", "cell_attr" ) vst.check <- setdiff(x = vst.element, y = names(x = vst.out)) if (length(x = setdiff(x = vst.element, y = names(x = vst.out))) == 0) { vst.check <- TRUE } else { vst.check <- FALSE } return(vst.check) } # Calculate euclidean distance the x and y, # and subtract the nearest neighbors of x distance to keep local connectivity # It is used in FindModalityWeights to calculate the with and cross modality distance impute_dist <- function(x, y, nearest.dist) { dist <- sqrt(x = rowSums(x = (x - y)**2)) - nearest.dist dist <- ReLu(x = dist) return(dist) } # Check the length of components of a list # # @param values A list whose components should be checked # @param cutoff A minimum value to check for # # @return a vector of logicals # LengthCheck <- function(values, cutoff = 0) { return(vapply( X = values, FUN = function(x) { return(length(x = x) > cutoff) }, FUN.VALUE = logical(1) )) } # Function to map values in a vector `v` as defined in `from`` to the values # defined in `to`. # # @param v vector of values to map # @param from vector of original values # @param to vector of values to map original values to (should be of equal # length as from) # @return returns vector of mapped values # MapVals <- function(v, from, to) { if (length(x = from) != length(x = to)) { stop("from and to vectors are not the equal length.") } vals.to.match <- match(x = v, table = from) vals.to.match.idx <- !is.na(x = vals.to.match) v[vals.to.match.idx] <- to[vals.to.match[vals.to.match.idx]] return(v) } # Independently shuffle values within each row of a matrix # # Creates a matrix where correlation structure has been removed, but overall values are the same # # @param x Matrix to shuffle # # @return Returns a scrambled matrix, where each row is shuffled independently # #' @importFrom stats runif # # @export # # @examples # mat <- matrix(data = rbinom(n = 25, size = 20, prob = 0.2 ), nrow = 5) # mat # MatrixRowShuffle(x = mat) # MatrixRowShuffle <- function(x) { x2 <- x x2 <- t(x = x) ind <- order(c(col(x = x2)), runif(n = length(x = x2))) x2 <- matrix( data = x2[ind], nrow = nrow(x = x), ncol = ncol(x = x), byrow = TRUE ) return(x2) } # Reverse the vector x and return the value at the Nth index. If N is larger # than the length of the vector, return the last value in the reversed vector. # # @param x vector of interest # @param N index in reversed vector # # @return returns element at given index # MaxN <- function(x, N = 2){ len <- length(x) if (N > len) { warning('N greater than length(x). Setting N=length(x)') N <- length(x) } sort(x, partial = len - N + 1)[len - N + 1] } # Given a range from cut, compute the mean # # @x range from cut as a string (e.g. (10, 20] ) # @return returns a numeric with the mean of the range # MeanRange <- function(x) { left <- gsub(pattern = "\\]", replacement = "", x = sub(pattern = "\\([[:digit:]\\.e+]*,", x = x, replacement = "")) right <- gsub(pattern = "\\(", replacement = "", x = sub(pattern = ",[[:digit:]\\.e+]*]", x = x, replacement = "")) return(mean(c(as.numeric(x = left), as.numeric(x = right)))) } # Melt a data frame # # @param x A data frame # # @return A molten data frame # Melt <- function(x) { if (!is.data.frame(x = x)) { x <- as.data.frame(x = x) } return(data.frame( rows = rep.int(x = rownames(x = x), times = ncol(x = x)), cols = unlist(x = lapply(X = colnames(x = x), FUN = rep.int, times = nrow(x = x))), vals = unlist(x = x, use.names = FALSE) )) } # Modify parameters in calling environment # # Used exclusively for helper parameter validation functions # # @param param name of parameter to change # @param value new value for parameter # ModifyParam <- function(param, value) { # modify in original function environment env1 <- sys.frame(which = length(x = sys.frames()) - 2) env1[[param]] <- value # also modify in validation function environment env2 <- sys.frame(which = length(x = sys.frames()) - 1) env2[[param]] <- value } # Give hints for old parameters and their newer counterparts # # This is a non-exhaustive list. If your function isn't working properly based # on the parameters you give it, please read the documentation for your function # # @param param A vector of parameters to get hints for # # @return Parameter hints for the specified parameters # OldParamHints <- function(param) { param.conversion <- c( 'raw.data' = 'counts', 'min.genes' = 'min.features', 'features.plot' = 'features', 'pc.genes' = 'features', 'do.print' = 'verbose', 'genes.print' = 'nfeatures.print', 'pcs.print' = 'ndims.print', 'pcs.use' = 'dims', 'reduction.use' = 'reduction', 'cells.use' = 'cells', 'do.balanced' = 'balanced', 'display.progress' = 'verbose', 'print.output' = 'verbose', 'dims.use' = 'dims', 'reduction.type' = 'reduction', 'y.log' = 'log', 'cols.use' = 'cols', 'assay.use' = 'assay' ) return(param.conversion[param]) } # Check if a web resource is available # # @param url A URL # @param strict Perform a strict web availability test # @param seconds Timeout in seconds # # @return \code{TRUE} if \url{is available} otherwise \code{FALSE} # #' @importFrom httr GET status_code timeout # # @keywords internal # Online <- function(url, strict = FALSE, seconds = 5L) { if (isTRUE(x = strict)) { code <- 200L comp <- identical } else { code <- 404L comp <- Negate(f = identical) } request <- tryCatch( expr = GET(url = url, timeout(seconds = seconds)), error = function(err) { code <- if (grepl(pattern = '^Timeout was reached', x = err$message)) { 408L } else { 404L } return(code) } ) return(comp(x = status_code(x = request), y = code)) } # Parenting parameters from one environment to the next # # This function allows one to modify a parameter in a parent environment # The primary use of this is to ensure logging functions store correct parameters # if they've been modified by a child function or method # # @param parent.find Regex pattern of name of parent function call to modify. # For example, this can be the class name for a method that was dispatched previously # @param ... Parameter names and values to parent; both name and value must be supplied # in the standard \code{name = value} format; any number of name/value pairs can be specified # # @return No return, modifies parent environment directly # # @examples # Parenting(parent.find = 'Seurat', features = features[features > 7]) # Parenting <- function(parent.find = 'Seurat', ...) { calls <- as.character(x = sys.calls()) calls <- lapply( X = strsplit(x = calls, split = '(', fixed = TRUE), FUN = '[', 1 ) parent.index <- grep(pattern = parent.find, x = calls) if (length(x = parent.index) != 1) { warning( "Cannot find a parent environment called ", parent.find, immediate. = TRUE, call. = FALSE ) } else { to.parent <- list(...) if (length(x = to.parent) == 0) { warning("Nothing to parent", immediate. = TRUE, call. = FALSE) } else if (is.null(x = names(x = to.parent))) { stop("All input must be in a key = value pair") } else if (length(x = Filter(f = nchar, x = names(x = to.parent))) != length(x = to.parent)) { stop("All inputs must be named") } else { parent.environ <- sys.frame(which = parent.index) for (i in 1:length(x = to.parent)) { parent.environ[[names(x = to.parent)[i]]] <- to.parent[[i]] } } } } # Generate a random name # # Make a name from randomly sampled lowercase letters, # pasted together with no spaces or other characters # # @param length How long should the name be # @param ... Extra parameters passed to sample # # @return A character with nchar == length of randomly sampled letters # # @seealso \code{\link{sample}} # RandomName <- function(length = 5L, ...) { CheckDots(..., fxns = 'sample') return(paste(sample(x = letters, size = length, ...), collapse = '')) } # Rectified linear units function. Calculate positive part of its argument # The input can be a vector and a matrix ReLu <- function(x) { x[x < 0] <- 0 return(x) } # Remove the last field from a string # # Parses a string (usually a cell name) and removes the last field based on a delimter # # @param string String to parse # @param delim Delimiter to use, set to underscore by default. # # @return A new string sans the last field # RemoveLastField <- function(string, delim = "_") { ss <- strsplit(x = string, split = delim)[[1]] if (length(x = ss) == 1) { return(string) } else { return(paste(ss[1:(length(x = ss)-1)], collapse = delim)) } } # Calculate row mean of a sparse matrix # @param mat sparse matrix # @return A vector of row mean # RowMeanSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_mean_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Calculate row sum of a sparse matrix # # @param mat sparse matrix # @return A vector of row sum # RowSumSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_sum_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Calculate row variance of a sparse matrix # # @param mat sparse matrix # @return A vector of row variance # RowVarSparse <- function(mat) { mat <- RowSparseCheck(mat = mat) output <- row_var_dgcmatrix( x = slot(object = mat, name = "x"), i = slot(object = mat, name = "i"), rows = nrow(x = mat), cols = ncol(x = mat) ) names(x = output) <- rownames(x = mat) return(output) } # Check if the input matrix is dgCMatrix # # @param mat sparse matrix # @return A dgCMatrix # RowSparseCheck <- function(mat) { if (!inherits(x = mat, what = "sparseMatrix")) { stop("Input should be sparse matrix") } else if (!is(object = mat, class2 = "dgCMatrix")) { warning("Input matrix is converted to dgCMatrix.") mat <- as.sparse(x = mat) } return(mat) } # Sweep out array summaries # # Reimplmentation of \code{\link[base]{sweep}} to maintain compatability with # both R 3.X and 4.X # # @inheritParams base::sweep # @param x an array. # # @seealso \code{\link[base]{sweep}} # Sweep <- function(x, MARGIN, STATS, FUN = '-', check.margin = TRUE, ...) { if (any(grepl(pattern = 'X', x = names(x = formals(fun = sweep))))) { return(sweep( X = x, MARGIN = MARGIN, STATS = STATS, FUN = FUN, check.margin = check.margin, ... )) } else { return(sweep( x = x, MARGIN = MARGIN, STATS = STATS, FUN = FUN, check.margin = check.margin, ... )) } } # Get program paths in a system-agnostic way # # @param progs A vector of program names # @param error Throw an error if any programs are not found # @param add.exe Add '.exe' extension to program names that don't have it # # @return A named vector of program paths; missing programs are returned as # \code{NA} if \code{error = FALSE} # #' @importFrom tools file_ext # SysExec <- function( progs, error = ifelse(test = length(x = progs) == 1, yes = TRUE, no = FALSE), add.exe = .Platform$OS.type == 'windows' ) { cmd <- ifelse( test = .Platform$OS.type == 'windows', yes = 'where.exe', no = 'which' ) if (add.exe) { missing.exe <- file_ext(x = progs) != 'exe' progs[missing.exe] <- paste0(progs[missing.exe], '.exe') } paths <- sapply( X = progs, FUN = function(x) { return(tryCatch( expr = system2(command = cmd, args = x, stdout = TRUE)[1], warning = function(...) { return(NA_character_) } )) } ) if (error && any(is.na(x = paths))) { stop( "Could not find the following programs: ", paste(names(x = paths[is.na(x = paths)]), collapse = ', '), call. = FALSE ) } return(paths) } # Try to convert x to numeric, if NA's introduced return x as is # ToNumeric <- function(x){ # check for x:y range if (is.numeric(x = x)) { return(x) } if (length(x = unlist(x = strsplit(x = x, split = ":"))) == 2) { num <- unlist(x = strsplit(x = x, split = ":")) return(num[1]:num[2]) } num <- suppressWarnings(expr = as.numeric(x = x)) if (!is.na(x = num)) { return(num) } return(x) } # Merge a list of sparse matrixes #' @importFrom Matrix summary sparseMatrix MergeSparseMatrices <- function(...) { colname.new <- character() rowname.new <- character() x <- vector() i <- numeric() j <- numeric() for (mat in list(...)) { colname.old <- colnames(x = mat) rowname.old <- rownames(x = mat) # does not check if there are overlapping cells colname.new <- union(x = colname.new, y = colname.old) rowname.new <- union(x = rowname.new, y = rowname.old) colindex.new <- match(x = colname.old, table = colname.new) rowindex.new <- match(x = rowname.old, table = rowname.new) ind <- summary(object = mat) # Expand the list of indices and x i <- c(i, rowindex.new[ind[,1]]) j <- c(j, colindex.new[ind[,2]]) x <- c(x, ind[,3]) } merged.mat <- sparseMatrix(i=i, j=j, x=x, dims=c(length(rowname.new), length(colname.new)), dimnames=list(rowname.new, colname.new)) return (merged.mat) } # cross product from delayed array # crossprod_DelayedAssay <- function(x, y, block.size = 1e8) { # perform t(x) %*% y in blocks for y if (!inherits(x = y, 'DelayedMatrix')) { stop('y should a DelayedMatrix') } if (nrow(x) != nrow(y)) { stop('row of x and y should be the same') } sparse <- DelayedArray::is_sparse(x = y) suppressMessages(expr = DelayedArray::setAutoBlockSize(size = block.size)) cells.grid <- DelayedArray::colAutoGrid(x = y) product.list <- list() for (i in seq_len(length.out = length(x = cells.grid))) { vp <- cells.grid[[i]] block <- DelayedArray::read_block(x = y, viewport = vp, as.sparse = sparse) if (sparse) { block <- as(object = block, Class = 'dgCMatrix') } else { block <- as(object = block, Class = 'Matrix') } product.list[[i]] <- as.matrix(t(x) %*% block) } product.mat <- matrix(data = unlist(product.list), nrow = ncol(x) , ncol = ncol(y)) colnames(product.mat) <- colnames(y) rownames(product.mat) <- colnames(x) return(product.mat) } # cross product from BPCells # crossprod_BPCells <- function(x, y) { # perform t(x) %*% y, y is from BPCells product.mat <- t(x) %*% y colnames(product.mat) <- colnames(y) rownames(product.mat) <- colnames(x) return(product.mat) } # nonzero element version of sweep # SweepNonzero <- function( x, MARGIN, STATS, FUN = "/" ) { if (!inherits(x = x, what = 'dgCMatrix')) { stop('input should be dgCMatrix. eg: x <- as(x, "CsparseMatrix")') } if (dim(x = x)[MARGIN] != length(STATS)){ warning("Length of STATS is not equal to dim(x)[MARGIN]") } fun <- match.fun(FUN) if (MARGIN == 1) { idx <- x@i + 1 x@x <- fun(x@x, STATS[idx]) } else if (MARGIN == 2) { x <- as(x, "RsparseMatrix") idx <- x@j + 1 x@x <- fun(x@x, STATS[idx]) x <- as(x, "CsparseMatrix") } return(x) } #' Create one hot matrix for a given label #' #' @param labels A vector of labels #' @param method Method to aggregate cells with the same label. Either 'aggregate' or 'average' #' @param cells.name A vector of cell names #' #' @importFrom Matrix colSums sparse.model.matrix #' @importFrom stats as.formula #' @export #' CreateCategoryMatrix <- function( labels, method = c('aggregate', 'average'), cells.name = NULL ) { method <- match.arg(arg = method) if (is.null(dim(labels))) { if (length(x = unique(x = labels)) == 1) { data <- matrix(nrow = length(x = labels), ncol = 0) } else { data <- cbind(labels = labels) } } else { data <- labels } cells.name <- cells.name %||% rownames(data) if (!is.null(cells.name) & length(cells.name) != nrow(data)) { stop('length of cells name should be equal to the length of input labels') } if (ncol(x = data) == 0) { message("All grouping variables have 1 value only. Computing across all cells.") category.matrix <- matrix( data = 1, nrow = nrow(x = data), dimnames = list(cells.name, 'all') ) if (method == 'average') { category.matrix <- category.matrix / sum(category.matrix) } return(category.matrix) } group.by <- colnames(x = data) category.matrix <- sparse.model.matrix(object = as.formula( object = paste0( '~0+', paste0( "data[,", 1:length(x = group.by), "]", collapse = ":" ) ) )) colsums <- colSums(x = category.matrix) category.matrix <- category.matrix[, colsums > 0] colsums <- colsums[colsums > 0] if (method =='average') { category.matrix <- SweepNonzero( x = category.matrix, MARGIN = 2, STATS = colsums, FUN = "/") } if (any(grepl(pattern = "_", x = colnames(x = category.matrix) ))) { inform( message = "Names of identity class contain underscores ('_'), replacing with dashes ('-')", .frequency = "regularly", .frequency_id = "CreateCategoryMatrix" ) colnames(x = category.matrix) <- gsub(pattern = '_', replacement = '-', x = colnames(x = category.matrix) ) } colnames(x = category.matrix) <- unname(sapply( X = colnames(x = category.matrix), FUN = function(name) { name <- gsub(pattern = "data\\[, [1-9]*\\]", replacement = "", x = name) return(paste0(rev(x = unlist(x = strsplit(x = name, split = ":"))), collapse = "_")) })) rownames(category.matrix) <- cells.name return(category.matrix) } #' Construct an assay for spatial niche analysis #' #' This function will construct a new assay where each feature is a #' cell label The values represents the sum of a particular cell label #' neighboring a given cell. #' #' @param object A Seurat object #' @param fov FOV object to gather cell positions from #' @param group.by Cell classifications to count in spatial neighborhood #' @param assay Name for spatial neighborhoods assay #' @param neighbors.k Number of neighbors to consider for each cell #' @param niches.k Number of clusters to return based on the niche assay #' #' @importFrom stats kmeans #' @return Seurat object containing a new assay #' @concept clustering #' @export #' BuildNicheAssay <- function( object, fov, group.by, assay = "niche", neighbors.k = 20, niches.k = 4 ) { # find neighbors based on tissue position coords <- GetTissueCoordinates(object[[fov]], which = "centroids") cells <- coords$cell rownames(coords) <- cells coords <- as.matrix(coords[ , c("x", "y")]) neighbors <- FindNeighbors(coords, k.param = neighbors.k) neighbors$nn <- neighbors$nn[Cells(object), Cells(object)] # build cell x cell type matrix ct.mtx <- matrix( data = 0, nrow = length(cells), ncol = length(unlist(unique(object[[group.by]]))) ) rownames(ct.mtx) <- cells colnames(ct.mtx) <- unique(unlist(object[[group.by]])) cts <- object[[group.by]] for (i in 1:length(cells)) { ct <- as.character(cts[cells[[i]], ]) ct.mtx[cells[[i]], ct] <- 1 } # create niche assay sum.mtx <- as.matrix(neighbors$nn %*% ct.mtx) niche.assay <- CreateAssayObject(counts = t(sum.mtx)) object[[assay]] <- niche.assay DefaultAssay(object) <- assay # cluster niches assay object <- ScaleData(object) results <- kmeans( x = t(object[[assay]]@scale.data), centers = niches.k, nstart = 30 ) object$niches <- results[["cluster"]] return(object) } Seurat/R/generics.R0000644000176200001440000006470114525500037013651 0ustar liggesusers#' @include reexports.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Add info to anchor matrix #' #' @param anchors An \code{\link{AnchorSet}} object #' @param vars Variables to pull for each object via FetchData #' @param slot Slot to pull feature data for #' @param assay Specify the Assay per object if annotating with expression data #' @param ... Arguments passed to other methods # #' @return Returns the anchor dataframe with additional columns for annotation #' metadata #' #' @export #' AnnotateAnchors <- function(anchors, vars, slot, ...) { UseMethod(generic = 'AnnotateAnchors', object = anchors) } #' Convert objects to CellDataSet objects #' #' @param x An object to convert to class \code{CellDataSet} #' @param ... Arguments passed to other methods #' #' @rdname as.CellDataSet #' @export as.CellDataSet #' as.CellDataSet <- function(x, ...) { UseMethod(generic = 'as.CellDataSet', object = x) } #' Convert objects to SingleCellExperiment objects #' #' @param x An object to convert to class \code{SingleCellExperiment} #' @param ... Arguments passed to other methods #' #' @rdname as.SingleCellExperiment #' @export as.SingleCellExperiment #' as.SingleCellExperiment <- function(x, ...) { UseMethod(generic = 'as.SingleCellExperiment', object = x) } #' Cluster Determination #' #' Identify clusters of cells by a shared nearest neighbor (SNN) modularity #' optimization based clustering algorithm. First calculate k-nearest neighbors #' and construct the SNN graph. Then optimize the modularity function to #' determine clusters. For a full description of the algorithms, see Waltman and #' van Eck (2013) \emph{The European Physical Journal B}. Thanks to Nigel #' Delaney (evolvedmicrobe@github) for the rewrite of the Java modularity #' optimizer code in Rcpp! #' #' To run Leiden algorithm, you must first install the leidenalg python #' package (e.g. via pip install leidenalg), see Traag et al (2018). #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns a Seurat object where the idents have been updated with new cluster info; #' latest clustering results will be stored in object metadata under 'seurat_clusters'. #' Note that 'seurat_clusters' will be overwritten everytime FindClusters is run #' #' @export #' #' @rdname FindClusters #' @export FindClusters #' FindClusters <- function(object, ...) { UseMethod(generic = 'FindClusters', object = object) } #' Gene expression markers of identity classes #' #' Finds markers (differentially expressed genes) for identity classes #' #' @param object An object #' @param ... Arguments passed to other methods and to specific DE methods #' @return data.frame with a ranked list of putative markers as rows, and associated #' statistics as columns (p-values, ROC score, etc., depending on the test used (\code{test.use})). The following columns are always present: #' \itemize{ #' \item \code{avg_logFC}: log fold-chage of the average expression between the two groups. Positive values indicate that the gene is more highly expressed in the first group #' \item \code{pct.1}: The percentage of cells where the gene is detected in the first group #' \item \code{pct.2}: The percentage of cells where the gene is detected in the second group #' \item \code{p_val_adj}: Adjusted p-value, based on bonferroni correction using all genes in the dataset #' } #' #' @details p-value adjustment is performed using bonferroni correction based on #' the total number of genes in the dataset. Other correction methods are not #' recommended, as Seurat pre-filters genes using the arguments above, reducing #' the number of tests performed. Lastly, as Aaron Lun has pointed out, p-values #' should be interpreted cautiously, as the genes used for clustering are the #' same genes tested for differential expression. #' #' @references McDavid A, Finak G, Chattopadyay PK, et al. Data exploration, #' quality control and testing in single-cell qPCR-based gene expression experiments. #' Bioinformatics. 2013;29(4):461-467. doi:10.1093/bioinformatics/bts714 #' @references Trapnell C, et al. The dynamics and regulators of cell fate #' decisions are revealed by pseudotemporal ordering of single cells. Nature #' Biotechnology volume 32, pages 381-386 (2014) #' @references Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based #' Analysis of Single Cell Transcriptomics. R package version 1.2.1. #' https://github.com/RGLab/MAST/ #' @references Love MI, Huber W and Anders S (2014). "Moderated estimation of #' fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. #' https://bioconductor.org/packages/release/bioc/html/DESeq2.html #' #' @export #' #' @examples #' \dontrun{ #' data("pbmc_small") #' # Find markers for cluster 2 #' markers <- FindMarkers(object = pbmc_small, ident.1 = 2) #' head(x = markers) #' #' # Take all cells in cluster 2, and find markers that separate cells in the 'g1' group (metadata #' # variable 'group') #' markers <- FindMarkers(pbmc_small, ident.1 = "g1", group.by = 'groups', subset.ident = "2") #' head(x = markers) #' #' # Pass 'clustertree' or an object of class phylo to ident.1 and #' # a node to ident.2 as a replacement for FindMarkersNode #' if (requireNamespace("ape", quietly = TRUE)) { #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' markers <- FindMarkers(object = pbmc_small, ident.1 = 'clustertree', ident.2 = 5) #' head(x = markers) #' } #' } #' #' @rdname FindMarkers #' @export FindMarkers #' #' @aliases FindMarkersNode #' @seealso \code{FoldChange} #' FindMarkers <- function(object, ...) { UseMethod(generic = 'FindMarkers', object = object) } #' (Shared) Nearest-neighbor graph construction #' #' Computes the \code{k.param} nearest neighbors for a given dataset. Can also #' optionally (via \code{compute.SNN}), construct a shared nearest neighbor #' graph by calculating the neighborhood overlap (Jaccard index) between every #' cell and its \code{k.param} nearest neighbors. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return This function can either return a \code{\link{Neighbor}} object #' with the KNN information or a list of \code{\link{Graph}} objects with #' the KNN and SNN depending on the settings of \code{return.neighbor} and #' \code{compute.SNN}. When running on a \code{\link{Seurat}} object, this #' returns the \code{\link{Seurat}} object with the Graphs or Neighbor objects #' stored in their respective slots. Names of the Graph or Neighbor object can #' be found with \code{\link{Graphs}} or \code{\link{Neighbors}}. #' #' @examples #' data("pbmc_small") #' pbmc_small #' # Compute an SNN on the gene expression level #' pbmc_small <- FindNeighbors(pbmc_small, features = VariableFeatures(object = pbmc_small)) #' #' # More commonly, we build the SNN on a dimensionally reduced form of the data #' # such as the first 10 principle components. #' #' pbmc_small <- FindNeighbors(pbmc_small, reduction = "pca", dims = 1:10) #' #' @rdname FindNeighbors #' @export FindNeighbors #' FindNeighbors <- function(object, ...) { UseMethod(generic = 'FindNeighbors', object = object) } #' Find variable features #' #' Identifies features that are outliers on a 'mean variability plot'. #' #' For the mean.var.plot method: #' Exact parameter settings may vary empirically from dataset to dataset, and #' based on visual inspection of the plot. Setting the y.cutoff parameter to 2 #' identifies features that are more than two standard deviations away from the #' average dispersion within a bin. The default X-axis function is the mean #' expression level, and for Y-axis it is the log(Variance/mean). All mean/variance #' calculations are not performed in log-space, but the results are reported in #' log-space - see relevant functions for exact details. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname FindVariableFeatures #' @export FindVariableFeatures #' #' @aliases FindVariableGenes #' FindVariableFeatures <- function(object, ...) { UseMethod(generic = 'FindVariableFeatures', object = object) } #' Find spatially variable features #' #' Identify features whose variability in expression can be explained to some #' degree by spatial location. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname FindSpatiallyVariableFeatures #' @export FindSpatiallyVariableFeatures #' FindSpatiallyVariableFeatures <- function(object, ...) { UseMethod(generic = 'FindSpatiallyVariableFeatures', object = object) } #' Fold Change #' #' Calculate log fold change and percentage of cells expressing each feature #' for different identity classes. #' #' If the slot is \code{scale.data} or a reduction is specified, average difference #' is returned instead of log fold change and the column is named "avg_diff". #' Otherwise, log2 fold change is returned with column named "avg_log2_FC". #' #' @examples #' \dontrun{ #' data("pbmc_small") #' FoldChange(pbmc_small, ident.1 = 1) #' } #' #' @param object A Seurat object #' @param ... Arguments passed to other methods #' @rdname FoldChange #' @export FoldChange #' @return Returns a data.frame #' @seealso \code{FindMarkers} FoldChange <- function(object, ...) { UseMethod(generic = 'FoldChange', object = object) } #' Get an Assay object from a given Seurat object. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns an Assay object #' #' @rdname GetAssay #' @export GetAssay #' GetAssay <- function(object, ...) { UseMethod(generic = 'GetAssay', object = object) } #' Integrate low dimensional embeddings #' #' Perform dataset integration using a pre-computed Anchorset of specified low #' dimensional representations. #' #' The main steps of this procedure are identical to \code{\link{IntegrateData}} #' with one key distinction. When computing the weights matrix, the distance #' calculations are performed in the full space of integrated embeddings when #' integrating more than two datasets, as opposed to a reduced PCA space which #' is the default behavior in \code{\link{IntegrateData}}. #' #' @param anchorset An AnchorSet object #' @param new.reduction.name Name for new integrated dimensional reduction. #' @param reductions Name of reductions to be integrated. For a #' TransferAnchorSet, this should be the name of a reduction present in the #' anchorset object (for example, "pcaproject"). For an IntegrationAnchorSet, #' this should be a \code{\link{DimReduc}} object containing all cells present #' in the anchorset object. #' @param dims.to.integrate Number of dimensions to return integrated values for #' @param weight.reduction Dimension reduction to use when calculating anchor #' weights. This can be one of: #' \itemize{ #' \item{A string, specifying the name of a dimension reduction present in #' all objects to be integrated} #' \item{A vector of strings, specifying the name of a dimension reduction to #' use for each object to be integrated} #' \item{A vector of \code{\link{DimReduc}} objects, specifying the object to #' use for each object in the integration} #' \item{NULL, in which case the full corrected space is used for computing #' anchor weights.} #' } #' @param ... Reserved for internal use #' #' @return When called on a TransferAnchorSet (from FindTransferAnchors), this #' will return the query object with the integrated embeddings stored in a new #' reduction. When called on an IntegrationAnchorSet (from IntegrateData), this #' will return a merged object with the integrated reduction stored. #' #' @rdname IntegrateEmbeddings #' @export IntegrateEmbeddings #' IntegrateEmbeddings <- function(anchorset, ...) { UseMethod(generic = "IntegrateEmbeddings", object = anchorset) } #' Leverage Score Calculation #' #' This function computes the leverage scores for a given object #' It uses the concept of sketching and random projections. The function provides an approximation #' to the leverage scores using a scalable method suitable for large matrices. #' #' @param object A matrix-like object #' @param ... Arguments passed to other methods #' #' @references Clarkson, K. L. & Woodruff, D. P. #' Low-rank approximation and regression in input sparsity time. #' JACM 63, 1–45 (2017). \url{https://dl.acm.org/doi/10.1145/3019134}; #' #' @export #' #' LeverageScore <- function(object, ...) { UseMethod(generic = 'LeverageScore', object = object) } #' Normalize Raw Data #' #' @param data Matrix with the raw count data #' @param scale.factor Scale the data; default is \code{1e4} #' @param margin Margin to normalize over #' @param verbose Print progress #' #' @return A matrix with the normalized and log-transformed data #' #' @template param-dotsm #' #' @export #' @concept preprocessing #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) #' mat #' mat_norm <- LogNormalize(data = mat) #' mat_norm #' LogNormalize <- function( data, scale.factor = 1e4, margin = 2L, verbose = TRUE, ... ) { UseMethod(generic = 'LogNormalize', object = data) } #' Metric for evaluating mapping success #' #' This metric was designed to help identify query cells that aren't well #' represented in the reference dataset. The intuition for the score is that we #' are going to project the query cells into a reference-defined space and then #' project them back onto the query. By comparing the neighborhoods before and #' after projection, we identify cells who's local neighborhoods are the most #' affected by this transformation. This could be because there is a population #' of query cells that aren't present in the reference or the state of the cells #' in the query is significantly different from the equivalent cell type in the #' reference. #' #' @param anchors Set of anchors #' @param ... Arguments passed to other methods #' #' @rdname MappingScore #' @export MappingScore #' MappingScore <- function(anchors, ...) { UseMethod(generic = "MappingScore", object = anchors) } #' Normalize Data #' #' Normalize the count data present in a given assay. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns object after normalization #' #' @rdname NormalizeData #' @export NormalizeData #' NormalizeData <- function(object, ...) { UseMethod(generic = 'NormalizeData', object = object) } #' Project query data to the reference dimensional reduction #' #' #' @param query An object for query cells #' @param reference An object for reference cells #' @param query.assay Assay name for query object #' @param reference.assay Assay name for reference object #' @param reduction Name of dimensional reduction from reference object #' @param dims Dimensions used for reference dimensional reduction #' @param scale Determine if scale query data based on reference data variance #' @param verbose Print progress #' @param feature.mean Mean of features in reference #' @param feature.sd Standard variance of features in reference #' #' @return A matrix with projected cell embeddings #' #' @rdname ProjectCellEmbeddings #' @export ProjectCellEmbeddings #' #' @keywords internal #' ProjectCellEmbeddings <- function( query, ... ) { UseMethod(generic = 'ProjectCellEmbeddings', object = query) } #' Project query into UMAP coordinates of a reference #' #' This function will take a query dataset and project it into the coordinates #' of a provided reference UMAP. This is essentially a wrapper around two steps: #' \itemize{ #' \item{FindNeighbors - Find the nearest reference cell neighbors and their #' distances for each query cell.} #' \item{RunUMAP - Perform umap projection by providing the neighbor set #' calculated above and the umap model previously computed in the reference.} #' } #' #' @param query Query dataset #' #' @rdname ProjectUMAP #' @export ProjectUMAP #' ProjectUMAP <- function(query, ...) { UseMethod(generic = "ProjectUMAP", object = query) } #' Pseudobulk Expression #' #' Normalize the count data present in a given assay. #' #' @param object An assay #' @param ... Arguments passed to other methods #' #' @return Returns object after normalization #' #' @rdname PseudobulkExpression #' @export PseudobulkExpression #' PseudobulkExpression <- function(object, ...) { UseMethod(generic = "PseudobulkExpression", object = object) } #' Perform Canonical Correlation Analysis #' #' Runs a canonical correlation analysis using a diagonal implementation of CCA. #' For details about stored CCA calculation parameters, see #' \code{PrintCCAParams}. #' @param object1 First Seurat object #' @param object2 Second Seurat object. # @param ... Arguments passed to other methods #' #' @return Returns a combined Seurat object with the CCA results stored. #' #' @seealso \code{\link{merge.Seurat}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' # As CCA requires two datasets, we will split our test object into two just for this example #' pbmc1 <- subset(pbmc_small, cells = colnames(pbmc_small)[1:40]) #' pbmc2 <- subset(pbmc_small, cells = colnames(x = pbmc_small)[41:80]) #' pbmc1[["group"]] <- "group1" #' pbmc2[["group"]] <- "group2" #' pbmc_cca <- RunCCA(object1 = pbmc1, object2 = pbmc2) #' # Print results #' print(x = pbmc_cca[["cca"]]) #' } #' #' @rdname RunCCA #' @export RunCCA #' RunCCA <- function(object1, object2, ...) { UseMethod(generic = 'RunCCA', object = object1) } #' Run Graph Laplacian Eigendecomposition #' #' Run a graph laplacian dimensionality reduction. It is used as a low #' dimensional representation for a cell-cell graph. The input graph #' should be symmetric #' #' @param object A Seurat object #' @param ... Arguments passed to #' \code{\link[RSpectra:eigs_sym]{RSpectra::eigs_sym}} #' #' @return Returns Seurat object with the Graph laplacian eigenvector #' calculation stored in the reductions slot #' #' @rdname RunGraphLaplacian #' @export RunGraphLaplacian #' RunGraphLaplacian <- function(object, ...) { UseMethod(generic = 'RunGraphLaplacian', object = object) } #' Run Independent Component Analysis on gene expression #' #' Run fastica algorithm from the ica package for ICA dimensionality reduction. #' For details about stored ICA calculation parameters, see #' \code{PrintICAParams}. #' #' @param object Seurat object #' #' @rdname RunICA #' @export RunICA #' RunICA <- function(object, ...) { UseMethod(generic = "RunICA", object = object) } #' Run Linear Discriminant Analysis #' #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname RunLDA #' @export RunLDA #' #' @aliases RunLDA #' RunLDA <- function(object, ...) { UseMethod(generic = 'RunLDA', object = object) } #' Run Principal Component Analysis #' #' Run a PCA dimensionality reduction. For details about stored PCA calculation #' parameters, see \code{PrintPCAParams}. #' #' @param object An object #' @param ... Arguments passed to other methods and IRLBA #' #' @return Returns Seurat object with the PCA calculation stored in the reductions slot #' #' @export #' #' @rdname RunPCA #' @export RunPCA #' RunPCA <- function(object, ...) { UseMethod(generic = 'RunPCA', object = object) } #' Run Supervised Latent Semantic Indexing #' #' Run a supervised LSI (SLSI) dimensionality reduction supervised by a #' cell-cell kernel. SLSI is used to capture a linear transformation of peaks #' that maximizes its dependency to the given cell-cell kernel. #' #' @param object An object #' @param ... Arguments passed to IRLBA irlba #' #' @return Returns Seurat object with the SLSI calculation stored in the #' reductions slot #' #' @export #' #' @rdname RunSLSI #' @export RunSLSI #' RunSLSI <- function(object, ...) { UseMethod(generic = 'RunSLSI', object = object) } #' Run Supervised Principal Component Analysis #' #' Run a supervised PCA (SPCA) dimensionality reduction supervised by a cell-cell kernel. #' SPCA is used to capture a linear transformation which maximizes its dependency to #' the given cell-cell kernel. We use SNN graph as the kernel to supervise the linear #' matrix factorization. #' #' @param object An object #' @param ... Arguments passed to other methods and IRLBA #' #' @return Returns Seurat object with the SPCA calculation stored in the reductions slot #' @references Barshan E, Ghodsi A, Azimifar Z, Jahromi MZ. #' Supervised principal component analysis: Visualization, classification and #' regression on subspaces and submanifolds. #' Pattern Recognition. 2011 Jul 1;44(7):1357-71. \url{https://www.sciencedirect.com/science/article/pii/S0031320310005819?casa_token=AZMFg5OtPnAAAAAA:_Udu7GJ7G2ed1-XSmr-3IGSISUwcHfMpNtCj-qacXH5SBC4nwzVid36GXI3r8XG8dK5WOQui}; #' @export #' #' @rdname RunSPCA #' @export RunSPCA #' RunSPCA <- function(object, ...) { UseMethod(generic = 'RunSPCA', object = object) } #' Run t-distributed Stochastic Neighbor Embedding #' #' Run t-SNE dimensionality reduction on selected features. Has the option of #' running in a reduced dimensional space (i.e. spectral tSNE, recommended), #' or running based on a set of genes. For details about stored TSNE calculation #' parameters, see \code{PrintTSNEParams}. #' #' @param object Seurat object #' @param ... Arguments passed to other methods and to t-SNE call (most commonly used is perplexity) #' #' @rdname RunTSNE #' @export RunTSNE #' RunTSNE <- function(object, ...) { UseMethod(generic = 'RunTSNE', object = object) } #' Run UMAP #' #' Runs the Uniform Manifold Approximation and Projection (UMAP) dimensional #' reduction technique. To run using \code{umap.method="umap-learn"}, you must #' first install the umap-learn python package (e.g. via #' \code{pip install umap-learn}). Details on this package can be #' found here: \url{https://github.com/lmcinnes/umap}. For a more in depth #' discussion of the mathematics underlying UMAP, see the ArXiv paper here: #' \url{https://arxiv.org/abs/1802.03426}. #' #' @param object An object #' @param ... Arguments passed to other methods and UMAP #' #' @return Returns a Seurat object containing a UMAP representation #' #' @references McInnes, L, Healy, J, UMAP: Uniform Manifold Approximation and #' Projection for Dimension Reduction, ArXiv e-prints 1802.03426, 2018 #' #' @export #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' # Run UMAP map on first 5 PCs #' pbmc_small <- RunUMAP(object = pbmc_small, dims = 1:5) #' # Plot results #' DimPlot(object = pbmc_small, reduction = 'umap') #' } #' #' @rdname RunUMAP #' @export RunUMAP #' RunUMAP <- function(object, ...) { UseMethod(generic = 'RunUMAP', object = object) } #' Scale and center the data. #' #' Scales and centers features in the dataset. If variables are provided in vars.to.regress, #' they are individually regressed against each feature, and the resulting residuals are #' then scaled and centered. #' #' ScaleData now incorporates the functionality of the function formerly known #' as RegressOut (which regressed out given the effects of provided variables #' and then scaled the residuals). To make use of the regression functionality, #' simply pass the variables you want to remove to the vars.to.regress parameter. #' #' Setting center to TRUE will center the expression for each feature by subtracting #' the average expression for that feature. Setting scale to TRUE will scale the #' expression level for each feature by dividing the centered feature expression #' levels by their standard deviations if center is TRUE and by their root mean #' square otherwise. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @rdname ScaleData #' @export ScaleData #' ScaleData <- function(object, ...) { UseMethod(generic = 'ScaleData', object = object) } #' Get image scale factors #' #' @param object An object to get scale factors from #' @param ... Arguments passed to other methods #' #' @return An object of class \code{scalefactors} #' #' @rdname ScaleFactors #' @export ScaleFactors #' ScaleFactors <- function(object, ...) { UseMethod(generic = 'ScaleFactors', object = object) } #' Compute Jackstraw scores significance. #' #' Significant PCs should show a p-value distribution that is #' strongly skewed to the left compared to the null distribution. #' The p-value for each PC is based on a proportion test comparing the number #' of features with a p-value below a particular threshold (score.thresh), compared with the #' proportion of features expected under a uniform distribution of p-values. #' #' @param object An object #' @param ... Arguments passed to other methods #' #' @return Returns a Seurat object #' #' @author Omri Wurtzel #' @seealso \code{\link{JackStrawPlot}} #' #' @rdname ScoreJackStraw #' @export ScoreJackStraw #' ScoreJackStraw <- function(object, ...) { UseMethod(generic = 'ScoreJackStraw', object = object) } #' Perform sctransform-based normalization #' @param object An object #' @param ... Arguments passed to other methods (not used) #' #' @rdname SCTransform #' @export SCTransform #' SCTransform <- function(object, ...) { UseMethod(generic = 'SCTransform', object = object) } #' Get SCT results from an Assay #' #' Pull the \code{\link{SCTResults}} information from an \code{\link{SCTAssay}} #' object. #' #' @param object An object #' @param ... Arguments passed to other methods (not used) #' #' @rdname SCTResults #' @export SCTResults #' SCTResults <- function(object, ...) { UseMethod(generic = 'SCTResults', object = object) } #' @param value new data to set #' #' @rdname SCTResults #' @export SCTResults<- #' "SCTResults<-" <- function(object, ..., value) { UseMethod(generic = 'SCTResults<-', object = object) } #' Variance Stabilizing Transformation #' #' Apply variance stabilizing transformation for selection of variable features #' #' @inheritParams stats::loess #' @param data A matrix-like object #' @param margin Unused #' @param nselect Number of of features to select #' @param clip Upper bound for values post-standardization; defaults to the #' square root of the number of cells #' @param verbose ... #' #' @template param-dotsm #' #' @return A data frame with the following columns: #' \itemize{ #' \item \dQuote{\code{mean}}: ... #' \item \dQuote{\code{variance}}: ... #' \item \dQuote{\code{variance.expected}}: ... #' \item \dQuote{\code{variance.standardized}}: ... #' \item \dQuote{\code{variable}}: \code{TRUE} if the feature selected as #' variable, otherwise \code{FALSE} #' \item \dQuote{\code{rank}}: If the feature is selected as variable, then how #' it compares to other variable features with lower ranks as more variable; #' otherwise, \code{NA} #' } #' #' @rdname VST #' @export VST #' #' @keywords internal #' VST <- function( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ... ) { UseMethod(generic = 'VST', object = data) } Seurat/R/data.R0000644000176200001440000000274314525500037012761 0ustar liggesusers#' Cell cycle genes #' #' A list of genes used in cell-cycle regression #' #' @format A list of two vectors #' \describe{ #' \item{s.genes}{Genes associated with S-phase} #' \item{g2m.genes}{Genes associated with G2M-phase} #' } #' @concept data #' @source \url{https://www.science.org/doi/abs/10.1126/science.aad0501} #' "cc.genes" #' Cell cycle genes: 2019 update #' #' A list of genes used in cell-cycle regression, updated with 2019 symbols #' #' @section Updated symbols: #' The following symbols were updated from \code{\link{cc.genes}} #' \describe{ #' \item{s.genes}{ #' \itemize{ #' \item \emph{MCM2}: \emph{MCM7} #' \item \emph{MLF1IP}: \emph{CENPU} #' \item \emph{RPA2}: \emph{POLR1B} #' \item \emph{BRIP1}: \emph{MRPL36} #' } #' } #' \item{g2m.genes}{ #' \itemize{ #' \item \emph{FAM64A}: \emph{PIMREG} #' \item \emph{HN1}: \emph{JPT1} #' } #' } #' } #' #' @format A list of two vectors #' \describe{ #' \item{s.genes}{Genes associated with S-phase} #' \item{g2m.genes}{Genes associated with G2M-phase} #' } #' @concept data #' @source \url{https://www.science.org/doi/abs/10.1126/science.aad0501} #' #' @seealso \code{\link{cc.genes}} #' #' @examples #' \dontrun{ #' cc.genes.updated.2019 <- cc.genes #' cc.genes.updated.2019$s.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$s.genes) #' cc.genes.updated.2019$g2m.genes <- UpdateSymbolList(symbols = cc.genes.updated.2019$g2m.genes) #' } #' "cc.genes.updated.2019" Seurat/R/roxygen.R0000644000176200001440000000276014525500037013542 0ustar liggesusers#' @include zzz.R #' NULL #' @importFrom utils lsf.str #' @importFrom rlang is_scalar_character #' .rd_methods <- function(method = 'integration') { methods <- sapply( X = grep(pattern = '^package:', x = search(), value = TRUE), FUN = function(x) { fxns <- as.character(x = lsf.str(pos = x)) attrs <- vector(mode = 'logical', length = length(x = fxns)) for (i in seq_along(along.with = fxns)) { mthd <- attr(x = get(x = fxns[i], pos = x), which = 'Seurat.method') attrs[i] <- is_scalar_character(x = mthd) && isTRUE(x = mthd == method) } return(fxns[attrs]) }, simplify = FALSE, USE.NAMES = TRUE ) methods <- Filter(f = length, x = methods) names(x = methods) <- gsub( pattern = '^package:', replacement = '', x = names(x = methods) ) if (!length(x = methods)) { return('') } ret <- vector( mode = 'character', length = sum(vapply( X = methods, FUN = length, FUN.VALUE = integer(length = 1L) )) ) j <- 1L for (pkg in names(x = methods)) { for (fxn in methods[[pkg]]) { ret[j] <- ifelse( test = pkg == 'Seurat', yes = paste0('\\item \\code{\\link{', fxn, '}}'), no = paste0( '\\item \\code{\\link[', pkg, ':', fxn, ']{', pkg, '::', fxn, '}}' ) ) j <- j + 1L } } return(paste('\\itemize{', paste0(' ', ret, collapse = '\n'), '}', sep = '\n')) } Seurat/R/RcppExports.R0000644000176200001440000001174414525500037014342 0ustar liggesusers# Generated by using Rcpp::compileAttributes() -> do not edit by hand # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 RunModularityClusteringCpp <- function(SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename) { .Call('_Seurat_RunModularityClusteringCpp', PACKAGE = 'Seurat', SNN, modularityFunction, resolution, algorithm, nRandomStarts, nIterations, randomSeed, printOutput, edgefilename) } RunUMISampling <- function(data, sample_val, upsample = FALSE, display_progress = TRUE) { .Call('_Seurat_RunUMISampling', PACKAGE = 'Seurat', data, sample_val, upsample, display_progress) } RunUMISamplingPerCell <- function(data, sample_val, upsample = FALSE, display_progress = TRUE) { .Call('_Seurat_RunUMISamplingPerCell', PACKAGE = 'Seurat', data, sample_val, upsample, display_progress) } RowMergeMatrices <- function(mat1, mat2, mat1_rownames, mat2_rownames, all_rownames) { .Call('_Seurat_RowMergeMatrices', PACKAGE = 'Seurat', mat1, mat2, mat1_rownames, mat2_rownames, all_rownames) } LogNorm <- function(data, scale_factor, display_progress = TRUE) { .Call('_Seurat_LogNorm', PACKAGE = 'Seurat', data, scale_factor, display_progress) } Standardize <- function(mat, display_progress = TRUE) { .Call('_Seurat_Standardize', PACKAGE = 'Seurat', mat, display_progress) } FastSparseRowScale <- function(mat, scale = TRUE, center = TRUE, scale_max = 10, display_progress = TRUE) { .Call('_Seurat_FastSparseRowScale', PACKAGE = 'Seurat', mat, scale, center, scale_max, display_progress) } FastSparseRowScaleWithKnownStats <- function(mat, mu, sigma, scale = TRUE, center = TRUE, scale_max = 10, display_progress = TRUE) { .Call('_Seurat_FastSparseRowScaleWithKnownStats', PACKAGE = 'Seurat', mat, mu, sigma, scale, center, scale_max, display_progress) } FastCov <- function(mat, center = TRUE) { .Call('_Seurat_FastCov', PACKAGE = 'Seurat', mat, center) } FastCovMats <- function(mat1, mat2, center = TRUE) { .Call('_Seurat_FastCovMats', PACKAGE = 'Seurat', mat1, mat2, center) } FastRBind <- function(mat1, mat2) { .Call('_Seurat_FastRBind', PACKAGE = 'Seurat', mat1, mat2) } FastExpMean <- function(mat, display_progress) { .Call('_Seurat_FastExpMean', PACKAGE = 'Seurat', mat, display_progress) } SparseRowVar2 <- function(mat, mu, display_progress) { .Call('_Seurat_SparseRowVar2', PACKAGE = 'Seurat', mat, mu, display_progress) } SparseRowVarStd <- function(mat, mu, sd, vmax, display_progress) { .Call('_Seurat_SparseRowVarStd', PACKAGE = 'Seurat', mat, mu, sd, vmax, display_progress) } FastLogVMR <- function(mat, display_progress) { .Call('_Seurat_FastLogVMR', PACKAGE = 'Seurat', mat, display_progress) } RowVar <- function(x) { .Call('_Seurat_RowVar', PACKAGE = 'Seurat', x) } SparseRowVar <- function(mat, display_progress) { .Call('_Seurat_SparseRowVar', PACKAGE = 'Seurat', mat, display_progress) } ReplaceColsC <- function(mat, col_idx, replacement) { .Call('_Seurat_ReplaceColsC', PACKAGE = 'Seurat', mat, col_idx, replacement) } GraphToNeighborHelper <- function(mat) { .Call('_Seurat_GraphToNeighborHelper', PACKAGE = 'Seurat', mat) } fast_dist <- function(x, y, n) { .Call('_Seurat_fast_dist', PACKAGE = 'Seurat', x, y, n) } FindWeightsC <- function(cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress) { .Call('_Seurat_FindWeightsC', PACKAGE = 'Seurat', cells2, distances, anchor_cells2, integration_matrix_rownames, cell_index, anchor_score, min_dist, sd, display_progress) } IntegrateDataC <- function(integration_matrix, weights, expression_cells2) { .Call('_Seurat_IntegrateDataC', PACKAGE = 'Seurat', integration_matrix, weights, expression_cells2) } ScoreHelper <- function(snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress) { .Call('_Seurat_ScoreHelper', PACKAGE = 'Seurat', snn, query_pca, query_dists, corrected_nns, k_snn, subtract_first_nn, display_progress) } ComputeSNN <- function(nn_ranked, prune) { .Call('_Seurat_ComputeSNN', PACKAGE = 'Seurat', nn_ranked, prune) } WriteEdgeFile <- function(snn, filename, display_progress) { invisible(.Call('_Seurat_WriteEdgeFile', PACKAGE = 'Seurat', snn, filename, display_progress)) } DirectSNNToFile <- function(nn_ranked, prune, display_progress, filename) { .Call('_Seurat_DirectSNNToFile', PACKAGE = 'Seurat', nn_ranked, prune, display_progress, filename) } SNN_SmallestNonzero_Dist <- function(snn, mat, n, nearest_dist) { .Call('_Seurat_SNN_SmallestNonzero_Dist', PACKAGE = 'Seurat', snn, mat, n, nearest_dist) } row_sum_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_sum_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } row_mean_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_mean_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } row_var_dgcmatrix <- function(x, i, rows, cols) { .Call('_Seurat_row_var_dgcmatrix', PACKAGE = 'Seurat', x, i, rows, cols) } Seurat/R/preprocessing5.R0000644000176200001440000020115014525500056015012 0ustar liggesusers#' @include generics.R #' @include preprocessing.R #' @importFrom stats loess #' @importFrom methods slot #' @importFrom SeuratObject .MARGIN .SparseSlots #' @importFrom utils txtProgressBar setTxtProgressBar #' NULL hvf.methods <- list() #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @method FindVariableFeatures default #' @export #' FindVariableFeatures.default <- function( object, method = VST, nfeatures = 2000L, verbose = TRUE, selection.method = selection.method, ... ) { if (is_quosure(x = method)) { method <- eval( expr = quo_get_expr(quo = method), envir = quo_get_env(quo = method) ) } if (is.character(x = method)) { method <- get(x = method) } if (!is.function(x = method)) { stop( "'method' must be a function for calculating highly variable features", call. = FALSE ) } var.gene.ouput <- method( data = object, nselect = nfeatures, verbose = verbose, ... ) rownames(x = var.gene.ouput) <- rownames(x = object) return(var.gene.ouput) } #' @importFrom SeuratObject DefaultLayer Features Key Layers #' #' @method FindVariableFeatures StdAssay #' @export #' FindVariableFeatures.StdAssay <- function( object, method = NULL, nfeatures = 2000L, layer = NULL, span = 0.3, clip = NULL, key = NULL, verbose = TRUE, selection.method = 'vst', ... ) { if (selection.method == 'vst') { layer <- layer%||%'counts' method <- VST key <- 'vst' } else if (selection.method %in% c('mean.var.plot', 'mvp')) { layer <- layer%||%'data' method <- MVP key <- 'mvp' } else if (selection.method %in% c('dispersion', 'disp')) { layer <- layer%||%'data' method <- DISP key <- 'disp' } else if (is.null(x = method) || is.null(x = layer)){ stop('Custome functions and layers are both required') } else { key <- NULL } layer <- Layers(object = object, search = layer) if (is.null(x = key)) { false <- function(...) { return(FALSE) } key <- if (tryCatch(expr = is_quosure(x = method), error = false)) { method } else if (is.function(x = method)) { substitute(expr = method) } else if (is.call(x = enquo(arg = method))) { enquo(arg = method) } else if (is.character(x = method)) { method } else { parse(text = method) } key <- .Abbrv(x = as_name(x = key)) } warn.var <- warn.rank <- TRUE for (i in seq_along(along.with = layer)) { if (isTRUE(x = verbose)) { message("Finding variable features for layer ", layer[i]) } data <- LayerData(object = object, layer = layer[i], fast = TRUE) hvf.function <- if (inherits(x = data, what = 'V3Matrix')) { FindVariableFeatures.default } else { FindVariableFeatures } hvf.info <- hvf.function( object = data, method = method, nfeatures = nfeatures, span = span, clip = clip, verbose = verbose, ... ) if (warn.var) { if (!'variable' %in% colnames(x = hvf.info) || !is.logical(x = hvf.info$variable)) { warning( "No variable feature indication in HVF info for method ", key, ", `VariableFeatures` will not work", call. = FALSE, immediate. = TRUE ) warn.var <- FALSE } } else if (warn.rank && !'rank' %in% colnames(x = hvf.info)) { warning( "No variable feature rank in HVF info for method ", key, ", `VariableFeatures` will return variable features in assay order", call. = FALSE, immediate. = TRUE ) warn.rank <- FALSE } colnames(x = hvf.info) <- paste( 'vf', key, layer[i], colnames(x = hvf.info), sep = '_' ) rownames(x = hvf.info) <- Features(x = object, layer = layer[i]) object[["var.features"]] <- NULL object[["var.features.rank"]] <- NULL object[[names(x = hvf.info)]] <- NULL object[[names(x = hvf.info)]] <- hvf.info } VariableFeatures(object) <- VariableFeatures(object, nfeatures=nfeatures,method = key) return(object) } #' @param layer Layer in the Assay5 to pull data from #' @param features If provided, only compute on given features. Otherwise, #' compute for all features. #' @param nfeatures Number of features to mark as the top spatially variable. #' #' @method FindSpatiallyVariableFeatures StdAssay #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' FindSpatiallyVariableFeatures.StdAssay <- function( object, layer = "scale.data", spatial.location, selection.method = c('markvariogram', 'moransi'), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) { features <- features %||% rownames(x = object) if (selection.method == "markvariogram" && "markvariogram" %in% names(x = Misc(object = object))) { features.computed <- names(x = Misc(object = object, slot = "markvariogram")) features <- features[! features %in% features.computed] } data <- GetAssayData(object = object, layer = layer) data <- as.matrix(x = data[features, ]) data <- data[RowVar(x = data) > 0, ] if (nrow(x = data) != 0) { svf.info <- FindSpatiallyVariableFeatures( object = data, spatial.location = spatial.location, selection.method = selection.method, r.metric = r.metric, x.cuts = x.cuts, y.cuts = y.cuts, verbose = verbose, ... ) } else { svf.info <- c() } if (selection.method == "markvariogram") { if ("markvariogram" %in% names(x = Misc(object = object))) { svf.info <- c(svf.info, Misc(object = object, slot = "markvariogram")) } suppressWarnings(expr = Misc(object = object, slot = "markvariogram") <- svf.info) svf.info <- ComputeRMetric(mv = svf.info, r.metric) svf.info <- svf.info[order(svf.info[, 1]), , drop = FALSE] } if (selection.method == "moransi") { colnames(x = svf.info) <- paste0("MoransI_", colnames(x = svf.info)) svf.info <- svf.info[order(svf.info[, 2], -abs(svf.info[, 1])), , drop = FALSE] } var.name <- paste0(selection.method, ".spatially.variable") var.name.rank <- paste0(var.name, ".rank") svf.info[[var.name]] <- FALSE svf.info[[var.name]][1:(min(nrow(x = svf.info), nfeatures))] <- TRUE svf.info[[var.name.rank]] <- 1:nrow(x = svf.info) object[names(x = svf.info)] <- svf.info return(object) } #' @rdname LogNormalize #' @method LogNormalize default #' #' @param margin Margin to normalize over #' @importFrom SeuratObject .CheckFmargin #' #' @export #' LogNormalize.default <- function( data, scale.factor = 1e4, margin = 2L, verbose = TRUE, ... ) { margin <- .CheckFmargin(fmargin = margin) ncells <- dim(x = data)[margin] if (isTRUE(x = verbose)) { pb <- txtProgressBar(file = stderr(), style = 3) } for (i in seq_len(length.out = ncells)) { x <- if (margin == 1L) { data[i, ] } else { data[, i] } xnorm <- log1p(x = x / sum(x) * scale.factor) if (margin == 1L) { data[i, ] <- xnorm } else { data[, i] <- xnorm } if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / ncells) } } if (isTRUE(x = verbose)) { close(con = pb) } return(data) } #' @method LogNormalize IterableMatrix #' @export #' LogNormalize.IterableMatrix <- function( data, scale.factor = 1e4, margin = 2L, verbose = TRUE, ... ) { data <- BPCells::t(BPCells::t(data) / colSums(data)) # Log normalization data <- log1p(data * scale.factor) return(data) } #' @importFrom SeuratObject IsSparse #' #' @method NormalizeData default #' @export #' NormalizeData.default <- function( object, normalization.method = c('LogNormalize', 'CLR', 'RC'), scale.factor = 1e4, cmargin = 2L, margin = 1L, verbose = TRUE, ... ) { normalization.method <- normalization.method[1L] normalization.method <- match.arg(arg = normalization.method) # TODO: enable parallelization via future normalized <- switch( EXPR = normalization.method, 'LogNormalize' = { if (IsSparse(x = object) && .MARGIN(object = object) == cmargin) { .SparseNormalize( data = object, scale.factor = scale.factor, verbose = verbose ) } else { LogNormalize( data = object, scale.factor = scale.factor, margin = cmargin, verbose = verbose, ... ) } }, 'CLR' = { if (inherits(x = object, what = 'dgTMatrix')) { warning('Convert input dgTMatrix into dgCMatrix') object <- as(object = object, Class = 'dgCMatrix') } if (!inherits(x = object, what = 'dgCMatrix') && !inherits(x = object, what = 'matrix')) { stop('CLR normalization is only supported for dense and dgCMatrix') } CustomNormalize( data = object, custom_function = function(x) { return(log1p(x = x/(exp(x = sum(log1p(x = x[x > 0]), na.rm = TRUE)/length(x = x))))) }, margin = margin, verbose = verbose ) }, 'RC' = { if (!inherits(x = object, what = 'dgCMatrix') && !inherits(x = object, what = 'matrix')) { stop('RC normalization is only supported for dense and dgCMatrix') } RelativeCounts(data = object, scale.factor = scale.factor, verbose = verbose) } ) return(normalized) } #' @importFrom SeuratObject Cells DefaultLayer DefaultLayer<- Features #' LayerData LayerData<- #' #' @method NormalizeData StdAssay #' @export #' NormalizeData.StdAssay <- function( object, normalization.method = 'LogNormalize', scale.factor = 1e4, margin = 1L, layer = 'counts', save = 'data', verbose = TRUE, ... ) { olayer <- layer <- unique(x = layer) layer <- Layers(object = object, search = layer) if (length(x = save) != length(x = layer)) { save <- make.unique(names = gsub( pattern = olayer, replacement = save, x = layer )) } for (i in seq_along(along.with = layer)) { l <- layer[i] if (isTRUE(x = verbose)) { message("Normalizing layer: ", l) } LayerData( object = object, layer = save[i], features = Features(x = object, layer = l), cells = Cells(x = object, layer = l) ) <- NormalizeData( object = LayerData(object = object, layer = l, fast = NA), normalization.method = normalization.method, scale.factor = scale.factor, margin = margin, verbose = verbose, ... ) } gc(verbose = FALSE) return(object) } #' @importFrom SeuratObject StitchMatrix #' #' @method ScaleData StdAssay #' @export #' ScaleData.StdAssay <- function( object, features = NULL, layer = 'data', vars.to.regress = NULL, latent.data = NULL, by.layer = FALSE, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale= TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, save = 'scale.data', verbose = TRUE, ... ) { use.umi <- ifelse(test = model.use != 'linear', yes = TRUE, no = use.umi) olayer <- layer <- unique(x = layer) layer <- Layers(object = object, search = layer) if (is.null(layer)) { abort(paste0("No layer matching pattern '", olayer, "' found. Please run NormalizeData and retry")) } if (isTRUE(x = use.umi)) { layer <- "counts" inform( message = "'use.umi' is TRUE, please make sure 'layer' specifies raw counts" ) } features <- features %||% VariableFeatures(object = object) if (!length(x = features)) { features <- Features(x = object, layer = layer) } if (isTRUE(x = by.layer)) { if (length(x = save) != length(x = layer)) { save <- make.unique(names = gsub( pattern = olayer, replacement = save, x = layer )) } for (i in seq_along(along.with = layer)) { lyr <- layer[i] if (isTRUE(x = verbose)) { inform(message = paste("Scaling data for layer", sQuote(x = lyr))) } LayerData(object = object, layer = save[i], ...) <- ScaleData( object = LayerData( object = object, layer = lyr, features = features, fast = NA ), features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) } } else { ldata <- if (length(x = layer) > 1L) { StitchMatrix( x = LayerData(object = object, layer = layer[1L], features = features), y = lapply( X = layer[2:length(x = layer)], FUN = LayerData, object = object, features = features ), rowmap = slot(object = object, name = 'features')[features, layer], colmap = slot(object = object, name = 'cells')[, layer] ) } else { LayerData(object = object, layer = layer, features = features) } ldata <- ScaleData( object = ldata, features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) LayerData(object = object, layer = save, features = rownames(ldata)) <- ldata } return(object) } #' @rdname VST #' @method VST default #' @export #' VST.default <- function( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ... ) { .NotYetImplemented() } #' @rdname VST #' @method VST IterableMatrix #' @importFrom SeuratObject EmptyDF #' @export #' VST.IterableMatrix <- function( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, verbose = TRUE, ... ) { nfeatures <- nrow(x = data) hvf.info <- EmptyDF(n = nfeatures) hvf.stats <- BPCells::matrix_stats( matrix = data, row_stats = 'variance')$row_stats # Calculate feature means hvf.info$mean <- hvf.stats['mean', ] # Calculate feature variance hvf.info$variance <- hvf.stats['variance', ] hvf.info$variance.expected <- 0L not.const <- hvf.info$variance > 0 fit <- loess( formula = log10(x = variance) ~ log10(x = mean), data = hvf.info[not.const, , drop = TRUE], span = span ) hvf.info$variance.expected[not.const] <- 10 ^ fit$fitted feature.mean <- hvf.info$mean feature.sd <- sqrt(x = hvf.info$variance.expected) standard.max <- clip %||% sqrt(x = ncol(x = data)) feature.mean[feature.mean == 0] <- 0.1 data <- BPCells::min_by_row(mat = data, vals = standard.max*feature.sd + feature.mean) data.standard <- (data - feature.mean) / feature.sd hvf.info$variance.standardized <- BPCells::matrix_stats( matrix = data.standard, row_stats = 'variance' )$row_stats['variance', ] # Set variable features hvf.info$variable <- FALSE hvf.info$rank <- NA vf <- head( x = order(hvf.info$variance.standardized, decreasing = TRUE), n = nselect ) hvf.info$variable[vf] <- TRUE hvf.info$rank[vf] <- seq_along(along.with = vf) rownames(x = hvf.info) <- rownames(x = data) return(hvf.info) } #' @importFrom Matrix rowMeans #' @importFrom SeuratObject EmptyDF #' #' @rdname VST #' @method VST dgCMatrix #' @export #' VST.dgCMatrix <- function( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, verbose = TRUE, ... ) { nfeatures <- nrow(x = data) hvf.info <- EmptyDF(n = nfeatures) # Calculate feature means hvf.info$mean <- Matrix::rowMeans(x = data) # Calculate feature variance hvf.info$variance <- SparseRowVar2( mat = data, mu = hvf.info$mean, display_progress = verbose ) hvf.info$variance.expected <- 0L not.const <- hvf.info$variance > 0 fit <- loess( formula = log10(x = variance) ~ log10(x = mean), data = hvf.info[not.const, , drop = TRUE], span = span ) hvf.info$variance.expected[not.const] <- 10 ^ fit$fitted hvf.info$variance.standardized <- SparseRowVarStd( mat = data, mu = hvf.info$mean, sd = sqrt(x = hvf.info$variance.expected), vmax = clip %||% sqrt(x = ncol(x = data)), display_progress = verbose ) # Set variable features hvf.info$variable <- FALSE hvf.info$rank <- NA vf <- head( x = order(hvf.info$variance.standardized, decreasing = TRUE), n = nselect ) hvf.info$variable[vf] <- TRUE hvf.info$rank[vf] <- seq_along(along.with = vf) return(hvf.info) } #' @rdname VST #' @method VST matrix #' @export #' VST.matrix <- function( data, margin = 1L, nselect = 2000L, span = 0.3, clip = NULL, ... ) { return(VST( data = as.sparse(x = data), margin = margin, nselect = nselect, span = span, clip = clip, ... )) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Calculate dispersion of features #' #' @param object Data matrix #' @param mean.function Function to calculate mean #' @param dispersion.function Function to calculate dispersion #' @param num.bin Number of bins to use #' @param binning.method Method to use for binning. Options are 'equal_width' or 'equal_frequency' #' @param verbose Display progress #' @keywords internal #' CalcDispersion <- function( object, mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) { if (!inherits(x = object, what = c('dgCMatrix', 'matrix'))) { stop('mean.var.plot and dispersion methods only \ support dense and sparse matrix input') } if (inherits(x = object, what = 'matrix')) { object <- as.sparse(x = object) } feature.mean <- mean.function(object, verbose) feature.dispersion <- dispersion.function(object, verbose) names(x = feature.mean) <- names( x = feature.dispersion) <- rownames(x = object) feature.dispersion[is.na(x = feature.dispersion)] <- 0 feature.mean[is.na(x = feature.mean)] <- 0 data.x.breaks <- switch( EXPR = binning.method, 'equal_width' = num.bin, 'equal_frequency' = c( quantile( x = feature.mean[feature.mean > 0], probs = seq.int(from = 0, to = 1, length.out = num.bin) ) ), stop("Unknown binning method: ", binning.method) ) data.x.bin <- cut(x = feature.mean, breaks = data.x.breaks, include.lowest = TRUE) names(x = data.x.bin) <- names(x = feature.mean) mean.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = mean) sd.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = sd) feature.dispersion.scaled <- (feature.dispersion - mean.y[as.numeric(x = data.x.bin)]) / sd.y[as.numeric(x = data.x.bin)] names(x = feature.dispersion.scaled) <- names(x = feature.mean) hvf.info <- data.frame( feature.mean, feature.dispersion, feature.dispersion.scaled) rownames(x = hvf.info) <- rownames(x = object) colnames(x = hvf.info) <- paste0( 'mvp.', c('mean', 'dispersion', 'dispersion.scaled')) return(hvf.info) } #' @importFrom SeuratObject .CalcN #' CalcN <- function(object, ...) { return(.CalcN(object, ...)) } #' @method .CalcN IterableMatrix #' @export #' .CalcN.IterableMatrix <- function(object, ...) { col_stat <- BPCells::matrix_stats(matrix = object, col_stats = 'mean')$col_stats return(list( nCount = round(col_stat['mean', ] * nrow(object)), nFeature = col_stat['nonzero', ] )) } #' Find variable features based on dispersion #' #' @param data Data matrix #' @param nselect Number of top features to select based on dispersion values #' @param verbose Display progress #' @keywords internal #' DISP <- function( data, nselect = 2000L, verbose = TRUE, ... ) { hvf.info <- CalcDispersion(object = data, verbose = verbose, ...) hvf.info$variable <- FALSE hvf.info$rank <- NA vf <- head( x = order(hvf.info$mvp.dispersion, decreasing = TRUE), n = nselect ) hvf.info$variable[vf] <- TRUE hvf.info$rank[vf] <- seq_along(along.with = vf) return(hvf.info) } #' @importFrom SeuratObject .CheckFmargin #' .FeatureVar <- function( data, mu, fmargin = 1L, standardize = FALSE, sd = NULL, clip = NULL, verbose = TRUE ) { fmargin <- .CheckFmargin(fmargin = fmargin) ncells <- dim(x = data)[-fmargin] nfeatures <- dim(x = data)[fmargin] fvars <- vector(mode = 'numeric', length = nfeatures) if (length(x = mu) != nfeatures) { stop("Wrong number of feature means provided") } if (isTRUE(x = standardize)) { clip <- clip %||% sqrt(x = ncells) if (length(x = sd) != nfeatures) { stop("Wrong number of standard deviations") } } if (isTRUE(x = verbose)) { msg <- 'Calculating feature variances' if (isTRUE(x = standardize)) { msg <- paste(msg, 'of standardized and clipped values') } message(msg) pb <- txtProgressBar(style = 3, file = stderr()) } for (i in seq_len(length.out = nfeatures)) { if (isTRUE(x = standardize) && sd[i] == 0) { if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / nfeatures) } next } x <- if (fmargin == 1L) { data[i, , drop = TRUE] } else { data[, i, drop = TRUE] } x <- x - mu[i] if (isTRUE(x = standardize)) { x <- x / sd[i] x[x > clip] <- clip } fvars[i] <- sum(x ^ 2) / (ncells - 1L) if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / nfeatures) } } if (isTRUE(x = verbose)) { close(con = pb) } return(fvars) } .Mean <- function(data, margin = 1L) { nout <- dim(x = data)[margin] nobs <- dim(x = data)[-margin] means <- vector(mode = 'numeric', length = nout) for (i in seq_len(length.out = nout)) { x <- if (margin == 1L) { data[i, , drop = TRUE] } else { data[, i, drop = TRUE] } means[i] <- sum(x) / nobs } return(means) } .SparseNormalize <- function(data, scale.factor = 1e4, verbose = TRUE) { entryname <- .SparseSlots(x = data, type = 'entries') p <- slot(object = data, name = .SparseSlots(x = data, type = 'pointers')) if (p[1L] == 0) { p <- p + 1L } np <- length(x = p) - 1L if (isTRUE(x = verbose)) { pb <- txtProgressBar(style = 3L, file = stderr()) } for (i in seq_len(length.out = np)) { idx <- seq.int(from = p[i], to = p[i + 1] - 1L) xidx <- slot(object = data, name = entryname)[idx] slot(object = data, name = entryname)[idx] <- log1p( x = xidx / sum(xidx) * scale.factor ) if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / np) } } if (isTRUE(x = verbose)) { close(con = pb) } return(data) } #' @param data A sparse matrix #' @param mu A vector of feature means #' @param fmargin Feature margin #' @param standardize Standardize matrix rows prior to calculating variances #' @param sd If standardizing, a vector of standard deviations to #' standardize with #' @param clip Set upper bound for standardized variances; defaults to the #' square root of the number of cells #' @param verbose Show progress updates #' #' @keywords internal #' @importFrom SeuratObject .CheckFmargin #' #' @noRd #' .SparseFeatureVar <- function( data, mu, fmargin = 1L, standardize = FALSE, sd = NULL, clip = NULL, verbose = TRUE ) { fmargin <- .CheckFmargin(fmargin = fmargin) if (fmargin != .MARGIN(object = data)) { data <- t(x = data) fmargin <- .MARGIN(object = data) } entryname <- .SparseSlots(x = data, type = 'entries') p <- slot(object = data, name = .SparseSlots(x = data, type = 'pointers')) if (p[1L] == 0) { p <- p + 1L } np <- length(x = p) - 1L ncells <- dim(x = data)[-fmargin] fvars <- vector(mode = 'numeric', length = np) if (length(x = mu) != np) { stop("Wrong number of feature means provided") } if (isTRUE(x = standardize)) { clip <- clip %||% sqrt(x = ncells) if (length(x = sd) != np) { stop("Wrong number of standard deviations provided") } } if (isTRUE(x = verbose)) { msg <- 'Calculating feature variances' if (isTRUE(x = standardize)) { msg <- paste(msg, 'of standardized and clipped values') } message(msg) pb <- txtProgressBar(style = 3, file = stderr()) } for (i in seq_len(length.out = np)) { if (isTRUE(x = standardize) && sd[i] == 0) { if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / np) } next } idx <- seq.int(from = p[i], to = p[i + 1L] - 1L) xidx <- slot(object = data, name = entryname)[idx] - mu[i] nzero <- ncells - length(x = xidx) csum <- nzero * ifelse( test = isTRUE(x = standardize), yes = ((0 - mu[i]) / sd[i]) ^ 2, no = mu[i] ^ 2 ) if (isTRUE(x = standardize)) { xidx <- xidx / sd[i] xidx[xidx > clip] <- clip } fsum <- sum(xidx ^ 2) + csum fvars[i] <- fsum / (ncells - 1L) if (isTRUE(x = verbose)) { setTxtProgressBar(pb = pb, value = i / np) } } if (isTRUE(x = verbose)) { close(con = pb) } return(fvars) } #' @importFrom SeuratObject .CheckFmargin .SparseMean <- function(data, margin = 1L) { margin <- .CheckFmargin(fmargin = margin) if (margin != .MARGIN(object = data)) { data <- t(x = data) margin <- .MARGIN(object = data) } entryname <- .SparseSlots(x = data, type = 'entries') p <- slot(object = data, name = .SparseSlots(x = data, type = 'pointers')) if (p[1L] == 0) { p <- p + 1L } np <- length(x = p) - 1L nobs <- dim(x = data)[-margin] means <- vector(mode = 'numeric', length = np) for (i in seq_len(length.out = np)) { idx <- seq.int(from = p[i], to = p[i + 1L] - 1L) means[i] <- sum(slot(object = data, name = entryname)[idx]) / nobs } return(means) } #' @inheritParams stats::loess #' @param data A matrix #' @param fmargin Feature margin #' @param nselect Number of features to select #' @param clip After standardization values larger than \code{clip} will be set #' to \code{clip}; default is \code{NULL} which sets this value to the square #' root of the number of cells #' #' @importFrom Matrix rowMeans #' @importFrom SeuratObject .CheckFmargin #' #' @keywords internal #' #' @noRd #' .VST <- function( data, fmargin = 1L, nselect = 2000L, span = 0.3, clip = NULL, verbose = TRUE, ... ) { fmargin <- .CheckFmargin(fmargin = fmargin) nfeatures <- dim(x = data)[fmargin] # TODO: Support transposed matrices # nfeatures <- nrow(x = data) if (IsSparse(x = data)) { mean.func <- .SparseMean var.func <- .SparseFeatureVar } else { mean.func <- .Mean var.func <- .FeatureVar } hvf.info <- SeuratObject::EmptyDF(n = nfeatures) # hvf.info$mean <- mean.func(data = data, margin = fmargin) hvf.info$mean <- rowMeans(x = data) hvf.info$variance <- var.func( data = data, mu = hvf.info$mean, fmargin = fmargin, verbose = verbose ) hvf.info$variance.expected <- 0L not.const <- hvf.info$variance > 0 fit <- loess( formula = log10(x = variance) ~ log10(x = mean), data = hvf.info[not.const, , drop = TRUE], span = span ) hvf.info$variance.expected[not.const] <- 10 ^ fit$fitted hvf.info$variance.standardized <- var.func( data = data, mu = hvf.info$mean, standardize = TRUE, sd = sqrt(x = hvf.info$variance.expected), clip = clip, verbose = verbose ) hvf.info$variable <- FALSE hvf.info$rank <- NA vs <- hvf.info$variance.standardized vs[vs == 0] <- NA vf <- head( x = order(hvf.info$variance.standardized, decreasing = TRUE), n = nselect ) hvf.info$variable[vf] <- TRUE hvf.info$rank[vf] <- seq_along(along.with = vf) # colnames(x = hvf.info) <- paste0('vst.', colnames(x = hvf.info)) return(hvf.info) } # hvf.methods$vst <- VST #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # S4 Methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ################################################################################ ################################# SCTransform ################################## ################################################################################ #' @importFrom SeuratObject Cells as.sparse #' #' @method SCTransform IterableMatrix #' @rdname SCTransform #' @concept preprocessing #' @export SCTransform.IterableMatrix <- function( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object) / 30), sqrt(x = ncol(x = object) / 30)), vst.flavor = 'v2', conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.null(reference.SCT.model)){ do.correct.umi <- FALSE do.center <- FALSE } sampled_cells <- sample.int(n = ncol(x = object), size = min(ncells, ncol(x = object))) umi <- as.sparse(x = object[, sampled_cells]) cell.attr <- cell.attr[colnames(x = umi),,drop=FALSE] vst.out <- SCTransform(object = umi, cell.attr = cell.attr, reference.SCT.model = reference.SCT.model, do.correct.umi = do.correct.umi, ncells = ncells, residual.features = residual.features, variable.features.n = variable.features.n, variable.features.rv.th = variable.features.rv.th, vars.to.regress = vars.to.regress, do.scale = do.scale, do.center = do.center, clip.range = clip.range, vst.flavor = vst.flavor, conserve.memory = conserve.memory, return.only.var.genes = return.only.var.genes, seed.use = seed.use, verbose = verbose, ...) if (!do.correct.umi) { vst.out$umi_corrected <- umi } return(vst.out) } #' @importFrom SeuratObject CreateAssayObject SetAssayData GetAssayData CreateSCTAssay <- function(vst.out, do.correct.umi, residual.type, clip.range){ residual.type <- vst.out[['residual_type']] %||% 'pearson' sct.method <- vst.out[['sct.method']] assay.out <- CreateAssayObject(counts = vst.out$umi_corrected) # set the variable genes VariableFeatures(object = assay.out) <- vst.out$variable_features # put log1p transformed counts in data assay.out <- SetAssayData( object = assay.out, slot = 'data', new.data = log1p(x = GetAssayData(object = assay.out, slot = 'counts')) ) scale.data <- vst.out$y assay.out <- SetAssayData( object = assay.out, slot = 'scale.data', new.data = scale.data ) vst.out$y <- NULL # save clip.range into vst model vst.out$arguments$sct.clip.range <- clip.range vst.out$arguments$sct.method <- sct.method Misc(object = assay.out, slot = 'vst.out') <- vst.out assay.out <- as(object = assay.out, Class = "SCTAssay") return (assay.out) } #' @importFrom SeuratObject Cells DefaultLayer DefaultLayer<- Features #' LayerData LayerData<- as.sparse #' #' @method SCTransform StdAssay #' @export #' SCTransform.StdAssay <- function( object, layer = 'counts', cell.attr = NULL, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object) / 30), sqrt(x = ncol(x = object) / 30)), vst.flavor = 'v2', conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ...) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.null(reference.SCT.model)){ do.correct.umi <- FALSE do.center <- FALSE } olayer <- layer <- unique(x = layer) layers <- Layers(object = object, search = layer) dataset.names <- gsub(pattern = paste0(layer, "."), replacement = "", x = layers) # loop over layers performing SCTransform() on individual layers sct.assay.list <- list() # Keep a tab of variable features per chunk variable.feature.list <- list() for (dataset.index in seq_along(along.with = layers)) { l <- layers[dataset.index] if (isTRUE(x = verbose)) { message("Running SCTransform on layer: ", l) } all_cells <- Cells(x = object, layer = l) all_features <- Features(x = object, layer = l) layer.data <- LayerData( object = object, layer = l, features = all_features, cells = all_cells ) local.reference.SCT.model <- NULL set.seed(seed = seed.use) do.correct.umi.chunk <- FALSE sct.function <- if (inherits(x = layer.data, what = 'V3Matrix')) { SCTransform.default } else { SCTransform } if (is.null(x = cell.attr) && is.null(x = reference.SCT.model)){ calcn <- CalcN(object = layer.data) cell.attr.layer <- data.frame(umi = calcn$nCount, log_umi = log10(x = calcn$nCount)) rownames(cell.attr.layer) <- colnames(x = layer.data) } else { cell.attr.layer <- cell.attr[colnames(x = layer.data),, drop=FALSE] } if (!"umi" %in% cell.attr.layer && is.null(x = reference.SCT.model)){ calcn <- CalcN(object = layer.data) cell.attr.tmp <- data.frame(umi = calcn$nCount) rownames(cell.attr.tmp) <- colnames(x = layer.data) cell.attr.layer$umi <- NA cell.attr.layer$log_umi <- NA cell.attr.layer[rownames(cell.attr.tmp), "umi"] <- cell.attr.tmp$umi cell.attr.layer[rownames(cell.attr.tmp), "log_umi"] <- log10(x = cell.attr.tmp$umi) } # Step 1: Learn model vst.out <- sct.function(object = layer.data, do.correct.umi = TRUE, cell.attr = cell.attr.layer, reference.SCT.model = reference.SCT.model, ncells = ncells, residual.features = residual.features, variable.features.n = variable.features.n, variable.features.rv.th = variable.features.rv.th, vars.to.regress = vars.to.regress, do.scale = do.scale, do.center = do.center, clip.range = clip.range, vst.flavor = vst.flavor, conserve.memory = conserve.memory, return.only.var.genes = return.only.var.genes, seed.use = seed.use, verbose = verbose, ...) min_var <- vst.out$arguments$min_variance residual.type <- vst.out[['residual_type']] %||% 'pearson' assay.out <- CreateSCTAssay(vst.out = vst.out, do.correct.umi = do.correct.umi, residual.type = residual.type, clip.range = clip.range) # If there is no reference model, use the model learned on subset of cells to calculate residuals # by setting the learned model as the reference model (local.reference.SCT.model) if (is.null(x = reference.SCT.model)) { local.reference.SCT.model <- assay.out@SCTModel.list[[1]] } else { local.reference.SCT.model <- reference.SCT.model } variable.features <- VariableFeatures(assay.out) # once we have the model, just calculate residuals for all cells # local.reference.SCT.model set to reference.model if it is non null vst_out.reference <- SCTModel_to_vst(SCTModel = local.reference.SCT.model) vst_out.reference$gene_attr <- local.reference.SCT.model@feature.attributes min_var <- vst_out.reference$arguments$min_variance if (min_var == "umi_median"){ counts.x <- as.sparse(x = layer.data[, sample.int(n = ncol(x = layer.data), size = min(ncells, ncol(x = layer.data)) )]) min_var <- (median(counts.x@x)/5)^2 } res_clip_range <- vst_out.reference$arguments$res_clip_range # Step 2: Use learned model to calculate residuals in chunks cells.vector <- 1:ncol(x = layer.data) cells.grid <- split(x = cells.vector, f = ceiling(x = seq_along(along.with = cells.vector)/ncells)) # Single block residuals <- list() corrected_counts <- list() cell_attrs <- list() if (length(x = cells.grid) == 1){ merged.assay <- assay.out corrected_counts[[1]] <- GetAssayData(object = assay.out, slot = "data") residuals[[1]] <- GetAssayData(object = assay.out, slot = "scale.data") cell_attrs[[1]] <- vst_out.reference$cell_attr sct.assay.list[[dataset.names[dataset.index]]] <- assay.out } else { # iterate over chunks to get residuals for (i in seq_len(length.out = length(x = cells.grid))) { vp <- cells.grid[[i]] if (verbose){ message("Getting residuals for block ", i, "(of ", length(cells.grid), ") for ", dataset.names[[dataset.index]], " dataset") } counts.vp <- as.sparse(x = layer.data[, vp]) cell.attr.object <- cell.attr.layer[colnames(x = counts.vp),, drop=FALSE] vst_out <- vst_out.reference vst_out$cell_attr <- cell.attr.object vst_out$gene_attr <- vst_out$gene_attr[variable.features,] if (return.only.var.genes){ new_residual <- get_residuals( vst_out = vst_out, umi = counts.vp[variable.features,], residual_type = "pearson", min_variance = min_var, res_clip_range = res_clip_range, verbosity = FALSE ) } else { new_residual <- get_residuals( vst_out = vst_out, umi = counts.vp[all_features,], residual_type = "pearson", min_variance = min_var, res_clip_range = res_clip_range, verbosity = FALSE ) } vst_out$y <- new_residual corrected_counts[[i]] <- correct_counts( x = vst_out, umi = counts.vp[all_features,], verbosity = FALSE# as.numeric(x = verbose) * 2 ) residuals[[i]] <- new_residual cell_attrs[[i]] <- cell.attr.object } new.residuals <- Reduce(cbind, residuals) corrected_counts <- Reduce(cbind, corrected_counts) cell_attrs <- Reduce(rbind, cell_attrs) vst_out.reference$cell_attr <- cell_attrs[colnames(new.residuals),] SCTModel.list <- PrepVSTResults(vst.res = vst_out.reference, cell.names = all_cells) SCTModel.list <- list(model1 = SCTModel.list) # scale data here as do.center and do.scale are set to FALSE inside new.residuals <- ScaleData( new.residuals, features = NULL, #vars.to.regress = vars.to.regress, #latent.data = cell.attr[, vars.to.regress, drop = FALSE], model.use = 'linear', use.umi = FALSE, do.scale = do.scale, do.center = do.center, scale.max = Inf, block.size = 750, min.cells.to.block = 3000, verbose = verbose ) assay.out <- CreateSCTAssayObject(counts = corrected_counts, scale.data = new.residuals, SCTModel.list = SCTModel.list) assay.out$data <- log1p(x = corrected_counts) VariableFeatures(assay.out) <- variable.features # one assay per dataset if (verbose){ message("Finished calculating residuals for ", dataset.names[dataset.index]) } sct.assay.list[[dataset.names[dataset.index]]] <- assay.out variable.feature.list[[dataset.names[dataset.index]]] <- VariableFeatures(assay.out) } } # Return array by merging everythin if (length(x = sct.assay.list) == 1){ merged.assay <- sct.assay.list[[1]] } else { vf.list <- lapply(X = sct.assay.list, FUN = function(object.i) VariableFeatures(object = object.i)) variable.features.union <- Reduce(f = union, x = vf.list) var.features.sorted <- sort( x = table(unlist(x = vf.list, use.names = FALSE)), decreasing = TRUE ) # select top ranking features var.features <- variable.features.union # calculate residuals for union of features for (layer.name in names(x = sct.assay.list)){ vst_out <- SCTModel_to_vst(SCTModel = slot(object = sct.assay.list[[layer.name]], name = "SCTModel.list")[[1]]) all_cells <- Cells(x = object, layer = paste0(layer, ".", layer.name)) all_features <- Features(x = object, layer = paste0(layer, ".", layer.name)) variable.features.target <- intersect(x = rownames(x = vst_out$model_pars_fit), y = var.features) variable.features.target <- setdiff(x = variable.features.target, y = VariableFeatures(sct.assay.list[[layer.name]])) if (length(x = variable.features.target )<1){ next } layer.counts.tmp <- LayerData( object = object, layer = paste0(layer, ".", layer.name), cells = all_cells ) layer.counts.tmp <- as.sparse(x = layer.counts.tmp) vst_out$cell_attr <- vst_out$cell_attr[, c("log_umi"), drop=FALSE] vst_out$model_pars_fit <- vst_out$model_pars_fit[variable.features.target,,drop=FALSE] new_residual <- GetResidualsChunked(vst_out = vst_out, layer.counts = layer.counts.tmp, residual_type = "pearson", min_variance = min_var, res_clip_range = res_clip_range, verbose = FALSE) old_residual <- GetAssayData(object = sct.assay.list[[layer.name]], slot = 'scale.data') merged_residual <- rbind(old_residual, new_residual) sct.assay.list[[layer.name]] <- SetAssayData(object = sct.assay.list[[layer.name]], slot = 'scale.data', new.data = merged_residual) VariableFeatures(sct.assay.list[[layer.name]]) <- rownames(x = merged_residual) } merged.assay <- merge(x = sct.assay.list[[1]], y = sct.assay.list[2:length(sct.assay.list)]) VariableFeatures(object = merged.assay) <- VariableFeatures(object = merged.assay, use.var.features = FALSE, nfeatures = variable.features.n) } # set the names of SCTmodels to be layer names models <- slot(object = merged.assay, name="SCTModel.list") names(models) <- names(x = sct.assay.list) slot(object = merged.assay, name="SCTModel.list") <- models gc(verbose = FALSE) return(merged.assay) } #' Calculate pearson residuals of features not in the scale.data #' #' This function calls sctransform::get_residuals. #' #' @param object A seurat object #' @param features Name of features to add into the scale.data #' @param assay Name of the assay of the seurat object generated by SCTransform #' @param layer Name (prefix) of the layer to pull counts from #' @param umi.assay Name of the assay of the seurat object containing UMI matrix #' and the default is RNA #' @param clip.range Numeric of length two specifying the min and max values the #' Pearson residual will be clipped to #' @param reference.SCT.model reference.SCT.model If a reference SCT model should be used #' for calculating the residuals. When set to not NULL, ignores the `SCTModel` #' paramater. #' @param replace.value Recalculate residuals for all features, even if they are #' already present. Useful if you want to change the clip.range. #' @param na.rm For features where there is no feature model stored, return NA #' for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only #' return residuals for features with a model stored for all cells. #' @param verbose Whether to print messages and progress bars #' #' @return Returns a Seurat object containing Pearson residuals of added #' features in its scale.data #' #' @importFrom sctransform get_residuals #' @importFrom matrixStats rowAnyNAs #' #' @export #' @concept preprocessing #' #' @seealso \code{\link[sctransform]{get_residuals}} FetchResiduals <- function( object, features, assay = NULL, umi.assay = "RNA", layer = "counts", clip.range = NULL, reference.SCT.model = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE) { assay <- assay %||% DefaultAssay(object = object) if (IsSCT(assay = object[[assay]])) { object[[assay]] <- as(object[[assay]], "SCTAssay") } if (!inherits(x = object[[assay]], what = "SCTAssay")) { stop(assay, " assay was not generated by SCTransform") } sct.models <- levels(x = object[[assay]]) if (length(sct.models)==1){ sct.models <- list(sct.models) } if (length(x = sct.models) == 0) { warning("SCT model not present in assay", call. = FALSE, immediate. = TRUE) return(object) } possible.features <- Reduce(f = union, x = lapply(X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) })) bad.features <- setdiff(x = features, y = possible.features) if (length(x = bad.features) > 0) { warning("The following requested features are not present in any models: ", paste(bad.features, collapse = ", "), call. = FALSE ) features <- intersect(x = features, y = possible.features) } features.orig <- features if (na.rm) { # only compute residuals when feature model info is present in all features <- names(x = which(x = table(unlist(x = lapply( X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) } ))) == length(x = sct.models))) if (length(x = features) == 0) { return(object) } } features <- intersect(x = features.orig, y = features) if (length(features) < 1){ warning("The following requested features are not present in all the models: ", paste(features.orig, collapse = ", "), call. = FALSE ) return(NULL) } #if (length(x = sct.models) > 1 & verbose) { # message("This SCTAssay contains multiple SCT models. Computing residuals for cells using") #} # Get all (count) layers layers <- Layers(object = object[[umi.assay]], search = layer) # iterate over layer running sct model for each of the object names new.residuals <- list() total_cells <- 0 all_cells <- c() if (!is.null(x = reference.SCT.model)) { if (inherits(x = reference.SCT.model, what = "SCTModel")) { reference.SCT.model <- SCTModel_to_vst(SCTModel = reference.SCT.model) } if (is.list(x = reference.SCT.model) & inherits(x = reference.SCT.model[[1]], what = "SCTModel")) { stop("reference.SCT.model must be one SCTModel rather than a list of SCTModel") } if (reference.SCT.model$model_str != "y ~ log_umi") { stop("reference.SCT.model must be derived using default SCT regression formula, `y ~ log_umi`") } } for (i in seq_along(along.with = layers)) { l <- layers[i] sct_model <- sct.models[[i]] # these cells belong to this layer layer_cells <- Cells(x = object[[umi.assay]], layer = l) all_cells <- c(all_cells, layer_cells) total_cells <- total_cells + length(layer_cells) # calculate residual using this model and these cells new.residuals[[i]] <- FetchResidualSCTModel( object = object, umi.assay = umi.assay, assay = assay, layer = l, layer.cells = layer_cells, SCTModel = sct_model, reference.SCT.model = reference.SCT.model, new_features = features, replace.value = replace.value, clip.range = clip.range, verbose = verbose ) } existing.data <- GetAssayData(object = object, slot = "scale.data", assay = assay) all.features <- union(x = rownames(x = existing.data), y = features) new.scale <- matrix( data = NA, nrow = length(x = all.features), ncol = total_cells, dimnames = list(all.features, all_cells) ) common_cells <- intersect(colnames(new.scale), colnames(existing.data)) if (nrow(x = existing.data) > 0) { new.scale[rownames(x = existing.data), common_cells] <- existing.data[, common_cells] } if (length(x = new.residuals) == 1 & is.list(x = new.residuals)) { new.residuals <- new.residuals[[1]] } else { new.residuals <- Reduce(cbind, new.residuals) #new.residuals <- matrix(data = unlist(new.residuals), nrow = nrow(new.scale) , ncol = ncol(new.scale)) #colnames(new.residuals) <- colnames(new.scale) #rownames(new.residuals) <- rownames(new.scale) } new.scale[rownames(x = new.residuals), colnames(x = new.residuals)] <- new.residuals if (na.rm) { new.scale <- new.scale[!rowAnyNAs(x = new.scale), ] } return(new.scale[features, ]) } #' Calculate pearson residuals of features not in the scale.data #' This function is the secondary function under FetchResiduals #' #' @param object A seurat object #' @param assay Name of the assay of the seurat object generated by #' SCTransform. Default is "SCT" #' @param umi.assay Name of the assay of the seurat object to fetch #' UMIs from. Default is "RNA" #' @param layer Name of the layer under `umi.assay` to fetch UMIs from. #' Default is "counts" #' @param chunk_size Number of cells to load in memory for calculating #' residuals #' @param layer.cells Vector of cells to calculate the residual for. #' Default is NULL which uses all cells in the layer #' @param SCTModel Which SCTmodel to use from the object for calculating #' the residual. Will be ignored if reference.SCT.model is set #' @param reference.SCT.model If a reference SCT model should be used #' for calculating the residuals. When set to not NULL, ignores the `SCTModel` #' paramater. #' @param new_features A vector of features to calculate the residuals for #' @param clip.range Numeric of length two specifying the min and max values #' the Pearson residual will be clipped to. Useful if you want to change the #' clip.range. #' @param replace.value Whether to replace the value of residuals if it #' already exists #' @param verbose Whether to print messages and progress bars #' #' @return Returns a matrix containing centered pearson residuals of #' added features #' #' @importFrom sctransform get_residuals #' @importFrom Matrix colSums # FetchResidualSCTModel <- function( object, assay = "SCT", umi.assay = "RNA", layer = "counts", chunk_size = 2000, layer.cells = NULL, SCTModel = NULL, reference.SCT.model = NULL, new_features = NULL, clip.range = NULL, replace.value = FALSE, verbose = FALSE ) { model.cells <- character() model.features <- Features(x = object, assay = assay) if (is.null(x = reference.SCT.model)){ clip.range <- clip.range %||% SCTResults(object = object[[assay]], slot = "clips", model = SCTModel)$sct model.features <- rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = SCTModel)) model.cells <- Cells(x = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) sct.method <- SCTResults(object = object[[assay]], slot = "arguments", model = SCTModel)$sct.method %||% "default" } layer.cells <- layer.cells %||% Cells(x = object[[umi.assay]], layer = layer) if (!is.null(reference.SCT.model)) { # use reference SCT model sct.method <- "reference" } existing.scale.data <- NULL if (is.null(x=reference.SCT.model)){ existing.scale.data <- suppressWarnings(GetAssayData(object = object, assay = assay, slot = "scale.data")) } scale.data.cells <- colnames(x = existing.scale.data) scale.data.cells.common <- intersect(scale.data.cells, layer.cells) scale.data.cells <- intersect(x = scale.data.cells, y = scale.data.cells.common) if (length(x = setdiff(x = layer.cells, y = scale.data.cells)) == 0) { # existing.scale.data <- suppressWarnings(GetAssayData(object = object, assay = assay, slot = "scale.data")) #full.scale.data <- matrix(data = NA, nrow = nrow(x = existing.scale.data), # ncol = length(x = layer.cells), dimnames = list(rownames(x = existing.scale.data), layer.cells)) #full.scale.data[rownames(x = existing.scale.data), colnames(x = existing.scale.data)] <- existing.scale.data #existing_features <- names(x = which(x = !apply( # X = full.scale.data, # MARGIN = 1, # FUN = anyNA #))) existing_features <- rownames(x = existing.scale.data) } else { existing_features <- character() } if (replace.value) { features_to_compute <- new_features } else { features_to_compute <- setdiff(x = new_features, y = existing_features) } if (length(features_to_compute)<1){ return (existing.scale.data[intersect(x = rownames(x = scale.data.cells), y = new_features),,drop=FALSE]) } if (is.null(x = reference.SCT.model) & length(x = setdiff(x = model.cells, y = scale.data.cells)) == 0) { existing_features <- names(x = which(x = ! apply( X = GetAssayData(object = object, assay = assay, slot = "scale.data")[, model.cells], MARGIN = 1, FUN = anyNA) )) } else { existing_features <- character() } if (sct.method == "reference.model") { if (verbose) { message("sct.model ", SCTModel, " is from reference, so no residuals will be recalculated") } features_to_compute <- character() } if (!umi.assay %in% Assays(object = object)) { warning("The umi assay (", umi.assay, ") is not present in the object. ", "Cannot compute additional residuals.", call. = FALSE, immediate. = TRUE ) return(NULL) } # these features do not have feature attriutes diff_features <- setdiff(x = features_to_compute, y = model.features) intersect_features <- intersect(x = features_to_compute, y = model.features) if (sct.method == "reference") { vst_out <- SCTModel_to_vst(SCTModel = reference.SCT.model) # override clip.range clip.range <- vst_out$arguments$sct.clip.range umi.field <- paste0("nCount_", assay) # get rid of the cell attributes vst_out$cell_attr <- NULL all.features <- intersect( x = rownames(x = vst_out$gene_attr), y = features_to_compute ) vst_out$gene_attr <- vst_out$gene_attr[all.features, , drop = FALSE] vst_out$model_pars_fit <- vst_out$model_pars_fit[all.features, , drop = FALSE] } else { vst_out <- SCTModel_to_vst(SCTModel = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) clip.range <- vst_out$arguments$sct.clip.range } clip.max <- max(clip.range) clip.min <- min(clip.range) layer.cells <- layer.cells %||% Cells(x = object[[umi.assay]], layer = layer) if (length(x = diff_features) == 0) { counts <- LayerData( object = object[[umi.assay]], layer = layer, cells = layer.cells ) cells.vector <- 1:length(x = layer.cells) cells.grid <- split(x = cells.vector, f = ceiling(x = seq_along(along.with = cells.vector)/chunk_size)) new_residuals <- list() for (i in seq_len(length.out = length(x = cells.grid))) { vp <- cells.grid[[i]] block <- counts[,vp, drop=FALSE] umi.all <- as.sparse(x = block) # calculate min_variance for get_residuals # required when vst_out$arguments$min_variance == "umi_median" # only calculated once if (i==1){ nz_median <- median(umi.all@x) min_var_custom <- (nz_median / 5)^2 } umi <- umi.all[features_to_compute, , drop = FALSE] ## Add cell_attr for missing cells cell_attr <- data.frame( umi = colSums(umi.all), log_umi = log10(x = colSums(umi.all)) ) rownames(cell_attr) <- colnames(umi.all) if (sct.method %in% c("reference.model", "reference")) { vst_out$cell_attr <- cell_attr[colnames(umi.all), ,drop=FALSE] } else { cell_attr_existing <- vst_out$cell_attr cells_missing <- setdiff(rownames(cell_attr), rownames(cell_attr_existing)) if (length(cells_missing)>0){ cell_attr_missing <- cell_attr[cells_missing, ,drop=FALSE] missing_cols <- setdiff(x = colnames(x = cell_attr_existing), y = colnames(x = cell_attr_missing)) if (length(x = missing_cols) > 0) { cell_attr_missing[, missing_cols] <- NA } vst_out$cell_attr <- rbind(cell_attr_existing, cell_attr_missing) vst_out$cell_attr <- vst_out$cell_attr[colnames(umi), , drop=FALSE] } } if (verbose) { if (sct.method == "reference.model") { message("using reference sct model") } else { message("sct.model: ", SCTModel, " on ", ncol(x = umi), " cells: ", colnames(x = umi.all)[1], " .. ", colnames(x = umi.all)[ncol(umi.all)]) } } if (vst_out$arguments$min_variance == "umi_median"){ min_var <- min_var_custom } else { min_var <- vst_out$arguments$min_variance } if (nrow(umi)>0){ vst_out.tmp <- vst_out vst_out.tmp$cell_attr <- vst_out.tmp$cell_attr[colnames(x = umi),] new_residual <- get_residuals( vst_out = vst_out.tmp, umi = umi, residual_type = "pearson", min_variance = min_var, res_clip_range = c(clip.min, clip.max), verbosity = as.numeric(x = verbose) * 2 ) } else { return(matrix( data = NA, nrow = length(x = features_to_compute), ncol = length(x = colnames(umi.all)), dimnames = list(features_to_compute, colnames(umi.all)) )) } new_residual <- as.matrix(x = new_residual) new_residuals[[i]] <- new_residual } new_residual <- do.call(what = cbind, args = new_residuals) # centered data if no reference model is provided if (is.null(x = reference.SCT.model)){ new_residual <- new_residual - rowMeans(x = new_residual) } else { # subtract residual mean from reference model if (verbose){ message("Using residual mean from reference for centering") } vst_out <- SCTModel_to_vst(SCTModel = reference.SCT.model) ref.residuals.mean <- vst_out$gene_attr[rownames(x = new_residual),"residual_mean"] new_residual <- sweep( x = new_residual, MARGIN = 1, STATS = ref.residuals.mean, FUN = "-" ) } # return (new_residuals) } else { # Some features do not exist warning( "In the SCTModel ", SCTModel, ", the following ", length(x = diff_features), " features do not exist in the counts slot: ", paste(diff_features, collapse = ", ") ) if (length(x = intersect_features) == 0) { # No features exist return(matrix( data = NA, nrow = length(x = features_to_compute), ncol = length(x = model.cells), dimnames = list(features_to_compute, model.cells) )) } } old.features <- setdiff(x = new_features, y = features_to_compute) if (length(x = old.features) > 0) { old_residuals <- GetAssayData(object = object[[assay]], slot = "scale.data")[old.features, model.cells, drop = FALSE] new_residual <- rbind(new_residual, old_residuals)[new_features, ] } return(new_residual) } #' @importFrom sctransform get_residuals GetResidualsChunked <- function(vst_out, layer.counts, residual_type, min_variance, res_clip_range, verbose, chunk_size=5000) { if (inherits(x = layer.counts, what = 'V3Matrix')) { residuals <- get_residuals( vst_out = vst_out, umi = layer.counts, residual_type = residual_type, min_variance = min_variance, res_clip_range = res_clip_range, verbosity = as.numeric(x = verbose) * 2 ) } else if (inherits(x = layer.counts, what = "IterableMatrix")) { cells.vector <- 1:ncol(x = layer.counts) residuals.list <- list() cells.grid <- split(x = cells.vector, f = ceiling(x = seq_along(along.with = cells.vector)/chunk_size)) for (i in seq_len(length.out = length(x = cells.grid))) { vp <- cells.grid[[i]] counts.vp <- as.sparse(x = layer.counts[, vp]) vst.out <- vst_out vst.out$cell_attr <- vst.out$cell_attr[colnames(x = counts.vp),,drop=FALSE] residuals.list[[i]] <- get_residuals( vst_out = vst.out, umi = counts.vp, residual_type = residual_type, min_variance = min_variance, res_clip_range = res_clip_range, verbosity = as.numeric(x = verbose) * 2 ) } residuals <- Reduce(f = cbind, x = residuals.list) } else { stop("Data type not supported") } return (residuals) } #' temporal function to get residuals from reference #' @param object A seurat object #' @param reference.SCT.model a reference SCT model that should be used #' for calculating the residuals #' @param features Names of features to compute #' @param nCount_UMI UMI counts. If not specified, defaults to #' column sums of object #' @param verbose Whether to print messages and progress bars #' @importFrom sctransform get_residuals #' @importFrom Matrix colSums #' FetchResiduals_reference <- function(object, reference.SCT.model = NULL, features = NULL, nCount_UMI = NULL, verbose = FALSE) { ## Add cell_attr for missing cells nCount_UMI <- nCount_UMI %||% colSums(object) cell_attr <- data.frame( umi = nCount_UMI, log_umi = log10(x = nCount_UMI) ) features_to_compute <- features features_to_compute <- intersect(features_to_compute, rownames(object)) vst_out <- SCTModel_to_vst(SCTModel = reference.SCT.model) # override clip.range clip.range <- vst_out$arguments$sct.clip.range # get rid of the cell attributes vst_out$cell_attr <- NULL all.features <- intersect( x = rownames(x = vst_out$gene_attr), y = features_to_compute ) vst_out$gene_attr <- vst_out$gene_attr[all.features, , drop = FALSE] vst_out$model_pars_fit <- vst_out$model_pars_fit[all.features, , drop = FALSE] clip.max <- max(clip.range) clip.min <- min(clip.range) umi <- object[features_to_compute, , drop = FALSE] rownames(cell_attr) <- colnames(object) vst_out$cell_attr <- cell_attr if (verbose) { message("using reference sct model") } if (vst_out$arguments$min_variance == "umi_median"){ nz_median <- 1 min_var_custom <- (nz_median / 5)^2 min_var <- min_var_custom } else { min_var <- vst_out$arguments$min_variance } new_residual <- get_residuals( vst_out = vst_out, umi = umi, residual_type = "pearson", min_variance = min_var, verbosity = as.numeric(x = verbose) * 2 ) ref.residuals.mean <- vst_out$gene_attr[rownames(x = new_residual),"residual_mean"] new_residual <- sweep( x = new_residual, MARGIN = 1, STATS = ref.residuals.mean, FUN = "-" ) new_residual <- MinMax(data = new_residual, min = clip.min, max = clip.max) return(new_residual) } #' Find variable features based on mean.var.plot #' #' @param data Data matrix #' @param nselect Number of features to select based on dispersion values #' @param verbose Whether to print messages and progress bars #' @param mean.cutoff Numeric of length two specifying the min and max values #' @param dispersion.cutoff Numeric of length two specifying the min and max values #' #' @keywords internal #' MVP <- function( data, verbose = TRUE, nselect = 2000L, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), ... ) { hvf.info <- DISP(data = data, nselect = nselect, verbose = verbose) hvf.info$variable <- FALSE hvf.info$rank <- NA hvf.info <- hvf.info[order(hvf.info$mvp.dispersion, decreasing = TRUE), , drop = FALSE] means.use <- (hvf.info[, 1] > mean.cutoff[1]) & (hvf.info[, 1] < mean.cutoff[2]) dispersions.use <- (hvf.info[, 3] > dispersion.cutoff[1]) & (hvf.info[, 3] < dispersion.cutoff[2]) hvf.info[which(x = means.use & dispersions.use), 'variable'] <- TRUE rank.rows <- rownames(x = hvf.info)[which(x = means.use & dispersions.use)] selected.indices <- which(rownames(x = hvf.info) %in% rank.rows) hvf.info$rank[selected.indices] <- seq_along(selected.indices) hvf.info <- hvf.info[order(as.numeric(row.names(hvf.info))), ] # hvf.info[hvf.info$variable,'rank'] <- rank(x = hvf.info[hvf.info$variable,'rank']) # hvf.info[!hvf.info$variable,'rank'] <- NA return(hvf.info) } Seurat/R/convenience.R0000644000176200001440000002627714525500037014354 0ustar liggesusers#' @include generics.R #' @include visualization.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param fov Name to store FOV as #' @param assay Name to store expression matrix as #' @inheritDotParams ReadAkoya #' #' @return \code{LoadAkoya}: A \code{\link[SeuratObject]{Seurat}} object #' #' @importFrom SeuratObject Cells CreateFOV CreateSeuratObject #' #' @export #' #' @rdname ReadAkoya #' LoadAkoya <- function( filename, type = c('inform', 'processor', 'qupath'), fov, assay = 'Akoya', ... ) { # read in matrix and centroids data <- ReadAkoya(filename = filename, type = type) # convert centroids into coords object coords <- suppressWarnings(expr = CreateFOV( coords = data$centroids, type = 'centroids', key = 'fov', assay = assay )) colnames(x = data$metadata) <- suppressWarnings( expr = make.names(names = colnames(x = data$metadata)) ) # build Seurat object from matrix obj <- CreateSeuratObject( counts = data$matrix, assay = assay, meta.data = data$metadata ) # make sure coords only contain cells in seurat object coords <- subset(x = coords, cells = Cells(x = obj)) suppressWarnings(expr = obj[[fov]] <- coords) # add image to seurat object # Add additional assays for (i in setdiff(x = names(x = data), y = c('matrix', 'centroids', 'metadata'))) { suppressWarnings(expr = obj[[i]] <- CreateAssayObject(counts = data[[i]])) } return(obj) } #' @inheritParams ReadAkoya #' @param data.dir Path to a directory containing Vitessce cells #' and clusters JSONs #' #' @return \code{LoadHuBMAPCODEX}: A \code{\link[SeuratObject]{Seurat}} object #' #' @importFrom SeuratObject Cells CreateFOV CreateSeuratObject #' #' @export #' #' @rdname ReadVitessce #' LoadHuBMAPCODEX <- function(data.dir, fov, assay = 'CODEX') { data <- ReadVitessce( counts = file.path(data.dir, "reg1_stitched_expressions.clusters.json"), coords = file.path(data.dir, "reg1_stitched_expressions.cells.json"), type = "segmentations" ) # Create spatial and Seurat objects coords <- CreateFOV( coords = data$segmentations, molecules = data$molecules, assay = assay ) obj <- CreateSeuratObject(counts = data$counts, assay = assay) # make sure spatial coords only contain cells in seurat object coords <- subset(x = coords, cells = Cells(x = obj)) obj[[fov]] <- coords return(obj) } #' @inheritParams ReadAkoya #' @param data.dir Path to folder containing Nanostring SMI outputs #' #' @return \code{LoadNanostring}: A \code{\link[SeuratObject]{Seurat}} object #' #' @importFrom SeuratObject Cells CreateCentroids CreateFOV #' CreateSegmentation CreateSeuratObject #' #' @export #' #' @rdname ReadNanostring #' LoadNanostring <- function(data.dir, fov, assay = 'Nanostring') { data <- ReadNanostring( data.dir = data.dir, type = c("centroids", "segmentations") ) segs <- CreateSegmentation(data$segmentations) cents <- CreateCentroids(data$centroids) segmentations.data <- list( "centroids" = cents, "segmentation" = segs ) coords <- CreateFOV( coords = segmentations.data, type = c("segmentation", "centroids"), molecules = data$pixels, assay = assay ) obj <- CreateSeuratObject(counts = data$matrix, assay = assay) # subset both object and coords based on the cells shared by both cells <- intersect( Cells(x = coords, boundary = "segmentation"), Cells(x = coords, boundary = "centroids") ) cells <- intersect(Cells(obj), cells) coords <- subset(x = coords, cells = cells) obj[[fov]] <- coords return(obj) } #' @return \code{LoadVizgen}: A \code{\link[SeuratObject]{Seurat}} object #' #' @importFrom SeuratObject Cells CreateCentroids CreateFOV #' CreateSegmentation CreateSeuratObject #' #' @export #' #' @rdname ReadVizgen #' LoadVizgen <- function(data.dir, fov, assay = 'Vizgen', z = 3L) { data <- ReadVizgen( data.dir = data.dir, filter = "^Blank-", type = c("centroids", "segmentations"), z = z ) segs <- CreateSegmentation(data$segmentations) cents <- CreateCentroids(data$centroids) segmentations.data <- list( "centroids" = cents, "segmentation" = segs ) coords <- CreateFOV( coords = segmentations.data, type = c("segmentation", "centroids"), molecules = data$microns, assay = assay ) obj <- CreateSeuratObject(counts = data$transcripts, assay = assay) # only consider the cells we have counts and a segmentation for # Cells which don't have a segmentation are probably found in other z slices. coords <- subset( x = coords, cells = intersect( x = Cells(x = coords[["segmentation"]]), y = Cells(x = obj) ) ) # add coords to seurat object obj[[fov]] <- coords return(obj) } #' @return \code{LoadXenium}: A \code{\link[SeuratObject]{Seurat}} object #' #' @param data.dir Path to folder containing Nanostring SMI outputs #' @param fov FOV name #' @param assay Assay name #' #' @importFrom SeuratObject Cells CreateCentroids CreateFOV #' CreateSegmentation CreateSeuratObject #' #' @export #' #' @rdname ReadXenium #' LoadXenium <- function(data.dir, fov = 'fov', assay = 'Xenium') { data <- ReadXenium( data.dir = data.dir, type = c("centroids", "segmentations"), ) segmentations.data <- list( "centroids" = CreateCentroids(data$centroids), "segmentation" = CreateSegmentation(data$segmentations) ) coords <- CreateFOV( coords = segmentations.data, type = c("segmentation", "centroids"), molecules = data$microns, assay = assay ) xenium.obj <- CreateSeuratObject(counts = data$matrix[["Gene Expression"]], assay = assay) if("Blank Codeword" %in% names(data$matrix)) xenium.obj[["BlankCodeword"]] <- CreateAssayObject(counts = data$matrix[["Blank Codeword"]]) else xenium.obj[["BlankCodeword"]] <- CreateAssayObject(counts = data$matrix[["Unassigned Codeword"]]) xenium.obj[["ControlCodeword"]] <- CreateAssayObject(counts = data$matrix[["Negative Control Codeword"]]) xenium.obj[["ControlProbe"]] <- CreateAssayObject(counts = data$matrix[["Negative Control Probe"]]) xenium.obj[[fov]] <- coords return(xenium.obj) } #' @param ... Extra parameters passed to \code{DimHeatmap} #' #' @rdname DimHeatmap #' @concept convenience #' @export #' PCHeatmap <- function(object, ...) { args <- list('object' = object) args <- c(args, list(...)) args$reduction <- "pca" return(do.call(what = 'DimHeatmap', args = args)) } #' @param ... Extra parameters passed to \code{DimPlot} #' #' @rdname DimPlot #' @concept convenience #' @export #' PCAPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #' @rdname SpatialPlot #' @concept convenience #' @concept spatial #' @export #' SpatialDimPlot <- function( object, group.by = NULL, images = NULL, cols = NULL, crop = TRUE, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), facet.highlight = FALSE, label = FALSE, label.size = 7, label.color = 'white', repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, label.box = TRUE, interactive = FALSE, information = NULL ) { return(SpatialPlot( object = object, group.by = group.by, images = images, cols = cols, crop = crop, cells.highlight = cells.highlight, cols.highlight = cols.highlight, facet.highlight = facet.highlight, label = label, label.size = label.size, label.color = label.color, repel = repel, ncol = ncol, combine = combine, pt.size.factor = pt.size.factor, alpha = alpha, image.alpha = image.alpha, stroke = stroke, label.box = label.box, interactive = interactive, information = information )) } #' @rdname SpatialPlot #' @concept convenience #' @concept spatial #' @export #' SpatialFeaturePlot <- function( object, features, images = NULL, crop = TRUE, slot = 'data', keep.scale = "feature", min.cutoff = NA, max.cutoff = NA, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), image.alpha = 1, stroke = 0.25, interactive = FALSE, information = NULL ) { return(SpatialPlot( object = object, features = features, images = images, crop = crop, slot = slot, keep.scale = keep.scale, min.cutoff = min.cutoff, max.cutoff = max.cutoff, ncol = ncol, combine = combine, pt.size.factor = pt.size.factor, alpha = alpha, image.alpha = image.alpha, stroke = stroke, interactive = interactive, information = information )) } #' @rdname DimPlot #' @concept convenience #' @export #' TSNEPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #' @rdname DimPlot #' @concept convenience #' @export #' UMAPPlot <- function(object, ...) { return(SpecificDimPlot(object = object, ...)) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # @rdname DimPlot # SpecificDimPlot <- function(object, ...) { funs <- sys.calls() name <- as.character(x = funs[[length(x = funs) - 1]])[1] name <- tolower(x = gsub(pattern = 'Plot', replacement = '', x = name)) args <- list('object' = object) args <- c(args, list(...)) reduc <- grep( pattern = name, x = names(x = object), value = TRUE, ignore.case = TRUE ) reduc <- grep(pattern = DefaultAssay(object = object), x = reduc, value = TRUE) args$reduction <- ifelse(test = length(x = reduc) == 1, yes = reduc, no = name) tryCatch( expr = return(do.call(what = 'DimPlot', args = args)), error = function(e) { stop(e) } ) } #' Read output from Parse Biosciences #' #' @param data.dir Directory containing the data files #' @param ... Extra parameters passed to \code{\link{ReadMtx}} #' @concept convenience #' @export #' ReadParseBio <- function(data.dir, ...) { file.dir <- list.files(path = data.dir, pattern = ".mtx") mtx <- file.path(data.dir, file.dir) cells <- file.path(data.dir, "cell_metadata.csv") features <- file.path(data.dir, "all_genes.csv") return(ReadMtx( mtx = mtx, cells = cells, features = features, cell.column = 1, feature.column = 2, cell.sep = ",", feature.sep = ",", skip.cell = 1, skip.feature = 1, mtx.transpose = TRUE )) } #' Read output from STARsolo #' #' @param data.dir Directory containing the data files #' @param ... Extra parameters passed to \code{\link{ReadMtx}} #' #' @rdname ReadSTARsolo #' @concept convenience #' @export #' ReadSTARsolo <- function(data.dir, ... ) { mtx <- file.path(data.dir, "matrix.mtx") cells <- file.path(data.dir, "barcodes.tsv") features <- file.path(data.dir, "features.tsv") return(ReadMtx(mtx = mtx, cells = cells, features = features, ...)) } Seurat/R/reexports.R0000644000176200001440000002040014525500037014071 0ustar liggesusers #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Classes #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' The Assay Class #' #' The \code{Assay} object is the basic unit of Seurat; for more details, please #' see the documentation in \code{\link[SeuratObject:Assay]{SeuratObject}} #' #' @importClassesFrom SeuratObject Assay #' #' @exportClass Assay #' #' @docType class #' @name Assay-class #' @rdname Assay-class #' #' @seealso \code{\link[SeuratObject:Assay]{SeuratObject::Assay-class}} #' NULL #' The DimReduc Class #' #' The \code{DimReduc} object stores a dimensionality reduction taken out in #' Seurat; for more details, please see the documentation in #' \code{\link[SeuratObject:DimReduc]{SeuratObject}} #' #' @importClassesFrom SeuratObject DimReduc #' #' @exportClass DimReduc #' #' @docType class #' @name DimReduc-class #' @rdname DimReduc-class #' #' @seealso \code{\link[SeuratObject:DimReduc]{SeuratObject::DimReduc-class}} #' NULL #' The Graph Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:Graph]{SeuratObject}} #' #' @importClassesFrom SeuratObject Graph #' #' @exportClass Graph #' #' @docType class #' @name Graph-class #' @rdname Graph-class #' #' @seealso \code{\link[SeuratObject:Graph]{SeuratObject::Graph-class}} #' NULL #' The JackStrawData Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:JackStrawData]{SeuratObject}} #' #' @importClassesFrom SeuratObject JackStrawData #' #' @exportClass JackStrawData #' #' @docType class #' @name JackStrawData-class #' @rdname JackStrawData-class #' #' @seealso \code{\link[SeuratObject:JackStrawData]{SeuratObject::JackStrawData-class}} #' NULL #' The Neighbor Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:Neighbor]{SeuratObject}} #' #' @importClassesFrom SeuratObject Neighbor #' #' @exportClass Neighbor #' #' @docType class #' @name Neighbor-class #' @rdname Neighbor-class #' #' @seealso \code{\link[SeuratObject:Neighbor]{SeuratObject::Neighbor-class}} #' NULL #' The Seurat Class #' #' The Seurat object is a representation of single-cell expression data for R; #' for more details, please see the documentation in #' \code{\link[SeuratObject:Seurat]{SeuratObject}} #' #' @importClassesFrom SeuratObject Seurat #' #' @exportClass Seurat #' #' @docType class #' @name Seurat-class #' @rdname Seurat-class #' #' @seealso \code{\link[SeuratObject:Seurat]{SeuratObject::Seurat-class}} #' NULL #' The SeuratCommand Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:SeuratCommand]{SeuratObject}} #' #' @importClassesFrom SeuratObject SeuratCommand #' #' @exportClass SeuratCommand #' #' @docType class #' @name SeuratCommand-class #' @rdname SeuratCommand-class #' #' @seealso \code{\link[SeuratObject:SeuratCommand]{SeuratObject::SeuratCommand-class}} #' NULL #' The SpatialImage Class #' #' For more details, please see the documentation in #' \code{\link[SeuratObject:SpatialImage]{SeuratObject}} #' #' @importClassesFrom SeuratObject SpatialImage #' #' @exportClass SpatialImage #' #' @docType class #' @name SpatialImage-class #' @rdname SpatialImage-class #' #' @seealso \code{\link[SeuratObject:SpatialImage]{SeuratObject::SpatialImage-class}} #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions and Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom generics components #' @rdname reexports #' @export #' generics::components #' @importFrom SeuratObject %||% #' @rdname reexports #' @export #' SeuratObject::`%||%` #' @importFrom SeuratObject %iff% #' @rdname reexports #' @export #' SeuratObject::`%iff%` #' @importFrom SeuratObject AddMetaData #' @export #' SeuratObject::AddMetaData #' @importFrom SeuratObject as.Graph #' @export #' SeuratObject::as.Graph #' @importFrom SeuratObject as.Neighbor #' @export #' SeuratObject::as.Neighbor #' @importFrom SeuratObject as.Seurat #' @export #' SeuratObject::as.Seurat #' @importFrom SeuratObject as.sparse #' @export #' SeuratObject::as.sparse #' @importFrom SeuratObject Assays #' @export #' SeuratObject::Assays #' @importFrom SeuratObject Cells #' @export #' SeuratObject::Cells #' @importFrom SeuratObject CellsByIdentities #' @export #' SeuratObject::CellsByIdentities #' @importFrom SeuratObject Command #' @export #' SeuratObject::Command #' @importFrom SeuratObject CreateAssayObject #' @export #' SeuratObject::CreateAssayObject #' @importFrom SeuratObject CreateDimReducObject #' @export #' SeuratObject::CreateDimReducObject #' @importFrom SeuratObject CreateSeuratObject #' @export #' SeuratObject::CreateSeuratObject #' @importFrom SeuratObject DefaultAssay #' @export #' SeuratObject::DefaultAssay #' @importFrom SeuratObject DefaultAssay<- #' @export #' SeuratObject::`DefaultAssay<-` #' @importFrom SeuratObject Distances #' @export #' SeuratObject::Distances #' @importFrom SeuratObject Embeddings #' @export #' SeuratObject::Embeddings #' @importFrom SeuratObject FetchData #' @export #' SeuratObject::FetchData #' @importFrom SeuratObject GetAssayData #' @export #' SeuratObject::GetAssayData #' @importFrom SeuratObject GetImage #' @export #' SeuratObject::GetImage #' @importFrom SeuratObject GetTissueCoordinates #' @export #' SeuratObject::GetTissueCoordinates #' @importFrom SeuratObject HVFInfo #' @export #' SeuratObject::HVFInfo #' @importFrom SeuratObject Idents #' @export #' SeuratObject::Idents #' @importFrom SeuratObject Idents<- #' @export #' SeuratObject::`Idents<-` #' @importFrom SeuratObject Images #' @export #' SeuratObject::Images #' @importFrom SeuratObject Index #' @export #' SeuratObject::Index #' @importFrom SeuratObject Index<- #' @export #' SeuratObject::`Index<-` #' @importFrom SeuratObject Indices #' @export #' SeuratObject::Indices #' @importFrom SeuratObject IsGlobal #' @export #' SeuratObject::IsGlobal #' @importFrom SeuratObject JS #' @export #' SeuratObject::JS #' @importFrom SeuratObject JS<- #' @export #' SeuratObject::`JS<-` #' @importFrom SeuratObject Key #' @export #' SeuratObject::Key #' @importFrom SeuratObject Key<- #' @export #' SeuratObject::`Key<-` #' @importFrom SeuratObject Loadings #' @export #' SeuratObject::Loadings #' @importFrom SeuratObject Loadings<- #' @export #' SeuratObject::`Loadings<-` #' @importFrom SeuratObject LogSeuratCommand #' @export #' SeuratObject::LogSeuratCommand #' @importFrom SeuratObject Misc #' @export #' SeuratObject::Misc #' @importFrom SeuratObject Misc<- #' @export #' SeuratObject::`Misc<-` #' @importFrom SeuratObject Neighbors #' @export #' SeuratObject::Neighbors #' @importFrom SeuratObject Project #' @export #' SeuratObject::Project #' @importFrom SeuratObject Project<- #' @export #' SeuratObject::`Project<-` #' @importFrom SeuratObject Radius #' @export #' SeuratObject::Radius #' @importFrom SeuratObject Reductions #' @export #' SeuratObject::Reductions #' @importFrom SeuratObject RenameCells #' @export #' SeuratObject::RenameCells #' @importFrom SeuratObject RenameIdents #' @export #' SeuratObject::RenameIdents #' @importFrom SeuratObject ReorderIdent #' @export #' SeuratObject::ReorderIdent #' @importFrom SeuratObject RowMergeSparseMatrices #' @export #' SeuratObject::RowMergeSparseMatrices #' @importFrom SeuratObject SetAssayData #' @export #' SeuratObject::SetAssayData #' @importFrom SeuratObject SetIdent #' @export #' SeuratObject::SetIdent #' @importFrom SeuratObject SpatiallyVariableFeatures #' @export #' SeuratObject::SpatiallyVariableFeatures #' @importFrom SeuratObject StashIdent #' @export #' SeuratObject::StashIdent #' @importFrom SeuratObject Stdev #' @export #' SeuratObject::Stdev #' @importFrom SeuratObject SVFInfo #' @export #' SeuratObject::SVFInfo #' @importFrom SeuratObject Tool #' @export #' SeuratObject::Tool #' @importFrom SeuratObject Tool<- #' @export #' SeuratObject::`Tool<-` #' @importFrom SeuratObject UpdateSeuratObject #' @export #' SeuratObject::UpdateSeuratObject #' @importFrom SeuratObject VariableFeatures #' @export #' SeuratObject::VariableFeatures #' @importFrom SeuratObject VariableFeatures<- #' @export #' SeuratObject::`VariableFeatures<-` #' @importFrom SeuratObject WhichCells #' @export #' SeuratObject::WhichCells Seurat/R/tree.R0000644000176200001440000002720014525500037013002 0ustar liggesusers#' @include generics.R #' NULL cluster.ape <- paste( "Cluster tree functionality requires 'ape'", "please install with 'install.packages('ape')'" ) #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Phylogenetic Analysis of Identity Classes #' #' Constructs a phylogenetic tree relating the 'average' cell from each #' identity class. Tree is estimated based on a distance matrix constructed in #' either gene expression space or PCA space. #' #' Note that the tree is calculated for an 'average' cell, so gene expression #' or PC scores are averaged across all cells in an identity class before the #' tree is constructed. #' #' @param object Seurat object #' @param assay Assay to use for the analysis. #' @param features Genes to use for the analysis. Default is the set of #' variable genes (\code{VariableFeatures(object = object)}) #' @param dims If set, tree is calculated in dimension reduction space; #' overrides \code{features} #' @param reduction Name of dimension reduction to use. Only used if \code{dims} #' is not NULL. #' @param graph If graph is passed, build tree based on graph connectivity between #' clusters; overrides \code{dims} and \code{features} #' @param reorder Re-order identity classes (factor ordering), according to #' position on the tree. This groups similar classes together which can be #' helpful, for example, when drawing violin plots. #' @param reorder.numeric Re-order identity classes according to position on #' the tree, assigning a numeric value ('1' is the leftmost node) #' @param verbose Show progress updates #' @inheritParams AverageExpression #' #' @return A Seurat object where the cluster tree can be accessed with \code{\link{Tool}} #' #' @importFrom pbapply pblapply #' @importFrom stats dist hclust na.omit #' @importFrom utils txtProgressBar setTxtProgressBar #' #' @export #' @concept tree #' #' @examples #' \dontrun{ #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' Tool(object = pbmc_small, slot = 'BuildClusterTree') #' } #' } #' BuildClusterTree <- function( object, assay = NULL, features = NULL, dims = NULL, reduction = "pca", graph = NULL, slot = 'data', reorder = FALSE, reorder.numeric = FALSE, verbose = TRUE ) { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } assay <- assay %||% DefaultAssay(object = object) if (!is.null(x = graph)) { idents <- levels(x = object) nclusters <- length(x = idents) data.dist <- matrix( data = numeric(length = 1L), nrow = nclusters, ncol = nclusters, dimnames = list(idents, idents) ) graph <- object[[graph]] cxi <- CellsByIdentities(object = object) cpairs <- na.omit(object = unique(x = t(x = apply( X = expand.grid(1:nclusters, 1:nclusters)[, c(2, 1)], MARGIN = 1, FUN = function(x) { if (length(x = x) == length(x = unique(x = x))) { return(sort(x = x)) } return(c(NA, NA)) } )))) if (verbose) { pb <- txtProgressBar(style = 3, file = stderr()) } for (i in 1:nrow(x = cpairs)) { i1 <- cpairs[i, ][1] i2 <- cpairs[i, ][2] graph.sub <- graph[cxi[[idents[i1]]], cxi[[idents[i2]]]] d <- mean(x = graph.sub) if (is.na(x = d)) { d <- 0 } data.dist[i1, i2] <- d if (verbose) { setTxtProgressBar(pb = pb, value = i / nrow(x = cpairs)) } } if (verbose) { close(con = pb) } diag(x = data.dist) <- 1 data.dist <- dist(x = data.dist) } else if (!is.null(x = dims)) { my.lapply <- ifelse(test = verbose, yes = pblapply, no = lapply) embeddings <- Embeddings(object = object, reduction = reduction)[, dims] data.dims <- my.lapply( X = levels(x = object), FUN = function(x) { cells <- WhichCells(object = object, idents = x) if (length(x = cells) == 1) { cells <- c(cells, cells) } temp <- colMeans(x = embeddings[cells, ]) } ) data.dims <- do.call(what = 'cbind', args = data.dims) colnames(x = data.dims) <- levels(x = object) data.dist <- dist(x = t(x = data.dims)) } else { features <- features %||% VariableFeatures(object = object) features <- intersect(x = features, y = rownames(x = object)) data.avg <- AverageExpression( object = object, assays = assay, features = features, slot = slot, verbose = verbose )[[1]] data.dist <- dist(x = t(x = data.avg[features, ])) } data.tree <- ape::as.phylo(x = hclust(d = data.dist)) Tool(object = object) <- data.tree if (reorder) { if (verbose) { message("Reordering identity classes and rebuilding tree") } old.ident.order <- levels(x = object) data.tree <- Tool(object = object, slot = 'BuildClusterTree') all.desc <- GetDescendants(tree = data.tree, node = (data.tree$Nnode + 2)) all.desc <- old.ident.order[all.desc[all.desc <= (data.tree$Nnode + 1)]] Idents(object = object) <- factor(x = Idents(object = object), levels = all.desc, ordered = TRUE) if (reorder.numeric) { new.levels <- sort(x = unique(x = as.integer(x = Idents(object = object)))) Idents(object = object) <- factor(x = as.integer(x = Idents(object = object)), levels = new.levels) object[['tree.ident']] <- as.integer(x = Idents(object = object)) } object <- BuildClusterTree( object = object, assay = assay, features = features, dims = dims, reduction = reduction, graph = graph, slot = slot, reorder = FALSE, verbose = verbose ) } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Depth first traversal path of a given tree # # @param tree Tree object (from ape package) # @param node Internal node in the tree # @param path Path through the tree (for recursion) # @param include.children Include children in the output path # @param only.children Only include children in the output path # @return Returns a vector representing the depth first traversal path # DFT <- function( tree, node, path = NULL, include.children = FALSE, only.children = FALSE ) { if (only.children) { include.children = TRUE } children <- which(x = tree$edge[, 1] == node) child1 <- tree$edge[children[1], 2] child2 <- tree$edge[children[2], 2] if (child1 %in% tree$edge[, 1]) { if (!only.children) { path <- c(path, child1) } path <- DFT( tree = tree, node = child1, path = path, include.children = include.children, only.children = only.children ) } else { if (include.children) { path <- c(path, child1) } } if (child2 %in% tree$edge[, 1]) { if (!only.children) { path <- c(path, child2) } path <- DFT( tree = tree, node = child2, path = path, include.children = include.children, only.children = only.children ) } else { if (include.children) { path <- c(path, child2) } } return(path) } # Function to return all internal (non-terminal) nodes in a given tree # # @param tree Tree object (from ape package) # # @return Returns a vector of all internal nodes for the given tree # GetAllInternalNodes <- function(tree) { return(c(tree$edge[1, 1], DFT(tree = tree, node = tree$edge[1, 1]))) } # Function to get all the descendants on a tree of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants of the given node # GetDescendants <- function(tree, node, curr = NULL) { if (is.null(x = curr)) { curr <- vector() } daughters <- tree$edge[which(x = tree$edge[, 1] == node), 2] curr <- c(curr, daughters) w <- which(x = daughters >= length(x = tree$tip)) if (length(x = w) > 0) { for (i in 1:length(x = w)) { curr <- GetDescendants(tree = tree, node = daughters[w[i]], curr = curr) } } return(curr) } # Function to get all the descendants on a tree left of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants left of the given node # GetLeftDescendants <- function(tree, node) { daughters <- tree$edge[which(tree$edge[, 1] == node), 2] if (daughters[1] <= (tree$Nnode + 1)) { return(daughters[1]) } daughter.use <- GetDescendants(tree, daughters[1]) daughter.use <- daughter.use[daughter.use <= (tree$Nnode + 1)] return(daughter.use) } # Function to get all the descendants on a tree right of a given node # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns all descendants right of the given node # GetRightDescendants <- function(tree, node) { daughters <- tree$edge[which(x = tree$edge[, 1] == node), 2] if (daughters[2] <= (tree$Nnode + 1)) { return(daughters[2]) } daughter.use <- GetDescendants(tree = tree, node = daughters[2]) daughter.use <- daughter.use[daughter.use <= (tree$Nnode + 1)] return(daughter.use) } # Merge childen of a node # # Merge the childen of a node into a single identity class # # @param object Seurat object # @param node.use Merge children of this node # @param rebuild.tree Rebuild cluster tree after the merge? # @param ... Extra parameters to BuildClusterTree, used only if rebuild.tree = TRUE # # @seealso \code{BuildClusterTree} # # # @examples # data("pbmc_small") # PlotClusterTree(object = pbmc_small) # pbmc_small <- MergeNode(object = pbmc_small, node.use = 7, rebuild.tree = TRUE) # PlotClusterTree(object = pbmc_small) # MergeNode <- function(object, node.use, rebuild.tree = FALSE, ...) { CheckDots(..., fxns = 'BuldClusterTree') object.tree <- object@cluster.tree[[1]] node.children <- DFT( tree = object.tree, node = node.use, include.children = TRUE ) node.children <- intersect(x = node.children, y = levels(x = object@ident)) children.cells <- WhichCells(object = object, ident = node.children) if (length(x = children.cells > 0)) { object <- SetIdent( object = object, cells.use = children.cells, ident.use = min(node.children) ) } if (rebuild.tree) { object <- BuildClusterTree(object = object, ...) } return(object) } # Function to check whether a given node in a tree has a child (leaf node) # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns a Boolean of whether the given node is connected to a terminal leaf node NodeHasChild <- function(tree, node) { children <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] return(any(children %in% tree$edge[, 2] && !children %in% tree$edge[, 1])) } # Function to check whether a given node in a tree has only children(leaf nodes) # # @param tree Tree object (from ape package) # @param node Internal node in the tree # # @return Returns a Boolean of whether the given node is connected to only terminal leaf nodes NodeHasOnlyChildren <- function(tree, node) { children <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] return(!any(children %in% tree$edge[, 1])) } Seurat/R/integration5.R0000644000176200001440000006407314525500037014464 0ustar liggesusers#' @include zzz.R #' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Harmony Integration #' #' @param object An \code{\link[SeuratObject]{Assay5}} object # @param assay Name of \code{object} in the containing \code{Seurat} object #' @param orig A \link[SeuratObject:DimReduc]{dimensional reduction} to correct #' @param features Ignored #' @param scale.layer Ignored #' @param new.reduction Name of new integrated dimensional reduction #' @param layers Ignored #' @param key Key for Harmony dimensional reduction #' @param npcs If doing PCA on input matrix, number of PCs to compute #' @param theta Diversity clustering penalty parameter #' @param lambda Ridge regression penalty parameter #' @param sigma Width of soft kmeans clusters #' @param nclust Number of clusters in model #' @param tau Protection against overclustering small datasets with large ones #' @param block.size What proportion of cells to update during clustering #' @param max.iter.harmony Maximum number of rounds to run Harmony #' @param max.iter.cluster Maximum number of rounds to run clustering at each round of Harmony #' @param epsilon.cluster Convergence tolerance for clustering round of Harmony #' @param epsilon.harmony Convergence tolerance for Harmony #' @param verbose Whether to print progress messages. TRUE to print, FALSE to suppress #' @param ... Ignored #' #' @return ... #' #' @note This function requires the #' \href{https://cran.r-project.org/package=harmony}{\pkg{harmony}} package #' to be installed #' # @templateVar pkg harmony # @template note-reqdpkg #' #' @examples #' \dontrun{ #' # Preprocessing #' obj <- SeuratData::LoadData("pbmcsca") #' obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) #' obj <- NormalizeData(obj) #' obj <- FindVariableFeatures(obj) #' obj <- ScaleData(obj) #' obj <- RunPCA(obj) #' #' # After preprocessing, we integrate layers with added parameters specific to Harmony: #' obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, orig.reduction = "pca", #' new.reduction = 'harmony', verbose = FALSE) #' #' # Modifying Parameters #' # We can also add arguments specific to Harmony such as theta, to give more diverse clusters #' obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, orig.reduction = "pca", #' new.reduction = 'harmony', verbose = FALSE, theta = 3) #' # Integrating SCTransformed data #' obj <- SCTransform(object = obj) #' obj <- IntegrateLayers(object = obj, method = HarmonyIntegration, #' orig.reduction = "pca", new.reduction = 'harmony', #' assay = "SCT", verbose = FALSE) #' } #' #' #' @export #' #' @concept integration #' #' @seealso \code{\link[harmony:HarmonyMatrix]{harmony::HarmonyMatrix}()} #' HarmonyIntegration <- function( object, orig, features = NULL, scale.layer = 'scale.data', new.reduction = 'harmony', layers = NULL, npcs = 50L, key = 'harmony_', theta = NULL, lambda = NULL, sigma = 0.1, nclust = NULL, tau = 0, block.size = 0.05, max.iter.harmony = 10L, max.iter.cluster = 20L, epsilon.cluster = 1e-05, epsilon.harmony = 1e-04, verbose = TRUE, ... ) { check_installed( pkg = "harmony", reason = "for running integration with Harmony" ) if (!inherits(x = object, what = c('StdAssay', 'SCTAssay'))) { abort(message = "'object' must be a v5 or SCT assay") } else if (!inherits(x = orig, what = 'DimReduc')) { abort(message = "'orig' must be a dimensional reduction") } # # Run joint PCA # features <- features %||% Features(x = object, layer = scale.layer) # pca <- RunPCA( # object = object, # assay = assay, # features = features, # layer = scale.layer, # npcs = npcs, # verbose = verbose # ) #create grouping variables groups <- CreateIntegrationGroups(object, layers = layers, scale.layer = scale.layer) # Run Harmony harmony.embed <- harmony::HarmonyMatrix( data_mat = Embeddings(object = orig), meta_data = groups, vars_use = 'group', do_pca = FALSE, npcs = 0L, theta = theta, lambda = lambda, sigma = sigma, nclust = nclust, tau = tau, block.size = block.size, max.iter.harmony = max.iter.harmony, max.iter.cluster = max.iter.cluster, epsilon.cluster = epsilon.cluster, epsilon.harmony = epsilon.harmony, return_object = FALSE, verbose = verbose ) rownames(x = harmony.embed) <- Cells(x = orig) # TODO add feature loadings from PCA dr <- suppressWarnings(expr = CreateDimReducObject( embeddings = harmony.embed, key = key, # assay = assay assay = DefaultAssay(object = orig) )) output.list <- list(dr) names(output.list) <- c(new.reduction) return(output.list) } attr(x = HarmonyIntegration, which = 'Seurat.method') <- 'integration' #' Seurat-CCA Integration #' #' @inheritParams RPCAIntegration #' @export #' #' @examples #' \dontrun{ #' # Preprocessing #' obj <- SeuratData::LoadData("pbmcsca") #' obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) #' obj <- NormalizeData(obj) #' obj <- FindVariableFeatures(obj) #' obj <- ScaleData(obj) #' obj <- RunPCA(obj) #' #' # After preprocessing, we integrate layers. #' obj <- IntegrateLayers(object = obj, method = CCAIntegration, #' orig.reduction = "pca", new.reduction = "integrated.cca", #' verbose = FALSE) #' #' # Modifying parameters #' # We can also specify parameters such as `k.anchor` to increase the strength of integration #' obj <- IntegrateLayers(object = obj, method = CCAIntegration, #' orig.reduction = "pca", new.reduction = "integrated.cca", #' k.anchor = 20, verbose = FALSE) #' #' # Integrating SCTransformed data #' obj <- SCTransform(object = obj) #' obj <- IntegrateLayers(object = obj, method = CCAIntegration, #' orig.reduction = "pca", new.reduction = "integrated.cca", #' assay = "SCT", verbose = FALSE) #' } #' CCAIntegration <- function( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = 'integrated.dr', reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.filter = NA, scale.layer = 'scale.data', dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) { op <- options(Seurat.object.assay.version = "v3", Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) normalization.method <- match.arg(arg = normalization.method) features <- features %||% SelectIntegrationFeatures5(object = object) assay <- assay %||% 'RNA' layers <- layers %||% Layers(object, search = 'data') if (normalization.method == 'SCT') { #create grouping variables groups <- CreateIntegrationGroups(object, layers = layers, scale.layer = scale.layer) object.sct <- CreateSeuratObject(counts = object, assay = 'SCT') object.sct$split <- groups[,1] object.list <- SplitObject(object = object.sct,split.by = 'split') object.list <- PrepSCTIntegration(object.list, anchor.features = features) } else { object.list <- list() for (i in seq_along(along.with = layers)) { if (inherits(x = object[layers[i]], what = "IterableMatrix")) { warning("Converting BPCells matrix to dgCMatrix for integration ", "as on-disk CCA Integration is not currently supported", call. = FALSE, immediate. = TRUE) counts <- as(object = object[layers[i]][features, ], Class = "dgCMatrix") } else { counts <- object[layers[i]][features, ] } object.list[[i]] <- CreateSeuratObject(counts = counts) if (inherits(x = object[scale.layer], what = "IterableMatrix")) { scale.data.layer <- as.matrix(object[scale.layer][features, Cells(object.list[[i]])]) object.list[[i]][["RNA"]]$scale.data <- scale.data.layer } else { object.list[[i]][["RNA"]]$scale.data <- object[scale.layer][features, Cells(object.list[[i]])] } object.list[[i]][['RNA']]$counts <- NULL } } anchor <- FindIntegrationAnchors(object.list = object.list, anchor.features = features, scale = FALSE, reduction = 'cca', normalization.method = normalization.method, dims = dims, k.filter = k.filter, reference = reference, verbose = verbose, ... ) suppressWarnings({ anchor@object.list <- lapply(anchor@object.list, function(x) { x <- DietSeurat(x, features = features[1:2]) return(x) }) }, classes = "dimWarning") object_merged <- IntegrateEmbeddings(anchorset = anchor, reductions = orig, new.reduction.name = new.reduction, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, verbose = verbose ) output.list <- list(object_merged[[new.reduction]]) names(output.list) <- c(new.reduction) return(output.list) } attr(x = CCAIntegration, which = 'Seurat.method') <- 'integration' #' Seurat-RPCA Integration #' #' @param object A \code{Seurat} object #' @param assay Name of \code{Assay} in the \code{Seurat} object #' @param layers Names of layers in \code{assay} #' @param orig A \link[SeuratObject:DimReduc]{dimensional reduction} to correct #' @param new.reduction Name of new integrated dimensional reduction #' @param reference A reference \code{Seurat} object #' @param features A vector of features to use for integration #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param dims Dimensions of dimensional reduction to use for integration #' @param k.filter Number of anchors to filter #' @param scale.layer Name of scaled layer in \code{Assay} #' @param verbose Print progress #' @param ... Additional arguments passed to \code{FindIntegrationAnchors} #' #' @examples #' \dontrun{ #' # Preprocessing #' obj <- SeuratData::LoadData("pbmcsca") #' obj[["RNA"]] <- split(obj[["RNA"]], f = obj$Method) #' obj <- NormalizeData(obj) #' obj <- FindVariableFeatures(obj) #' obj <- ScaleData(obj) #' obj <- RunPCA(obj) #' #' # After preprocessing, we run integration #' obj <- IntegrateLayers(object = obj, method = RPCAIntegration, #' orig.reduction = "pca", new.reduction = 'integrated.rpca', #' verbose = FALSE) #' #' # Reference-based Integration #' # Here, we use the first layer as a reference for integraion #' # Thus, we only identify anchors between the reference and the rest of the datasets, #' # saving computational resources #' obj <- IntegrateLayers(object = obj, method = RPCAIntegration, #' orig.reduction = "pca", new.reduction = 'integrated.rpca', #' reference = 1, verbose = FALSE) #' #' # Modifying parameters #' # We can also specify parameters such as `k.anchor` to increase the strength of #' # integration #' obj <- IntegrateLayers(object = obj, method = RPCAIntegration, #' orig.reduction = "pca", new.reduction = 'integrated.rpca', #' k.anchor = 20, verbose = FALSE) #' #' # Integrating SCTransformed data #' obj <- SCTransform(object = obj) #' obj <- IntegrateLayers(object = obj, method = RPCAIntegration, #' orig.reduction = "pca", new.reduction = 'integrated.rpca', #' assay = "SCT", verbose = FALSE) #' } #' #' @inheritParams FindIntegrationAnchors #' @inheritParams IntegrateEmbeddings #' @param ... Arguments passed on to \code{FindIntegrationAnchors} #' @export #' RPCAIntegration <- function( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = 'integrated.dr', reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.filter = NA, scale.layer = 'scale.data', dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) { op <- options(Seurat.object.assay.version = "v3", Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) normalization.method <- match.arg(arg = normalization.method) features <- features %||% SelectIntegrationFeatures5(object = object) assay <- assay %||% 'RNA' layers <- layers %||% Layers(object = object, search = 'data') #check that there enough cells present ncells <- sapply(X = layers, FUN = function(x) {ncell <- dim(object[x])[2] return(ncell) }) if (min(ncells) < max(dims)) { abort(message = "At least one layer has fewer cells than dimensions specified, please lower 'dims' accordingly.") } if (normalization.method == 'SCT') { #create grouping variables groups <- CreateIntegrationGroups(object, layers = layers, scale.layer = scale.layer) object.sct <- CreateSeuratObject(counts = object, assay = 'SCT') object.sct$split <- groups[,1] object.list <- SplitObject(object = object.sct, split.by = 'split') object.list <- PrepSCTIntegration(object.list = object.list, anchor.features = features) object.list <- lapply(X = object.list, FUN = function(x) { x <- RunPCA(object = x, features = features, verbose = FALSE, npcs = max(dims)) return(x) } ) } else { object.list <- list() for (i in seq_along(along.with = layers)) { object.list[[i]] <- suppressMessages(suppressWarnings(CreateSeuratObject(counts = object[layers[i]][features,]))) VariableFeatures(object = object.list[[i]]) <- features object.list[[i]] <- suppressWarnings(ScaleData(object = object.list[[i]], verbose = FALSE)) object.list[[i]] <- RunPCA(object = object.list[[i]], verbose = FALSE, npcs=max(dims)) suppressWarnings(object.list[[i]][['RNA']]$counts <- NULL) } } anchor <- FindIntegrationAnchors(object.list = object.list, anchor.features = features, scale = FALSE, reduction = 'rpca', normalization.method = normalization.method, dims = dims, k.filter = k.filter, reference = reference, verbose = verbose, ... ) slot(object = anchor, name = "object.list") <- lapply( X = slot( object = anchor, name = "object.list"), FUN = function(x) { suppressWarnings(expr = x <- DietSeurat(x, features = features[1:2])) return(x) }) object_merged <- IntegrateEmbeddings(anchorset = anchor, reductions = orig, new.reduction.name = new.reduction, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, verbose = verbose ) output.list <- list(object_merged[[new.reduction]]) names(output.list) <- c(new.reduction) return(output.list) } attr(x = RPCAIntegration, which = 'Seurat.method') <- 'integration' #' Seurat-Joint PCA Integration #' #' @inheritParams RPCAIntegration #' @inheritParams FindIntegrationAnchors #' @inheritParams IntegrateEmbeddings #' @param ... Arguments passed on to \code{FindIntegrationAnchors} #' @export #' JointPCAIntegration <- function( object = NULL, assay = NULL, layers = NULL, orig = NULL, new.reduction = 'integrated.dr', reference = NULL, features = NULL, normalization.method = c("LogNormalize", "SCT"), dims = 1:30, k.anchor = 20, scale.layer = 'scale.data', dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) { op <- options(Seurat.object.assay.version = "v3", Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) normalization.method <- match.arg(arg = normalization.method) features <- features %||% SelectIntegrationFeatures5(object = object) features.diet <- features[1:2] assay <- assay %||% DefaultAssay(object) layers <- layers %||% Layers(object, search = 'data') if (normalization.method == 'SCT') { #create grouping variables groups <- CreateIntegrationGroups(object, layers = layers, scale.layer = scale.layer) object.sct <- CreateSeuratObject(counts = object, assay = 'SCT') object.sct <- DietSeurat(object = object.sct, features = features.diet) object.sct[['joint.pca']] <- CreateDimReducObject( embeddings = Embeddings(object = orig), assay = 'SCT', loadings = Loadings(orig), key = 'J_' ) object.sct$split <- groups[,1] object.list <- SplitObject(object = object.sct,split.by = 'split') object.list <- PrepSCTIntegration(object.list, anchor.features = features.diet) object.list <- lapply(object.list, function(x) { x[['SCT']]@SCTModel.list <- list() return(x) }) } else { object.list <- list() for (i in seq_along(along.with = layers)) { object.list[[i]] <- CreateSeuratObject(counts = object[layers[i]][features.diet, ] ) object.list[[i]][['RNA']]$counts <- NULL object.list[[i]][['joint.pca']] <- CreateDimReducObject( embeddings = Embeddings(object = orig)[Cells(object.list[[i]]),], assay = 'RNA', loadings = Loadings(orig), key = 'J_' ) } } anchor <- FindIntegrationAnchors(object.list = object.list, anchor.features = features.diet, scale = FALSE, reduction = 'jpca', normalization.method = normalization.method, dims = dims, k.anchor = k.anchor, k.filter = NA, reference = reference, verbose = verbose, ... ) object_merged <- IntegrateEmbeddings(anchorset = anchor, reductions = orig, new.reduction.name = new.reduction, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, verbose = verbose ) output.list <- list(object_merged[[new.reduction]]) names(output.list) <- c(new.reduction) return(output.list) } attr(x = JointPCAIntegration, which = 'Seurat.method') <- 'integration' #' Integrate Layers #' #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param method Integration method function #' @param orig.reduction Name of dimensional reduction for correction #' @param assay Name of assay for integration #' @param features A vector of features to use for integration #' @param layers Names of normalized layers in \code{assay} #' @param scale.layer Name(s) of scaled layer(s) in \code{assay} #' @param ... Arguments passed on to \code{method} #' #' @return \code{object} with integration data added to it #' #' @section Integration Method Functions: #' The following integration method functions are available: #' \Sexpr[stage=render,results=rd]{Seurat:::.rd_methods("integration")} #' #' @export #' #' @concept integration #' #' @seealso \link[Seurat:writing-integration]{Writing integration method functions} #' IntegrateLayers <- function( object, method, orig.reduction = 'pca', assay = NULL, features = NULL, layers = NULL, scale.layer = 'scale.data', ... ) { # Get the integration method if (is_quosure(x = method)) { method <- eval( expr = quo_get_expr(quo = method), envir = quo_get_env(quo = method) ) } if (is.character(x = method)) { method <- get(x = method) } if (!is.function(x = method)) { abort(message = "'method' must be a function for integrating layers") } # Check our assay assay <- assay %||% DefaultAssay(object = object) if (inherits(x = object[[assay]], what = 'SCTAssay')) { layers <- 'data' scale.layer <- 'scale.data' features <- features %||% SelectSCTIntegrationFeatures( object = object, assay = assay ) } else if (inherits(x = object[[assay]], what = 'StdAssay')) { layers <- Layers(object = object, assay = assay, search = layers %||% 'data') scale.layer <- Layers(object = object, search = scale.layer) features <- features %||% VariableFeatures( object = object, assay = assay, nfeatures = 2000L ) } else { abort(message = "'assay' must be a v5 or SCT assay") } if (!is.null(scale.layer)) { features <- intersect( x = features, y = Features(x = object, assay = assay, layer = scale.layer) ) } if (!length(x = features)) { abort(message = "None of the features provided are found in this assay") } if (!is.null(orig.reduction)) { # Check our dimensional reduction orig.reduction <- orig.reduction %||% DefaultDimReduc(object = object, assay = assay) if (!orig.reduction %in% Reductions(object = object)) { abort(message = paste(sQuote(x = orig.reduction), 'is not a dimensional reduction')) } obj.orig <- object[[orig.reduction]] if (is.null(x = DefaultAssay(object = obj.orig))) { DefaultAssay(object = obj.orig) <- assay } } # Run the integration method value <- method( object = object[[assay]], assay = assay, orig = obj.orig, layers = layers, scale.layer = scale.layer, features = features, ... ) for (i in names(x = value)) { object[[i]] <- value[[i]] } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Creates data.frame with cell group assignments for integration # uses SCT models if SCTAssay and layers otherwise CreateIntegrationGroups <- function(object, layers, scale.layer) { groups <- if (inherits(x = object, what = 'SCTAssay')) { df <- SeuratObject::EmptyDF(n = ncol(x = object)) row.names(x = df) <- colnames(x = object) for (model in levels(x = object)) { cc <- Cells(x = object, layer = model) df[cc, "group"] <- model } df } else if (length(x = layers) > 1L) { cmap <- slot(object = object, name = 'cells')[, layers] as.data.frame(x = labels( object = cmap, values = Cells(x = object, layer = scale.layer) )) } names(x = groups) <- 'group' return(groups) } #' Writing Integration Method Functions #' #' Integration method functions can be written by anyone to implement any #' integration method in Seurat. These methods should expect to take a #' \link[SeuratObject:Assay5]{v5 assay} as input and return a named list of #' objects that can be added back to a \code{Seurat} object (eg. a #' \link[SeuratObject:DimReduc]{dimensional reduction} or cell-level meta data) #' #' @section Provided Parameters: #' Every integration method function should expect the following arguments: #' \itemize{ #' \item \dQuote{\code{object}}: an \code{\link[SeuratObject]{Assay5}} object # \item \dQuote{\code{assay}}: name of \code{object} in the original # \code{\link[SeuratObject]{Seurat}} object #' \item \dQuote{\code{orig}}: \link[SeuratObject:DimReduc]{dimensional #' reduction} to correct #' \item \dQuote{\code{layers}}: names of normalized layers in \code{object} #' \item \dQuote{\code{scale.layer}}: name(s) of scaled layer(s) in #' \code{object} #' \item \dQuote{\code{features}}: a vector of features for integration #' \item \dQuote{\code{groups}}: a one-column data frame with the groups for #' each cell in \code{object}; the column name will be \dQuote{group} #' } #' #' @section Method Discovery: #' The documentation for \code{\link{IntegrateLayers}()} will automatically #' link to integration method functions provided by packages in the #' \code{\link[base]{search}()} space. To make an integration method function #' discoverable by the documentation, simply add an attribute named #' \dQuote{\code{Seurat.method}} to the function with a value of #' \dQuote{\code{integration}} #' \preformatted{ #' attr(MyIntegrationFunction, which = "Seurat.method") <- "integration" #' } #' #' @keywords internal #' #' @concept integration #' #' @name writing-integration #' @rdname writing-integration #' #' @seealso \code{\link{IntegrateLayers}()} #' NULL Seurat/R/integration.R0000644000176200001440000106057114525500056014400 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Find integration anchors #' #' Find a set of anchors between a list of \code{\link{Seurat}} objects. #' These anchors can later be used to integrate the objects using the #' \code{\link{IntegrateData}} function. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019: #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' First, determine anchor.features if not explicitly specified using #' \code{\link{SelectIntegrationFeatures}}. Then for all pairwise combinations #' of reference and query datasets: #' #' \itemize{ #' \item{Perform dimensional reduction on the dataset pair as specified via #' the \code{reduction} parameter. If \code{l2.norm} is set to \code{TRUE}, #' perform L2 normalization of the embedding vectors.} #' \item{Identify anchors - pairs of cells from each dataset #' that are contained within each other's neighborhoods (also known as mutual #' nearest neighbors).} #' \item{Filter low confidence anchors to ensure anchors in the low dimension #' space are in broad agreement with the high dimensional measurements. This #' is done by looking at the neighbors of each query cell in the reference #' dataset using \code{max.features} to define this space. If the reference #' cell isn't found within the first \code{k.filter} neighbors, remove the #' anchor.} #' \item{Assign each remaining anchor a score. For each anchor cell, determine #' the nearest \code{k.score} anchors within its own dataset and within its #' pair's dataset. Based on these neighborhoods, construct an overall neighbor #' graph and then compute the shared neighbor overlap between anchor and query #' cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on #' these scores to dampen outlier effects and rescale to range between 0-1.} #' } #' #' @param object.list A list of \code{\link{Seurat}} objects between which to #' find anchors for downstream integration. #' @param assay A vector of assay names specifying which assay to use when #' constructing anchors. If NULL, the current default assay for each object is #' used. #' @param reference A vector specifying the object/s to be used as a reference #' during integration. If NULL (default), all pairwise anchors are found (no #' reference/s). If not NULL, the corresponding objects in \code{object.list} #' will be used as references. When using a set of specified references, anchors #' are first found between each query and each reference. The references are #' then integrated through pairwise integration. Each query is then mapped to #' the integrated reference. #' @param anchor.features Can be either: #' \itemize{ #' \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} #' to select the provided number of features to be used in anchor finding} #' \item{A vector of features to be used as input to the anchor finding process} #' } #' @param scale Whether or not to scale the features provided. Only set to FALSE #' if you have previously scaled the features you want to use for each object in #' the object.list #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param sct.clip.range Numeric of length two specifying the min and max values #' the Pearson residual will be clipped to #' @param reduction Dimensional reduction to perform when finding anchors. Can #' be one of: #' \itemize{ #' \item{cca: Canonical correlation analysis} #' \item{rpca: Reciprocal PCA} #' \item{jpca: Joint PCA} #' \item{rlsi: Reciprocal LSI} #' } #' @param l2.norm Perform L2 normalization on the CCA cell embeddings after #' dimensional reduction #' @param dims Which dimensions to use from the CCA to specify the neighbor #' search space #' @param k.anchor How many neighbors (k) to use when picking anchors #' @param k.filter How many neighbors (k) to use when filtering anchors #' @param k.score How many neighbors (k) to use when scoring anchors #' @param max.features The maximum number of features to use when specifying the #' neighborhood search space in the anchor filtering #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param eps Error bound on the neighbor finding algorithm (from RANN/Annoy) #' @param verbose Print progress bars and output #' #' @return Returns an \code{\link{AnchorSet}} object that can be used as input to #' \code{\link{IntegrateData}}. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @importFrom pbapply pblapply #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @export #' @concept integration #' #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset #' pancreas.list <- SplitObject(panc8, split.by = "tech") #' #' # perform standard preprocessing on each object #' for (i in 1:length(pancreas.list)) { #' pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) #' pancreas.list[[i]] <- FindVariableFeatures( #' pancreas.list[[i]], selection.method = "vst", #' nfeatures = 2000, verbose = FALSE #' ) #' } #' #' # find anchors #' anchors <- FindIntegrationAnchors(object.list = pancreas.list) #' #' # integrate data #' integrated <- IntegrateData(anchorset = anchors) #' } #' FindIntegrationAnchors <- function( object.list = NULL, assay = NULL, reference = NULL, anchor.features = 2000, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), sct.clip.range = NULL, reduction = c("cca", "rpca", "jpca", "rlsi"), l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reduction <- match.arg(arg = reduction) if (reduction == "rpca") { reduction <- "pca" } if (reduction == "rlsi") { reduction <- "lsi" if (normalization.method == "SCT") { warning("Requested normalization method 'SCT' is not applicable for LSI") normalization.method <- "LogNormalize" } scale <- FALSE k.filter <- NA } my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) object.ncells <- sapply(X = object.list, FUN = function(x) dim(x = x)[2]) if (any(object.ncells <= max(dims))) { bad.obs <- which(x = object.ncells <= max(dims)) stop("Max dimension too large: objects ", paste(bad.obs, collapse = ", "), " contain fewer than ", max(dims), " cells. \n Please specify a", " maximum dimensions that is less than the number of cells in any ", "object (", min(object.ncells), ").") } if (!is.null(x = assay)) { if (length(x = assay) != length(x = object.list)) { stop("If specifying the assay, please specify one assay per object in the object.list") } object.list <- sapply( X = 1:length(x = object.list), FUN = function(x) { DefaultAssay(object = object.list[[x]]) <- assay[x] return(object.list[[x]]) } ) } else { assay <- sapply(X = object.list, FUN = DefaultAssay) } # check tool object.list <- lapply( X = object.list, FUN = function (obj) { slot(object = obj, name = "tools")$Integration <- NULL return(obj) }) object.list <- CheckDuplicateCellNames(object.list = object.list) slot <- "data" if (reduction == "lsi") { all.rownames <- lapply(X = object.list, FUN = rownames) anchor.features <- Reduce(f = intersect, x = all.rownames) } if (normalization.method == "SCT") { slot <- "scale.data" scale <- FALSE if (is.numeric(x = anchor.features)) { stop("Please specify the anchor.features to be used. The expected ", "workflow for integratinge assays produced by SCTransform is ", "SelectIntegrationFeatures -> PrepSCTIntegration -> ", "FindIntegrationAnchors.") } sct.check <- sapply( X = 1:length(x = object.list), FUN = function(x) { sct.cmd <- grep( pattern = 'PrepSCTIntegration', x = Command(object = object.list[[x]]), value = TRUE ) # check assay has gone through PrepSCTIntegration if (!any(grepl(pattern = "PrepSCTIntegration", x = Command(object = object.list[[x]]))) || Command(object = object.list[[x]], command = sct.cmd, value = "assay") != assay[x]) { stop("Object ", x, " assay - ", assay[x], " has not been processed ", "by PrepSCTIntegration. Please run PrepSCTIntegration prior to ", "FindIntegrationAnchors if using assays generated by SCTransform.", call. = FALSE) } # check that the correct features are being used if (all(Command(object = object.list[[x]], command = sct.cmd, value = "anchor.features") != anchor.features)) { stop("Object ", x, " assay - ", assay[x], " was processed using a ", "different feature set than in PrepSCTIntegration. Please rerun ", "PrepSCTIntegration with the same anchor.features for all objects in ", "the object.list.", call. = FALSE) } } ) } if (is.numeric(x = anchor.features) && normalization.method != "SCT") { if (verbose) { message("Computing ", anchor.features, " integration features") } anchor.features <- SelectIntegrationFeatures( object.list = object.list, nfeatures = anchor.features, assay = assay ) } if (scale) { if (verbose) { message("Scaling features for provided objects") } object.list <- my.lapply( X = object.list, FUN = function(object) { ScaleData(object = object, features = anchor.features, verbose = FALSE) } ) } nn.reduction <- reduction # if using pca or lsi, only need to compute the internal neighborhood structure once # for each dataset internal.neighbors <- list() if (nn.reduction %in% c("pca", "lsi","jpca")) { if (nn.reduction == 'jpca') { nn.reduction <- 'joint.pca' reduction <- 'joint.pca' } k.filter <- NA if (verbose) { message("Computing within dataset neighborhoods") } k.neighbor <- max(k.anchor, k.score) internal.neighbors <- my.lapply( X = 1:length(x = object.list), FUN = function(x) { NNHelper( data = Embeddings(object = object.list[[x]][[nn.reduction]])[, dims], k = k.neighbor + 1, method = nn.method, n.trees = n.trees, eps = eps ) } ) } # determine pairwise combinations combinations <- expand.grid(1:length(x = object.list), 1:length(x = object.list)) combinations <- combinations[combinations$Var1 < combinations$Var2, , drop = FALSE] # determine the proper offsets for indexing anchors objects.ncell <- sapply(X = object.list, FUN = ncol) offsets <- as.vector(x = cumsum(x = c(0, objects.ncell)))[1:length(x = object.list)] if (is.null(x = reference)) { # case for all pairwise, leave the combinations matrix the same if (verbose) { message("Finding all pairwise anchors") } } else { reference <- unique(x = sort(x = reference)) if (max(reference) > length(x = object.list)) { stop('Error: requested reference object ', max(reference), " but only ", length(x = object.list), " objects provided") } # modify the combinations matrix to retain only R-R and R-Q comparisons if (verbose) { message("Finding anchors between all query and reference datasets") ok.rows <- (combinations$Var1 %in% reference) | (combinations$Var2 %in% reference) combinations <- combinations[ok.rows, ] } } # determine all anchors anchoring.fxn <- function(row) { i <- combinations[row, 1] j <- combinations[row, 2] object.1 <- DietSeurat( object = object.list[[i]], assays = assay[i], features = anchor.features, counts = FALSE, scale.data = TRUE, dimreducs = reduction ) object.2 <- DietSeurat( object = object.list[[j]], assays = assay[j], features = anchor.features, counts = FALSE, scale.data = TRUE, dimreducs = reduction ) # suppress key duplication warning suppressWarnings(object.1[["ToIntegrate"]] <- object.1[[assay[i]]]) DefaultAssay(object = object.1) <- "ToIntegrate" if (reduction %in% Reductions(object = object.1)) { slot(object = object.1[[reduction]], name = "assay.used") <- "ToIntegrate" } object.1 <- DietSeurat(object = object.1, assays = "ToIntegrate", scale.data = TRUE, dimreducs = reduction) suppressWarnings(object.2[["ToIntegrate"]] <- object.2[[assay[j]]]) DefaultAssay(object = object.2) <- "ToIntegrate" if (reduction %in% Reductions(object = object.2)) { slot(object = object.2[[reduction]], name = "assay.used") <- "ToIntegrate" } object.2 <- DietSeurat(object = object.2, assays = "ToIntegrate", scale.data = TRUE, dimreducs = reduction) object.pair <- switch( EXPR = reduction, 'cca' = { object.pair <- RunCCA( object1 = object.1, object2 = object.2, assay1 = "ToIntegrate", assay2 = "ToIntegrate", features = anchor.features, num.cc = max(dims), renormalize = FALSE, rescale = FALSE, verbose = verbose ) if (l2.norm){ object.pair <- L2Dim(object = object.pair, reduction = reduction) reduction <- paste0(reduction, ".l2") nn.reduction <- reduction } reduction.2 <- character() object.pair }, 'pca' = { object.pair <- ReciprocalProject( object.1 = object.1, object.2 = object.2, reduction = 'pca', projected.name = 'projectedpca', features = anchor.features, do.scale = FALSE, do.center = FALSE, slot = 'scale.data', l2.norm = l2.norm, verbose = verbose ) reduction <- "projectedpca.ref" reduction.2 <- "projectedpca.query" if (l2.norm) { reduction <- paste0(reduction, ".l2") reduction.2 <- paste0(reduction.2, ".l2") } object.pair }, 'lsi' = { object.pair <- ReciprocalProject( object.1 = object.1, object.2 = object.2, reduction = 'lsi', projected.name = 'projectedlsi', features = anchor.features, do.center = TRUE, do.scale = FALSE, slot = 'data', l2.norm = l2.norm, verbose = verbose ) reduction <- "projectedlsi.ref" reduction.2 <- "projectedlsi.query" if (l2.norm) { reduction <- paste0(reduction, ".l2") reduction.2 <- paste0(reduction.2, ".l2") } object.pair }, 'joint.pca' = { object.pair <- merge(x = object.1, y = object.2) reduction.2 <- "joint.pca" object.pair[['joint.pca']] <- CreateDimReducObject( embeddings = rbind(Embeddings(object.1[['joint.pca']]), Embeddings(object.2[['joint.pca']])), loadings = Loadings(object.1[['joint.pca']]), key = 'Joint_', assay = 'ToIntegrate') if (l2.norm) { object.pair <- L2Dim(object = object.pair, reduction = 'joint.pca', new.dr = 'joint.pca.l2', new.key = 'Jl2_' ) reduction <- paste0(reduction, ".l2") reduction.2 <- paste0(reduction.2, ".l2") } object.pair }, stop("Invalid reduction parameter. Please choose either cca, rpca, or rlsi") ) internal.neighbors <- internal.neighbors[c(i, j)] anchors <- FindAnchors( object.pair = object.pair, assay = c("ToIntegrate", "ToIntegrate"), slot = slot, cells1 = colnames(x = object.1), cells2 = colnames(x = object.2), internal.neighbors = internal.neighbors, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, eps = eps, verbose = verbose ) anchors[, 1] <- anchors[, 1] + offsets[i] anchors[, 2] <- anchors[, 2] + offsets[j] return(anchors) } if (nbrOfWorkers() == 1) { all.anchors <- pblapply( X = 1:nrow(x = combinations), FUN = anchoring.fxn ) } else { all.anchors <- future_lapply( X = 1:nrow(x = combinations), FUN = anchoring.fxn, future.seed = TRUE ) } all.anchors <- do.call(what = 'rbind', args = all.anchors) all.anchors <- rbind(all.anchors, all.anchors[, c(2, 1, 3)]) all.anchors <- AddDatasetID(anchor.df = all.anchors, offsets = offsets, obj.lengths = objects.ncell) command <- LogSeuratCommand(object = object.list[[1]], return.command = TRUE) anchor.set <- new(Class = "IntegrationAnchorSet", object.list = object.list, reference.objects = reference %||% seq_along(object.list), anchors = all.anchors, offsets = offsets, anchor.features = anchor.features, command = command ) return(anchor.set) } # Merge dataset and perform reciprocal SVD projection, adding new dimreducs # for each projection and the merged original SVDs. # # @param object.1 First Seurat object to merge # @param object.2 Second Seurat object to merge # @param reduction Name of DimReduc to use. Must be an SVD-based DimReduc (eg, PCA or LSI) # so that the loadings can be used to project new embeddings. Must be present # in both input objects, with a substantial overlap in the features use to construct # the SVDs. # @param dims dimensions used for rpca # @param projected.name Name to store projected SVDs under (eg, "projectedpca") # @param features Features to use. Will subset the SVD loadings to use these features # before performing projection. Typically uses the anchor.features for integration. # @param do.center Center projected values (subtract mean) # @param do.scale Scale projected values (divide by SD) # @param slot Name of slot to pull data from. Should be scale.data for PCA and data for LSI # @param verbose Display messages # @return Returns a merged Seurat object with two projected SVDs (object.1 -> object.2, object.2 -> object.1) # and a merged SVD (needed for within-dataset neighbors) ReciprocalProject <- function( object.1, object.2, reduction, dims, projected.name, features, do.scale, do.center, slot, l2.norm, verbose = TRUE ) { common.features <- intersect( x = rownames(x = Loadings(object = object.1[[reduction]])), y = rownames(x = Loadings(object = object.2[[reduction]])) ) common.features <- intersect( x = common.features, y = features ) object.pair <- merge(x = object.1, y = object.2, merge.data = TRUE) data.1 <- GetAssayData( object = object.1, slot = slot ) data.2 <- GetAssayData( object = object.2, slot = slot ) proj.1 <- ProjectSVD( reduction = object.2[[reduction]], data = data.1, mode = reduction, features = common.features, do.scale = do.scale, do.center = do.center, use.original.stats = FALSE, verbose = verbose ) proj.2 <- ProjectSVD( reduction = object.1[[reduction]], data = data.2, mode = reduction, features = common.features, do.scale = do.scale, do.center = do.center, use.original.stats = FALSE, verbose = verbose ) # object.1 is ref, and object.2 is query reduction.dr.name.1 <- paste0(projected.name, ".ref") reduction.dr.name.2 <- paste0(projected.name, ".query") object.pair[[reduction.dr.name.1]] <- CreateDimReducObject( embeddings = rbind(Embeddings(object = object.1[[reduction]]), proj.2)[,dims], loadings = Loadings(object = object.1[[reduction]])[,dims], assay = DefaultAssay(object = object.1), key = paste0(projected.name, "ref_") ) object.pair[[reduction.dr.name.2]] <- CreateDimReducObject( embeddings = rbind(proj.1, Embeddings(object = object.2[[reduction]]))[,dims], loadings = Loadings(object = object.2[[reduction]])[,dims], assay = DefaultAssay(object = object.2), key = paste0(projected.name, "query_") ) object.pair[[reduction]] <- CreateDimReducObject( embeddings = rbind( Embeddings(object = object.1[[reduction]]), Embeddings(object = object.2[[reduction]]))[,dims], loadings = Loadings(object = object.1[[reduction]])[,dims], assay = DefaultAssay(object = object.1), key = paste0(projected.name, "_") ) if (l2.norm) { slot(object = object.pair[[reduction.dr.name.1]], name = "cell.embeddings") <- Sweep( x = Embeddings(object = object.pair[[reduction.dr.name.1]]), MARGIN = 2, STATS = apply(X = Embeddings(object = object.pair[[reduction.dr.name.1]]), MARGIN = 2, FUN = sd), FUN = "/" ) slot(object = object.pair[[reduction.dr.name.2]], name = "cell.embeddings") <- Sweep( x = Embeddings(object = object.pair[[reduction.dr.name.2]]), MARGIN = 2, STATS = apply(X = Embeddings(object = object.pair[[reduction.dr.name.2]]), MARGIN = 2, FUN = sd), FUN = "/" ) object.pair <- L2Dim(object = object.pair, reduction = reduction.dr.name.1) object.pair <- L2Dim(object = object.pair, reduction = reduction.dr.name.2) } return(object.pair) } #' Find transfer anchors #' #' Find a set of anchors between a reference and query object. These #' anchors can later be used to transfer data from the reference to #' query object using the \code{\link{TransferData}} object. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' \itemize{ #' #' \item{Perform dimensional reduction. Exactly what is done here depends on #' the values set for the \code{reduction} and \code{project.query} #' parameters. If \code{reduction = "pcaproject"}, a PCA is performed on #' either the reference (if \code{project.query = FALSE}) or the query (if #' \code{project.query = TRUE}), using the \code{features} specified. The data #' from the other dataset is then projected onto this learned PCA structure. #' If \code{reduction = "cca"}, then CCA is performed on the reference and #' query for this dimensional reduction step. If #' \code{reduction = "lsiproject"}, the stored LSI dimension reduction in the #' reference object is used to project the query dataset onto the reference. #' If \code{l2.norm} is set to \code{TRUE}, perform L2 normalization of the #' embedding vectors.} #' \item{Identify anchors between the reference and query - pairs of cells #' from each dataset that are contained within each other's neighborhoods #' (also known as mutual nearest neighbors).} #' \item{Filter low confidence anchors to ensure anchors in the low dimension #' space are in broad agreement with the high dimensional measurements. This #' is done by looking at the neighbors of each query cell in the reference #' dataset using \code{max.features} to define this space. If the reference #' cell isn't found within the first \code{k.filter} neighbors, remove the #' anchor.} #' \item{Assign each remaining anchor a score. For each anchor cell, determine #' the nearest \code{k.score} anchors within its own dataset and within its #' pair's dataset. Based on these neighborhoods, construct an overall neighbor #' graph and then compute the shared neighbor overlap between anchor and query #' cells (analogous to an SNN graph). We use the 0.01 and 0.90 quantiles on #' these scores to dampen outlier effects and rescale to range between 0-1.} #' } #' #' @param reference \code{\link{Seurat}} object to use as the reference #' @param query \code{\link{Seurat}} object to use as the query #' @param reference.assay Name of the Assay to use from reference #' @param reference.neighbors Name of the Neighbor to use from the reference. #' Optionally enables reuse of precomputed neighbors. #' @param query.assay Name of the Assay to use from query #' @param reduction Dimensional reduction to perform when finding anchors. #' Options are: #' \itemize{ #' \item{pcaproject: Project the PCA from the reference onto the query. We #' recommend using PCA when reference and query datasets are from scRNA-seq} #' \item{lsiproject: Project the LSI from the reference onto the query. We #' recommend using LSI when reference and query datasets are from scATAC-seq. #' This requires that LSI has been computed for the reference dataset, and the #' same features (eg, peaks or genome bins) are present in both the reference #' and query. See \code{\link[Signac]{RunTFIDF}} and #' \code{\link[Signac]{RunSVD}}} #' \item{rpca: Project the PCA from the reference onto the query, and the PCA #' from the query onto the reference (reciprocal PCA projection).} #' \item{cca: Run a CCA on the reference and query } #' } #' @param reference.reduction Name of dimensional reduction to use from the #' reference if running the pcaproject workflow. Optionally enables reuse of #' precomputed reference dimensional reduction. If NULL (default), use a PCA #' computed on the reference object. #' @param project.query Project the PCA from the query dataset onto the #' reference. Use only in rare cases where the query dataset has a much larger #' cell number, but the reference dataset has a unique assay for transfer. In #' this case, the default features will be set to the variable features of the #' query object that are alos present in the reference. #' @param features Features to use for dimensional reduction. If not specified, #' set as variable features of the reference object which are also present in #' the query. #' @param scale Scale query data. #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT. #' @param recompute.residuals If using SCT as a normalization method, compute #' query Pearson residuals using the reference SCT model parameters. #' @param npcs Number of PCs to compute on reference if reference.reduction is #' not provided. #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction #' @param dims Which dimensions to use from the reduction to specify the #' neighbor search space #' @param k.anchor How many neighbors (k) to use when finding anchors #' @param k.filter How many neighbors (k) to use when filtering anchors. Set to #' NA to turn off filtering. #' @param k.score How many neighbors (k) to use when scoring anchors #' @param max.features The maximum number of features to use when specifying the #' neighborhood search space in the anchor filtering #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}} or \code{\link{RcppAnnoy}}) #' @param approx.pca Use truncated singular value decomposition to approximate #' PCA #' @param mapping.score.k Compute and store nearest k query neighbors in the #' AnchorSet object that is returned. You can optionally set this if you plan #' on computing the mapping score and want to enable reuse of some downstream #' neighbor calculations to make the mapping score function more efficient. #' @param verbose Print progress bars and output #' #' @return Returns an \code{AnchorSet} object that can be used as input to #' \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and #' \code{\link{MapQuery}}. The dimension reduction used for finding anchors is #' stored in the \code{AnchorSet} object and can be used for computing anchor #' weights in downstream functions. Note that only the requested dimensions are #' stored in the dimension reduction object in the \code{AnchorSet}. This means #' that if \code{dims=2:20} is used, for example, the dimension of the stored #' reduction is \code{1:19}. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031}; #' #' @export #' @importFrom methods slot slot<- #' @importFrom SeuratObject JoinLayers RenameAssays #' @concept integration #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("pbmc3k") #' #' # for demonstration, split the object into reference and query #' pbmc.reference <- pbmc3k[, 1:1350] #' pbmc.query <- pbmc3k[, 1351:2700] #' #' # perform standard preprocessing on each object #' pbmc.reference <- NormalizeData(pbmc.reference) #' pbmc.reference <- FindVariableFeatures(pbmc.reference) #' pbmc.reference <- ScaleData(pbmc.reference) #' #' pbmc.query <- NormalizeData(pbmc.query) #' pbmc.query <- FindVariableFeatures(pbmc.query) #' pbmc.query <- ScaleData(pbmc.query) #' #' # find anchors #' anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) #' #' # transfer labels #' predictions <- TransferData( #' anchorset = anchors, #' refdata = pbmc.reference$seurat_annotations #' ) #' pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) #' } #' FindTransferAnchors <- function( reference, query, normalization.method = "LogNormalize", recompute.residuals = TRUE, reference.assay = NULL, reference.neighbors = NULL, query.assay = NULL, reduction = "pcaproject", reference.reduction = NULL, project.query = FALSE, features = NULL, scale = TRUE, npcs = 30, l2.norm = TRUE, dims = 1:30, k.anchor = 5, k.filter = NA, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, eps = 0, approx.pca = TRUE, mapping.score.k = NULL, verbose = TRUE ) { op <- options(Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) # input validation ValidateParams_FindTransferAnchors( reference = reference, query = query, normalization.method = normalization.method, recompute.residuals = recompute.residuals, reference.assay = reference.assay, reference.neighbors = reference.neighbors, query.assay = query.assay, reduction = reduction, reference.reduction = reference.reduction, project.query = project.query, features = features, scale = scale, npcs = npcs, l2.norm = l2.norm, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, eps = eps, approx.pca = approx.pca, mapping.score.k = mapping.score.k, verbose = verbose ) projected <- ifelse(test = reduction == "pcaproject", yes = TRUE, no = FALSE) reduction.2 <- character() feature.mean <- NULL reference.reduction.init <- reference.reduction if (inherits(x = reference[[reference.assay]], what = 'Assay5')) { if (length(Layers(reference, search = "data")) > 1) { reference[[reference.assay]] <- JoinLayers( reference[[reference.assay]], layers = "data", new = "data") } } if (normalization.method == "SCT") { if (is.null(x = reference.reduction)) { reference <- suppressWarnings(expr = GetResidual( object = reference, assay = reference.assay, features = features, verbose = FALSE )) features <- intersect( x = features, y = rownames(reference[[reference.assay]]$scale.data) ) VariableFeatures(reference) <- features } if (IsSCT(assay = query[[query.assay]])) { query <- suppressWarnings(expr = GetResidual( object = query, assay = query.assay, features = features, verbose = FALSE )) } } # Rename query assay w same name as reference assay if (query.assay != reference.assay) { suppressWarnings(expr = query <- RenameAssays( object = query, assay.name = query.assay, new.assay.name = reference.assay, verbose = FALSE )) DefaultAssay(query) <- reference.assay } # only keep necessary info from objects suppressWarnings( query <- DietSeurat( object = query, assays = reference.assay, dimreducs = reference.reduction, features = features, scale.data = TRUE ) ) # check assay in the reference.reduction if (!is.null(reference.reduction) && slot(object = reference[[reference.reduction]], name = "assay.used") != reference.assay) { warnings("reference assay is diffrent from the assay.used in", reference.reduction) slot(object = reference[[reference.reduction]], name = "assay.used") <- reference.assay } suppressWarnings( reference <- DietSeurat( object = reference, assays = reference.assay, dimreducs = reference.reduction, features = features, scale.data = TRUE ) ) # append query and reference to cell names - mainly to avoid name conflicts query <- RenameCells( object = query, new.names = paste0(Cells(x = query), "_", "query") ) reference <- RenameCells( object = reference, new.names = paste0(Cells(x = reference), "_", "reference") ) # Perform PCA projection if (reduction == 'pcaproject') { if (project.query) { if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message( "Performing PCA on the provided query using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { query <- ScaleData( object = query, features = features, do.scale = scale, verbose = FALSE ) } query <- RunPCA( object = query, npcs = npcs, reduction.name = reference.reduction, verbose = FALSE, features = features, approx = approx.pca ) } projected.pca <- ProjectCellEmbeddings( reference = query, reduction = reference.reduction, query = reference, scale = scale, dims = dims, feature.mean = feature.mean, verbose = verbose, normalization.method = normalization.method ) orig.embeddings <- Embeddings(object = query[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = query[[reference.reduction]]) } else { if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message("Performing PCA on the provided reference using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { reference <- ScaleData(object = reference, features = features, do.scale = scale, verbose = FALSE) } reference <- RunPCA( object = reference, npcs = npcs, verbose = FALSE, features = features, approx = approx.pca ) } if (paste0("nCount_", query.assay) %in% colnames(query[[]])) { query_nCount_UMI <- query[[]][, paste0("nCount_", query.assay)] names(x = query_nCount_UMI) <- colnames(x = query) } else { query_nCount_UMI <- NULL } projected.pca <- ProjectCellEmbeddings( reference = reference, reduction = reference.reduction, normalization.method = normalization.method, query = query, scale = scale, dims = dims, nCount_UMI = query_nCount_UMI, feature.mean = feature.mean, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = reference[[reference.reduction]]) } combined.pca <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.pca)), key = "ProjectPC_", assay = reference.assay ) # combined.ob <- suppressWarnings(expr = merge( # x = DietSeurat(object = reference, counts = FALSE), # y = DietSeurat(object = query, counts = FALSE), # )) ref.diet <- DietSeurat(object = reference, counts = FALSE) query.diet <- DietSeurat(object = query, counts = FALSE) counts.list <- list(reference = LayerData(ref.diet[[reference.assay]], layer = "data")) query.data.list <- list() for (i in Layers(object = query.diet[[reference.assay]], search = "data")) { data.layer.name <- gsub(pattern = "data.", replacement = "", x = i) counts.list[[data.layer.name]] <- LayerData(object = query[[reference.assay]], layer = i) } combined.ob <- CreateSeuratObject(counts = counts.list, assay = reference.assay) for (i in Layers(object = combined.ob[[reference.assay]], search = "counts")){ data.layer.name <- gsub(pattern = "counts.", replacement = "data.", x = i) # replace counts. to data. layer.data <- LayerData(object = combined.ob, layer = i) LayerData(object = combined.ob, layer = data.layer.name) <- layer.data # set layer data } colnames(x = orig.loadings) <- paste0("ProjectPC_", 1:ncol(x = orig.loadings)) combined.ob[["pcaproject"]] <- combined.pca Loadings(object = combined.ob[["pcaproject"]], projected = FALSE) <- orig.loadings[, dims] Loadings(object = combined.ob[["pcaproject"]]) <- orig.loadings[, dims] } # Use reciprocal PCA projection in anchor finding if (reduction == "rpca") { # Run PCA on reference and query if (is.null(x = reference.reduction)) { reference.reduction <- "pca" if (verbose) { message("Performing PCA on the provided reference using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { reference <- ScaleData( object = reference, features = features, do.scale = scale, verbose = verbose ) } reference <- RunPCA( object = reference, npcs = npcs, verbose = FALSE, features = features, approx = approx.pca ) } if (verbose) { message("Performing PCA on the provided query using ", length(x = features), " features as input.") } if (normalization.method == "LogNormalize") { query <- ScaleData( object = query, features = features, do.scale = scale, verbose = verbose ) } query <- RunPCA( object = query, npcs = ncol(x = reference[[reference.reduction]]), reduction.name = reference.reduction, verbose = FALSE, features = features, approx = approx.pca ) combined.ob <- ReciprocalProject( object.1 = reference, object.2 = query, reduction = reference.reduction, dims = dims, projected.name = reduction, features = features, do.scale = FALSE, do.center = FALSE, slot = 'scale.data', l2.norm = l2.norm, verbose = verbose ) # pcaproject is used as the weight.matrix in MapQuery projected.pca <- ProjectCellEmbeddings( reference = reference, reduction = reference.reduction, query = query, scale = scale, normalization.method = normalization.method, dims = dims, feature.mean = feature.mean, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]])[, dims] orig.loadings <- Loadings(object = reference[[reference.reduction]]) combined.pca <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.pca)), key = "ProjectPC_", assay = reference.assay ) combined.ob[["pcaproject"]] <- combined.pca colnames(x = orig.loadings) <- paste0("ProjectPC_", 1:ncol(x = orig.loadings)) Loadings(object = combined.ob[["pcaproject"]]) <- orig.loadings[, dims] if (l2.norm) { # L2 norm is done on each projected PCA in ReciprocalProject, so turn it off here # avoids later error as we now have two reductions (rpca.ref and rpca.query) l2.norm <- FALSE reduction <- "rpca.ref.l2" reduction.2 <- "rpca.query.l2" } else { reduction <- "rpca.ref" reduction.2 <- "rpca.query" } if (project.query) { reduction <- gsub(".ref", ".query", reduction) reduction.2 <- gsub(".query", ".ref", reduction.2) } } # Run CCA as dimension reduction to be used in anchor finding if (reduction == 'cca') { if (normalization.method == "LogNormalize") { reference <- ScaleData(object = reference, features = features, do.scale = scale, verbose = FALSE) query <- ScaleData(object = query, features = features, do.scale = scale, verbose = FALSE) } combined.ob <- RunCCA( object1 = reference, object2 = query, features = features, num.cc = max(dims), renormalize = FALSE, rescale = FALSE, verbose = verbose ) slot(object = combined.ob[["cca"]], name = "cell.embeddings") <- Embeddings(combined.ob[["cca"]])[, dims] slot(object = combined.ob[["cca"]], name = "feature.loadings") <- Loadings(combined.ob[["cca"]])[, dims] slot(object = combined.ob[["cca"]], name = "feature.loadings.projected") <- Loadings(object = combined.ob[["cca"]], projected = TRUE)[, dims] } if (reduction == "lsiproject") { if (project.query) { projected.lsi <- ProjectSVD( reduction = query[[reference.reduction]], data = GetAssayData(object = reference, assay = reference.assay, slot = "data"), mode = "lsi", do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, verbose = verbose ) orig.embeddings <- Embeddings(object = query[[reference.reduction]]) orig.loadings <- Loadings(object = query[[reference.reduction]]) } else { projected.lsi <- ProjectSVD( reduction = reference[[reference.reduction]], data = GetAssayData(object = query, assay = reference.assay, slot = "data"), mode = "lsi", do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, verbose = verbose ) orig.embeddings <- Embeddings(object = reference[[reference.reduction]]) orig.loadings <- Loadings(object = reference[[reference.reduction]]) } combined.lsi <- CreateDimReducObject( embeddings = as.matrix(x = rbind(orig.embeddings, projected.lsi))[,dims], key = "ProjectLSI_", assay = reference.assay ) combined.ob <- merge( x = DietSeurat(object = reference), y = DietSeurat(object = query) ) combined.ob[["lsiproject"]] <- combined.lsi colnames(x = orig.loadings) <- paste0("ProjectLSI_", 1:ncol(x = orig.loadings)) Loadings(object = combined.ob[["lsiproject"]]) <- orig.loadings[,dims] } if (l2.norm) { combined.ob <- L2Dim(object = combined.ob, reduction = reduction) reduction <- paste0(reduction, ".l2") } precomputed.neighbors <- list(ref.neighbors = NULL, query.neighbors = NULL) nn.idx1 <- NULL nn.idx2 <- NULL # if computing the mapping score later, compute large enough query # neighborhood here to reuse if (!is.null(x = mapping.score.k)) { if (verbose) { message("Finding query neighbors") } k.nn <- max(k.score, k.anchor) query.neighbors <- NNHelper( data = Embeddings(object = combined.ob[[reduction]])[colnames(x = query), ], k = max(mapping.score.k, k.nn + 1), method = nn.method, n.trees = n.trees, cache.index = TRUE ) query.neighbors.sub <- query.neighbors slot(object = query.neighbors.sub, name = "nn.idx") <- slot( object = query.neighbors.sub, name = "nn.idx")[, 1:(k.nn + 1)] slot(object = query.neighbors.sub, name = "nn.dist") <- slot( object = query.neighbors.sub, name = "nn.dist")[, 1:(k.nn + 1)] precomputed.neighbors[["query.neighbors"]] <- query.neighbors.sub nn.idx2 <- Index(object = query.neighbors.sub) } if (!is.null(x = reference.neighbors)) { precomputed.neighbors[["ref.neighbors"]] <- reference[[reference.neighbors]] } else { precomputed.neighbors[["ref.neighbors"]] <- NNHelper( data = Embeddings(combined.ob[[reduction]])[ colnames(x = reference), 1:length(x = dims) ], k = max(k.score, k.anchor) + 1, method = nn.method, cache.index = TRUE ) } nn.idx1 <- Index(object = precomputed.neighbors[["ref.neighbors"]]) anchors <- FindAnchors( object.pair = combined.ob, assay = c(reference.assay, reference.assay), slot = "data", cells1 = colnames(x = reference), cells2 = colnames(x = query), reduction = reduction, reduction.2 = reduction.2, internal.neighbors = precomputed.neighbors, dims = 1:length(x = dims), k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, projected = projected, verbose = verbose ) reductions <- slot(object = combined.ob, name = "reductions") for (i in unique(x = c(reference.assay))) { dummy.assay <- paste0(i, "DUMMY") suppressWarnings( expr = combined.ob[[dummy.assay]] <- CreateDummyAssay(assay = combined.ob[[i]]) ) DefaultAssay(combined.ob) <- dummy.assay combined.ob[[i]] <- NULL suppressWarnings( expr = combined.ob[[i]] <- combined.ob[[dummy.assay]] ) DefaultAssay(combined.ob) <- i combined.ob[[dummy.assay]] <- NULL } slot(object = combined.ob, name = "reductions") <- reductions command <- LogSeuratCommand(object = combined.ob, return.command = TRUE) slot(command, name = 'params')$reference.reduction <- reference.reduction.init anchor.set <- new( Class = "TransferAnchorSet", object.list = list(combined.ob), reference.cells = colnames(x = reference), query.cells = colnames(x = query), anchors = anchors, anchor.features = features, command = command ) if (!is.null(x = precomputed.neighbors[["query.neighbors"]])) { slot(object = anchor.set, name = "neighbors") <- list( query.neighbors = query.neighbors) } return(anchor.set) } #' Get the predicted identity #' #' Utility function to easily pull out the name of the class with the maximum #' prediction. This is useful if you've set \code{prediction.assay = TRUE} in #' \code{\link{TransferData}} and want to have a vector with the predicted class. #' #' @param object Seurat object #' @param assay Name of the assay holding the predictions #' @param slot Slot of the assay in which the prediction scores are stored #' @param score.filter Return "Unassigned" for any cell with a score less than #' this value #' #' @return Returns a vector of predicted class names #' #' @examples #' \dontrun{ #' prediction.assay <- TransferData(anchorset = anchors, refdata = reference$class) #' query[["predictions"]] <- prediction.assay #' query$predicted.id <- GetTransferPredictions(query) #' } #' @export #' @concept integration #' GetTransferPredictions <- function(object, assay = "predictions", slot = "data", score.filter = 0.75) { dat <- GetAssayData(object[[assay]], slot = slot) predictions <- apply( X = dat, MARGIN = 2, FUN = function(x){ if (x['max'] < score.filter) { "Unassigned" } else { x <- x[-which(x = names(x = x) == "max")] names(x = which.max(x = x)) } } ) return(predictions) } #' Integrate data #' #' Perform dataset integration using a pre-computed \code{\link{AnchorSet}}. #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' For pairwise integration: #' #' \itemize{ #' \item{Construct a weights matrix that defines the association between each #' query cell and each anchor. These weights are computed as 1 - the distance #' between the query cell and the anchor divided by the distance of the query #' cell to the \code{k.weight}th anchor multiplied by the anchor score #' computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian #' kernel width a bandwidth defined by \code{sd.weight} and normalize across #' all \code{k.weight} anchors.} #' \item{Compute the anchor integration matrix as the difference between the #' two expression matrices for every pair of anchor cells} #' \item{Compute the transformation matrix as the product of the integration #' matrix and the weights matrix.} #' \item{Subtract the transformation matrix from the original expression #' matrix.} #' } #' #' For multiple dataset integration, we perform iterative pairwise integration. #' To determine the order of integration (if not specified via #' \code{sample.tree}), we #' \itemize{ #' \item{Define a distance between datasets as the total number of cells in #' the smaller dataset divided by the total number of anchors between the two #' datasets.} #' \item{Compute all pairwise distances between datasets} #' \item{Cluster this distance matrix to determine a guide tree} #' } #' #' #' @param anchorset An \code{\link{AnchorSet}} object generated by #' \code{\link{FindIntegrationAnchors}} #' @param new.assay.name Name for the new assay containing the integrated data #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param features Vector of features to use when computing the PCA to determine #' the weights. Only set if you want a different set from those used in the #' anchor finding process #' @param features.to.integrate Vector of features to integrate. By default, #' will use the features used in anchor finding. #' @param dims Number of dimensions to use in the anchor weighting procedure #' @param k.weight Number of neighbors to consider when weighting anchors #' @param weight.reduction Dimension reduction to use when calculating anchor #' weights. This can be one of: #' \itemize{ #' \item{A string, specifying the name of a dimension reduction present in #' all objects to be integrated} #' \item{A vector of strings, specifying the name of a dimension reduction to #' use for each object to be integrated} #' \item{A vector of \code{\link{DimReduc}} objects, specifying the object to #' use for each object in the integration} #' \item{NULL, in which case a new PCA will be calculated and used to #' calculate anchor weights} #' } #' Note that, if specified, the requested dimension reduction will only be used #' for calculating anchor weights in the first merge between reference and #' query, as the merged object will subsequently contain more cells than was in #' query, and weights will need to be calculated for all cells in the object. #' @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting #' @param sample.tree Specify the order of integration. Order of integration #' should be encoded in a matrix, where each row represents one of the pairwise #' integration steps. Negative numbers specify a dataset, positive numbers #' specify the integration results from a given row (the format of the merge #' matrix included in the \code{\link{hclust}} function output). For example: #' `matrix(c(-2, 1, -3, -1), ncol = 2)` gives: #' #' ``` #' [,1] [,2] #' [1,] -2 -3 #' [2,] 1 -1 #' ``` #' #' Which would cause dataset 2 and 3 to be integrated first, then the resulting #' object integrated with dataset 1. #' #' If NULL, the sample tree will be computed automatically. #' @param preserve.order Do not reorder objects based on size for each pairwise #' integration. #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}}) #' @param verbose Print progress bars and output #' #' @return Returns a \code{\link{Seurat}} object with a new integrated #' \code{\link{Assay}}. If \code{normalization.method = "LogNormalize"}, the #' integrated data is returned to the \code{data} slot and can be treated as #' log-normalized, corrected data. If \code{normalization.method = "SCT"}, the #' integrated data is returned to the \code{scale.data} slot and can be treated #' as centered, corrected Pearson residuals. #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @export #' @concept integration #' @md #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset #' pancreas.list <- SplitObject(panc8, split.by = "tech") #' #' # perform standard preprocessing on each object #' for (i in 1:length(pancreas.list)) { #' pancreas.list[[i]] <- NormalizeData(pancreas.list[[i]], verbose = FALSE) #' pancreas.list[[i]] <- FindVariableFeatures( #' pancreas.list[[i]], selection.method = "vst", #' nfeatures = 2000, verbose = FALSE #' ) #' } #' #' # find anchors #' anchors <- FindIntegrationAnchors(object.list = pancreas.list) #' #' # integrate data #' integrated <- IntegrateData(anchorset = anchors) #' } #' IntegrateData <- function( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') ref <- object.list[reference.datasets] features <- features %||% slot(object = anchorset, name = "anchor.features") unintegrated <- suppressWarnings(expr = merge( x = object.list[[1]], y = object.list[2:length(x = object.list)] )) if (!is.null(x = features.to.integrate)) { features.to.integrate <- intersect( x = features.to.integrate, y = Reduce( f = intersect, x = lapply( X = object.list, FUN = rownames ) ) ) } if (normalization.method == "SCT") { model.list <- list() for (i in 1:length(x = object.list)) { assay <- DefaultAssay(object = object.list[[i]]) if (length(x = setdiff(x = features.to.integrate, y = features)) != 0) { object.list[[i]] <- GetResidual( object = object.list[[i]], features = setdiff(x = features.to.integrate, y = features), verbose = verbose ) } print(i) model.list[[i]] <- slot(object = object.list[[i]][[assay]], name = "SCTModel.list") object.list[[i]][[assay]] <- suppressWarnings(expr = CreateSCTAssayObject( data = GetAssayData( object = object.list[[i]], assay = assay, slot = "scale.data") ) ) } model.list <- unlist(x = model.list) slot(object = anchorset, name = "object.list") <- object.list } # perform pairwise integration of reference objects reference.integrated <- PairwiseIntegrateReference( anchorset = anchorset, new.assay.name = new.assay.name, normalization.method = normalization.method, features = features, features.to.integrate = features.to.integrate, dims = dims, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, eps = eps, verbose = verbose ) # set SCT model if (normalization.method == "SCT") { if (is.null(x = Tool(object = reference.integrated, slot = "Integration"))) { reference.sample <- slot(object = anchorset, name = "reference.objects") } else { reference.sample <- SampleIntegrationOrder( tree = slot( object = reference.integrated, name = "tools" )$Integration@sample.tree )[1] } reference.cells <- Cells(x = object.list[[reference.sample]]) reference.model <- NULL if (length(x = model.list) > 0) { reference.model <- sapply(X = model.list, FUN = function(model) { reference.check <- FALSE model.cells <- Cells(x = model) if (length(x = model.cells) > 0 & length(x = setdiff(x = model.cells, y = reference.cells)) == 0) { reference.check <- TRUE } return(reference.check) } ) reference.model <- model.list[[which(reference.model)]] } } if (length(x = reference.datasets) == length(x = object.list)) { if (normalization.method == "SCT") { reference.integrated[[new.assay.name]] <- CreateSCTAssayObject( data = GetAssayData(object = reference.integrated, assay = new.assay.name, slot = "data"), scale.data = ScaleData( object = GetAssayData(object = reference.integrated, assay = new.assay.name, slot = "scale.data"), do.scale = FALSE, do.center = TRUE, verbose = FALSE), SCTModel.list = reference.model ) levels(x = reference.integrated[[new.assay.name]]) <- "refmodel" reference.integrated[[assay]] <- unintegrated[[assay]] } DefaultAssay(object = reference.integrated) <- new.assay.name VariableFeatures(object = reference.integrated) <- features reference.integrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") reference.integrated <- suppressWarnings(LogSeuratCommand(object = reference.integrated)) return(reference.integrated) } else { active.assay <- DefaultAssay(object = ref[[1]]) reference.integrated[[active.assay]] <- NULL reference.integrated[[active.assay]] <- CreateAssayObject( data = GetAssayData( object = reference.integrated[[new.assay.name]], slot = 'data' ), check.matrix = FALSE ) DefaultAssay(object = reference.integrated) <- active.assay reference.integrated[[new.assay.name]] <- NULL VariableFeatures(object = reference.integrated) <- features # Extract the query objects (if any) and map to reference integrated.data <- MapQueryData( anchorset = anchorset, reference = reference.integrated, new.assay.name = new.assay.name, normalization.method = normalization.method, features = features, features.to.integrate = features.to.integrate, dims = dims, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, preserve.order = preserve.order, eps = eps, verbose = verbose ) # Construct final assay object integrated.assay <- CreateAssayObject( data = integrated.data, check.matrix = FALSE ) if (normalization.method == "SCT") { integrated.assay <- CreateSCTAssayObject( data = integrated.data, scale.data = ScaleData( object = integrated.data, do.scale = FALSE, do.center = TRUE, verbose = FALSE), SCTModel.list = reference.model ) levels(x = integrated.assay) <- "refmodel" } unintegrated[[new.assay.name]] <- integrated.assay unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) if (!is.null(x = Tool(object = reference.integrated, slot = "Integration"))) { sample.tree <- GetIntegrationData( object = reference.integrated, integration.name = "Integration", slot = "sample.tree" ) } unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "sample.tree", new.data = sample.tree ) DefaultAssay(object = unintegrated) <- new.assay.name VariableFeatures(object = unintegrated) <- features unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") unintegrated <- suppressWarnings(LogSeuratCommand(object = unintegrated)) return(unintegrated) } } #' @inheritParams IntegrateData #' #' @rdname IntegrateEmbeddings #' @concept integration #' @export #' @method IntegrateEmbeddings IntegrationAnchorSet #' IntegrateEmbeddings.IntegrationAnchorSet <- function( anchorset, new.reduction.name = "integrated_dr", reductions = NULL, dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, verbose = TRUE, ... ) { CheckDots(...) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') reductions <- reductions %||% slot( object = anchorset, name = 'weight.reduction' ) ValidateParams_IntegrateEmbeddings_IntegrationAnchors( anchorset = anchorset, object.list = object.list, reductions = reductions, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, sample.tree = sample.tree ) unintegrated <- merge( x = object.list[[1]], y = object.list[2:length(x = object.list)] ) # make DimReducs into Assays temporarily intdr.assay <- DefaultAssay(object = reductions) int.assay <- DefaultAssay(object = object.list[[1]]) dims.names <- paste0("drtointegrate-", dims.to.integrate) # cell.names.map <- Cells(x = unintegrated) cell.names.map <- colnames(x = unintegrated) names(x = cell.names.map) <- make.unique(names = unname(obj = do.call( what = c, args = lapply(X = object.list, FUN = colnames))) ) for (i in 1:length(x = object.list)) { embeddings <- t(x = Embeddings(object = reductions)[cell.names.map[Cells(x = object.list[[i]])], dims.to.integrate]) rownames(x = embeddings) <- dims.names fake.assay <- suppressWarnings( expr = CreateAssayObject( data = embeddings, check.matrix = FALSE ) ) object.list[[i]][['drtointegrate']] <- fake.assay DefaultAssay(object = object.list[[i]]) <- "drtointegrate" } slot(object = anchorset, name = "object.list") <- object.list new.reduction.name.safe <- gsub(pattern = "_", replacement = "", x = new.reduction.name) new.reduction.name.safe <- gsub(pattern = "[.]", replacement = "", x = new.reduction.name.safe) reference.integrated <- PairwiseIntegrateReference( anchorset = anchorset, new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, sample.tree = sample.tree, preserve.order = preserve.order, verbose = verbose ) if (length(x = reference.datasets) == length(x = object.list)) { reference.dr <- CreateDimReducObject( embeddings = as.matrix(x = t(GetAssayData( object = reference.integrated[[new.reduction.name.safe]] ))), assay = intdr.assay, loadings = Loadings(object = reductions), key = paste0(new.reduction.name.safe, "_") ) DefaultAssay(object = reference.integrated) <- int.assay reference.integrated[["drtointegrate"]] <- NULL reference.integrated[[new.reduction.name.safe]] <- NULL reference.integrated[[new.reduction.name]] <- reference.dr return(reference.integrated) } active.assay <- DefaultAssay(object = object.list[reference.datasets][[1]]) reference.integrated[[active.assay]] <- NULL reference.integrated[[active.assay]] <- CreateAssayObject( data = GetAssayData( object = reference.integrated[[new.reduction.name.safe]], slot = 'data' ) ) DefaultAssay(object = reference.integrated) <- active.assay reference.integrated[[new.reduction.name.safe]] <- NULL VariableFeatures(object = reference.integrated) <- dims.names # Extract the query objects (if any) and map to reference integrated.data <- MapQueryData( anchorset = anchorset, reference = reference.integrated, new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, sd.weight = sd.weight, preserve.order = preserve.order, verbose = verbose ) suppressWarnings(expr = unintegrated[[new.reduction.name]] <- CreateDimReducObject( embeddings = as.matrix(x = t(x = integrated.data)), assay = intdr.assay, loadings = Loadings(object = reductions), key = paste0(new.reduction.name.safe, "_") )) unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) if (!is.null(x = Tool(object = reference.integrated, slot = "Integration"))) { sample.tree <- GetIntegrationData( object = reference.integrated, integration.name = "Integration", slot = "sample.tree" ) } unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "sample.tree", new.data = sample.tree ) unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") suppressWarnings(unintegrated <- LogSeuratCommand(object = unintegrated)) return(unintegrated) } #' @param reference Reference object used in anchorset construction #' @param query Query object used in anchorset construction #' @param query.assay Name of the Assay to use from query #' @param reuse.weights.matrix Can be used in conjunction with the store.weights #' parameter in TransferData to reuse a precomputed weights matrix. #' #' @rdname IntegrateEmbeddings #' @concept integration #' @export #' @method IntegrateEmbeddings TransferAnchorSet #' IntegrateEmbeddings.TransferAnchorSet <- function( anchorset, reference, query, query.assay = NULL, new.reduction.name = "integrated_dr", reductions = 'pcaproject', dims.to.integrate = NULL, k.weight = 100, weight.reduction = NULL, reuse.weights.matrix = TRUE, sd.weight = 1, preserve.order = FALSE, verbose = TRUE, ... ) { CheckDots(...) combined.object <- slot(object = anchorset, name = 'object.list')[[1]] anchors <- slot(object = anchorset, name = 'anchors') weights.matrix <- NULL query.assay <- query.assay %||% DefaultAssay(query) ValidateParams_IntegrateEmbeddings_TransferAnchors( anchorset = anchorset, combined.object = combined.object, reference = reference, query = query, query.assay = query.assay, reductions = reductions, dims.to.integrate = dims.to.integrate, k.weight = k.weight, weight.reduction = weight.reduction, reuse.weights.matrix = reuse.weights.matrix ) object.list <- list(reference, query) # make DimReducs into Assays temporarily intdr.assay <- DefaultAssay(object = object.list[[1]][[reductions[[1]]]]) int.assay <- DefaultAssay(object = object.list[[1]]) dims.names <- paste0("drtointegrate-", dims.to.integrate) for (i in 1:length(x = object.list)) { embeddings <- t(x = Embeddings( object = object.list[[i]], reduction = reductions[[i]] )[ , dims.to.integrate]) rownames(x = embeddings) <- dims.names fake.assay <- suppressWarnings( expr = CreateAssayObject( data = embeddings, check.matrix = FALSE ) ) object.list[[i]][['drtointegrate']] <- fake.assay DefaultAssay(object = object.list[[i]]) <- "drtointegrate" object.list[[i]] <- DietSeurat(object = object.list[[i]], assays = "drtointegrate") } slot(object = anchorset, name = "object.list") <- object.list new.reduction.name.safe <- gsub(pattern = "_", replacement = "", x = new.reduction.name) new.reduction.name.safe <- gsub(pattern = "[.]", replacement = "", x = new.reduction.name) slot(object = anchorset, name = "reference.objects") <- 1 anchors <- as.data.frame(x = anchors) anchors$dataset1 <- 1 anchors$dataset2 <- 2 slot(object = anchorset, name = "anchors") <- anchors integrated.embeddings <- MapQueryData( anchorset = anchorset, reference = object.list[[1]], new.assay.name = new.reduction.name.safe, normalization.method = "LogNormalize", features = dims.names, features.to.integrate = dims.names, dims = NULL, k.weight = k.weight, weight.reduction = weight.reduction, weights.matrix = weights.matrix, no.offset = TRUE, sd.weight = sd.weight, preserve.order = preserve.order, verbose = verbose ) integrated.embeddings <- as.matrix(x = integrated.embeddings) query[[new.reduction.name]] <- CreateDimReducObject( embeddings = t(x = integrated.embeddings[, Cells(x = query[[query.assay]])]), assay = DefaultAssay(object = query[[reductions[1]]]), key = paste0(new.reduction.name.safe, "_") ) query <- RenameCells( object = query, new.names = gsub(pattern = "_query$", replacement = "", x = Cells(x = query)) ) query[[reductions[[1]]]] <- NULL return(query) } #' Integrate embeddings from the integrated sketched.assay #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Hao, et al Biorxiv 2022: #' \doi{10.1101/2022.02.24.481684} #' #' First learn a atom dictionary representation to reconstruct each cell. #' Then, using this dictionary representation, #' reconstruct the embeddings of each cell from the integrated atoms. #' #' @param object A Seurat object with all cells for one dataset #' @param sketched.assay Assay name for sketched-cell expression (default is 'sketch') #' @param assay Assay name for original expression (default is 'RNA') #' @param features Features used for atomic sketch integration #' @param reduction Dimensional reduction name for batch-corrected embeddings #' in the sketched object (default is 'integrated_dr') #' @param method Methods to construct sketch-cell representation #' for all cells (default is 'sketch'). Can be one of: #' \itemize{ #' \item \dQuote{\code{sketch}}: Use random sketched data slot #' \item \dQuote{\code{data}}: Use data slot #' } #' @param ratio Sketch ratio of data slot when \code{dictionary.method} is set #' to \dQuote{\code{sketch}}; defaults to 0.8 #' @param reduction.name Name to save new reduction as; defaults to #' \code{paste0(reduction, '.orig')} #' @param reduction.key Key for new dimensional reduction; defaults to creating #' one from \code{reduction.name} #' @param layers Names of layers for correction. #' @param sketched.layers Names of sketched layers, defaults to all #' layers of \dQuote{\code{object[[assay]]}} #' @param seed A positive integer. The seed for the random number generator, defaults to 123. #' @param verbose Print progress and message #' #' @return Returns a Seurat object with an integrated dimensional reduction #' #' @importFrom MASS ginv #' @importFrom Matrix t #' #' @export #' ProjectIntegration <- function( object, sketched.assay = 'sketch', # DefaultAssay(object) assay = 'RNA', reduction = 'integrated_dr', # harmony; rerun UMAP on this features = NULL, # VF from object[[atom.assay]] layers = 'data', reduction.name = NULL, reduction.key = NULL, method = c('sketch', 'data'), ratio = 0.8, sketched.layers = NULL, seed = 123, verbose = TRUE ) { layers <- Layers(object = object[[assay]], search = layers) # Check input and output dimensional reductions sketched.layers <- sketched.layers %||% layers reduction <- match.arg(arg = reduction, choices = Reductions(object = object)) reduction.name <- reduction.name %||% paste0(reduction, '.full') reduction.key <- reduction.key %||% Key(object = reduction.name, quiet = TRUE) if (reduction.name %in% Reductions(object = object)) { warning( "'", reduction.name, "' already exists, overwriting", call. = FALSE, immediate. = TRUE ) } # Check the method being used method <- method[1L] method <- match.arg(arg = method) # Check our layers sketched.assay <- match.arg(arg = sketched.assay, choices = Assays(object = object)) assay <- match.arg(arg = assay, choices = Assays(object = object)) layer.full <- layers layers <- layers %||% intersect( x = DefaultLayer(object[[sketched.assay]]), y = Layers(object[[assay]]) ) if (is.null(x = layer.full)) { sketched.assay.missing <- setdiff(x = layers, DefaultLayer(object = object[[sketched.assay]])) if (length(x = sketched.assay.missing) == length(x = layers)) { stop("None of the requested layers are present in the sketched.assay") } else if (length(x = sketched.assay.missing)) { warning( length(x = sketched.assay.missing), " layers missing from the sketched.assay", call. = FALSE, immediate. = TRUE ) layers <- intersect(x = layers, y = DefaultLayer(object = object[[sketched.assay]])) } } # check layers layers.missing <- setdiff(layers, Layers(object = object[[assay]])) if (length(x = layers.missing)) { stop('layer ', layers.missing[1L], ' are not present in ', assay, " assay") } # check features features <- features %||% VariableFeatures(object = object[[sketched.assay]]) # TODO: see if we can handle missing features with `union` features.atom <- Reduce( f = intersect, x = lapply( X = sketched.layers, FUN = function(lyr) { return(Features(x = object[[sketched.assay]], layer = lyr)) } ) ) features <- intersect(x = features, y = features.atom) if (length(x = features) == 0) { stop('Features are not found. Please check VariableFeatures(object[[sketched.assay]]) ', 'or set features in ProjectIntegration') } ncells <- c( 0, sapply( X = layers, FUN = function(lyr) { return(length(x = Cells(x = object[[assay]], layer = lyr))) } ) ) if (length(sketched.layers) == 1) { sketched.layers <- rep(sketched.layers, length(layers)) } sketch.matrix <- switch( EXPR = method, data = { R = as.sparse( x = diag( x = length( x = features) ) ) R }, sketch = { R <- FeatureSketch(features = features, ratio = ratio, seed = seed ) R } ) emb.list <- list() cells.list <- list() for (i in seq_along(along.with = layers)) { if (length(unique(sketched.layers)) == length(layers)) { cells.sketch <- Cells(x = object[[sketched.assay]], layer = sketched.layers[i]) } else if (length(unique(sketched.layers)) == 1) { cells.sketch <- intersect(Cells(x = object[[sketched.assay]][[sketched.layers[[1]]]]), Cells(object[[assay]][[layers[i] ]] )) } if (isTRUE(x = verbose)) { message( length(x = cells.sketch), ' atomic cells identified in the sketched.assay' ) message("Correcting embeddings") } emb <- UnSketchEmbeddings( atom.data = LayerData( object = object[[sketched.assay]], layer = layers[i], features = features ), atom.cells = cells.sketch, orig.data = LayerData( object = object[[assay]], layer = layers[i], features = features ), embeddings = Embeddings(object = object[[reduction]]), sketch.matrix = sketch.matrix) emb.list[[i]] <- emb cells.list[[i]] <- colnames(x = emb) } emb.all <- t(x = matrix( data = unlist(emb.list), nrow = ncol(x = object[[reduction]]), ncol = length(unlist(cells.list)))) rownames(emb.all) <- unlist(cells.list) emb.all <- emb.all[colnames(object[[assay]]), ] object[[reduction.name]] <- CreateDimReducObject( embeddings = emb.all, loadings = Loadings(object = object[[reduction]]), key = reduction.key, assay = assay ) CheckGC() return(object) } #' Calculate the local structure preservation metric #' #' Calculates a metric that describes how well the local structure of each group #' prior to integration is preserved after integration. This procedure works as #' follows: For each group, compute a PCA, compute the top num.neighbors in pca #' space, compute the top num.neighbors in corrected pca space, compute the #' size of the intersection of those two sets of neighbors. #' Return the average over all groups. #' #' @param object Seurat object #' @param grouping.var Grouping variable #' @param idents Optionally specify a set of idents to compute metric for #' @param neighbors Number of neighbors to compute in pca/corrected pca space #' @param reduction Dimensional reduction to use for corrected space #' @param reduced.dims Number of reduced dimensions to use #' @param orig.dims Number of PCs to use in original space #' @param verbose Display progress bar #' #' @return Returns the average preservation metric #' #' @importFrom RANN nn2 #' @importFrom utils txtProgressBar setTxtProgressBar #' #' @export #' @concept integration #' LocalStruct <- function( object, grouping.var, idents = NULL, neighbors = 100, reduction = "pca", reduced.dims = 1:10, orig.dims = 1:10, verbose = TRUE ) { if (is.null(x = idents)) { cells.use <- colnames(x = object) } else { cells.use <- WhichCells(object = object, idents = idents) } Idents(object = object) <- grouping.var local.struct <- list() ob.list <- SplitObject(object = object, split.by = grouping.var) if (verbose) { pb <- txtProgressBar( min = 1, max = length(x = ob.list), style = 3, file = stderr() ) } embeddings <- Embeddings(object = object[[reduction]])[, reduced.dims] for (i in 1:length(x = ob.list)) { ob <- ob.list[[i]] ob <- FindVariableFeatures( object = ob, verbose = FALSE, selection.method = "dispersion", nfeatures = 2000 ) ob <- ScaleData( object = ob, features = VariableFeatures(object = ob), verbose = FALSE ) ob <- RunPCA( object = ob, features = VariableFeatures(object = ob), verbose = FALSE, npcs = max(orig.dims) ) ob.cells <- intersect(x = cells.use, y = colnames(x = ob)) if (length(x = ob.cells) == 0) next nn.corrected <- nn2( data = embeddings[colnames(x = ob), ], query = embeddings[ob.cells, ], k = neighbors )$nn.idx nn.orig <- nn2( data = Embeddings(object = ob[["pca"]])[, orig.dims], query = Embeddings(object = ob[["pca"]])[ob.cells, orig.dims], k = neighbors )$nn.idx local.struct[[i]] <- sapply(X = 1:nrow(x = nn.orig), FUN = function(x) { length(x = intersect(x = nn.orig[x, ], y = nn.corrected[x, ])) / neighbors }) if (verbose) { setTxtProgressBar(pb = pb, value = i) } } names(x = local.struct) <- names(x = ob.list) return(local.struct) } #' Map query cells to a reference #' #' This is a convenience wrapper function around the following three functions #' that are often run together when mapping query data to a reference: #' \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}}, #' \code{\link{ProjectUMAP}}. Note that by default, the \code{weight.reduction} #' parameter for all functions will be set to the dimension reduction method #' used in the \code{\link{FindTransferAnchors}} function call used to construct #' the anchor object, and the \code{dims} parameter will be the same dimensions #' used to find anchors. #' #' @inheritParams IntegrateEmbeddings #' @inheritParams TransferData #' @inheritParams ProjectUMAP #' @param store.weights Determine if the weight and anchor matrices are stored. #' @param transferdata.args A named list of additional arguments to #' \code{\link{TransferData}} #' @param integrateembeddings.args A named list of additional arguments to #' \code{\link{IntegrateEmbeddings}} #' @param projectumap.args A named list of additional arguments to #' \code{\link{ProjectUMAP}} #' #' @return Returns a modified query Seurat object containing:#' #' \itemize{ #' \item{New Assays corresponding to the features transferred and/or their #' corresponding prediction scores from \code{\link{TransferData}}} #' \item{An integrated reduction from \code{\link{IntegrateEmbeddings}}} #' \item{A projected UMAP reduction of the query cells projected into the #' reference UMAP using \code{\link{ProjectUMAP}}} #' } #' #' @importFrom rlang exec #' #' @export #' @concept integration #' MapQuery <- function( anchorset, query, reference, refdata = NULL, new.reduction.name = NULL, reference.reduction = NULL, reference.dims = NULL, query.dims = NULL, store.weights = FALSE, reduction.model = NULL, transferdata.args = list(), integrateembeddings.args = list(), projectumap.args = list(), verbose = TRUE ) { transfer.reduction <- slot(object = anchorset, name = "command")$reduction if (DefaultAssay(anchorset@object.list[[1]]) %in% Assays(reference)) { DefaultAssay(reference) <- DefaultAssay(anchorset@object.list[[1]]) } else { stop('The assay used to create the anchorset does not match any', 'of the assays in the reference object.') } # determine anchor type if (grepl(pattern = "pca", x = transfer.reduction)) { anchor.reduction <- "pcaproject" # check if the anchorset can be used for mapping if (is.null(x = slot(object = anchorset, name = "command")$reference.reduction)) { stop('The reference.reduction parameter was not set when running ', 'FindTransferAnchors, so the resulting AnchorSet object cannot be used ', 'in the MapQuery function.') } } else if (grepl(pattern = "cca", x = transfer.reduction)) { anchor.reduction <- "cca" ref.cca.embedding <- Embeddings( slot(object = anchorset, name = "object.list")[[1]][["cca"]] )[slot(object = anchorset, name = "reference.cells"), ] rownames(x = ref.cca.embedding) <- gsub( pattern = "_reference", replacement = "", x = rownames(x = ref.cca.embedding) ) query.cca.embedding <- Embeddings( slot(object = anchorset, name = "object.list")[[1]][["cca"]] )[slot(object = anchorset, name = "query.cells"), ] rownames(x = query.cca.embedding) <- gsub( pattern = "_query", replacement = "", x = rownames(x = query.cca.embedding) ) reference[["cca"]] <- CreateDimReducObject( embeddings = ref.cca.embedding, key = "CCA_", assay = DefaultAssay(reference) ) query[["cca"]] <- CreateDimReducObject( embeddings = query.cca.embedding, key = "CCA_", assay = DefaultAssay(query) ) reference.reduction <- new.reduction.name <- "cca" reference.dims <- query.dims <- 1:ncol(x = ref.cca.embedding) } else if (grepl(pattern = "lsi", x = transfer.reduction)) { anchor.reduction <- "lsiproject" } else if (grepl(pattern = "direct", x = transfer.reduction)) { anchor.reduction <- paste0( slot(object = anchorset, name = "command")$bridge.assay.name, ".reduc" ) ref.reduction.emb <- Embeddings( object = slot( object = anchorset, name = "object.list" )[[1]][[anchor.reduction]])[ slot(object = anchorset, name = "reference.cells"),] rownames(ref.reduction.emb) <- gsub( pattern = "_reference", replacement = "", x = rownames(ref.reduction.emb) ) reference[[anchor.reduction]] <- CreateDimReducObject( embeddings = ref.reduction.emb, key = "L_", assay = DefaultAssay(reference) ) } else { stop("unkown type of anchors") } reference.reduction <- reference.reduction %||% slot(object = anchorset, name = "command")$reference.reduction %||% anchor.reduction new.reduction.name <- new.reduction.name %||% paste0("ref.", reference.reduction) # checking TransferData parameters td.badargs <- names(x = transferdata.args)[!names(x = transferdata.args) %in% names(x = formals(fun = TransferData))] if (length(x = td.badargs) > 0) { warning("The following arguments in transferdata.args are not valid: ", paste(td.badargs, collapse = ", "), immediate. = TRUE, call. = FALSE) } transferdata.args <- transferdata.args[names(x = transferdata.args) %in% names(x = formals(fun = TransferData))] transferdata.args$weight.reduction <- transferdata.args$weight.reduction %||% anchor.reduction # checking IntegrateEmbeddings parameters ie.badargs <- names(x = integrateembeddings.args)[!names(x = integrateembeddings.args) %in% names(x = formals(fun = IntegrateEmbeddings.TransferAnchorSet))] if (length(x = ie.badargs) > 0) { warning("The following arguments in integrateembeddings.args are not valid: ", paste(ie.badargs, collapse = ", "), immediate. = TRUE, call. = FALSE) } integrateembeddings.args <- integrateembeddings.args[names(x = integrateembeddings.args) %in% names(x = formals(fun = IntegrateEmbeddings.TransferAnchorSet))] integrateembeddings.args$reductions <- integrateembeddings.args$reductions %||% anchor.reduction integrateembeddings.args$weight.reduction <- integrateembeddings.args$weight.reduction %||% anchor.reduction slot(object = query, name = "tools")$TransferData <- NULL reuse.weights.matrix <- FALSE td.allarguments <- c(list(anchorset = anchorset, reference = reference, query = query, refdata = refdata, store.weights = TRUE, only.weights = is.null(x = refdata), verbose = verbose), transferdata.args) query <- exec("TransferData",!!!td.allarguments) if (inherits(x = transferdata.args$weight.reduction , "character") && transferdata.args$weight.reduction == integrateembeddings.args$weight.reduction) { reuse.weights.matrix <- TRUE } if (anchor.reduction != "cca") { ie.allarguments <- c(list( anchorset = anchorset, reference = reference, query = query, new.reduction.name = new.reduction.name, reuse.weights.matrix = reuse.weights.matrix, verbose = verbose ), integrateembeddings.args ) query <- exec("IntegrateEmbeddings",!!!ie.allarguments) Misc( object = query[[new.reduction.name]], slot = 'ref.dims' ) <- slot(object = anchorset, name = "command")$dims } slot(object = query, name = "tools")$MapQuery <- NULL if (store.weights) { slot(object = query, name = "tools")$MapQuery <- slot( object = query, name = "tools" )$TransferData slot(object = query, name = "tools")$MapQuery$anchor <- slot( object = anchorset, name = "anchors" ) } slot(object = query, name = "tools")$TransferData <- NULL if (!is.null(x = reduction.model)) { reference.dims <- reference.dims %||% slot(object = anchorset, name = "command")$dims query.dims <- query.dims %||% 1:ncol(x = query[[new.reduction.name]]) if (length(x = query.dims) != length(x = reference.dims)) { message("Query and reference dimensions are not equal, proceeding with reference dimensions.") query.dims <- reference.dims } ref_nn.num <- Misc(object = reference[[reduction.model]], slot = "model")$n_neighbors pu.allarguments <- c(list( query = query, query.reduction = new.reduction.name, query.dims = query.dims, reference = reference, reference.dims = reference.dims, reference.reduction = reference.reduction, reduction.model = reduction.model, k.param = ref_nn.num ), projectumap.args ) query <- exec("ProjectUMAP",!!!pu.allarguments) } return(query) } #' @param anchors AnchorSet object or just anchor matrix from the #' Anchorset object returned from FindTransferAnchors #' @param combined.object Combined object (ref + query) from the #' Anchorset object returned #' @param query.neighbors Neighbors object computed on query cells #' @param ref.embeddings Reference embeddings matrix #' @param query.embeddings Query embeddings matrix #' @param kanchors Number of anchors to use in projection steps when computing #' weights #' @param ndim Number of dimensions to use when working with low dimensional #' projections of the data #' @param ksmooth Number of cells to average over when computing transition #' probabilities #' @param ksnn Number of cells to average over when determining the kernel #' bandwidth from the SNN graph #' @param snn.prune Amount of pruning to apply to edges in SNN graph #' @param subtract.first.nn Option to the scoring function when computing #' distances to subtract the distance to the first nearest neighbor #' @param nn.method Nearest neighbor method to use (annoy or RANN) #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param query.weights Query weights matrix for reuse #' @param verbose Display messages/progress #' @param ... Reserved for internal use #' #' @return Returns a vector of cell scores #' #' @importClassesFrom SeuratObject Neighbor #' #' @rdname MappingScore #' @concept integration #' @export #' MappingScore.default <- function( anchors, combined.object, query.neighbors, ref.embeddings, query.embeddings, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) { CheckDots(...) # Input checks start.time <- Sys.time() if (is.null(x = query.neighbors) || ncol(x = query.neighbors) < ksmooth) { message("Recomputing query neighborhoods.\nSetting mapping.score.k in ", "FindTransferAnchors to the ksmooth \nvalue here (", ksmooth, "), can bypass this calculation in future runs.") query.neighbors <- FindNeighbors( object = query.embeddings, k.param = ksmooth, nn.method = nn.method, n.trees = n.trees, cache.index = TRUE, return.neighbor = TRUE, verbose = FALSE ) } ref.cells <- rownames(x = ref.embeddings) query.cells <- rownames(query.embeddings) # Project reference values onto query if (verbose) { message("Projecting reference PCA onto query") } ## Need to set up an IntegrationData object to use FindWeights here int.mat <- matrix(data = NA, nrow = nrow(x = anchors), ncol = 0) rownames(x = int.mat) <- query.cells[anchors[, "cell2"]] slot(object = combined.object, name = 'tools')[["IT1"]] <- new( Class = "IntegrationData", anchors = anchors, neighbors = list(cells1 = ref.cells, cells2 = query.cells), integration.matrix = int.mat ) ## Finding weights of anchors in query pca space ref.pca.orig <- ref.embeddings[, 1:ndim] query.pca.orig <- query.embeddings[, 1:ndim] dr.weights <- suppressWarnings(expr = CreateDimReducObject( embeddings = rbind(query.pca.orig, ref.pca.orig) )) if (!is.null(x = query.weights)) { weights.matrix <- query.weights } else { combined.object <- FindWeights( object = combined.object, integration.name = "IT1", reduction = dr.weights, dims = 1:ncol(x = dr.weights), k = kanchors, sd.weight = 1, eps = 0, nn.method = nn.method, n.trees = n.trees, verbose = verbose ) weights.matrix <- GetIntegrationData( object = combined.object, integration.name = "IT1", slot = "weights" ) } ## Perform projection of ref pca values using weights matrix ref.pca <- ref.embeddings[ref.cells[anchors[, 1]], 1:ndim] rownames(x = ref.pca) <- paste0(rownames(x = ref.pca), "_reference") query.cells.projected <- Matrix::crossprod( x = as.sparse(x = ref.pca), y = weights.matrix ) colnames(x = query.cells.projected) <- query.cells rownames(x = query.cells.projected) <- colnames(x = ref.pca) # Re-project the query cells back onto query if (verbose) { message("Projecting back the query cells into original PCA space") } ## Compute new weights dr.weights <- suppressWarnings(CreateDimReducObject( embeddings = rbind( t(x = as.matrix(x = query.cells.projected)), ref.pca.orig[ref.cells, ] ), )) combined.object <- FindWeights( object = combined.object, integration.name = "IT1", reduction = dr.weights, dims = 1:ndim, k = kanchors, sd.weight = 1, eps = 0, nn.method = nn.method, n.trees = n.trees, reverse = TRUE, verbose = verbose ) weights.matrix <- GetIntegrationData( object = combined.object, integration.name = "IT1", slot = "weights" ) ## Project back onto query orig.pca <- query.embeddings[query.cells[anchors[, 2]], ] query.cells.back.corrected <- Matrix::t( x = Matrix::crossprod( x = as.sparse(x = orig.pca), y = weights.matrix)[1:ndim, ] ) query.cells.back.corrected <- as.matrix(x = query.cells.back.corrected) rownames(x = query.cells.back.corrected) <- query.cells query.cells.pca <- query.embeddings[query.cells, 1:ndim] if (verbose) { message("Computing scores:") message(" Finding neighbors of original query cells") } ## Compute original neighborhood of query cells if (is.null(x = query.neighbors)) { query.neighbors <- NNHelper( data = query.cells.pca, query = query.cells.pca, k = max(ksmooth, ksnn), method = nn.method, n.trees = n.trees, cache.index = TRUE ) } if (verbose) { message(" Finding neighbors of transformed query cells") } ## Compute new neighborhood of query cells after projections if (nn.method == "annoy") { if (is.null(x = Index(object = query.neighbors))) { corrected.neighbors <- NNHelper( data = query.cells.pca, query = query.cells.back.corrected, k = max(ksmooth, ksnn), method = nn.method, n.treees = n.trees, cache.index = TRUE ) } else { corrected.neighbors <- AnnoySearch( index = Index(object = query.neighbors), query = query.cells.back.corrected, k = max(ksmooth, ksnn) ) corrected.neighbors <- new( Class = 'Neighbor', nn.idx = corrected.neighbors$nn.idx, nn.dist = corrected.neighbors$nn.dists ) } } if (verbose) { message(" Computing query SNN") } snn <- ComputeSNN( nn_ranked = Indices(query.neighbors)[, 1:ksnn], prune = snn.prune ) query.cells.pca <- t(x = query.cells.pca) if (verbose) { message(" Determining bandwidth and computing transition probabilities") } scores <- ScoreHelper( snn = snn, query_pca = query.cells.pca, query_dists = Distances(object = query.neighbors), corrected_nns = Indices(object = corrected.neighbors), k_snn = ksnn, subtract_first_nn = subtract.first.nn, display_progress = verbose ) scores[scores > 1] <- 1 names(x = scores) <- query.cells end.time <- Sys.time() if (verbose) { message("Total elapsed time: ", end.time - start.time) } return(scores) } #' @rdname MappingScore #' @export #' @concept integration #' @method MappingScore AnchorSet #' MappingScore.AnchorSet <- function( anchors, kanchors = 50, ndim = 50, ksmooth = 100, ksnn = 20, snn.prune = 0, subtract.first.nn = TRUE, nn.method = "annoy", n.trees = 50, query.weights = NULL, verbose = TRUE, ... ) { CheckDots(...) combined.object <- slot(object = anchors, name = "object.list")[[1]] combined.object <- RenameCells( object = combined.object, new.names = unname(obj = make.unique(sapply( X = Cells(x = combined.object), FUN = RemoveLastField ))) ) query.cells <- make.unique(sapply( X = slot(object = anchors, name = "query.cells"), FUN = RemoveLastField )) ref.cells <- make.unique(sapply( X = slot(object = anchors, name = "reference.cells"), FUN = RemoveLastField )) query.embeddings <- Embeddings(object = subset( x = combined.object[["pcaproject.l2"]], cells = query.cells )) ref.embeddings <- Embeddings(object = subset( x = combined.object[["pcaproject.l2"]], cells = ref.cells )) query.neighbors <- slot(object = anchors, name = "neighbors")[["query.neighbors"]] # reduce size of anchorset combined object combined.object <- DietSeurat(object = combined.object) combined.object <- subset( x = combined.object, features = c(rownames(x = combined.object)[1]) ) for (i in colnames(x = combined.object[[]])) { combined.object[[i]] <- NULL } return(MappingScore( anchors = slot(object = anchors, name = "anchors"), combined.object = combined.object, query.neighbors = query.neighbors, ref.embeddings = ref.embeddings, query.embeddings = query.embeddings, kanchors = kanchors, ndim = ndim, ksmooth = ksmooth, ksnn = ksnn, snn.prune = snn.prune, subtract.first.nn = subtract.first.nn, nn.method = nn.method, n.trees = n.trees, query.weights = query.weights, verbose = verbose )) } #' Calculates a mixing metric #' #' Here we compute a measure of how well mixed a composite dataset is. To #' compute, we first examine the local neighborhood for each cell (looking at #' max.k neighbors) and determine for each group (could be the dataset after #' integration) the k nearest neighbor and what rank that neighbor was in the #' overall neighborhood. We then take the median across all groups as the mixing #' metric per cell. #' #' @param object Seurat object #' @param grouping.var Grouping variable for dataset #' @param reduction Which dimensionally reduced space to use #' @param dims Dimensions to use #' @param k Neighbor number to examine per group #' @param max.k Maximum size of local neighborhood to compute #' @param eps Error bound on the neighbor finding algorithm (from RANN) #' @param verbose Displays progress bar #' #' @return Returns a vector of values of the mixing metric for each cell #' #' @importFrom RANN nn2 #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers #' @export #' @concept integration #' MixingMetric <- function( object, grouping.var, reduction = "pca", dims = 1:2, k = 5, max.k = 300, eps = 0, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) embeddings <- Embeddings(object = object[[reduction]])[, dims] nn <- nn2( data = embeddings, k = max.k, eps = eps ) group.info <- object[[grouping.var, drop = TRUE]] groups <- unique(x = group.info) mixing <- my.sapply( X = 1:ncol(x = object), FUN = function(x) { sapply(X = groups, FUN = function(y) { which(x = group.info[nn$nn.idx[x, ]] == y)[k] }) } ) mixing[is.na(x = mixing)] <- max.k mixing <- apply( X = mixing, MARGIN = 2, FUN = median ) return(mixing) } #' Prepare an object list normalized with sctransform for integration. #' #' This function takes in a list of objects that have been normalized with the #' \code{\link{SCTransform}} method and performs the following steps: #' \itemize{ #' \item{If anchor.features is a numeric value, calls \code{\link{SelectIntegrationFeatures}} #' to determine the features to use in the downstream integration procedure.} #' \item{Ensures that the sctransform residuals for the features specified #' to anchor.features are present in each object in the list. This is #' necessary because the default behavior of \code{\link{SCTransform}} is to #' only store the residuals for the features determined to be variable. #' Residuals are recomputed for missing features using the stored model #' parameters via the \code{\link{GetResidual}} function.} #' \item{Subsets the \code{scale.data} slot to only contain the residuals for #' anchor.features for efficiency in downstream processing. } #' } #' #' @param object.list A list of \code{\link{Seurat}} objects to prepare for integration #' @param assay The name of the \code{\link{Assay}} to use for integration. This can be a #' single name if all the assays to be integrated have the same name, or a character vector #' containing the name of each \code{\link{Assay}} in each object to be integrated. The #' specified assays must have been normalized using \code{\link{SCTransform}}. #' If NULL (default), the current default assay for each object is used. #' @param anchor.features Can be either: #' \itemize{ #' \item{A numeric value. This will call \code{\link{SelectIntegrationFeatures}} #' to select the provided number of features to be used in anchor finding} #' \item{A vector of features to be used as input to the anchor finding #' process} #' } #' @param sct.clip.range Numeric of length two specifying the min and max values #' the Pearson residual will be clipped to #' @param verbose Display output/messages #' #' @return A list of \code{\link{Seurat}} objects with the appropriate \code{scale.data} slots #' containing only the required \code{anchor.features}. #' #' @importFrom pbapply pblapply #' @importFrom methods slot slot<- #' @importFrom future nbrOfWorkers #' @importFrom future.apply future_lapply #' #' @export #' @concept integration #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset and take the first 2 to integrate #' pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] #' #' # perform SCTransform normalization #' pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) #' #' # select integration features and prep step #' features <- SelectIntegrationFeatures(pancreas.list) #' pancreas.list <- PrepSCTIntegration( #' pancreas.list, #' anchor.features = features #' ) #' #' # downstream integration steps #' anchors <- FindIntegrationAnchors( #' pancreas.list, #' normalization.method = "SCT", #' anchor.features = features #' ) #' pancreas.integrated <- IntegrateData(anchors, normalization.method = "SCT") #' } #' PrepSCTIntegration <- function( object.list, assay = NULL, anchor.features = 2000, sct.clip.range = NULL, verbose = TRUE ) { my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) assay <- assay %||% sapply(X = object.list, FUN = DefaultAssay) assay <- rep_len(x = assay, length.out = length(x = object.list)) objects.names <- names(x = object.list) object.list <- lapply( X = 1:length(x = object.list), FUN = function(i) { DefaultAssay(object = object.list[[i]]) <- assay[i] object.list[[i]][[assay[i]]] <- as(object = object.list[[i]][[assay[i]]], Class = "SCTAssay") return(object.list[[i]]) } ) sct.check <- vapply( X = 1:length(x = object.list), FUN = function(i) { sct.check <- IsSCT(assay = object.list[[i]][[assay[i]]]) if (!sct.check) { if ("FindIntegrationAnchors" %in% Command(object = object.list[[i]]) && Command(object = object.list[[i]], command = "FindIntegrationAnchors", value = "normalization.method") == "SCT") { sct.check <- TRUE } } return(sct.check) }, FUN.VALUE = logical(length = 1L), USE.NAMES = FALSE ) if (!all(sct.check)) { stop( "The following assays have not been processed with SCTransform:\n", paste( ' object:', which(x = !sct.check, useNames = FALSE), '- assay:', assay[!sct.check], collapse = '\n' ), call. = FALSE ) } if (is.numeric(x = anchor.features)) { anchor.features <- SelectIntegrationFeatures( object.list = object.list, nfeatures = anchor.features, verbose = verbose ) } object.list <- my.lapply( X = 1:length(x = object.list), FUN = function(i) { obj <- GetResidual( object = object.list[[i]], assay = assay[i], features = anchor.features, replace.value = ifelse(test = is.null(x = sct.clip.range), yes = FALSE, no = TRUE), clip.range = sct.clip.range, verbose = FALSE ) scale.data <- GetAssayData( object = obj, assay = assay[i], slot = 'scale.data' ) obj <- SetAssayData( object = obj, slot = 'scale.data', new.data = scale.data[anchor.features, ], assay = assay[i] ) return(obj) } ) assays.used <- assay for (i in 1:length(x = object.list)) { assay <- as.character(x = assays.used[i]) object.list[[i]] <- LogSeuratCommand(object = object.list[[i]]) } names(x = object.list) <- objects.names return(object.list) } #' Select integration features #' #' Choose the features to use when integrating multiple datasets. This function #' ranks features by the number of datasets they are deemed variable in, #' breaking ties by the median variable feature rank across datasets. It returns #' the top scoring features by this ranking. #' #' If for any assay in the list, \code{\link{FindVariableFeatures}} hasn't been #' run, this method will try to run it using the \code{fvf.nfeatures} parameter #' and any additional ones specified through the \dots. #' #' @param object.list List of seurat objects #' @param nfeatures Number of features to return #' @param assay Name or vector of assay names (one for each object) from which #' to pull the variable features. #' @param verbose Print messages #' @param fvf.nfeatures nfeatures for \code{\link{FindVariableFeatures}}. Used #' if \code{VariableFeatures} have not been set for any object in #' \code{object.list}. #' @param ... Additional parameters to \code{\link{FindVariableFeatures}} #' #' @return A vector of selected features #' #' @importFrom utils head #' #' @export #' @concept integration #' #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("panc8") #' #' # panc8 is a merged Seurat object containing 8 separate pancreas datasets #' # split the object by dataset and take the first 2 #' pancreas.list <- SplitObject(panc8, split.by = "tech")[1:2] #' #' # perform SCTransform normalization #' pancreas.list <- lapply(X = pancreas.list, FUN = SCTransform) #' #' # select integration features #' features <- SelectIntegrationFeatures(pancreas.list) #' } #' SelectIntegrationFeatures <- function( object.list, nfeatures = 2000, assay = NULL, verbose = TRUE, fvf.nfeatures = 2000, ... ) { if (!is.null(x = assay)) { if (length(x = assay) != length(x = object.list)) { stop("If specifying the assay, please specify one assay per object in the object.list") } for (ii in length(x = object.list)) { DefaultAssay(object = object.list[[ii]]) <- assay[ii] } } else { assay <- sapply(X = object.list, FUN = DefaultAssay) } for (ii in 1:length(x = object.list)) { if (length(x = VariableFeatures(object = object.list[[ii]])) == 0) { if (verbose) { message(paste0("No variable features found for object", ii, " in the object.list. Running FindVariableFeatures ...")) } object.list[[ii]] <- FindVariableFeatures(object = object.list[[ii]], nfeatures = fvf.nfeatures, verbose = verbose, ...) } } var.features <- unname(obj = unlist(x = lapply( X = 1:length(x = object.list), FUN = function(x) VariableFeatures(object = object.list[[x]], assay = assay[x])) )) var.features <- sort(x = table(var.features), decreasing = TRUE) for (i in 1:length(x = object.list)) { var.features <- var.features[names(x = var.features) %in% rownames(x = object.list[[i]][[assay[i]]])] } tie.val <- var.features[min(nfeatures, length(x = var.features))] features <- names(x = var.features[which(x = var.features > tie.val)]) vf.list <- lapply(X = object.list, FUN = VariableFeatures) if (length(x = features) > 0) { feature.ranks <- sapply(X = features, FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- names(x = sort(x = feature.ranks)) } features.tie <- var.features[which(x = var.features == tie.val)] tie.ranks <- sapply(X = names(x = features.tie), FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- c( features, names(x = head(x = sort(x = tie.ranks), nfeatures - length(x = features))) ) return(features) } .FeatureRank <- function(features, flist, ranks = FALSE) { franks <- vapply( X = features, FUN = function(x) { return(median(x = unlist(x = lapply( X = flist, FUN = function(fl) { if (x %in% fl) { return(which(x = x == fl)) } return(NULL) } )))) }, FUN.VALUE = numeric(length = 1L) ) franks <- sort(x = franks) if (!isTRUE(x = ranks)) { franks <- names(x = franks) } return(franks) } #' Select integration features #' #' @param object Seurat object #' @param nfeatures Number of features to return for integration #' @param assay Name of assay to use for integration feature selection #' @param method Which method to pull. For \code{HVFInfo} and #' \code{VariableFeatures}, choose one from one of the #' following: #' \itemize{ #' \item \dQuote{vst} #' \item \dQuote{sctransform} or \dQuote{sct} #' \item \dQuote{mean.var.plot}, \dQuote{dispersion}, \dQuote{mvp}, or #' \dQuote{disp} #' } #' @param layers Name of layers to use for integration feature selection #' @param verbose Print messages #' @param ... Arguments passed on to \code{method} #' #' @export #' SelectIntegrationFeatures5 <- function( object, nfeatures = 2000, assay = NULL, method = NULL, layers = NULL, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) layers <- Layers(object = object[[assay]], search = layers) var.features <- VariableFeatures( object = object, assay = assay, nfeatures = nfeatures, method = method, layer = layers, simplify = TRUE ) return(var.features) } #' Select SCT integration features #' #' @param object Seurat object #' @param nfeatures Number of features to return for integration #' @param assay Name of assay to use for integration feature selection #' @param verbose Print messages #' @param ... Arguments passed on to \code{method} #' #' @export #' SelectSCTIntegrationFeatures <- function( object, nfeatures = 3000, assay = NULL, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) if (!inherits(x = object[[assay]], what = 'SCTAssay')) { abort(message = "'assay' must be an SCTAssay") } models <- levels(x = object[[assay]]) vf.list <- VariableFeatures( object = object[[assay]], layer = models, nfeatures = nfeatures, simplify = FALSE ) var.features <- sort( x = table(unlist(x = vf.list, use.names = FALSE)), decreasing = TRUE ) for (i in 1:length(x = models)) { vst_out <- SCTModel_to_vst(SCTModel = slot(object = object[[assay]], name = "SCTModel.list")[[models[[i]]]]) var.features <- var.features[names(x = var.features) %in% rownames(x = vst_out$gene_attr)] } tie.val <- var.features[min(nfeatures, length(x = var.features))] features <- names(x = var.features[which(x = var.features > tie.val)]) if (length(x = features) > 0) { feature.ranks <- sapply(X = features, FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- names(x = sort(x = feature.ranks)) } features.tie <- var.features[which(x = var.features == tie.val)] tie.ranks <- sapply(X = names(x = features.tie), FUN = function(x) { ranks <- sapply(X = vf.list, FUN = function(vf) { if (x %in% vf) { return(which(x = x == vf)) } return(NULL) }) median(x = unlist(x = ranks)) }) features <- c( features, names(x = head(x = sort(x = tie.ranks), nfeatures - length(x = features))) ) return(features) } #' Transfer data #' #' Transfer categorical or continuous data across single-cell datasets. For #' transferring categorical information, pass a vector from the reference #' dataset (e.g. \code{refdata = reference$celltype}). For transferring #' continuous information, pass a matrix from the reference dataset (e.g. #' \code{refdata = GetAssayData(reference[['RNA']])}). #' #' The main steps of this procedure are outlined below. For a more detailed #' description of the methodology, please see Stuart, Butler, et al Cell 2019. #' \doi{10.1016/j.cell.2019.05.031}; \doi{10.1101/460147} #' #' For both transferring discrete labels and also feature imputation, we first #' compute the weights matrix. #' #' \itemize{ #' \item{Construct a weights matrix that defines the association between each #' query cell and each anchor. These weights are computed as 1 - the distance #' between the query cell and the anchor divided by the distance of the query #' cell to the \code{k.weight}th anchor multiplied by the anchor score #' computed in \code{\link{FindIntegrationAnchors}}. We then apply a Gaussian #' kernel width a bandwidth defined by \code{sd.weight} and normalize across #' all \code{k.weight} anchors.} #' } #' #' The main difference between label transfer (classification) and feature #' imputation is what gets multiplied by the weights matrix. For label transfer, #' we perform the following steps: #' #' \itemize{ #' \item{Create a binary classification matrix, the rows corresponding to each #' possible class and the columns corresponding to the anchors. If the #' reference cell in the anchor pair is a member of a certain class, that #' matrix entry is filled with a 1, otherwise 0.} #' \item{Multiply this classification matrix by the transpose of weights #' matrix to compute a prediction score for each class for each cell in the #' query dataset.} #' } #' #' For feature imputation, we perform the following step: #' \itemize{ #' \item{Multiply the expression matrix for the reference anchor cells by the #' weights matrix. This returns a predicted expression matrix for the #' specified features for each cell in the query dataset.} #' } #' #' #' @param anchorset An \code{\link{AnchorSet}} object generated by #' \code{\link{FindTransferAnchors}} #' @param refdata Data to transfer. This can be specified in one of two ways: #' \itemize{ #' \item{The reference data itself as either a vector where the names #' correspond to the reference cells, or a matrix, where the column names #' correspond to the reference cells.} #' \item{The name of the metadata field or assay from the reference object #' provided. This requires the reference parameter to be specified. If pulling #' assay data in this manner, it will pull the data from the data slot. To #' transfer data from other slots, please pull the data explicitly with #' \code{\link{GetAssayData}} and provide that matrix here.} #' } #' @param reference Reference object from which to pull data to transfer #' @param query Query object into which the data will be transferred. #' @param query.assay Name of the Assay to use from query #' @param weight.reduction Dimensional reduction to use for the weighting #' anchors. Options are: #' \itemize{ #' \item{pcaproject: Use the projected PCA used for anchor building} #' \item{lsiproject: Use the projected LSI used for anchor building} #' \item{pca: Use an internal PCA on the query only} #' \item{cca: Use the CCA used for anchor building} #' \item{custom DimReduc: User provided \code{\link{DimReduc}} object #' computed on the query cells} #' } #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction #' @param dims Set of dimensions to use in the anchor weighting procedure. If #' NULL, the same dimensions that were used to find anchors will be used for #' weighting. #' @param k.weight Number of neighbors to consider when weighting anchors #' @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting #' @param eps Error bound on the neighbor finding algorithm (from #' \code{\link{RANN}}) #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param verbose Print progress bars and output #' @param slot Slot to store the imputed data. Must be either "data" (default) #' or "counts" #' @param prediction.assay Return an \code{Assay} object with the prediction #' scores for each class stored in the \code{data} slot. #' @param only.weights Only return weights matrix #' @param store.weights Optionally store the weights matrix used for predictions #' in the returned query object. #' #' @return #' If \code{query} is not provided, for the categorical data in \code{refdata}, #' returns a data.frame with label predictions. If \code{refdata} is a matrix, #' returns an Assay object where the imputed data has been stored in the #' provided slot. #' #' If \code{query} is provided, a modified query object is returned. For #' the categorical data in refdata, prediction scores are stored as Assays #' (prediction.score.NAME) and two additional metadata fields: predicted.NAME #' and predicted.NAME.score which contain the class prediction and the score for #' that predicted class. For continuous data, an Assay called NAME is returned. #' NAME here corresponds to the name of the element in the refdata list. #' #' #' @references Stuart T, Butler A, et al. Comprehensive Integration of #' Single-Cell Data. Cell. 2019;177:1888-1902 \doi{10.1016/j.cell.2019.05.031} #' #' @importFrom Matrix t #' #' @export #' #' @concept integration #' #' @examples #' \dontrun{ #' # to install the SeuratData package see https://github.com/satijalab/seurat-data #' library(SeuratData) #' data("pbmc3k") #' #' # for demonstration, split the object into reference and query #' pbmc.reference <- pbmc3k[, 1:1350] #' pbmc.query <- pbmc3k[, 1351:2700] #' #' # perform standard preprocessing on each object #' pbmc.reference <- NormalizeData(pbmc.reference) #' pbmc.reference <- FindVariableFeatures(pbmc.reference) #' pbmc.reference <- ScaleData(pbmc.reference) #' #' pbmc.query <- NormalizeData(pbmc.query) #' pbmc.query <- FindVariableFeatures(pbmc.query) #' pbmc.query <- ScaleData(pbmc.query) #' #' # find anchors #' anchors <- FindTransferAnchors(reference = pbmc.reference, query = pbmc.query) #' #' # transfer labels #' predictions <- TransferData(anchorset = anchors, refdata = pbmc.reference$seurat_annotations) #' pbmc.query <- AddMetaData(object = pbmc.query, metadata = predictions) #' } #' TransferData <- function( anchorset, refdata, reference = NULL, query = NULL, query.assay = NULL, weight.reduction = 'pcaproject', l2.norm = FALSE, dims = NULL, k.weight = 50, sd.weight = 1, eps = 0, n.trees = 50, verbose = TRUE, slot = "data", prediction.assay = FALSE, only.weights = FALSE, store.weights = TRUE ) { combined.ob <- slot(object = anchorset, name = "object.list")[[1]] anchors <- slot(object = anchorset, name = "anchors") reference.cells <- slot(object = anchorset, name = "reference.cells") query.cells <- slot(object = anchorset, name = "query.cells") if (!is.null(query)) { query.assay <- query.assay %||% DefaultAssay(query) } label.transfer <- list() ValidateParams_TransferData( anchorset = anchorset, combined.ob = combined.ob, anchors = anchors, reference.cells = reference.cells, query.cells = query.cells, refdata = refdata, reference = reference, query = query, query.assay = query.assay, weight.reduction = weight.reduction, l2.norm = l2.norm, dims = dims, k.weight = k.weight, sd.weight = sd.weight, eps = eps, n.trees = n.trees, verbose = verbose, only.weights = only.weights, slot = slot, prediction.assay = prediction.assay, label.transfer = label.transfer ) if (!inherits(x = weight.reduction, what = "DimReduc") && weight.reduction == 'pca') { if (verbose) { message("Running PCA on query dataset") } features <- slot(object = anchorset, name = "anchor.features") query.ob <- query DefaultAssay(query.ob) <- query.assay query.ob <- ScaleData(object = query.ob, features = features, verbose = FALSE) query.ob <- RunPCA(object = query.ob, npcs = max(dims), features = features, verbose = FALSE) query.pca <- Embeddings(query.ob[['pca']]) rownames(x = query.pca) <- paste0(rownames(x = query.pca), "_query") #fill with 0s ref.pca <- matrix( data = 0, nrow = length(x = reference.cells), ncol = ncol(x = query.pca), dimnames = list(reference.cells, colnames(x = query.pca)) ) rm(query.ob) combined.pca.embeddings <- rbind(ref.pca, query.pca)[colnames(x = combined.ob), ] combined.pca <- CreateDimReducObject( embeddings = combined.pca.embeddings, key = "PC_", assay = DefaultAssay(object = combined.ob) ) combined.ob[["pca"]] <- combined.pca if (l2.norm) { combined.ob <- L2Dim(object = combined.ob, reduction = 'pca') } } if (!inherits(x = weight.reduction, what = "DimReduc") && weight.reduction == "lsi") { if (!("lsi" %in% Reductions(object = query))) { stop("Requested lsi for weight.reduction, but lsi not stored in query object.") } else { weight.reduction <- query[["lsi"]] } } if (inherits(x = weight.reduction, what = "DimReduc")) { weight.reduction <- RenameCells( object = weight.reduction, new.names = paste0(Cells(x = weight.reduction), "_query") ) } else { if (l2.norm) { weight.reduction.l2 <- paste0(weight.reduction, ".l2") if (weight.reduction.l2 %in% Reductions(object = combined.ob)) { combined.ob <- L2Dim(object = combined.ob, reduction = weight.reduction) } weight.reduction <- weight.reduction.l2 } weight.reduction <- combined.ob[[weight.reduction]] } dims <- dims %||% seq_len(length.out = ncol(x = weight.reduction)) if (max(dims) > ncol(x = weight.reduction)) { stop("dims is larger than the number of available dimensions in ", "weight.reduction (", ncol(x = weight.reduction), ").", call. = FALSE) } combined.ob <- SetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'anchors', new.data = anchors ) combined.ob <- SetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'neighbors', new.data = list('cells1' = reference.cells, 'cells2' = query.cells) ) combined.ob <- FindIntegrationMatrix( object = combined.ob, verbose = verbose ) combined.ob <- FindWeights( object = combined.ob, reduction = weight.reduction, dims = dims, k = k.weight, sd.weight = sd.weight, eps = eps, n.trees = n.trees, verbose = verbose ) weights <- GetIntegrationData( object = combined.ob, integration.name = "integrated", slot = 'weights' ) if (only.weights) { if (is.null(x = query)) { return(weights) } else { slot(object = query, name = "tools")[["TransferData"]] <- list(weights.matrix = weights) return(query) } } anchors <- as.data.frame(x = anchors) query.cells <- unname(obj = sapply( X = query.cells, FUN = function(x) gsub(pattern = "_query", replacement = "", x = x) )) transfer.results <- list() for (rd in 1:length(x = refdata)) { if (isFALSE(x = refdata[[rd]])) { transfer.results[[rd]] <- NULL next } rd.name <- names(x = refdata)[rd] # case for projection if (label.transfer[[rd]]) { anchors$id1 <- refdata[[rd]][anchors[, "cell1"]] reference.ids <- factor(x = anchors$id1, levels = unique(x = refdata[[rd]])) possible.ids <- levels(x = reference.ids) prediction.mat <- matrix( nrow = nrow(x = anchors), ncol = length(x = possible.ids), data = 0 ) for (i in 1:length(x = possible.ids)) { prediction.mat[which(reference.ids == possible.ids[i]), i] = 1 } if (verbose) { message("Predicting cell labels") } prediction.scores <- t(x = weights) %*% prediction.mat colnames(x = prediction.scores) <- possible.ids rownames(x = prediction.scores) <- query.cells if ("bridge.sets" %in% names(anchorset@weight.reduction@misc)) { bridge.weight <- anchorset@weight.reduction@misc$bridge.sets bridge.prediction.matrix <- as.sparse( x = dummy_cols( refdata[[rd]][ bridge.weight$bridge.ref_anchor ] )[, -1] ) colnames(bridge.prediction.matrix) <- gsub( pattern = ".data_", replacement = "", x = colnames(bridge.prediction.matrix) ) extra.id <- setdiff(possible.ids, colnames(bridge.prediction.matrix)) if (length(extra.id) > 0) { extra.prediction <- as.sparse(x = matrix( data = 0, nrow = nrow(bridge.prediction.matrix), ncol = length(extra.id) )) colnames(extra.prediction) <- extra.id bridge.prediction.matrix <- cbind( bridge.prediction.matrix, extra.prediction ) } bridge.prediction.matrix <- bridge.prediction.matrix[,possible.ids, drop = FALSE] bridge.prediction.scores <- t(bridge.weight$query.weights) %*% (t(bridge.weight$bridge.weights) %*% bridge.prediction.matrix)[bridge.weight$query.ref_anchor,] prediction.scores <- (prediction.scores + bridge.prediction.scores)/2 prediction.scores <- as.matrix(x = prediction.scores) } prediction.ids <- possible.ids[apply(X = prediction.scores, MARGIN = 1, FUN = which.max)] prediction.ids <- as.character(prediction.ids) prediction.max <- apply(X = prediction.scores, MARGIN = 1, FUN = max) if (is.null(x = query)) { prediction.scores <- cbind(prediction.scores, max = prediction.max) } predictions <- data.frame( predicted.id = prediction.ids, prediction.score = as.matrix(prediction.scores), row.names = query.cells, stringsAsFactors = FALSE ) if (prediction.assay || !is.null(x = query)) { predictions <- CreateAssayObject( data = t(x = as.matrix(x = prediction.scores)), check.matrix = FALSE ) Key(object = predictions) <- Key(paste0("predictionscore", rd.name), quiet = TRUE) } if (is.null(x = query)) { transfer.results[[rd]] <- predictions } else { query <- AddMetaData(object = query, metadata = prediction.max, col.name = paste0("predicted.", rd.name, ".score")) query <- AddMetaData(object = query, metadata = prediction.ids, col.name = paste0("predicted.", rd.name)) query[[paste0("prediction.score.", rd.name)]] <- predictions } } else { # case for transferring features reference.cell.indices <- reference.cells[anchors$cell1] refdata.anchors <- refdata[[rd]][, reference.cell.indices] nfeatures <- nrow(x = refdata[[rd]]) if (verbose) { message(paste0("Transfering ", nfeatures, " features onto reference data")) } new.data <- refdata.anchors %*% weights rownames(x = new.data) <- rownames(x = refdata[[rd]]) colnames(x = new.data) <- query.cells if (inherits(x = new.data, what = "Matrix")) { new.data <- as.sparse(x = new.data) } if (slot == "counts") { new.assay <- CreateAssayObject(counts = new.data, check.matrix = FALSE) } else if (slot == "data") { new.assay <- CreateAssayObject(data = new.data, check.matrix = FALSE) } Key(object = new.assay) <- Key(rd.name, quiet = TRUE) if (is.null(x = query)) { transfer.results[[rd]] <- new.assay } else { if (rd.name %in% Assays(object = query)) { message( rd.name, " already present in query. ", "Storing as ", paste0("predicted_", rd.name) ) rd.name <- paste0("predicted_", rd.name) } query[[rd.name]] <- new.assay } } } if (is.null(x = query)) { names(x = transfer.results) <- names(x = refdata) if (length(x = transfer.results) == 1) { transfer.results <- transfer.results[[1]] } return(transfer.results) } else { if (store.weights) { slot(object = query, name = "tools")[["TransferData"]] <- list(weights.matrix = weights) } return(query) } } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param object.list List of Seurat objects #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors default #' @concept integration #' AnnotateAnchors.default <- function( anchors, vars = NULL, slot = NULL, object.list, assay = NULL, ... ) { # reorder columns anchors <- anchors[, c("cell1", "dataset1", "cell2", "dataset2", "score")] colnames(x = anchors)[5] <- "anchor.score" cell.names <- lapply(X = object.list, FUN = Cells) cell1.names <- character(length = nrow(x = anchors)) for (dataset in unique(x = anchors$dataset1)) { dataset.cells <- which(x = anchors$dataset1 == dataset) cell1.names[dataset.cells] <- cell.names[[dataset]][anchors[dataset.cells, "cell1"]] } anchors$cell1 <- cell1.names cell2.names <- character(length(x = nrow(x = anchors))) for (dataset in unique(x = anchors$dataset2)) { dataset.cells <- which(x = anchors$dataset2 == dataset) cell2.names[dataset.cells] <- cell.names[[dataset]][anchors[dataset.cells, "cell2"]] } anchors$cell2 <- cell2.names slot <- slot %||% "data" assay <- assay %||% sapply(X = object.list, FUN = DefaultAssay) if (length(x = assay) == 1) { assay <- rep(x = assay, times = length(x = object.list)) } if (length(x = assay) != length(x = object.list)) { stop("Number of assays provided should either be one or the length of object.list") } for (ob in 1:length(x = object.list)) { DefaultAssay(object = object.list[[ob]]) <- assay[ob] } if (length(x = slot) == 1) { slot <- rep(x = slot, times = length(x = vars)) } if (length(x = vars) > 0) { for(v in 1:length(x = vars)) { var <- vars[v] var.list <- lapply(X = object.list, FUN = function(x) { tryCatch( expr = FetchData(object = x, vars = var, slot = slot[v]), error = function(e) { data.fetched <- as.data.frame( x = rep(x = NA, times = ncol(x = x)), row.names = Cells(x = x), stringsAsFactors = FALSE ) colnames(x = data.fetched) <- var return(data.fetched) } ) }) if (all(unlist(x = lapply(X = var.list, FUN = isFALSE)))) { warning( var, " not found in all objects", call. = FALSE, immediate. = TRUE ) next } if (any(unlist(x = lapply(X = var.list, FUN = isFALSE)))) { warning( var, " not in all objects. Filling missing objects with NA", call. = FALSE, immediate. = TRUE ) } if (is.null(x = names(x = object.list))) { names(x = var.list) <- 1:length(x = object.list) } else { names(x = var.list) <- names(x = object.list) } for(i in c(1, 2)) { cell <- paste0("cell", i) if (is.factor(x = anchors[, cell])) { anchors[, cell] <- as.character(x = anchors[, cell]) } for (j in unique(x = anchors[, paste0("dataset", i)])) { var.df <- var.list[[j]] dataset.cells <- which(x = anchors[, paste0("dataset", i)] == j) anchors[dataset.cells, paste0(cell, ".", var)] <- var.df[anchors[, cell][dataset.cells], ] } } # column specifying whether the annotation matches across pair of datasets anchors[, paste0(var, ".match")] <- anchors[, paste0("cell1.", var)] == anchors[, paste0("cell2.", var)] } } return(anchors) } #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors IntegrationAnchorSet #' AnnotateAnchors.IntegrationAnchorSet <- function( anchors, vars = NULL, slot = NULL, object.list = NULL, assay = NULL, ... ) { anchor.df <- slot(object = anchors, name = 'anchors') object.list <- object.list %||% slot(object = anchors, name = 'object.list') anchor.df <- as.data.frame(x = anchor.df) anchor.df <- AnnotateAnchors( anchors = anchor.df, vars = vars, slot = slot, object.list = object.list, assay = assay ) return(anchor.df) } #' @param reference Reference object used in \code{\link{FindTransferAnchors}} #' @param query Query object used in \code{\link{FindTransferAnchors}} #' @rdname AnnotateAnchors #' @export #' @method AnnotateAnchors TransferAnchorSet #' AnnotateAnchors.TransferAnchorSet <- function( anchors, vars = NULL, slot = NULL, reference = NULL, query = NULL, assay = NULL, ... ) { anchor.df <- slot(object = anchors, name = 'anchors') if (class(x = reference) != class(x = query)) { stop("If setting reference/query, please set both parameters.") } if (is.null(x = reference)) { object.list <- slot(object = anchors, name = 'object.list')[[1]] reference.cells <- slot(object = anchors, name = "reference.cells") reference <- subset(x = object.list, cells = reference.cells, recompute = FALSE) reference <- RenameCells( object = reference, new.names = gsub(pattern = "_reference$", replacement = "", x = reference.cells) ) query.cells <- slot(object = anchors, name = "query.cells") query <- subset(x = object.list, cells = query.cells, recompute = FALSE) query <- RenameCells( object = query, new.names = gsub(pattern = "_query$", replacement = "", x = query.cells) ) } object.list <- list(reference = reference, query = query) anchor.df <- as.data.frame(x = anchor.df) anchor.df$dataset1 <- "reference" anchor.df$dataset2 <- "query" anchor.df <- AnnotateAnchors( anchors = anchor.df, vars = vars, slot = slot, object.list = object.list, assay = assay ) return(anchor.df) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Add dataset number and remove cell offset # # Record which dataset number in the original list of Seurat objects # each anchor cell came from, and correct the cell index so it corresponds to # the position of the anchor cell in its own dataset # # @param anchor.df Dataframe of anchors # @param offsets size of each dataset in anchor dataframe # @param obj.length Vector of object lengths # # @return Anchor dataframe with additional columns corresponding to the dataset # of each cell AddDatasetID <- function( anchor.df, offsets, obj.lengths ) { ndataset <- length(x = offsets) total.cells <- sum(obj.lengths) offsets <- c(offsets, total.cells) row.offset <- rep.int(x = offsets[1:ndataset], times = obj.lengths) dataset <- rep.int(x = 1:ndataset, times = obj.lengths) anchor.df <- data.frame( 'cell1' = anchor.df[, 1] - row.offset[anchor.df[, 1]], 'cell2' = anchor.df[, 2] - row.offset[anchor.df[, 2]], 'score' = anchor.df[, 3], 'dataset1' = dataset[anchor.df[, 1]], 'dataset2' = dataset[anchor.df[, 2]] ) return(anchor.df) } # Adjust sample tree to only include given reference objects # # @param x A sample tree # @param reference.objects a sorted list of reference object IDs # AdjustSampleTree <- function(x, reference.objects) { for (i in 1:nrow(x = x)) { obj.id <- -(x[i, ]) if (obj.id[[1]] > 0) { x[i, 1] <- -(reference.objects[[obj.id[[1]]]]) } if (obj.id[[2]] > 0) { x[i, 2] <- -(reference.objects[[obj.id[[2]]]]) } } return(x) } # Build tree of datasets based on cell similarity # # @param similarity.matrix Dataset similarity matrix # # @return Returns a heirarchical clustering of datasets # #' @importFrom stats hclust # BuildSampleTree <- function(similarity.matrix) { dist.mat <- as.dist(m = 1 / similarity.matrix) clusters <- hclust(d = dist.mat) return(clusters$merge) } # Construct nearest neighbor matrix from nn.idx # # @param nn.idx Nearest neighbor index matrix (nn.idx from RANN) # @param offset1 Offsets for the first neighbor # @param offset2 Offsets for the second neighbor # # @return returns a sparse matrix representing the NN matrix # ConstructNNMat <- function(nn.idx, offset1, offset2, dims) { k <- ncol(x = nn.idx) j <- as.numeric(x = t(x = nn.idx)) + offset2 i <- ((1:length(x = j)) - 1) %/% k + 1 + offset1 nn.mat <- sparseMatrix(i = i, j = j, x = 1, dims = dims) return(nn.mat) } # Count anchors between all datasets # # Counts anchors between each dataset and scales based on total number of cells # in the datasets # # @param anchor.df Matrix of anchors # @param offsets Dataset sizes in anchor matrix. Used to identify boundaries of # each dataset in matrix, so that total pairwise anchors between all datasets # can be counted # # @return Returns a similarity matrix # CountAnchors <- function( anchor.df, offsets, obj.lengths ) { similarity.matrix <- matrix(data = 0, ncol = length(x = offsets), nrow = length(x = offsets)) similarity.matrix[upper.tri(x = similarity.matrix, diag = TRUE)] <- NA total.cells <- sum(obj.lengths) offsets <- c(offsets, total.cells) for (i in 1:nrow(x = similarity.matrix)){ for (j in 1:ncol(x = similarity.matrix)){ if (!is.na(x = similarity.matrix[i, j])){ relevant.rows <- anchor.df[(anchor.df$dataset1 %in% c(i, j)) & (anchor.df$dataset2 %in% c(i, j)), ] score <- nrow(x = relevant.rows) ncell <- min(obj.lengths[[i]], obj.lengths[[j]]) similarity.matrix[i, j] <- score / ncell } } } return(similarity.matrix) } FilterAnchors <- function( object, assay = NULL, slot = "data", integration.name = 'integrated', features = NULL, k.filter = 200, nn.method = "annoy", n.trees = 50, eps = 0, verbose = TRUE ) { if (verbose) { message("Filtering anchors") } assay <- assay %||% DefaultAssay(object = object) features <- features %||% VariableFeatures(object = object) if (length(x = features) == 0) { stop("No features provided and no VariableFeatures computed.") } features <- unique(x = features) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 if (min(length(x = nn.cells1), length(x = nn.cells2)) < k.filter) { warning("Number of anchor cells is less than k.filter. Retaining all anchors.") k.filter <- min(length(x = nn.cells1), length(x = nn.cells2)) anchors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "anchors") } else { cn.data1 <- L2Norm( mat = as.matrix(x = t(x = GetAssayData( object = object[[assay[1]]], slot = slot)[features, nn.cells1])), MARGIN = 1) cn.data2 <- L2Norm( mat = as.matrix(x = t(x = GetAssayData( object = object[[assay[2]]], slot = slot)[features, nn.cells2])), MARGIN = 1) nn <- NNHelper( data = cn.data2[nn.cells2, ], query = cn.data1[nn.cells1, ], k = k.filter, method = nn.method, n.trees = n.trees, eps = eps ) anchors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "anchors") position <- sapply(X = 1:nrow(x = anchors), FUN = function(x) { which(x = anchors[x, "cell2"] == Indices(object = nn)[anchors[x, "cell1"], ])[1] }) anchors <- anchors[!is.na(x = position), ] if (verbose) { message("\tRetained ", nrow(x = anchors), " anchors") } } object <- SetIntegrationData( object = object, integration.name = integration.name, slot = "anchors", new.data = anchors ) return(object) } FindAnchors_v3 <- function( object.pair, assay, slot, cells1, cells2, internal.neighbors, reduction, reduction.2 = character(), nn.reduction = reduction, dims = 1:10, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, projected = FALSE, verbose = TRUE ) { # compute local neighborhoods, use max of k.anchor and k.score if also scoring to avoid # recomputing neighborhoods k.neighbor <- k.anchor if (!is.na(x = k.score)) { k.neighbor <- max(k.anchor, k.score) } object.pair <- FindNN( object = object.pair, cells1 = cells1, cells2 = cells2, internal.neighbors = internal.neighbors, dims = dims, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, k = k.neighbor, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, verbose = verbose ) object.pair <- FindAnchorPairs( object = object.pair, integration.name = "integrated", k.anchor = k.anchor, verbose = verbose ) if (!is.na(x = k.filter)) { top.features <- TopDimFeatures( object = object.pair, reduction = reduction, dims = dims, features.per.dim = 100, max.features = max.features, projected = projected ) if(length(top.features) == 2){ top.features <- intersect(top.features[[1]], top.features[[2]]) } else{ top.features <- as.vector(top.features) } top.features <- top.features[top.features %in% rownames(x = object.pair)] object.pair <- FilterAnchors( object = object.pair, assay = assay, slot = slot, integration.name = 'integrated', features = top.features, k.filter = k.filter, nn.method = nn.method, n.trees = n.trees, eps = eps, verbose = verbose ) } if (!is.na(x = k.score)) { object.pair = ScoreAnchors( object = object.pair, assay = DefaultAssay(object = object.pair), integration.name = "integrated", verbose = verbose, k.score = k.score ) } anchors <- GetIntegrationData( object = object.pair, integration.name = 'integrated', slot = 'anchors' ) return(anchors) } FindAnchors_v5 <- function( object.pair, assay, slot, cells1, cells2, internal.neighbors, reduction, reduction.2 = character(), nn.reduction = reduction, dims = 1:10, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, projected = FALSE, verbose = TRUE ) { ref.assay <- assay[1] query.assay <- assay[2] reference.layers <- Layers(object.pair[[ref.assay]], search = 'data')[1] query.layers <- setdiff(Layers(object.pair[[query.assay]], search = 'data'), reference.layers) anchor.list <- list() for (i in seq_along(query.layers)) { cells2.i <- Cells( x = object.pair[[query.assay]], layer = query.layers[i] ) object.pair.i <- subset( x = object.pair, cells = c(cells1, cells2.i) ) object.pair.i <- JoinLayers(object.pair.i) anchor.list[[i]] <- FindAnchors_v3( object.pair = object.pair.i, assay = assay, slot = slot, cells1 = cells1, cells2 = cells2.i, internal.neighbors = internal.neighbors, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, projected = projected, verbose = verbose ) anchor.list[[i]][,2] <- match(x = cells2.i, table = cells2)[anchor.list[[i]][,2]] anchor.list[[i]] <- t(anchor.list[[i]]) } anchors <- t(x = matrix( data = unlist(x = anchor.list), nrow = 3, ncol = sum( sapply(X = anchor.list, FUN = function(x) ncol(x)) ) ) ) colnames(anchors) <- c('cell1', 'cell2', 'score') return(anchors) } FindAnchors <- function( object.pair, assay, slot, cells1, cells2, internal.neighbors, reduction, reduction.2 = character(), nn.reduction = reduction, dims = 1:10, k.anchor = 5, k.filter = 200, k.score = 30, max.features = 200, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, projected = FALSE, verbose = TRUE ) { if (inherits(x = object.pair[[assay[1]]], what = 'Assay')) { FindAnchors.function <- FindAnchors_v3 } else if (inherits(x = object.pair[[assay[1]]], what = 'Assay5')) { FindAnchors.function <- FindAnchors_v5 } anchors <- FindAnchors.function( object.pair = object.pair, assay = assay, slot = slot, cells1 = cells1, cells2 = cells2, internal.neighbors = internal.neighbors, reduction = reduction, reduction.2 = reduction.2, nn.reduction = nn.reduction, dims = dims, k.anchor = k.anchor, k.filter = k.filter, k.score = k.score, max.features = max.features, nn.method = nn.method, n.trees = n.trees, nn.idx1 = nn.idx1, nn.idx2 = nn.idx2, eps = eps, projected = projected, verbose = verbose ) return(anchors) } # Find Anchor pairs # FindAnchorPairs <- function( object, integration.name = 'integrated', k.anchor = 5, verbose = TRUE ) { neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') max.nn <- c(ncol(x = neighbors$nnab), ncol(x = neighbors$nnba)) if (any(k.anchor > max.nn)) { message(paste0('warning: requested k.anchor = ', k.anchor, ', only ', min(max.nn), ' in dataset')) k.anchor <- min(max.nn) } if (verbose) { message("Finding anchors") } # convert cell name to neighbor index nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 cell1.index <- suppressWarnings(which(colnames(x = object) == nn.cells1, arr.ind = TRUE)) ncell <- 1:nrow(x = neighbors$nnab) ncell <- ncell[ncell %in% cell1.index] anchors <- list() # pre allocate vector anchors$cell1 <- rep(x = 0, length(x = ncell) * 5) anchors$cell2 <- anchors$cell1 anchors$score <- anchors$cell1 + 1 idx <- 0 indices.ab <- Indices(object = neighbors$nnab) indices.ba <- Indices(object = neighbors$nnba) for (cell in ncell) { neighbors.ab <- indices.ab[cell, 1:k.anchor] mutual.neighbors <- which( x = indices.ba[neighbors.ab, 1:k.anchor, drop = FALSE] == cell, arr.ind = TRUE )[, 1] for (i in neighbors.ab[mutual.neighbors]){ idx <- idx + 1 anchors$cell1[idx] <- cell anchors$cell2[idx] <- i anchors$score[idx] <- 1 } } anchors$cell1 <- anchors$cell1[1:idx] anchors$cell2 <- anchors$cell2[1:idx] anchors$score <- anchors$score[1:idx] anchors <- t(x = do.call(what = rbind, args = anchors)) anchors <- as.matrix(x = anchors) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors', new.data = anchors ) if (verbose) { message(paste0("\tFound ", nrow(x = anchors), " anchors")) } return(object) } FindIntegrationMatrix <- function( object, assay = NULL, integration.name = 'integrated', features.integrate = NULL, verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 if (inherits(x = object[[assay[1]]], what = 'Assay5')) { object <- JoinLayers(object) } anchors <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors' ) if (verbose) { message("Finding integration vectors") } features.integrate <- features.integrate %||% rownames( x = GetAssayData(object = object, assay = assay, slot = "data") ) data.use1 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.integrate, nn.cells1] ) data.use2 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.integrate, nn.cells2] ) anchors1 <- nn.cells1[anchors[, "cell1"]] anchors2 <- nn.cells2[anchors[, "cell2"]] data.use1 <- data.use1[anchors1, ] data.use2 <- data.use2[anchors2, ] integration.matrix <- data.use2 - data.use1 object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'integration.matrix', new.data = integration.matrix ) return(object) } # Find nearest neighbors # FindNN <- function( object, cells1 = NULL, cells2 = NULL, internal.neighbors, grouping.var = NULL, dims = 1:10, reduction = "cca.l2", reduction.2 = character(), nn.dims = dims, nn.reduction = reduction, k = 300, nn.method = "annoy", n.trees = 50, nn.idx1 = NULL, nn.idx2 = NULL, eps = 0, integration.name = 'integrated', verbose = TRUE ) { if (xor(x = is.null(x = cells1), y = is.null(x = cells2))) { stop("cells1 and cells2 must both be specified") } if (!is.null(x = cells1) && !is.null(x = cells2) && !is.null(x = grouping.var)) { stop("Specify EITHER grouping.var or cells1/2.") } if (is.null(x = cells1) && is.null(x = cells2) && is.null(x = grouping.var)) { stop("Please set either cells1/2 or grouping.var") } if (!is.null(x = grouping.var)) { if (nrow(x = unique(x = object[[grouping.var]])) != 2) { stop("Number of groups in grouping.var not equal to 2.") } groups <- names(x = sort(x = table(object[[grouping.var]]), decreasing = TRUE)) cells1 <- colnames(x = object)[object[[grouping.var]] == groups[[1]]] cells2 <- colnames(x = object)[object[[grouping.var]] == groups[[2]]] } if (verbose) { message("Finding neighborhoods") } dim.data.self <- Embeddings(object = object[[nn.reduction]])[, nn.dims] if (!is.null(x = internal.neighbors[[1]])) { nnaa <- internal.neighbors[[1]] } else { dims.cells1.self <- dim.data.self[cells1, ] nnaa <- NNHelper( data = dims.cells1.self, k = k + 1, method = nn.method, n.trees = n.trees, eps = eps, cache.index = TRUE, index = nn.idx1 ) nn.idx1 <- Index(object = nnaa) } if (!is.null(x = internal.neighbors[[2]])) { nnbb <- internal.neighbors[[2]] } else { dims.cells2.self <- dim.data.self[cells2, ] nnbb <- NNHelper( data = dims.cells2.self, k = k + 1, method = nn.method, n.trees = n.trees, eps = eps, cache.index = TRUE ) nn.idx2 <- Index(object = nnbb) } if (length(x = reduction.2) > 0) { nnab <- NNHelper( data = Embeddings(object = object[[reduction.2]])[cells2, nn.dims], query = Embeddings(object = object[[reduction.2]])[cells1, nn.dims], k = k, method = nn.method, n.trees = n.trees, eps = eps, index = if (reduction.2 == nn.reduction) nn.idx2 else NULL ) nnba <- NNHelper( data = Embeddings(object = object[[reduction]])[cells1, nn.dims], query = Embeddings(object = object[[reduction]])[cells2, nn.dims], k = k, method = nn.method, n.trees = n.trees, eps = eps, index = if (reduction == nn.reduction) nn.idx1 else NULL ) } else { dim.data.opposite <- Embeddings(object = object[[reduction]])[ ,dims] dims.cells1.opposite <- dim.data.opposite[cells1, ] dims.cells2.opposite <- dim.data.opposite[cells2, ] nnab <- NNHelper( data = dims.cells2.opposite, query = dims.cells1.opposite, k = k, method = nn.method, n.trees = n.trees, eps = eps, index = if (reduction == nn.reduction) nn.idx2 else NULL ) nnba <- NNHelper( data = dims.cells1.opposite, query = dims.cells2.opposite, k = k, method = nn.method, n.trees = n.trees, eps = eps, index = if (reduction == nn.reduction) nn.idx1 else NULL ) } object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'neighbors', new.data = list('nnaa' = nnaa, 'nnab' = nnab, 'nnba' = nnba, 'nnbb' = nnbb, 'cells1' = cells1, 'cells2' = cells2) ) return(object) } # @param reduction a DimReduc object containing cells in the query object # @param reverse Compute weights matrix for reference anchors that are nearest # to query cells. Used in mapping metric to perform projection of query cells # back from reference space. FindWeights <- function( object, reduction = NULL, assay = NULL, integration.name = 'integrated', dims = 1:10, features = NULL, k = 300, sd.weight = 1, nn.method = "annoy", n.trees = 50, eps = 0, reverse = FALSE, verbose = TRUE ) { if (verbose) { message("Finding integration vector weights") } if (is.null(x = reduction) & is.null(x = features)) { stop("Need to specify either dimension reduction object or a set of features") } assay <- assay %||% DefaultAssay(object = object) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 anchors <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors' ) if (reverse) { anchors.cells2 <- nn.cells2[anchors[, "cell2"]] anchors.cells1 <- nn.cells1[anchors[, "cell1"]] to.keep <- !duplicated(x = anchors.cells1) anchors.cells1 <- anchors.cells1[to.keep] anchors.cells2 <- anchors.cells2[to.keep] if (length(anchors.cells1) < k || length(anchors.cells2) < k) { stop("Number of anchor cells is less than k.weight. Consider lowering k.weight to less than ", min(length(anchors.cells1), length(anchors.cells2)), " or increase k.anchor.") } if (is.null(x = features)) { data.use <- Embeddings(object = reduction)[nn.cells1, dims] data.use.query <- Embeddings(object = reduction)[nn.cells2, dims] } else { data.use <- t(x = GetAssayData( object = object, slot = 'data', assay = assay)[features, nn.cells1] ) data.use.query <- t(x = GetAssayData( object = object, slot = 'data', assay = assay)[features, nn.cells2] ) } knn_2_2 <- NNHelper( data = data.use[anchors.cells1, ], query = data.use.query, k = k, method = nn.method, n.trees = n.trees, eps = eps ) } else { anchors.cells2 <- unique(x = nn.cells2[anchors[, "cell2"]]) if (length(anchors.cells2) < k) { stop("Number of anchor cells is less than k.weight. Consider lowering k.weight to less than ", length(anchors.cells2), " or increase k.anchor.") } if (is.null(x = features)) { data.use <- Embeddings(reduction)[nn.cells2, dims] } else { data.use <- t(x = GetAssayData(object = object, slot = 'data', assay = assay)[features, nn.cells2]) } knn_2_2 <- NNHelper( data = data.use[anchors.cells2, ], query = data.use, k = k, method = nn.method, n.trees = n.trees, eps = eps ) } distances <- Distances(object = knn_2_2) distances <- 1 - (distances / distances[, ncol(x = distances)]) cell.index <- Indices(object = knn_2_2) integration.matrix <- GetIntegrationData( object = object, integration.name = integration.name, slot = "integration.matrix" ) weights <- FindWeightsC( cells2 = 0:(length(x = nn.cells2) - 1), distances = as.matrix(x = distances), anchor_cells2 = anchors.cells2, integration_matrix_rownames = rownames(x = integration.matrix), cell_index = cell.index, anchor_score = anchors[, "score"], min_dist = 0, sd = sd.weight, display_progress = verbose ) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'weights', new.data = weights ) return(object) } # Find weight matrix between query and reference cells from a neighbor object # # FindWeightsNN <- function( nn.obj, query.cells, reference.cells, verbose = TRUE ) { distances <- Distances(object = nn.obj) distances <- 1 - (distances / distances[, ncol(x = distances)]) cell.index <- Indices(object = nn.obj) weights <- FindWeightsC( cells2 = 0:(length(query.cells) - 1), distances = as.matrix(x = distances), anchor_cells2 = reference.cells, integration_matrix_rownames = reference.cells, cell_index = cell.index, anchor_score = rep(1, length(reference.cells)), min_dist = 0, sd = 1, display_progress = verbose ) colnames(weights) <- query.cells return(weights) } # Work out the anchor cell offsets for given set of cells in anchor list # # @param anchors A dataframe of anchors, from AnchorSet object # @param dataset Dataset number (1 or 2) # @param cell Cell number (1 or 2) # @param cellnames.list List of cell names in all objects # @param cellnames list of cell names for only the object in question # # @return Returns a list of offsets # GetCellOffsets <- function(anchors, dataset, cell, cellnames.list, cellnames) { cell.id <- sapply(X = 1:nrow(x = anchors), FUN = function(x) { cellnames.list[[anchors[, dataset+3][x]]][anchors[, cell][x]] }) cell.offset <- sapply( X = 1:length(x = cell.id), FUN = function(x) { return(which(x = cellnames == cell.id[x])) } ) return(cell.offset) } # Map queries to reference # # Map query objects onto assembled reference dataset # # @param anchorset Anchorset found by FindIntegrationAnchors # @param reference Pre-integrated reference dataset to map query datasets to # @param new.assay.name Name for the new assay containing the integrated data # @param normalization.method Name of normalization method used: LogNormalize # or SCT # @param features Vector of features to use when computing the PCA to determine the weights. Only set # if you want a different set from those used in the anchor finding process # @param features.to.integrate Vector of features to integrate. By default, will use the features # used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor weights. # This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the # first merge between reference and query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param preserve.order Do not reorder objects based on size for each pairwise integration. # @param eps Error bound on the neighbor finding algorithm (from \code{\link{RANN}}) # @param verbose Print progress bars and output # # @return Returns an integrated matrix # MapQueryData <- function( anchorset, reference, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, weights.matrix = NULL, no.offset = FALSE, sd.weight = 1, preserve.order = FALSE, eps = 0, verbose = TRUE ) { normalization.method <- match.arg(arg = normalization.method) reference.datasets <- slot(object = anchorset, name = 'reference.objects') object.list <- slot(object = anchorset, name = 'object.list') anchors <- slot(object = anchorset, name = 'anchors') features <- features %||% slot(object = anchorset, name = "anchor.features") features.to.integrate <- features.to.integrate %||% features cellnames.list <- list() for (ii in 1:length(x = object.list)) { cellnames.list[[ii]] <- colnames(x = object.list[[ii]]) } if (length(x = reference.datasets) == length(x = object.list)) { query.datasets <- NULL } else { query.datasets <- setdiff(x = seq_along(along.with = object.list), y = reference.datasets) } my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) query.corrected <- my.lapply( X = query.datasets, FUN = function(dataset1) { if (verbose) { message("\nIntegrating dataset ", dataset1, " with reference dataset") } filtered.anchors <- anchors[anchors$dataset1 %in% reference.datasets & anchors$dataset2 == dataset1, ] integrated <- RunIntegration( filtered.anchors = filtered.anchors, reference = reference, query = object.list[[dataset1]], new.assay.name = new.assay.name, normalization.method = normalization.method, cellnames.list = cellnames.list, features.to.integrate = features.to.integrate, weight.reduction = weight.reduction, weights.matrix = weights.matrix, no.offset = no.offset, features = features, dims = dims, k.weight = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) return(integrated) } ) reference.integrated <- GetAssayData( object = reference, slot = 'data' )[features.to.integrate, ] query.corrected[[length(x = query.corrected) + 1]] <- reference.integrated all.integrated <- do.call(cbind, query.corrected) return(all.integrated) } # Convert nearest neighbor information to a sparse matrix # # @param idx Nearest neighbor index # @param distance Nearest neighbor distance # @param k Number of nearest neighbors # NNtoMatrix <- function(idx, distance, k) { nn <- list() x <- 1 for (i in 1:nrow(x = idx)) { for (j in 2:k) { nn.idx <- idx[i, j] nn.dist <- distance[i, j] nn[[x]] <- c('i' = i, 'j' = nn.idx, 'x' = 1/nn.dist) x <- x + 1 } } nn <- do.call(what = rbind, args = nn) nn.matrix <- new( Class = 'dgTMatrix', i = as.integer(x = nn[, 1] - 1), j = as.integer(x = nn[, 2] - 1), x = as.numeric(x = nn[, 3]), Dim = as.integer(x = c(nrow(idx), nrow(x = idx))) ) nn.matrix <- as.sparse(x = nn.matrix) return(nn.matrix) } # Pairwise dataset integration # # Used for reference construction # # @param anchorset Results from FindIntegrationAnchors # @param new.assay.name Name for the new assay containing the integrated data # @param normalization.method Name of normalization method used: LogNormalize # or SCT # @param features Vector of features to use when computing the PCA to determine # the weights. Only set if you want a different set from those used in the # anchor finding process # @param features.to.integrate Vector of features to integrate. By default, # will use the features used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor # weights. This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in # all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to # use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to # calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used # for calculating anchor weights in the first merge between reference and # query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param sample.tree Specify the order of integration. If NULL, will compute # automatically. # @param preserve.order Do not reorder objects based on size for each pairwise # integration. # @param eps Error bound on the neighbor finding algorithm (from # \code{\link{RANN}}) # @param verbose Print progress bars and output # # @return Returns a Seurat object with a new integrated Assay # PairwiseIntegrateReference <- function( anchorset, new.assay.name = "integrated", normalization.method = c("LogNormalize", "SCT"), features = NULL, features.to.integrate = NULL, dims = 1:30, k.weight = 100, weight.reduction = NULL, sd.weight = 1, sample.tree = NULL, preserve.order = FALSE, eps = 0, verbose = TRUE ) { object.list <- slot(object = anchorset, name = "object.list") reference.objects <- slot(object = anchorset, name = "reference.objects") features <- features %||% slot(object = anchorset, name = "anchor.features") features.to.integrate <- features.to.integrate %||% features if (length(x = reference.objects) == 1) { ref.obj <- object.list[[reference.objects]] ref.obj[[new.assay.name]] <- CreateAssayObject( data = GetAssayData(ref.obj, slot = 'data')[features.to.integrate, ], check.matrix = FALSE ) DefaultAssay(object = ref.obj) <- new.assay.name return(ref.obj) } anchors <- slot(object = anchorset, name = "anchors") offsets <- slot(object = anchorset, name = "offsets") objects.ncell <- sapply(X = object.list, FUN = ncol) if (!is.null(x = weight.reduction)) { if (length(x = weight.reduction) == 1 | inherits(x = weight.reduction, what = "DimReduc")) { if (length(x = object.list) == 2) { weight.reduction <- list(NULL, weight.reduction) } else if (inherits(x = weight.reduction, what = "character")) { weight.reduction <- as.list(x = rep(x = weight.reduction, times = length(x = object.list))) } else { stop("Invalid input for weight.reduction. Please specify either the names of the dimension", "reduction for each object in the list or provide DimReduc objects.") } } if (length(x = weight.reduction) != length(x = object.list)) { stop("Please specify a dimension reduction for each object, or one dimension reduction to be used for all objects") } if (inherits(x = weight.reduction, what = "character")) { weight.reduction <- as.list(x = weight.reduction) } available.reductions <- lapply(X = object.list, FUN = FilterObjects, classes.keep = 'DimReduc') for (ii in 1:length(x = weight.reduction)) { if (ii == 1 & is.null(x = weight.reduction[[ii]])) next if (!inherits(x = weight.reduction[[ii]], what = "DimReduc")) { if (!weight.reduction[[ii]] %in% available.reductions[[ii]]) { stop("Requested dimension reduction (", weight.reduction[[ii]], ") is not present in object ", ii) } weight.reduction[[ii]] <- object.list[[ii]][[weight.reduction[[ii]]]] } } } if (is.null(x = sample.tree)) { similarity.matrix <- CountAnchors( anchor.df = anchors, offsets = offsets, obj.lengths = objects.ncell ) similarity.matrix <- similarity.matrix[reference.objects, reference.objects] sample.tree <- BuildSampleTree(similarity.matrix = similarity.matrix) sample.tree <- AdjustSampleTree(x = sample.tree, reference.objects = reference.objects) } cellnames.list <- list() for (ii in 1:length(x = object.list)) { cellnames.list[[ii]] <- colnames(x = object.list[[ii]]) } unintegrated <- suppressWarnings(expr = merge( x = object.list[[reference.objects[[1]]]], y = object.list[reference.objects[2:length(x = reference.objects)]] )) names(x = object.list) <- as.character(-(1:length(x = object.list))) if (!is.null(x = weight.reduction)) { names(x = weight.reduction) <- names(x = object.list) } if (verbose & (length(x = reference.objects) != length(x = object.list))) { message("Building integrated reference") } for (ii in 1:nrow(x = sample.tree)) { merge.pair <- as.character(x = sample.tree[ii, ]) length1 <- ncol(x = object.list[[merge.pair[1]]]) length2 <- ncol(x = object.list[[merge.pair[2]]]) if (!(preserve.order) & (length2 > length1)) { merge.pair <- rev(x = merge.pair) sample.tree[ii, ] <- as.numeric(merge.pair) } if (!is.null(x = weight.reduction)) { # extract the correct dimreduc objects, in the correct order weight.pair <- weight.reduction[merge.pair] } else { weight.pair <- NULL } object.1 <- DietSeurat( object = object.list[[merge.pair[1]]], assays = DefaultAssay(object = object.list[[merge.pair[1]]]), counts = FALSE ) object.2 <- DietSeurat( object = object.list[[merge.pair[2]]], assays = DefaultAssay(object = object.list[[merge.pair[2]]]), counts = FALSE ) # suppress key duplication warning suppressWarnings(object.1[["ToIntegrate"]] <- object.1[[DefaultAssay(object = object.1)]]) DefaultAssay(object = object.1) <- "ToIntegrate" object.1 <- DietSeurat(object = object.1, assays = "ToIntegrate") suppressWarnings(object.2[["ToIntegrate"]] <- object.2[[DefaultAssay(object = object.2)]]) DefaultAssay(object = object.2) <- "ToIntegrate" object.2 <- DietSeurat(object = object.2, assays = "ToIntegrate") datasets <- ParseMergePair(sample.tree, ii) if (verbose) { message( "Merging dataset ", paste(datasets$object2, collapse = " "), " into ", paste(datasets$object1, collapse = " ") ) } merged.obj <- merge(x = object.1, y = object.2, merge.data = TRUE) if (verbose) { message("Extracting anchors for merged samples") } filtered.anchors <- anchors[anchors$dataset1 %in% datasets$object1 & anchors$dataset2 %in% datasets$object2, ] integrated.matrix <- RunIntegration( filtered.anchors = filtered.anchors, normalization.method = normalization.method, reference = object.1, query = object.2, cellnames.list = cellnames.list, new.assay.name = new.assay.name, features.to.integrate = features.to.integrate, features = features, dims = dims, weight.reduction = weight.reduction, k.weight = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) integrated.matrix <- cbind(integrated.matrix, GetAssayData(object = object.1, slot = 'data')[features.to.integrate, ]) merged.obj[[new.assay.name]] <- CreateAssayObject(data = integrated.matrix, check.matrix = FALSE) DefaultAssay(object = merged.obj) <- new.assay.name object.list[[as.character(x = ii)]] <- merged.obj object.list[[merge.pair[[1]]]] <- NULL object.list[[merge.pair[[2]]]] <- NULL invisible(x = CheckGC()) } integrated.data <- GetAssayData( object = object.list[[as.character(x = ii)]], assay = new.assay.name, slot = 'data' ) integrated.data <- integrated.data[, colnames(x = unintegrated)] new.assay <- new( Class = 'Assay', counts = new(Class = "dgCMatrix"), data = integrated.data, scale.data = matrix(), var.features = vector(), meta.features = data.frame(row.names = rownames(x = integrated.data)), misc = NULL, key = paste0(new.assay.name, "_") ) unintegrated[[new.assay.name]] <- new.assay # "unintegrated" now contains the integrated assay DefaultAssay(object = unintegrated) <- new.assay.name VariableFeatures(object = unintegrated) <- features if (normalization.method == "SCT"){ unintegrated[[new.assay.name]] <- SetAssayData( object = unintegrated[[new.assay.name]], slot = "scale.data", new.data = as.matrix(x = GetAssayData(object = unintegrated[[new.assay.name]], slot = "data")) ) } unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "anchors", new.data = anchors ) unintegrated <- SetIntegrationData( object = unintegrated, integration.name = "Integration", slot = "sample.tree", new.data = sample.tree ) unintegrated[["FindIntegrationAnchors"]] <- slot(object = anchorset, name = "command") suppressWarnings(expr = unintegrated <- LogSeuratCommand(object = unintegrated)) return(unintegrated) } # Parse merge information from dataset clustering # # @param clustering clustering dataframe from hclust ($merge). # Gives the order of merging datasets to get to the root of the tree. # @param i current row in clustering dataframe # ParseMergePair <- function(clustering, i){ # return 2-element list of datasets in first and second object datasets <- list('object1' = clustering[i, 1], 'object2' = clustering[i, 2]) if (datasets$object1 > 0) { datasets$object1 <- ParseRow(clustering, datasets$object1) } if (datasets$object2 > 0) { datasets$object2 <- ParseRow(clustering, datasets$object2) } datasets$object1 <- abs(x = datasets$object1) datasets$object2 <- abs(x = datasets$object2) return(datasets) } # Parse row of clustering order # # Used recursively to work out the dataset composition of a merged object # # @param clustering clustering dataframe from hclust ($merge). # Gives the order of merging datasets to get to the root of the tree. # @param i current row in clustering dataframe # ParseRow <- function(clustering, i){ # returns vector of datasets datasets <- as.list(x = clustering[i, ]) if (datasets[[1]] > 0) { datasets[[1]] <- ParseRow(clustering = clustering, i = datasets[[1]]) } if (datasets[[2]] > 0) { datasets[[2]] <- ParseRow(clustering = clustering, i = datasets[[2]]) } return(unlist(datasets)) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings Seurat #' @export #' #' ProjectCellEmbeddings.Seurat <- function( query, reference, query.assay = NULL, reference.assay = NULL, reduction = "pca", dims = 1:50, normalization.method = c("LogNormalize", "SCT"), scale = TRUE, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) { if (verbose) { message("Projecting cell embeddings") } normalization.method <- match.arg(arg = normalization.method) query.assay <- query.assay %||% DefaultAssay(object = query) reference.assay <- reference.assay %||% DefaultAssay(object = reference) if (normalization.method == 'SCT') { if (!IsSCT(assay = reference[[reference.assay]])) { stop('reference in ', reference.assay, ' assay does not have a SCT model' ) } reference.model.num <- length(slot(object = reference[[reference.assay]], name = "SCTModel.list")) if (reference.model.num > 1) { stop("Given reference assay (", reference.assay, ") has ", reference.model.num , " reference sct models. Please provide a reference assay with a ", " single reference sct model.", call. = FALSE) } else if (reference.model.num == 0) { stop("Given reference assay (", reference.assay, ") doesn't contain a reference SCT model.") } } proj.pca <- ProjectCellEmbeddings( query = query[[query.assay]], reference = reference, reference.assay = reference.assay, reduction = reduction, dims = dims, scale = scale, normalization.method = normalization.method, verbose = verbose, nCount_UMI = nCount_UMI, feature.mean = feature.mean, feature.sd = feature.sd ) return(proj.pca) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings Assay #' @export #' ProjectCellEmbeddings.Assay <- function( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) { features <- Reduce( f = intersect, x = list( rownames(x = Loadings(object = reference[[reduction]])), rownames(x = reference[[reference.assay]]), rownames(x = query) ) ) if (normalization.method == 'SCT') { slot <- 'counts' } else { slot <- 'data' } proj.pca <- ProjectCellEmbeddings( query = GetAssayData( object = query, slot = slot), reference = reference, reference.assay = reference.assay, reduction = reduction, dims = dims, scale = scale, normalization.method = normalization.method, verbose = verbose, features = features, nCount_UMI = nCount_UMI, feature.mean = feature.mean, feature.sd = feature.sd ) return(proj.pca) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings SCTAssay #' @export #' ProjectCellEmbeddings.SCTAssay <- function( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) { if (normalization.method != 'SCT') { warning('Query data is SCT normalized, but normalization.method is set to LogNormalize') } features <- Reduce( f = intersect, x = list( rownames(x = Loadings(object = reference[[reduction]])), rownames(x = reference[[reference.assay]]), rownames(x = query$scale.data) ) ) query.data <- GetAssayData( object = query, slot = "scale.data")[features,] ref.feature.loadings <- Loadings(object = reference[[reduction]])[features, dims] proj.pca <- t(crossprod(x = ref.feature.loadings, y = query.data)) return(proj.pca) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings StdAssay #' @export #' ProjectCellEmbeddings.StdAssay <- function( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ) { reference.assay <- reference.assay %||% DefaultAssay(object = reference) features <- Reduce( f = intersect, x = list( rownames(x = Loadings(object = reference[[reduction]])), rownames(x = reference[[reference.assay]]) ) ) if (normalization.method == 'SCT') { layers.set <- Layers(object = query, search = 'counts') } else { layers.set <- Layers(object = query, search = 'data') } proj.pca.list <- list() cell.list <- list() for (i in seq_along(layers.set)) { proj.pca.list[[i]] <- t(ProjectCellEmbeddings( query = LayerData(object = query, layer = layers.set[i]), reference = reference, reference.assay = reference.assay, reduction = reduction, dims = dims, scale = scale, normalization.method = normalization.method, verbose = verbose, features = features, nCount_UMI = nCount_UMI[Cells(x = query, layer = layers.set[i])], feature.mean = feature.mean, feature.sd = feature.sd )) cell.list[[i]] <- colnames(proj.pca.list[[i]]) } proj.pca <- matrix( data = unlist(proj.pca.list), nrow = nrow(proj.pca.list[[1]]), ncol = ncol(query) ) rownames(proj.pca) <- rownames(proj.pca.list[[1]]) colnames(proj.pca) <- unlist(cell.list) proj.pca <- t(proj.pca) proj.pca <- proj.pca[colnames(query),] return(proj.pca) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings default #' @export #' ProjectCellEmbeddings.default <- function( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, features = NULL, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, ... ){ features <- features %||% rownames(x = Loadings(object = reference[[reduction]])) if (normalization.method == 'SCT') { reference.SCT.model <- slot(object = reference[[reference.assay]], name = "SCTModel.list")[[1]] query <- FetchResiduals_reference( object = query, reference.SCT.model = reference.SCT.model, features = features, nCount_UMI = nCount_UMI) } else { query <- query[features,] if(inherits(x = reference[[reference.assay]], what = "Assay5")){ reference.data.list <- c() for (i in Layers(object = reference[[reference.assay]], search = "data")) { reference.data.list[[i]] <- LayerData( object = reference[[reference.assay]], layer = i )[features, ] } reference.data <- do.call(cbind, reference.data.list) } else { reference.data <- GetAssayData( object = reference, assay = reference.assay, slot = "data")[features, ] } if (is.null(x = feature.mean)) { if (inherits(x = reference.data, what = 'dgCMatrix')) { feature.mean <- RowMeanSparse(mat = reference.data) } else if (inherits(x = reference.data, what = "IterableMatrix")) { bp.stats <- BPCells::matrix_stats(matrix = reference.data, row_stats = "variance") feature.mean <- bp.stats$row_stats["mean",] } else { feature.mean <- rowMeans2(x = reference.data) } if (scale) { if (inherits(x = reference.data, what = "IterableMatrix")) { feature.sd <- sqrt(bp.stats$row_stats["variance",]) } else { feature.sd <- sqrt(x = RowVarSparse(mat = as.sparse(reference.data))) } feature.sd[is.na(x = feature.sd)] <- 1 feature.sd[feature.sd==0] <- 1 } else { feature.sd <- rep(x = 1, nrow(x = reference.data)) } feature.mean[is.na(x = feature.mean)] <- 1 } store.names <- dimnames(x = query) if (is.numeric(x = feature.mean)) { query <- FastSparseRowScaleWithKnownStats( mat = as.sparse(x = query), mu = feature.mean, sigma = feature.sd, display_progress = FALSE ) } dimnames(x = query) <- store.names } ref.feature.loadings <- Loadings(object = reference[[reduction]])[features, dims] proj.pca <- t(crossprod(x = ref.feature.loadings, y = query)) return(proj.pca) } #' @rdname ProjectCellEmbeddings #' @method ProjectCellEmbeddings IterableMatrix #' @export #' #' ProjectCellEmbeddings.IterableMatrix <- function( query, reference, reference.assay = NULL, reduction = "pca", dims = 1:50, scale = TRUE, normalization.method = NULL, verbose = TRUE, features = features, nCount_UMI = NULL, feature.mean = NULL, feature.sd = NULL, block.size = 10000, ... ) { features <- features %||% rownames(x = Loadings(object = reference[[reduction]])) features <- intersect(x = features, y = rownames(query)) if (normalization.method == 'SCT') { reference.SCT.model <- slot(object = reference[[reference.assay]], name = "SCTModel.list")[[1]] cells.grid <- split( x = 1:ncol(query), f = ceiling(seq_along(along.with = 1:ncol(query)) / block.size)) proj.list <- list() for (i in seq_along(along.with = cells.grid)) { query.i <- FetchResiduals_reference( object = as.sparse(query[,cells.grid[[i]]]), reference.SCT.model = reference.SCT.model, features = features, nCount_UMI = nCount_UMI[colnames(query)[cells.grid[[i]]]]) proj.list[[i]] <- t(Loadings(object = reference[[reduction]])[features, dims]) %*% query.i } proj.pca <- t(matrix( data = unlist(x = proj.list), nrow = length(x = dims), ncol = ncol(x = query), dimnames = list( colnames(x = Embeddings(object = reference[[reduction]]))[dims], colnames(x = query)) )) } else { query <- query[features,] reference.data.list <- c() for (i in Layers(object = reference[[reference.assay]], search = "data")) { reference.data.list[[i]] <- LayerData(object = reference[[reference.assay]], layer = i)[features, ] } reference.data <- do.call(cbind, reference.data.list) if (is.null(x = feature.mean)) { if (inherits(x = reference.data, what = 'dgCMatrix')) { feature.mean <- RowMeanSparse(mat = reference.data) } else if (inherits(x = reference.data, what = "IterableMatrix")) { bp.stats <- BPCells::matrix_stats( matrix = reference.data, row_stats = "variance") feature.mean <- bp.stats$row_stats["mean",] } else { feature.mean <- rowMeans(mat = reference.data) } if (scale) { if (inherits(x = reference.data, what = "IterableMatrix")) { feature.sd <- sqrt(x = bp.stats$row_stats["variance",]) } else { feature.sd <- sqrt( x = RowVarSparse( mat = as.sparse(x = reference.data) ) ) } feature.sd[is.na(x = feature.sd)] <- 1 feature.sd[feature.sd==0] <- 1 } else { feature.sd <- rep(x = 1, nrow(x = reference.data)) } feature.mean[is.na(x = feature.mean)] <- 1 } query.scale <- BPCells::min_by_row(mat = query, vals = 10 * feature.sd + feature.mean) query.scale <- (query.scale - feature.mean) / feature.sd proj.pca <- t(query.scale) %*% Loadings(object = reference[[reduction]])[features,dims] rownames(x = proj.pca) <- colnames(x = query) colnames(x = proj.pca) <- colnames(x = Embeddings(object = reference[[reduction]]))[dims] } return(proj.pca) } # Project new data onto SVD (LSI or PCA) # # A = U∑V SVD # U' = VA'/∑ LSI projection # # Note that because in LSI we don't multiply by ∑ to get the embeddings (it's just U), # we need to divide by ∑ in the projection to get the equivalent. Therefore need # the singular values, which (in Signac RunLSI) we store in the DimReduc misc slot. # # @param reduction A \code{DimReduc} object containing the SVD dimension # reduction. Assumes original irlba output is stored in the misc slot of the dimreduc. # @param data A data matrix to project onto the SVD. Must contain the same # features used to construct the original SVD. # @param mode "pca" or "lsi". Determines if we divide projected values by singular values. # @param features Features to use. If NULL, use all common features between # the dimreduc and the data matrix. # @param do.center Center the projected cell embeddings (subtract mean across cells) # @param do.scale Scale the projected cell embeddings (divide by standard deviation across cells) # @param use.original.stats When standardizing the vectors, use the mean and standard deviation # of the original vectors from the SVD, rather than the mean and standard deviation of the # projected vectors. # @param dims A vector containing the dimensions to use in the projection. If NULL (default), # project to all dimensions in the input SVD. # @param verbose Display messages # # @return Returns a matrix #' @importFrom Matrix crossprod # @export ProjectSVD <- function( reduction, data, mode = "pca", features = NULL, do.center = FALSE, do.scale = FALSE, use.original.stats = FALSE, dims = NULL, verbose = TRUE ) { vt <- Loadings(object = reduction) dims <- dims %||% seq_len(length.out = ncol(x = vt)) features <- features %||% rownames(x = vt) features <- intersect(x = features, y = rownames(x = data)) vt <- vt[features, dims] data <- data[features, ] if (verbose) { message("Projecting new data onto SVD") } projected.u <- as.matrix(t(vt) %*% data) if (mode == "lsi") { components <- slot(object = reduction, name = 'misc') sigma <- components$d projected.u <- projected.u / sigma[dims] } if (do.center) { if (use.original.stats) { components <- slot(object = reduction, name = 'misc') if ("u" %in% names(x = components)) { # preferentially use original irlba output stored in misc # signac scales and centers embeddings by default embed.mean <- apply(X = components$u, MARGIN = 2, FUN = mean) } else { # raw irlba output not stored, fall back to the reference embeddings ref.emb <- Embeddings(object = reduction) embed.mean <- apply(X = ref.emb, MARGIN = 2, FUN = mean) } } else { # projected.u is transposed so use MARGIN = 1 embed.mean <- apply(X = projected.u, MARGIN = 1, FUN = mean) } projected.u <- projected.u - embed.mean } if (do.scale) { if (use.original.stats) { components <- slot(object = reduction, name = 'misc') if ("u" %in% names(x = components)) { embed.sd <- apply(X = components$u, MARGIN = 2, FUN = sd) } else { ref.emb <- Embeddings(object = reduction) embed.sd <- apply(X = ref.emb, MARGIN = 2, FUN = sd) } } else { embed.sd <- apply(X = projected.u, MARGIN = 1, FUN = sd) } projected.u <- projected.u / embed.sd } return(t(x = projected.u)) } # Calculate position along a defined reference range for a given vector of # numerics. Will range from 0 to 1. # # @param x Vector of numeric type # @param lower Lower end of reference range # @param upper Upper end of reference range # #' @importFrom stats quantile # # @return Returns a vector that describes the position of each element in # x along the defined reference range # ReferenceRange <- function(x, lower = 0.025, upper = 0.975) { return((x - quantile(x = x, probs = lower)) / (quantile(x = x, probs = upper) - quantile(x = x, probs = lower))) } # Run integration between a reference and query object # # Should only be called from within another function # # @param filtered.anchors A dataframe containing only anchors between reference and query # @param reference A reference object # @param query A query object # @param cellnames.list List of all cell names in all objects to be integrated # @param new.assay.name Name for the new assay containing the integrated data # @param features Vector of features to use when computing the PCA to determine the weights. Only set # if you want a different set from those used in the anchor finding process # @param features.to.integrate Vector of features to integrate. By default, will use the features # used in anchor finding. # @param dims Number of PCs to use in the weighting procedure # @param k.weight Number of neighbors to consider when weighting # @param weight.reduction Dimension reduction to use when calculating anchor weights. # This can be either: # \itemize{ # \item{A string, specifying the name of a dimension reduction present in all objects to be integrated} # \item{A vector of strings, specifying the name of a dimension reduction to use for each object to be integrated} # \item{NULL, in which case a new PCA will be calculated and used to calculate anchor weights} # } # Note that, if specified, the requested dimension reduction will only be used for calculating anchor weights in the # first merge between reference and query, as the merged object will subsequently contain more cells than was in # query, and weights will need to be calculated for all cells in the object. # @param sd.weight Controls the bandwidth of the Gaussian kernel for weighting # @param sample.tree Specify the order of integration. If NULL, will compute automatically. # @param eps Error bound on the neighbor finding algorithm (from \code{\link{RANN}}) # @param verbose Print progress bars and output # RunIntegration <- function( filtered.anchors, normalization.method, reference, query, cellnames.list, new.assay.name, features.to.integrate, weight.reduction, weights.matrix = NULL, no.offset = FALSE, features, dims, k.weight, sd.weight, eps, verbose ) { cells1 <- colnames(x = reference) cells2 <- colnames(x = query) if (nrow(x = filtered.anchors) < k.weight) { warning("Number of anchors is less than k.weight. Lowering k.weight for sample pair.") k.weight <- nrow(x = filtered.anchors) } merged.obj <- merge(x = reference, y = query, merge.data = TRUE) if (no.offset) { cell1.offset <- filtered.anchors[, 1] cell2.offset <- filtered.anchors[, 2] } else { cell1.offset <- GetCellOffsets( anchors = filtered.anchors, dataset = 1, cell = 1, cellnames.list = cellnames.list, cellnames = cells1 ) cell2.offset <- GetCellOffsets( anchors = filtered.anchors, dataset = 2, cell = 2, cellnames.list = cellnames.list, cellnames = cells2 ) } filtered.anchors[, 1] <- cell1.offset filtered.anchors[, 2] <- cell2.offset integration.name <- "integrated" merged.obj <- SetIntegrationData( object = merged.obj, integration.name = integration.name, slot = 'anchors', new.data = filtered.anchors ) merged.obj <- SetIntegrationData( object = merged.obj, integration.name = integration.name, slot = 'neighbors', new.data = list('cells1' = cells1, 'cells2' = cells2) ) merged.obj <- FindIntegrationMatrix( object = merged.obj, integration.name = integration.name, features.integrate = features.to.integrate, verbose = verbose ) assay <- DefaultAssay(object = merged.obj) if (is.null(x = weights.matrix)) { if (is.null(x = weight.reduction) && !is.null(x = dims)) { if (normalization.method == "SCT"){ # recenter residuals centered.resids <- ScaleData( object = GetAssayData(object = merged.obj, assay = assay, slot = "data"), do.scale = FALSE, do.center = TRUE, verbose = FALSE ) merged.obj[["pca"]] <- RunPCA( object = centered.resids[features, ], assay = assay, npcs = max(dims), verbose = FALSE, features = features ) } else { merged.obj <- ScaleData( object = merged.obj, features = features, verbose = FALSE ) merged.obj <- RunPCA( object = merged.obj, npcs = max(dims), verbose = FALSE, features = features ) } dr.weights <- merged.obj[['pca']] } else if(is.null(x = weight.reduction) && is.null(x = dims)) { dr.weights <- CreateDimReducObject( embeddings = as.matrix(x = t(x = GetAssayData(object = merged.obj))), key = "int_", assay = "ToIntegrate" ) dims <- 1:ncol(x = dr.weights) } else { # need to match order of objects dr <- weight.reduction[[2]] if (!all(cells2 %in% rownames(x = dr))) { stop("Query cells not present in supplied DimReduc object. Set weight.reduction to a DimReduc object containing the query cells.") } if (inherits(x = dr, what = "DimReduc")) { dr.weights <- dr } else { dr.weights <- query[[dr]] } dims <- 1:ncol(x = dr.weights) } merged.obj <- FindWeights( object = merged.obj, integration.name = integration.name, reduction = dr.weights, dims = dims, k = k.weight, sd.weight = sd.weight, eps = eps, verbose = verbose ) } else { merged.obj <- SetIntegrationData( object = merged.obj, integration.name = "integrated", slot = "weights", new.data = weights.matrix ) } merged.obj <- TransformDataMatrix( object = merged.obj, new.assay.name = new.assay.name, features.to.integrate = features.to.integrate, integration.name = integration.name, verbose = verbose ) integrated.matrix <- GetAssayData( object = merged.obj, assay = new.assay.name, slot = 'data' ) return(integrated.matrix[, cells2]) } # order samples based on sample tree # the first sample is reference sample SampleIntegrationOrder <- function(tree) { order <- tree[nrow(x = tree), ] while (sum(order > 0) != 0) { replace.idx <- which(x = order > 0)[1] replace <- tree[order[replace.idx], ] if (replace.idx == 1) { left <- vector() right <- order[(replace.idx + 1):length(x = order)] replace <- tree[order[replace.idx], ] order <- c(left, replace, right) } else if (replace.idx == length(x = order)) { left <- order[1:(replace.idx - 1)] right <- vector() } else { left <- order[1:(replace.idx - 1)] right <- order[(replace.idx + 1):length(x = order)] } order <- c(left, replace, right) } order <- order * (-1) return(order) } ScoreAnchors <- function( object, assay = NULL, integration.name = 'integrated', verbose = TRUE, k.score = 30 ) { assay <- assay %||% DefaultAssay(object = object) anchor.df <- as.data.frame(x = GetIntegrationData(object = object, integration.name = integration.name, slot = 'anchors')) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = "neighbors") offset <- length(x = neighbors$cells1) indices.aa <- Indices(object = neighbors$nnaa) indices.bb <- Indices(object = neighbors$nnbb) indices.ab <- Indices(object = neighbors$nnab) indices.ba <- Indices(object = neighbors$nnba) nbrsetA <- function(x) c(indices.aa[x, 1:k.score], indices.ab[x, 1:k.score] + offset) nbrsetB <- function(x) c(indices.ba[x, 1:k.score], indices.bb[x, 1:k.score] + offset) # score = number of shared neighbors anchor.new <- data.frame( 'cell1' = anchor.df[, 1], 'cell2' = anchor.df[, 2], 'score' = mapply( FUN = function(x, y) { length(x = intersect(x = nbrsetA(x = x), nbrsetB(x = y)))}, anchor.df[, 1], anchor.df[, 2] ) ) # normalize the score max.score <- quantile(anchor.new$score, 0.9) min.score <- quantile(anchor.new$score, 0.01) anchor.new$score <- anchor.new$score - min.score anchor.new$score <- anchor.new$score / (max.score - min.score) anchor.new$score[anchor.new$score > 1] <- 1 anchor.new$score[anchor.new$score < 0] <- 0 anchor.new <- as.matrix(x = anchor.new) object <- SetIntegrationData( object = object, integration.name = integration.name, slot = 'anchors', new.data = anchor.new ) return(object) } # Get top n features across given set of dimensions # # @param object Seurat object # @param reduction Which dimension reduction to use # @param dims Which dimensions to use # @param features.per.dim How many features to consider per dimension # @param max.features Number of features to return at most # @param projected Use projected loadings # TopDimFeatures <- function( object, reduction, dims = 1:10, features.per.dim = 100, max.features = 200, projected = FALSE ) { dim.reduction <- object[[reduction]] max.features <- max(length(x = dims) * 2, max.features) num.features <- sapply(X = 1:features.per.dim, FUN = function(y) { length(x = unique(x = as.vector(x = sapply(X = dims, FUN = function(x) { unlist(x = TopFeatures(object = dim.reduction, dim = x, nfeatures = y, balanced = TRUE, projected = projected)) })))) }) max.per.pc <- which.max(x = num.features[num.features < max.features]) features <- unique(x = as.vector(x = sapply(X = dims, FUN = function(x) { unlist(x = TopFeatures(object = dim.reduction, dim = x, nfeatures = max.per.pc, balanced = TRUE, projected = projected)) }))) features <- unique(x = features) return(features) } TransformDataMatrix <- function( object, assay = NULL, new.assay.name = 'integrated', integration.name = 'integrated', features.to.integrate = NULL, reduction = "cca", verbose = TRUE ) { if(verbose) { message("Integrating data") } assay <- assay %||% DefaultAssay(object = object) weights <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'weights' ) integration.matrix <- GetIntegrationData( object = object, integration.name = integration.name, slot = 'integration.matrix' ) neighbors <- GetIntegrationData(object = object, integration.name = integration.name, slot = 'neighbors') nn.cells1 <- neighbors$cells1 nn.cells2 <- neighbors$cells2 data.use1 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.to.integrate, nn.cells1] ) data.use2 <- t(x = GetAssayData( object = object, assay = assay, slot = "data")[features.to.integrate, nn.cells2] ) integrated <- IntegrateDataC(integration_matrix = as.sparse(x = integration.matrix), weights = as.sparse(x = weights), expression_cells2 = as.sparse(x = data.use2)) dimnames(integrated) <- dimnames(data.use2) new.expression <- t(rbind(data.use1, integrated)) new.expression <- new.expression[, colnames(object)] new.assay <- new( Class = 'Assay', # key = paste0(new.assay.name,"_"), counts = new(Class = "dgCMatrix"), data = new.expression, scale.data = matrix(), var.features = vector(), meta.features = data.frame(row.names = rownames(x = new.expression)), misc = NULL, # key = paste0(new.assay.name, "_") key = Key(object = new.assay.name, quiet = TRUE) ) object[[new.assay.name]] <- new.assay return(object) } # Helper function to validate parameters for FindTransferAnchors # ValidateParams_FindTransferAnchors <- function( reference, query, normalization.method, recompute.residuals, reference.assay, reference.neighbors, query.assay, reduction, reference.reduction, project.query, features, scale, npcs, l2.norm, dims, k.anchor, k.filter, k.score, max.features, nn.method, n.trees, eps, approx.pca, mapping.score.k, verbose ) { reference.assay <- reference.assay %||% DefaultAssay(object = reference) ModifyParam(param = "reference.assay", value = reference.assay) query.assay <- query.assay %||% DefaultAssay(object = query) ModifyParam(param = "query.assay", value = query.assay) DefaultAssay(object = reference) <- reference.assay ModifyParam(param = "reference", value = reference) DefaultAssay(object = query) <- query.assay ModifyParam(param = "query", value = query) if (!is.logical(x = scale)) { stop("Scale should be TRUE or FALSE") } if (length(x = reference) > 1 | length(x = query) > 1) { stop("We currently only support transfer between a single query and reference", call. = FALSE) } if (!reduction %in% c("pcaproject", "cca", "lsiproject", "rpca")) { stop("Please select either pcaproject, rpca, cca, or lsiproject for the reduction parameter.", call. = FALSE) } if (reduction == "cca" && !is.null(x = reference.reduction)) { stop("Specifying a reference reduction is only compatible with reduction = 'pcaproject'", call. = FALSE) } if (!normalization.method %in% c("LogNormalize", "SCT")) { stop("Please select either LogNormalize or SCT, for the normalization.method parameter.", call. = FALSE) } if (normalization.method == "SCT") { ModifyParam(param = "k.filter", value = NA) } if (reduction == "lsiproject") { ModifyParam(param = "k.filter", value = NA) } # commented out to enable filter anchors for v5 assay # if (inherits(x = reference[[reference.assay]], what = 'Assay5') || # inherits(x = query[[query.assay]], what = 'Assay5')) { # # current filter anchors not support for v5 assay # ModifyParam(param = "k.filter", value = NA) # } if (!is.na(x = k.filter) && k.filter > ncol(x = query)) { warning("k.filter is larger than the number of cells present in the query.\n", "Continuing without anchor filtering.", immediate. = TRUE, call. = FALSE) ModifyParam(param = "k.filter", value = NA) } if ((k.anchor + 1) > min(ncol(x = query), ncol(x = reference))) { stop("Please set k.anchor to be smaller than the number of cells in query (", ncol(x = query), ") and reference (", ncol(x = reference), ") objects.", call. = FALSE) } if ((k.score + 1) > min(ncol(x = query), ncol(x = reference))) { stop("Please set k.score to be smaller than the number of cells in query (", ncol(x = query), ") and reference (", ncol(x = reference), ") objects.", call. = FALSE) } if (reduction == "cca" && isTRUE(x = project.query)) { stop("The project.query workflow is not compatible with reduction = 'cca'", call. = FALSE) } if (normalization.method == "SCT" && isTRUE(x = project.query) && !IsSCT(query[[query.assay]])) { stop("In the project.query workflow, normalization is SCT, but query is not SCT normalized", call. = FALSE) } if (IsSCT(assay = query[[query.assay]]) && IsSCT(assay = reference[[reference.assay]]) && normalization.method != "SCT") { warning("Both reference and query assays have been processed with SCTransform.", "Setting normalization.method = 'SCT' and continuing.") normalization.method <- "SCT" ModifyParam(param = "normalization.method", value = "SCT") } if (IsSCT(assay = query[[query.assay]]) && normalization.method == "LogNormalize") { stop("An SCT assay (", query.assay, ") was provided for query.assay but ", "normalization.method was set as LogNormalize", call. = FALSE) } if (IsSCT(assay = query[[query.assay]]) && !inherits(x = query[[query.assay]], what = "SCTAssay")) { query[[query.assay]] <- as(object = query[[query.assay]], Class = "SCTAssay") ModifyParam(param = "query", value = query) } if (IsSCT(assay = reference[[reference.assay]]) && !inherits(x = reference[[reference.assay]], what = "SCTAssay")) { reference[[reference.assay]] <- as(object = reference[[reference.assay]], Class = "SCTAssay") ModifyParam(param = "reference", value = reference) } if (normalization.method != "SCT") { recompute.residuals <- FALSE ModifyParam(param = "recompute.residuals", value = recompute.residuals) } if (recompute.residuals) { # recompute.residuals only happens in ProjectCellEmbeddings, so k.filter set to NA. ModifyParam(param = "k.filter", value = NA) reference.model.num <- length(x = slot(object = reference[[reference.assay]], name = "SCTModel.list")) if (reference.model.num > 1) { stop("Given reference assay (", reference.assay, ") has ", reference.model.num , " reference sct models. Please provide a reference assay with a ", " single reference sct model.", call. = FALSE) } else if (reference.model.num == 0) { if (IsSCT(query[[query.assay]])) { stop("Given reference assay (", reference.assay, ") doesn't contain a reference SCT model.\n", "Query assay is a SCTAssay. ", "You can set recompute.residuals to FALSE ", "to use Query residuals to continue the analysis", call. = FALSE) } stop("Given reference assay (", reference.assay, ") doesn't contain a reference SCT model. ", call. = FALSE) } else if (reference.model.num == 1) { new.sct.assay <- reference.assay if (verbose) { message("Normalizing query using reference SCT model") } } query.umi.assay <- query.assay if (IsSCT(assay = query[[query.assay]])) { query.sct.models <- slot(object = query[[query.assay]], name = "SCTModel.list") query.umi.assay <- unique(x = unname(obj = unlist(x = lapply(X = query.sct.models, FUN = slot, name = "umi.assay")))) if (length(x = query.umi.assay) > 1) { stop("Query assay provided is an SCTAssay with multiple different original umi assays", call = FALSE) } if (!query.umi.assay %in% Assays(object = query)) { stop("Query assay provided is an SCTAssay based on an orignal UMI assay", " that is no longer present in the query Seurat object. Unable to", " recompute residuals based on the reference SCT model.\n", "If you want to use Query SCTAssay residuals to continue the analysis, ", "you can set recompute.residuals to FALSE", call. = FALSE) } } if (reduction %in% c('cca', 'rpca')) { query <- SCTransform( object = query, reference.SCT.model = slot(object = reference[[reference.assay]], name = "SCTModel.list")[[1]], residual.features = features, assay = query.umi.assay, new.assay.name = new.sct.assay, verbose = FALSE ) } else { new.sct.assay <- query.umi.assay } DefaultAssay(query) <- new.sct.assay ModifyParam(param = "query.assay", value = new.sct.assay) ModifyParam(param = "query", value = query) ModifyParam(param = "reference", value = reference) } if (IsSCT(assay = reference[[reference.assay]]) && normalization.method == "LogNormalize") { stop("An SCT assay (", reference.assay, ") was provided for reference.assay but ", "normalization.method was set as LogNormalize.", call. = FALSE) } if (!IsSCT(assay = reference[[reference.assay]]) && normalization.method == "SCT") { stop("Given reference.assay (", reference.assay, ") has not been processed with ", "SCTransform. Please either run SCTransform or set normalization.method = 'LogNormalize'.", call. = FALSE) } # Make data slot if DNE if (inherits(x = query[[query.assay]], what = "Assay5")){ if (is.null( tryCatch(expr = Layers(object = query[[query.assay]], search = 'data'), error = function (e) return(NULL)) ) ) { LayerData( object = query[[query.assay]], layer = "data") <- sparseMatrix( i = 1, j = 1, x = 1, dims = c(nrow(x = query[[query.assay]]), ncol(x = query[[query.assay]]) ) ) ModifyParam(param = "query", value = query) } } # features must be in both reference and query query.assay.check <- query.assay reference.assay.check <- reference.assay ref.features <- rownames(x = reference[[reference.assay.check]]) query.features <- rownames(x = query[[query.assay.check]]) if (normalization.method == "SCT") { if (IsSCT(query[[query.assay.check]])) { query.features <- rownames(x = query[[query.assay.check]]$scale.data) } query.model.features <- rownames(x = Misc(object = query[[query.assay]])$vst.out$gene_attr) query.features <- unique(c(query.features, query.model.features)) ref.model.features <- rownames(x = Misc(object = reference[[reference.assay]])$vst.out$gene_attr) ref.features <- unique(c(ref.features, ref.model.features)) } if (!is.null(x = features)) { if (project.query) { features.new <- intersect(x = features, y = ref.features) } else { features.new <- intersect(x = features, y = query.features) } if (length(x = features.new) != length(x = features)) { warning(length(x = features) - length(x = features.new), " features of ", "the features specified were not present in both the reference ", "query assays. \nContinuing with remaining ", length(x = features.new), " features.", immediate. = TRUE, call. = FALSE) features <- features.new } } else { if (project.query) { features <- intersect( x = VariableFeatures(object = query[[query.assay]]), y = ref.features ) } else { features <- intersect( x = VariableFeatures(object = reference[[reference.assay]]), y = query.features ) } } if (length(x = features) == 0) { stop("No features to use in finding transfer anchors. To troubleshoot, try ", "explicitly providing features to the features parameter and ensure that ", "they are present in both reference and query assays.", call. = FALSE) } ModifyParam(param = "features", value = features) if (!is.null(x = reference.reduction)) { if (project.query) { if (!reference.reduction %in% Reductions(object = query)){ stop("reference.reduction (", reference.reduction, ") is not present in ", "the provided query object (Note: project.query was set to TRUE).", call. = FALSE) } if (ncol(x = reference[[reference.reduction]]) < max(dims)) { stop("reference.reduction (", reference.reduction, ") does not contain ", "all the dimensions required by the dims parameter (Note: ", "project.query was set to TRUE).", call. = FALSE) } } else { if (!reference.reduction %in% Reductions(object = reference)){ stop("reference.reduction (", reference.reduction, ") is not present in ", "the provided reference object.", call. = FALSE) } if (ncol(x = reference[[reference.reduction]]) < max(dims)) { stop("reference.reduction (", reference.reduction, ") does not contain ", "all the dimensions required by the dims parameter.", call. = FALSE) } } } else { if (reduction == "lsiproject") { stop("Must supply a reference reduction if reduction='lsiproject'") } mdim <- max(dims) if (npcs < mdim) { warning("npcs is smaller than the largest value requested by the dims ", "parameter.\nSetting npcs to ", mdim, " and continuing.", immediate. = TRUE, call. = FALSE) ModifyParam(param = "npcs", value = mdim) if (mdim >= length(x = features)) { stop("npcs (", npcs, ") must be smaller than the number of features (", length(x = features), "). Please either lower the npcs and/or dims ", "parameter settings or increase the size of the feature set.", call. = FALSE) } } } if (!is.null(x = reference.neighbors)) { if (!reference.neighbors %in% Neighbors(object = reference)) { stop("Specified reference.neighbors (", reference.neighbors, ") is not ", "available in the provided reference object.", call. = FALSE) } k.nn <- max(k.score, k.anchor) if (ncol(x = Indices(reference[[reference.neighbors]])) < (k.nn + 1)){ stop("k.score or k.anchor is larger than the number of neighbors ", "contained in reference.nn. Recompute reference.nn using ", "FindNeighbors with k > k.score and k > k.anchor", call. = FALSE) } } } # Helper function to validate parameters for TransferData # ValidateParams_TransferData <- function( anchorset, combined.ob, anchors, reference.cells, query.cells, reference, query, query.assay, refdata, weight.reduction, l2.norm, dims, k.weight, sd.weight, eps, n.trees, verbose, slot, only.weights, prediction.assay, label.transfer ) { ## check refdata if (is.null(refdata)) { if (!only.weights) { stop("refdata is NULL and only.weights is FALSE") } } else { if (!inherits(x = refdata, what = "list")) { refdata <- list(id = refdata) } for (i in 1:length(x = refdata)) { if (inherits(x = refdata[[i]], what = c("character", "factor"))) { # check is it's in the reference object if (length(x = refdata[[i]]) == 1) { if (is.null(x = reference)) { warning("If providing a single string to refdata element number ", i, ", please provide the reference object. Skipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE next } if (refdata[[i]] %in% Assays(object = reference)) { refdata[[i]] <- GetAssayData(object = reference, assay = refdata[[i]]) colnames(x = refdata[[i]]) <- paste0(colnames(x = refdata[[i]]), "_reference") label.transfer[[i]] <- FALSE next } else if (refdata[[i]] %in% colnames(x = reference[[]])) { refdata[[i]] <- reference[[refdata[[i]]]][, 1] } else { warning("Element number ", i, " provided to refdata does not exist in ", "the provided reference object.", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE next } } else if (length(x = refdata[[i]]) != length(x = reference.cells)) { warning("Please provide a vector that is the same length as the number ", "of reference cells used in anchor finding.\n", "Length of vector provided: ", length(x = refdata[[i]]), "\n", "Length of vector required: ", length(x = reference.cells), "\nSkipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } label.transfer[[i]] <- TRUE } else if (inherits(x = refdata[[i]], what = c("dgCMatrix", "matrix"))) { if (ncol(x = refdata[[i]]) != length(x = reference.cells)) { warning("Please provide a matrix that has the same number of columns as ", "the number of reference cells used in anchor finding.\n", "Number of columns in provided matrix : ", ncol(x = refdata[[i]]), "\n", "Number of columns required : ", length(x = reference.cells), "\nSkipping element ", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } else { colnames(x = refdata[[i]]) <- paste0(colnames(x = refdata[[i]]), "_reference") if (any(!colnames(x = refdata[[i]]) == reference.cells)) { if (any(!colnames(x = refdata[[i]]) %in% reference.cells) || any(!reference.cells %in% colnames(x = refdata[[i]]))) { warning("Some (or all) of the column names of the provided refdata ", "don't match the reference cells used in anchor finding ", "\nSkipping element", i, ".", call. = FALSE, immediate. = TRUE) refdata[[i]] <- FALSE } else { refdata[[i]] <- refdata[[i]][, reference.cells] } } } if (!slot %in% c("counts", "data")) { stop("Please specify slot as either 'counts' or 'data'.") } label.transfer[[i]] <- FALSE } else { warning("Please provide either a vector (character or factor) for label ", "transfer or a matrix for feature transfer. \nType provided: ", class(x = refdata[[i]])) refdata[[i]] <- FALSE } if (names(x = refdata)[i] == "") { possible.names <- make.unique(names = c(names(x = refdata), paste0("e", i))) names(x = refdata)[i] <- possible.names[length(x = possible.names)] if (verbose) { message("refdata element ", i, " is not named. Setting name as ", names(x = refdata)[i]) } } } ModifyParam(param = "label.transfer", value = label.transfer) if (all(unlist(x = lapply(X = refdata, FUN = isFALSE)))) { stop("None of the provided refdata elements are valid.", call. = FALSE) } ModifyParam(param = "refdata", value = refdata) } object.reduction <- Reductions(object = slot(object = anchorset, name = "object.list")[[1]]) valid.weight.reduction <- c("pcaproject", "pca", "cca", "rpca.ref","lsiproject", "lsi", object.reduction) if (!inherits(x = weight.reduction, "DimReduc")) { if (!weight.reduction %in% valid.weight.reduction) { stop("Please provide one of ", paste(valid.weight.reduction, collapse = ", "), " or a custom DimReduc to ", "the weight.reduction parameter.", call. = FALSE) } if (weight.reduction %in% c("pcaproject", "cca", "rpca.ref", "lsiproject") && !weight.reduction %in% Reductions(object = combined.ob)) { stop("Specified weight.reduction (", weight.reduction, ") is not present ", "in the provided anchorset.", call. = FALSE) } if (weight.reduction %in% c("pca", "lsi") && is.null(x = query)) { stop("To use an internal PCA on the query only for weight.reduction, ", "please provide the query object.", call. = FALSE) } } if (inherits(x = weight.reduction, "DimReduc")) { if (is.null(x = dims)) { stop("Please specify dims", call. = FALSE) } if (max(dims) > ncol(x = weight.reduction)) { stop("The max of dims specified (", max(dims), ") is greater than the ", "number of dimensions in the given DimReduc (", ncol(x = weight.reduction), ").", call. = FALSE) } } else { if (is.null(x = dims) && !is.null(x = slot(object = anchorset, name = "command")$dims)) { ModifyParam(param = "dims", value = 1:length(x = slot(object = anchorset, name = "command")$dims)) } } if (!is.null(x = query)) { if (!isTRUE(x = all.equal( target = gsub(pattern = "_query", replacement = "", x = query.cells), current = colnames(x = query[[query.assay]]), check.attributes = FALSE) )) { stop("Query object provided contains a different set of cells from the ", "query used to construct the AnchorSet provided.", call. = FALSE) } } if(k.weight > nrow(x = anchors)) { stop("Please set k.weight to be smaller than the number of anchors (", nrow(x = anchors), ").", call. = FALSE) } } # Internal function to validate the parameters for IntegrateEmbeddings run on # an IntegrationAnchorSet object # ValidateParams_IntegrateEmbeddings_IntegrationAnchors <- function( anchorset, object.list, reductions, dims.to.integrate, k.weight, weight.reduction, sample.tree ) { nobs <- length(x = object.list) if (is.null(x = reductions)) { stop("Must supply reductions to integrate") } if (!inherits(x = reductions, what = "DimReduc")) { stop("Please provide a single pre-computed DimReduc object to the ", "reductions parameter", call. = FALSE) } else { all.cells <- make.unique(names = unname(obj = do.call( what = c, args = lapply(X = object.list, FUN = Cells))) ) if (nrow(x = reductions) != length(x = all.cells)) { stop("The number of cells in the reduction provided (", nrow(x = reductions), ") doesn't match the number of cells in the objects used to build the ", "AnchorSet (", length(x = all.cells), ").", call. = FALSE) } if (!all(Cells(x = reductions) %in% all.cells)) { stop("The cell names in the reduction provided don't match the cell names ", "present in the objects used to build the AnchorSet", call. = FALSE) } dims.to.integrate <- dims.to.integrate %||% 1:ncol(x = reductions) if (max(dims.to.integrate) > ncol(x = reductions)) { warning("Max dims.to.integrate is larger than the number of dimensions in ", "the provided reduction. Setting dims.to.integrate to 1:", ncol(x = reductions), " and continuing.", immediate. = TRUE, call. = FALSE) } ModifyParam(param = 'dims.to.integrate', value = 1:ncol(x = reductions)) } if (!is.null(x = weight.reduction)) { if (inherits(x = weight.reduction, what = "character")) { if (length(x = weight.reduction) == 1) { weight.reduction <- as.list(x = rep(x = weight.reduction, times = nobs)) } ModifyParam(param = 'weight.reduction', value = weight.reduction) for (i in 1:nobs) { if (!weight.reduction[[i]] %in% Reductions(object = object.list[[i]])) { stop("weight.reduction (", weight.reduction[[i]], ") is not present ", "in object number ", i, ".", call. = FALSE) } } } if (inherits(x = weight.reduction[[1]], what = "DimReduc")) { if (length(x = weight.reduction) != nobs) { stop("Please provide one weight.reduction for each object. ", length(x = weight.reduction), " provided, ", nobs, " required.", call. = FALSE) } for (i in 1:nobs) { if (!isTRUE(all.equal( target = Cells(x = weight.reduction[[i]]), current = Cells(x = object.list[[i]]), check.attributes = FALSE )) ) { stop("Cell names in the provided weight.reduction ", i, " don't ", "match with the cell names in object ", i, ".", call. = FALSE) } } } } min.object.size <- min(sapply(X = object.list, FUN = ncol)) if (k.weight > min.object.size) { stop("k.weight (", k.weight, ") is set larger than the number of cells in ", "the smallest object (", min.object.size, "). Please choose a smaller ", "k.weight.", call. = FALSE) } if (!is.null(x = sample.tree)) { if (ncol(x = sample.tree) != 2) { stop("Invalid sample tree. Please provide a two column matrix specifying the order of integration.") } if (min(sample.tree) < (-1 * nobs)) { stop("Invalid sample tree. Dataset index greater than the number of ", "objects was provided.") } } } # Internal function to validate the parameters for IntegrateEmbeddings run on # a TransferAnchorSet object # ValidateParams_IntegrateEmbeddings_TransferAnchors <- function( anchorset, combined.object , reference, query, query.assay, reductions, dims.to.integrate, k.weight, weight.reduction, reuse.weights.matrix ) { if (missing(x = reference)) { stop("Please provide the reference object.", call. = FALSE) } if (missing(x = query)) { stop("Please provide the query object.", call. = FALSE) } reference.cells <- slot(object = anchorset, name = "reference.cells") reference.cells <- gsub(pattern = "_reference", replacement = "", x = reference.cells) if (!isTRUE(x = all.equal(target = reference.cells, current = Cells(x = reference), check.attributes = FALSE))) { stop("The set of cells used as a reference in the AnchorSet does not match ", "the set of cells provided in the reference object.") } query.cells <- slot(object = anchorset, name = "query.cells") query.cells <- gsub(pattern = "_query", replacement = "", x = query.cells) if (!isTRUE(x = all.equal(target = query.cells, current = colnames(x = query[[query.assay]]), check.attributes = FALSE))) { stop("The set of cells used as a query in the AnchorSet does not match ", "the set of cells provided in the query object.") } if (length(x = reductions) != 1) { stop("Please provide a single reduction name to reductions that is present ", "in the anchorset.", call. = FALSE) } if (!reductions %in% Reductions(object = combined.object)) { stop("Please specify a reduction that is present in the anchorset: ", paste(Reductions(object = combined.object), collapse = ", "), call. = FALSE) } reference <- RenameCells(object = reference, new.names = paste0(Cells(x = reference), "_reference")) reference.embeddings <- Embeddings(object = combined.object[[reductions]])[Cells(x = reference), ] reference[[reductions]] <- CreateDimReducObject(embeddings = reference.embeddings, assay = DefaultAssay(object = reference)) ModifyParam(param = "reference", value = reference) query <- RenameCells(object = query, new.names = paste0(Cells(x = query), "_query")) query.embeddings <- Embeddings(object = combined.object[[reductions]])[Cells(x = query[[query.assay]]), ] query[[reductions]] <- CreateDimReducObject(embeddings = query.embeddings, assay = DefaultAssay(object = query)) ModifyParam(param = "query", value = query) ModifyParam(param = "reductions", value = c(reductions, reductions)) min.ndim <- min(ncol(x = query[[reductions[2]]]), ncol(x = reference[[reductions[1]]])) if (is.null(x = dims.to.integrate)) { dims.to.integrate <- 1:min.ndim } else { if (max(dims.to.integrate) > min.ndim) { dims.to.integrate <- dims.to.integrate[dims.to.integrate <= min.ndim] warning("Max dims.to.integrate is larger than the max dims for at least ", "one of the reductions specified. Setting dims.to.integrate to ", paste(dims.to.integrate, collapse = ","), " and continuing.", immediate. = TRUE, call. = FALSE) } } ModifyParam(param = "dims.to.integrate", value = dims.to.integrate) if (isTRUE(x = reuse.weights.matrix)) { weights.matrix <- Tool(object = query, slot = "TransferData")$weights.matrix if (is.null(x = weights.matrix)) { message("Requested to reuse weights matrix, but no weights found. Computing new weights.") reuse.weights.matrix <- FALSE } else if (nrow(x = weights.matrix) != nrow(x = slot(object = anchorset, name = "anchors"))) { stop("The number of anchors in the weights matrix stored in the query (", nrow(x = weights.matrix), ") doesn't match the number of anchors ", "in the anchorset (", nrow(x = slot(object = anchorset, name = "anchors")), ").", call. = FALSE) } else { ModifyParam(param = 'weights.matrix', value = weights.matrix) } } # check T/F again due to possible modification in above if (isFALSE(x = reuse.weights.matrix)) { if (k.weight > ncol(x = query)) { stop("k.weight (", k.weight, ") is set larger than the number of cells in ", "the query object (", ncol(x = query), "). Please choose a smaller ", "k.weight.", call. = FALSE) } if (inherits(x = weight.reduction, what = "list")) { if (length(x = weight.reduction) > 2) { stop("Supplied too many dimension reduction objects for weight.reduction. ", "Should supply a single DimReduc object.") } if (length(x = weight.reduction) == 2) { # take the second element as the dimreduc to use for query weight.reduction <- weight.reduction[[2]] } } if (inherits(x = weight.reduction, what = "character")) { if (length(x = weight.reduction) > 2) { stop("Supplied too many dimension reduction names for weight.reduction. ", "Should supply the name of a single DimReduc present in the query.") } if (length(x = weight.reduction) == 2) { # take the second element as the dimreduc to use for query weight.reduction <- weight.reduction[[2]] } if (!weight.reduction %in% Reductions(object = query)) { stop("The weight.reduction ", weight.reduction, " is not present in the ", "query object.", call. = FALSE) } ModifyParam(param = 'weight.reduction', value = list(NULL, query[[weight.reduction]])) } if (inherits(x = weight.reduction, what = "DimReduc")) { weight.reduction <- RenameCells(object = weight.reduction, new.names = paste0(Cells(x = weight.reduction), "_query")) if (!isTRUE(all.equal( target = Cells(x = weight.reduction), current = Cells(x = query), check.attributes = FALSE ))) { stop("Cell names in the provided weight.reduction don't ", "match with the cell names in the query object.", call. = FALSE) } ModifyParam(param = 'weight.reduction', value = list(NULL, weight.reduction)) } } } #' Convert Neighbor class to an asymmetrical Graph class #' #' #' @param nn.object A neighbor class object #' @param col.cells Cells names of the neighbors, cell names in nn.object is used by default #' @param weighted Determine if use distance in the Graph #' #' @return Returns a Graph object #' #' @importFrom Matrix sparseMatrix #' #' @export #' NNtoGraph <- function( nn.object, col.cells = NULL, weighted = FALSE ) { select_nn <- Indices(object = nn.object) col.cells <- col.cells %||% Cells(x = nn.object) ncol.nn <- length(x = col.cells) k.nn <- ncol(x = select_nn) j <- as.numeric(x = t(x = select_nn)) i <- ((1:length(x = j)) - 1) %/% k.nn + 1 if (weighted) { select_nn_dist <- Distances(object = nn.object) dist.element <- as.numeric(x = t(x = select_nn_dist)) nn.matrix <- sparseMatrix( i = i, j = j, x = dist.element, dims = c(nrow(x = select_nn), ncol.nn) ) } else { nn.matrix <- sparseMatrix( i = i, j = j, x = 1, dims = c(nrow(x = select_nn), ncol.nn) ) } rownames(x = nn.matrix) <- Cells(x = nn.object) colnames(x = nn.matrix) <- col.cells nn.matrix <- as.Graph(x = nn.matrix) return(nn.matrix) } # Find Anchor directly from assay # # # @return Returns a TranserAnchor or Integration set FindAssayAnchor <- function( object.list, reference = NULL, anchor.type = c("Transfer", "Integration"), assay = "Bridge", slot = "data", reduction = NULL, k.anchor = 20, k.score = 50, verbose = TRUE ) { anchor.type <- match.arg(arg = anchor.type) reduction.name <- reduction %||% paste0(assay, ".reduc") if ( is.null(x = reduction) || !reduction %in% Reductions(object.list[[1]])) { object.list <- lapply(object.list, function(x) { if (is.null(reduction)) { x[[reduction.name]] <- CreateDimReducObject( embeddings = t(GetAssayData( object = x, slot = slot, assay = assay )), key = "L_", assay = assay ) } DefaultAssay(x) <- assay x <- DietSeurat(x, assays = assay, dimreducs = reduction.name) return(x) } ) } object.both <- merge(object.list[[1]], object.list[[2]], merge.dr = reduction.name) objects.ncell <- sapply(X = object.list, FUN = function(x) dim(x = x)[2]) offsets <- as.vector(x = cumsum(x = c(0, objects.ncell)))[1:length(x = object.list)] if (verbose) { message("Finding ", anchor.type," anchors from assay ", assay) } anchors <- FindAnchors(object.pair = object.both, assay = c(DefaultAssay(object.both), DefaultAssay(object.both)), slot = 'data', cells1 = colnames(object.list[[1]]), cells2 = colnames(object.list[[2]]), internal.neighbors = NULL, reduction = reduction.name, k.anchor = k.anchor, k.score = k.score, dims = 1:ncol(object.both[[reduction.name]]), k.filter = NA, verbose = verbose ) inte.anchors <- anchors inte.anchors[, 1] <- inte.anchors[, 1] + offsets[1] inte.anchors[, 2] <- inte.anchors[, 2] + offsets[2] # determine all anchors inte.anchors <- rbind(inte.anchors, inte.anchors[, c(2, 1, 3)]) inte.anchors <- AddDatasetID( anchor.df = inte.anchors, offsets = offsets, obj.lengths = objects.ncell ) command <- LogSeuratCommand(object = object.list[[1]], return.command = TRUE) anchor.features <- rownames(object.both) if (anchor.type == "Integration") { anchor.set <- new(Class = "IntegrationAnchorSet", object.list = object.list, reference.objects = reference %||% seq_along(object.list), anchors = inte.anchors, weight.reduction = object.both[[reduction.name]], offsets = offsets, anchor.features = anchor.features, command = command ) } else if (anchor.type == "Transfer") { reference.index <- reference reference <- object.list[[reference.index]] query <- object.list[[setdiff(c(1,2), reference.index)]] query <- RenameCells( object = query, new.names = paste0(Cells(x = query), "_", "query") ) reference <- RenameCells( object = reference, new.names = paste0(Cells(x = reference), "_", "reference") ) combined.ob <- suppressWarnings(expr = merge( x = reference, y = query, merge.dr = reduction.name )) anchor.set <- new( Class = "TransferAnchorSet", object.list = list(combined.ob), reference.cells = colnames(x = reference), query.cells = colnames(x = query), anchors = anchors, anchor.features = anchor.features, command = command ) } return(anchor.set) } #' Construct a dictionary representation for each unimodal dataset #' #' #' @param object.list A list of Seurat objects #' @param bridge.object A multi-omic bridge Seurat which is used as the basis to #' represent unimodal datasets #' @param object.reduction A list of dimensional reductions from object.list used #' to be reconstructed by bridge.object #' @param bridge.reduction A list of dimensional reductions from bridge.object used #' to reconstruct object.reduction #' @param laplacian.reduction Name of bridge graph laplacian dimensional reduction #' @param laplacian.dims Dimensions used for bridge graph laplacian dimensional reduction #' @param bridge.assay.name Assay name used for bridge object reconstruction value (default is 'Bridge') #' @param return.all.assays Whether to return all assays in the object.list. #' Only bridge assay is returned by default. #' @param l2.norm Whether to l2 normalize the dictionary representation #' @param verbose Print messages and progress #' #' @importFrom MASS ginv #' @return Returns a object list in which each object has a bridge cell derived assay #' @export #' BridgeCellsRepresentation <- function(object.list, bridge.object, object.reduction, bridge.reduction, laplacian.reduction = 'lap', laplacian.dims = 1:50, bridge.assay.name = "Bridge", return.all.assays = FALSE, l2.norm = TRUE, verbose = TRUE ) { my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) if (verbose) { message("Constructing Bridge-cells representation") } single.object = FALSE if (length(x = object.list) == 1 & inherits(x = object.list, what = 'Seurat') ) { object.list <- list(object.list) single.object = TRUE } dims.list <- list() for (i in 1:length(object.reduction)) { ref.dims <- list( object= Misc(object.list[[i]][[object.reduction[[i]]]], slot = 'ref.dims'), bridge = Misc( bridge.object[[bridge.reduction[[i]]]], slot = 'ref.dims') ) all.dims <- list( object = 1:ncol(object.list[[i]][[object.reduction[[i]]]]), bridge = 1:ncol( bridge.object[[bridge.reduction[[i]] ]]) ) projected.dims.index <- which(sapply(ref.dims, function(x) !is.null(x))) if (length(projected.dims.index) == 0) { warning('No reference dims found in the dimensional reduction,', ' all dims in the dimensional reduction will be used.') if (all.dims[[1]] == all.dims[[2]]) { dims.list[[i]] <- all.dims } else { stop( 'The number of dimensions in the object.list ', object.reduction[[i]], ' (', length(all.dims[[1]]), ') ', ' and the number of dimensions in the bridge object ', bridge.reduction[[i]], ' (', length(all.dims[[2]]), ') ', ' is different.') } } else { reference.dims.index <- setdiff(c(1:2), projected.dims.index) dims.list[[i]] <- list() dims.list[[i]][[reference.dims.index]] <- ref.dims[[projected.dims.index ]] dims.list[[i]][[projected.dims.index]] <- all.dims[[projected.dims.index]] names(dims.list[[i]]) <- c('object', 'bridge') } } object.list <- my.lapply( X = 1:length(x = object.list), FUN = function(x) { SA.inv <- ginv( X = Embeddings( object = bridge.object, reduction = bridge.reduction[[x]] )[ ,dims.list[[x]]$bridge] ) if (!is.null(laplacian.reduction)) { lap.vector <- Embeddings(bridge.object[[laplacian.reduction]])[,laplacian.dims] X <- Embeddings( object = object.list[[x]], reduction = object.reduction[[x]] )[, dims.list[[x]]$object] %*% (SA.inv %*% lap.vector) } else { X <- Embeddings( object = object.list[[x]], reduction = object.reduction[[x]] )[, dims.list[[x]]$object] %*% SA.inv colnames(X) <- Cells(bridge.object) } if (l2.norm) { X <- L2Norm(mat = X, MARGIN = 1) } colnames(x = X) <- paste0('bridge_', colnames(x = X)) suppressWarnings( object.list[[x]][[bridge.assay.name]] <- CreateAssayObject(data = t(X)) ) object.list[[x]][[bridge.assay.name]]@misc$SA.inv <- SA.inv DefaultAssay(object.list[[x]]) <- bridge.assay.name VariableFeatures(object = object.list[[x]]) <- rownames(object.list[[x]]) return (object.list[[x]]) } ) if (!return.all.assays) { object.list <- my.lapply( X = object.list, FUN = function(x) { x <- DietSeurat(object = x, assays = bridge.assay.name, scale.data = TRUE) return(x) } ) } if (single.object) { object.list <- object.list[[1]] } return(object.list) } #' Find bridge anchors between two unimodal datasets #' #' First, bridge object is used to reconstruct two single-modality profiles and #' then project those cells into bridage graph laplacian space. #' Next, find a set of anchors between two single-modality objects. These #' anchors can later be used to integrate embeddings or transfer data from the reference to #' query object using the \code{\link{MapQuery}} object. #' #' \itemize{ #' \item{ Bridge cells reconstruction #' } #' \item{ Find anchors between objects. It can be either IntegrationAnchors or TransferAnchor. #' } #' } #' #' @inheritParams BridgeCellsRepresentation #' @param anchor.type The type of anchors. Can #' be one of: #' \itemize{ #' \item{Integration: Generate IntegrationAnchors for integration} #' \item{Transfer: Generate TransferAnchors for transfering data} #' } #' @param reference A vector specifying the object/s to be used as a reference #' during integration or transfer data. #' @param reduction Dimensional reduction to perform when finding anchors. Can #' be one of: #' \itemize{ #' \item{cca: Canonical correlation analysis} #' \item{direct: Use assay data as a dimensional reduction} #' } #' @param reference.bridge.stored If refernece has stored the bridge dictionary representation #' @param k.anchor How many neighbors (k) to use when picking anchors #' @param k.score How many neighbors (k) to use when scoring anchors #' @param verbose Print messages and progress #' @param ... Additional parameters passed to \code{FindIntegrationAnchors} or #' \code{FindTransferAnchors} #' #' #' @return Returns an \code{\link{AnchorSet}} object that can be used as input to #' \code{\link{IntegrateEmbeddings}}.or \code{\link{MapQuery}} #' FindBridgeAnchor <- function(object.list, bridge.object, object.reduction, bridge.reduction, anchor.type = c("Transfer", "Integration"), reference = NULL, laplacian.reduction = "lap", laplacian.dims = 1:50, reduction = c("direct", "cca"), bridge.assay.name = "Bridge", reference.bridge.stored = FALSE, k.anchor = 20, k.score = 50, verbose = TRUE, ... ) { anchor.type <- match.arg(arg = anchor.type) reduction <- match.arg(arg = reduction) if (!is.null(laplacian.reduction)) { bridge.method <- "bridge graph" } else { bridge.method <- "bridge cells" } if (verbose) { message("Finding ", anchor.type," anchors") switch( EXPR = bridge.method, "bridge graph" = { message('Transform cells to bridge graph laplacian space') }, "bridge cells" = { message('Transform cells to bridge cells space') } ) } reference <- reference %||% c(1) query <- setdiff(c(1,2), reference) if (anchor.type == "Transfer") { stored.bridge.weights <- FALSE # check weight matrix if (is.null(bridge.object@tools$MapQuery)) { warning("No weights stored between reference and bridge obejcts.", "Please set store.weights to TRUE in MapQuery") } else if (is.null(object.list[[query]]@tools$MapQuery)) { warning("No weights stored between query and bridge obejcts.", "Please set store.weights to TRUE in MapQuery") } else { stored.bridge.weights <- TRUE } } if (reference.bridge.stored) { object.list[[query]] <- BridgeCellsRepresentation( object.list = object.list[[query]] , bridge.object = bridge.object, object.reduction = object.reduction[[query]] , bridge.reduction = bridge.reduction[[query]] , bridge.assay.name = bridge.assay.name, laplacian.reduction = laplacian.reduction, laplacian.dims = laplacian.dims, verbose = verbose ) } else { object.list <- BridgeCellsRepresentation( object.list = object.list , bridge.object = bridge.object, object.reduction = object.reduction, bridge.reduction = bridge.reduction, bridge.assay.name = bridge.assay.name, laplacian.reduction = laplacian.reduction, laplacian.dims = laplacian.dims, verbose = verbose ) } if (reduction == "direct") { anchor <- FindAssayAnchor( object.list = object.list , reference = reference, slot = "data", anchor.type = anchor.type, assay = bridge.assay.name, k.anchor = k.anchor, k.score = k.score, verbose = verbose ) } else if (reduction == "cca") { # set data slot to scale.data slot object.list <- lapply( X = object.list, FUN = function(x) { x <- SetAssayData( object = x, slot = "scale.data", new.data = as.matrix( x = GetAssayData(object = x, slot = "data") )) return(x) } ) anchor <- switch(EXPR = anchor.type, "Integration" = { anchor <- FindIntegrationAnchors( object.list = object.list, k.filter = NA, reference = reference, reduction = "cca", scale = FALSE, k.anchor = k.anchor, k.score = k.score, verbose = verbose, ...) object.merge <- merge(x = object.list[[1]], y = object.list[2:length(object.list)] ) slot( object = anchor, name = "weight.reduction" ) <- CreateDimReducObject( embeddings = t(GetAssayData( object = object.merge, slot = 'data' )), key = "L_", assay = bridge.assay.name ) anchor }, "Transfer" = { anchor <- FindTransferAnchors( reference = object.list[[reference]], query = object.list[[query]], reduction = "cca", scale = FALSE, k.filter = NA, k.anchor = k.anchor, k.score = k.score, verbose = verbose, ... ) } ) } if (anchor.type == "Transfer") { if (stored.bridge.weights) { slot( object = anchor,name = "weight.reduction" )@misc$bridge.sets <- list( bridge.weights = slot(object = bridge.object, name = "tools" )$MapQuery_PrepareBridgeReference$weights.matrix, bridge.ref_anchor = slot(object = bridge.object, name = "tools" )$MapQuery_PrepareBridgeReference$anchor[,1], query.weights = slot(object = object.list[[query]], name = "tools" )$MapQuery$weights.matrix, query.ref_anchor = slot(object = object.list[[query]], name = "tools" )$MapQuery$anchor[,1] ) } } slot(object = anchor, name = "command") <- LogSeuratCommand( object = object.list[[1]], return.command = TRUE ) return(anchor) } # Helper function to transfer labels based on neighbors object # @param nn.object the query neighbors object # @param reference.object the reference seurat object # @param group.by A vector of variables to group cells by # @param weight.matrix A reference x query cell weight matrix # @return Returns a list for predicted labels, prediction score and matrix #' @importFrom Matrix sparseMatrix #' @importFrom fastDummies dummy_cols #' @importFrom Matrix rowMeans t #' TransferLablesNN <- function( nn.object = NULL, weight.matrix = NULL, reference.labels ){ reference.labels.matrix <- CreateCategoryMatrix(labels = as.character(reference.labels)) if (!is.null(x = weight.matrix) & !is.null(x = nn.object)) { warning('both nn.object and weight matrix are set. Only weight matrix is used for label transfer') } if (is.null(x = weight.matrix)) { select_nn <- Indices(nn.object) k.nn <- ncol(select_nn) j <- as.numeric(x = t(x = select_nn )) i <- ((1:length(x = j)) - 1) %/% k.nn + 1 nn.matrix <- sparseMatrix( i = i, j = j, x = 1, dims = c(nrow(select_nn), nrow(reference.labels.matrix)) ) rownames(nn.matrix) <- Cells(nn.object) } else if (nrow(weight.matrix) == nrow(reference.labels.matrix)) { nn.matrix <- t(weight.matrix) k.nn <- 1 } else if (ncol(weight.matrix) == nrow(reference.labels.matrix)) { nn.matrix <- weight.matrix k.nn <- 1 } else { stop('wrong weights matrix input') } query.label.mat <- nn.matrix %*% reference.labels.matrix query.label.mat <- query.label.mat/k.nn prediction.max <- apply(X = query.label.mat, MARGIN = 1, FUN = which.max) query.label <- colnames(x = query.label.mat)[prediction.max] query.label.score <- apply(X = query.label.mat, MARGIN = 1, FUN = max) names(query.label) <- names(query.label.score) <- rownames(query.label.mat) if (is.factor(reference.labels)) { levels(query.label) <- levels(reference.labels) } output.list <- list(labels = query.label, scores = query.label.score, prediction.mat = query.label.mat ) return(output.list) } # transfer continuous value based on neighbors # TransferExpressionNN<- function( nn.object, reference.object, var.name = NULL ) { nn.matrix <- NNtoGraph(nn.object = nn.object, col.cells = Cells(reference.object) ) reference.exp.matrix <- FetchData(object = reference.object, vars = var.name) # remove NA reference.exp.matrix <- reference.exp.matrix[complete.cases(reference.exp.matrix), ,drop= F] nn.matrix <- nn.matrix[, rownames(reference.exp.matrix)] # remove NO neighbor query nn.sum <- RowSumSparse(mat = nn.matrix) nn.matrix <- nn.matrix[nn.sum > 2, ] nn.sum <- nn.sum[nn.sum>2] # transfer data reference.exp.matrix <- as.matrix(reference.exp.matrix) query.exp.mat <- nn.matrix %*% reference.exp.matrix query.exp.mat <- sweep(x = query.exp.mat, MARGIN = 1, STATS = nn.sum, FUN = "/") # set output for all query cells query.exp.all <- data.frame(row.names = Cells(nn.object)) query.exp.all[rownames(query.exp.mat),1] <- query.exp.mat[,1] colnames(query.exp.all) <- var.name return(query.exp.all) } #' @param reduction.name dimensional reduction name, lap by default #' @param graph The name of graph #' @rdname RunGraphLaplacian #' @concept dimensional_reduction #' @export #' @method RunGraphLaplacian Seurat #' RunGraphLaplacian.Seurat <- function( object, graph, reduction.name = "lap", reduction.key ="LAP_", n = 50, verbose = TRUE, ... ) { lap_dir <- RunGraphLaplacian(object = object[[graph]], n = n, reduction.key = reduction.key , verbose = verbose, ... ) object[[reduction.name]] <- lap_dir return(object) } #' @param n Total Number of Eigenvectors to compute and store (50 by default) #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. LAP by default #' @param verbose Print message and process #' @param ... Arguments passed to eigs_sym #' #' #' @concept dimensional_reduction #' @rdname RunGraphLaplacian #' @export #' #' @importFrom Matrix diag t rowSums #' @importFrom RSpectra eigs_sym RunGraphLaplacian.default <- function(object, n = 50, reduction.key ="LAP_", verbose = TRUE, ... ) { if (!all( slot(object = t(x = object), name = "x") == slot(object = object, name = "x") )) { stop("Input graph is not symmetric") } if (verbose) { message("Generating normalized laplacian graph") } D_half <- sqrt(x = rowSums(x = object)) L <- -1 * (t(object / D_half) / D_half) diag(L) <- 1 + diag(L) if (verbose) { message("Performing eigendecomposition of the normalized laplacian graph") } L_eigen <- eigs_sym(L, k = n + 1, which = "SM", ...) #delete the first eigen vector new_order <- n:1 lap_output <- list(eigen_vector = Re(L_eigen$vectors[, new_order]), eigen_value = L_eigen$values[new_order] ) rownames(lap_output$eigen_vector) <- colnames(object) colnames(lap_output$eigen_vector) <- paste0(reduction.key, 1:n ) lap_dir <- CreateDimReducObject(embeddings = lap_output$eigen_vector, key = reduction.key, assay = DefaultAssay(object), stdev = lap_output$eigen_value ) return(lap_dir) } # Check if the var.name already existed in the meta.data # CheckMetaVarName <- function(object, var.name) { if (var.name %in% colnames(x = object[[]])) { var.name.exist <- var.name var.name <- rev( x = make.unique( names = c(colnames(object[[]]), var.name.exist) ) )[1] warning(var.name.exist, " is already existed in the meta.data. ", var.name, " will store leverage score value") } return(var.name) } # Run hnsw to find neighbors # # @param data Data to build the index with # @param query A set of data to be queried against data # @param metric Distance metric; can be one of "euclidean", "cosine", "manhattan", # "hamming" # @param k Number of neighbors # @param ef_construction A larger value means a better quality index, but increases build time. # @param ef Higher values lead to improved recall at the expense of longer search time. # @param n_threads Maximum number of threads to use. # @param index optional index object, will be recomputed if not provided #' @importFrom RcppHNSW hnsw_build hnsw_search # HnswNN <- function(data, query = data, metric = "euclidean", k, ef_construction = 200, ef = 10, index = NULL, n_threads = 0 ) { idx <- index %||% hnsw_build( X = data, distance = metric, ef = ef_construction, n_threads = n_threads ) nn <- hnsw_search( X = query, ann = idx, k = k, ef = ef, n_threads = n_threads ) names(nn) <- c("nn.idx", "nn.dists") nn$idx <- idx nn$alg.info <- list(metric = metric, ndim = ncol(x = data)) return(nn) } # Calculate reference index from the integrated object # IntegrationReferenceIndex <- function(object) { if (is.null(object@tools$Integration@sample.tree)) { reference.index <- object@commands$FindIntegrationAnchors$reference if (length(x = reference.index) > 1) { stop('the number of the reference is bigger than 1') } } else { reference.index <- SampleIntegrationOrder(tree = object@tools$Integration@sample.tree)[1] } return(reference.index) } # Calculate mean and sd # SparseMeanSd <- function(object, assay = NULL, slot = 'data', features = NULL, eps = 1e-8 ){ assay <- assay%||% DefaultAssay(object) features <- features %||% rownames(object[[assay]]) assay <- assay %||% DefaultAssay(object = object) mat <- GetAssayData(object = object[[assay]], slot = slot)[features,] if (class(mat)[1] !='dgCMatrix'){ stop('Matrix is not sparse') } mat.mean <- RowMeanSparse(mat) mat.sd <- sqrt(RowVarSparse(mat)) names(mat.mean) <- names(mat.sd) <- rownames(mat) mat.sd <- MinMax(data = mat.sd, min = eps, max = max(mat.sd)) output <- list(mean = mat.mean, sd = mat.sd) return(output) } # Run PCA on sparse matrix # #' @importFrom Matrix t #' @importFrom rlang exec #' @importFrom irlba irlba # # RunPCA_Sparse <- function( object, features = NULL, reduction.key = "PCsp_", reduction.name = "pca.sparse", npcs = 50, do.scale = TRUE, verbose = TRUE ) { features <- features %||% VariableFeatures(object) data <- GetAssayData(object = object, slot = "data")[features,] n <- npcs args <- list(A = t(data), nv = n) args$center <- RowMeanSparse(data) feature.var <- RowVarSparse(data) args$totalvar <- sum(feature.var) if (do.scale) { args$scale <- sqrt(feature.var) args$scale <- MinMax(args$scale, min = 1e-8, max = max(args$scale)) } else { args$scale <- FALSE } if (verbose) { message("Running PCA") } pca.irlba <- exec(.fn = irlba, !!!args) sdev <- pca.irlba$d/sqrt(max(1, ncol(data) - 1)) feture.loadings <- pca.irlba$v rownames(feture.loadings) <- rownames(data) embeddings <- sweep(x = pca.irlba$u, MARGIN = 2, STATS = pca.irlba$d, FUN = "*") rownames(embeddings) <- colnames(data) colnames(feture.loadings) <- colnames(embeddings) <- paste0(reduction.key, 1:npcs) object[[reduction.name]] <- CreateDimReducObject( embeddings = embeddings, loadings = feture.loadings, stdev = sdev, key = reduction.key, assay = DefaultAssay(object), misc = list(d = pca.irlba$d) ) return(object) } # Smoothing labels based on the clusters # @param labels the original labels # @param clusters the clusters that are used to smooth labels # SmoothLabels <- function(labels, clusters) { cluster.set <- unique(clusters) smooth.labels <- labels for (c in cluster.set) { cell.c <- which(clusters == c) smooth.labels[cell.c] <- names(sort(table(labels[cell.c]), decreasing = T)[1]) } return(smooth.labels) } #' Project query data to reference dimensional reduction #' #' @param query Query object #' @param reference Reference object #' @param mode Projection mode name for projection #' \itemize{ #' \item{pcaproject: PCA projection} #' \item{lsiproject: LSI projection} #' } #' @param reference.reduction Name of dimensional reduction in the reference object #' @param combine Determine if query and reference objects are combined #' @param query.assay Assay used for query object #' @param reference.assay Assay used for reference object #' @param features Features used for projection #' @param do.scale Determine if scale expression matrix in the pcaproject mode #' @param reduction.name dimensional reduction name, reference.reduction is used by default #' @param reduction.key dimensional reduction key, the key in reference.reduction #' is used by default #' @param verbose Print progress and message #' #' @return Returns a query-only or query-reference combined seurat object #' @export ProjectDimReduc <- function(query, reference, mode = c('pcaproject', 'lsiproject'), reference.reduction, combine = FALSE, query.assay = NULL, reference.assay = NULL, features = NULL, do.scale = TRUE, reduction.name = NULL, reduction.key= NULL, verbose = TRUE ) { query.assay <- query.assay %||% DefaultAssay(object = query) reference.assay <- reference.assay %||% DefaultAssay(object = reference) DefaultAssay(object = query) <- query.assay DefaultAssay(object = reference) <- reference.assay reduction.name <- reduction.name %||% reference.reduction reduction.key <- reduction.key %||% Key(object = reference[[reference.reduction]]) if (reduction.name %in% Reductions(object = query)) { warning(reduction.name, ' already exists in the query object. It will be overwritten.' ) } features <- features %||% rownames(x = Loadings(object = reference[[reference.reduction]])) features <- intersect(x = features, y = rownames(x = query)) if (mode == 'lsiproject') { if (verbose) { message('LSI projection to ', reference.reduction) } projected.embeddings <- ProjectSVD( reduction = reference[[reference.reduction]], data = GetAssayData(object = query, assay = query.assay, slot = "data"), mode = "lsi", do.center = FALSE, do.scale = FALSE, features = features, use.original.stats = FALSE, verbose = verbose ) } else if (mode == 'pcaproject') { if (inherits(query[[query.assay]], what = 'SCTAssay')) { if (verbose) { message('PCA projection to ', reference.reduction, ' in SCT assay') } query <- suppressWarnings( expr = GetResidual(object = query, assay = query.assay, features = features, verbose = FALSE) ) query.mat <- GetAssayData(object = query, slot = 'scale.data')[features,] projected.embeddings <- t( crossprod(x = Loadings( object = reference[[reference.reduction]])[features, ], y = query.mat ) ) } else { if (verbose) { message('PCA projection to ', reference.reduction) } projected.embeddings <- ProjectCellEmbeddings( reference = reference, reduction = reference.reduction, query = query, scale = do.scale, dims = 1:ncol(reference[[reference.reduction]]), feature.mean = NULL, verbose = verbose ) } } query[[reduction.name]] <- CreateDimReducObject( embeddings = projected.embeddings, loadings = Loadings(reference[[reference.reduction]])[features,], assay = query.assay, key = reduction.key, misc = Misc(reference[[reference.reduction]]) ) if (combine) { query <- DietSeurat(object = query, dimreducs = reduction.name, features = features, assays = query.assay ) reference <- DietSeurat(object = reference, dimreducs = reference.reduction, features = features, assays = reference.assay) suppressWarnings( combine.obj <- merge(query, reference, merge.dr = c(reduction.name, reference.reduction) ) ) Idents(combine.obj) <- c(rep(x = 'query', times = ncol(query)), rep(x = 'reference', times = ncol(reference)) ) return(combine.obj) } else { return(query) } } #' Prepare the bridge and reference datasets #' #' Preprocess the multi-omic bridge and unimodal reference datasets into #' an extended reference. #' This function performs the following three steps: #' 1. Performs within-modality harmonization between bridge and reference #' 2. Performs dimensional reduction on the SNN graph of bridge datasets via #' Laplacian Eigendecomposition #' 3. Constructs a bridge dictionary representation for unimodal reference cells #' #' @param reference A reference Seurat object #' @param bridge A multi-omic bridge Seurat object #' @param reference.reduction Name of dimensional reduction of the reference object (default is 'pca') #' @param reference.dims Number of dimensions used for the reference.reduction (default is 50) #' @param normalization.method Name of normalization method used: LogNormalize #' or SCT #' @param reference.assay Assay name for reference (default is \code{\link{DefaultAssay}}) #' @param bridge.ref.assay Assay name for bridge used for reference mapping. RNA by default #' @param bridge.query.assay Assay name for bridge used for query mapping. ATAC by default #' @param supervised.reduction Type of supervised dimensional reduction to be performed #' for integrating the bridge and query. #' #' Options are: #' \itemize{ #' \item{slsi: Perform supervised LSI as the dimensional reduction for #' the bridge-query integration} #' \item{spca: Perform supervised PCA as the dimensional reduction for #' the bridge-query integration} #' \item{NULL: no supervised dimensional reduction will be calculated. #' bridge.query.reduction is used for the bridge-query integration} #' } #' @param bridge.query.reduction Name of dimensions used for the bridge-query harmonization. #' 'bridge.query.reduction' and 'supervised.reduction' cannot be NULL together. #' @param bridge.query.features Features used for bridge query dimensional reduction #' (default is NULL which uses VariableFeatures from the bridge object) #' @param laplacian.reduction.name Name of dimensional reduction name of graph laplacian eigenspace (default is 'lap') #' @param laplacian.reduction.key Dimensional reduction key (default is 'lap_') #' @param laplacian.reduction.dims Number of dimensions used for graph laplacian eigenspace (default is 50) #' @param verbose Print progress and message (default is TRUE) #' #' @export #' @return Returns a \code{BridgeReferenceSet} that can be used as input to #' \code{\link{FindBridgeTransferAnchors}}. #' The parameters used are stored in the \code{BridgeReferenceSet} as well #' PrepareBridgeReference <- function ( reference, bridge, reference.reduction = 'pca', reference.dims = 1:50, normalization.method = c('SCT', 'LogNormalize'), reference.assay = NULL, bridge.ref.assay = 'RNA', bridge.query.assay = 'ATAC', supervised.reduction = c('slsi', 'spca', NULL), bridge.query.reduction = NULL, bridge.query.features = NULL, laplacian.reduction.name = 'lap', laplacian.reduction.key = 'lap_', laplacian.reduction.dims = 1:50, verbose = TRUE ) { ## checking if (!is.null(supervised.reduction)) { supervised.reduction <- match.arg(arg = supervised.reduction) } if (!is.null(x = bridge.query.reduction) & !is.null(x = supervised.reduction)) { stop('bridge.query.reduction and supervised.reduction can only set one.', 'If you want to set bridge.query.reduction, supervised.reduction should set to NULL') } if (is.null(x = bridge.query.reduction) & is.null(x = supervised.reduction)) { stop('Both bridge.query.reduction and supervised.reduction are NULL. One of them needs to be set') } bridge.query.features <- bridge.query.features %||% VariableFeatures(object = bridge[[bridge.query.assay]]) if (length(x = bridge.query.features) == 0) { stop('bridge object ', bridge.query.assay, ' assay has no variable genes and bridge.query.features has no input') } # modality harmonization reference.assay <- reference.assay %||% DefaultAssay(reference) DefaultAssay(reference) <- reference.assay DefaultAssay(bridge) <- bridge.ref.assay ref.anchor <- FindTransferAnchors( reference = reference, reference.reduction = reference.reduction, normalization.method = normalization.method, dims = reference.dims, query = bridge, recompute.residuals = TRUE, features = rownames(reference[[reference.reduction]]@feature.loadings), k.filter = NA, verbose = verbose ) bridge <- MapQuery(anchorset = ref.anchor, reference = reference, query = bridge, store.weights = TRUE, verbose = verbose ) bridge.ref.reduction <- paste0('ref.', reference.reduction) bridge <- FindNeighbors(object = bridge, reduction = bridge.ref.reduction, dims = 1:ncol(x = bridge[[bridge.ref.reduction]]), return.neighbor = FALSE, graph.name = c('bridge.ref.nn', 'bridge.ref.snn'), prune.SNN = 0) bridge <- RunGraphLaplacian(object = bridge, graph = "bridge.ref.snn", reduction.name = laplacian.reduction.name, reduction.key = laplacian.reduction.key, verbose = verbose) DefaultAssay(object = bridge) <- bridge.query.assay if (!is.null(supervised.reduction)) { bridge <- switch(EXPR = supervised.reduction, 'slsi' = { bridge.reduc <- RunSLSI(object = bridge, features = VariableFeatures(bridge), graph = 'bridge.ref.nn', assay = bridge.query.assay ) bridge.reduc }, 'spca' = { bridge.reduc <- RunSPCA(object = bridge, features = VariableFeatures(bridge), graph = 'bridge.ref.snn', assay = bridge.query.assay ) bridge.reduc } ) } # bridge representation reference.bridge <- BridgeCellsRepresentation( object.list = reference, bridge.object = bridge, object.reduction = c(reference.reduction), bridge.reduction = c(bridge.ref.reduction), laplacian.reduction = laplacian.reduction.name, laplacian.dims = laplacian.reduction.dims ) reference[['Bridge']] <- reference.bridge[['Bridge']] reference <- merge(x = reference, y = bridge, merge.dr = NA) reference@tools$MapQuery_PrepareBridgeReference <- bridge@tools$MapQuery command <- LogSeuratCommand(object = reference, return.command = TRUE) slot(object = command, name = "params")$bridge.query.features <- NULL command.name <- slot(object = command, name = "name") reference[[command.name]] <- command return(reference) } #' Find bridge anchors between query and extended bridge-reference #' #' Find a set of anchors between unimodal query and the other unimodal reference #' using a pre-computed \code{\link{BridgeReferenceSet}}. #' This function performs three steps: #' 1. Harmonize the bridge and query cells in the bridge query reduction space #' 2. Construct the bridge dictionary representations for query cells #' 3. Find a set of anchors between query and reference in the bridge graph laplacian eigenspace #' These anchors can later be used to integrate embeddings or transfer data from the reference to #' query object using the \code{\link{MapQuery}} object. #' @param extended.reference BridgeReferenceSet object generated from #' \code{\link{PrepareBridgeReference}} #' @param query A query Seurat object #' @param query.assay Assay name for query-bridge integration #' @param scale Determine if scale the query data for projection #' @param dims Number of dimensions for query-bridge integration #' @param reduction Dimensional reduction to perform when finding anchors. #' Options are: #' \itemize{ #' \item{pcaproject: Project the PCA from the bridge onto the query. We #' recommend using PCA when bridge and query datasets are from scRNA-seq} #' \item{lsiproject: Project the LSI from the bridge onto the query. We #' recommend using LSI when bridge and query datasets are from scATAC-seq or scCUT&TAG data. #' This requires that LSI or supervised LSI has been computed for the bridge dataset, and the #' same features (eg, peaks or genome bins) are present in both the bridge #' and query. #' } #' } #' @param bridge.reduction Dimensional reduction to perform when finding anchors. Can #' be one of: #' \itemize{ #' \item{cca: Canonical correlation analysis} #' \item{direct: Use assay data as a dimensional reduction} #' } #' @param verbose Print messages and progress #' #' @export #' @return Returns an \code{AnchorSet} object that can be used as input to #' \code{\link{TransferData}}, \code{\link{IntegrateEmbeddings}} and #' \code{\link{MapQuery}}. #' FindBridgeTransferAnchors <- function( extended.reference, query, query.assay = NULL, dims = 1:30, scale = FALSE, reduction = c('lsiproject', 'pcaproject'), bridge.reduction = c('direct', 'cca'), verbose = TRUE ) { bridge.reduction <- match.arg(arg = bridge.reduction) reduction <- match.arg(arg = reduction) query.assay <- query.assay %||% DefaultAssay(query) DefaultAssay(query) <- query.assay command.name <- grep(pattern = 'PrepareBridgeReference', x = names(slot(object = extended.reference, name = 'commands')), value = TRUE) params <- Command(object = extended.reference, command = command.name) bridge.query.assay <- params$bridge.query.assay bridge.query.reduction <- params$bridge.query.reduction %||% params$supervised.reduction reference.reduction <- params$reference.reduction bridge.ref.reduction <- paste0('ref.', reference.reduction) DefaultAssay(extended.reference) <- bridge.query.assay extended.reference.bridge <- DietSeurat( object = extended.reference, assays = bridge.query.assay, dimreducs = c(bridge.ref.reduction, bridge.query.reduction, params$laplacian.reduction.name) ) query.anchor <- FindTransferAnchors( reference = extended.reference.bridge, reference.reduction = bridge.query.reduction, dims = dims, query = query, reduction = reduction, scale = scale, features = rownames(Loadings(extended.reference[[bridge.query.reduction]])), k.filter = NA, verbose = verbose ) query <- MapQuery(anchorset = query.anchor, reference = extended.reference.bridge, query = query, store.weights = TRUE ) DefaultAssay(extended.reference) <- 'Bridge' bridge_anchor <- FindBridgeAnchor( object.list = list(DietSeurat(object = extended.reference, assays = 'Bridge'), query), bridge.object = extended.reference.bridge, object.reduction = c(reference.reduction, paste0('ref.', bridge.query.reduction)), bridge.reduction = c(bridge.ref.reduction, bridge.query.reduction), anchor.type = "Transfer", reduction = bridge.reduction, reference.bridge.stored = TRUE, verbose = verbose ) return(bridge_anchor) } #' Find integration bridge anchors between query and extended bridge-reference #' #' Find a set of anchors between unimodal query and the other unimodal reference #' using a pre-computed \code{\link{BridgeReferenceSet}}. #' These integration anchors can later be used to integrate query and reference #' using the \code{\link{IntegrateEmbeddings}} object. #' #' @inheritParams FindBridgeTransferAnchors #' @param integration.reduction Dimensional reduction to perform when finding anchors #' between query and reference. #' Options are: #' \itemize{ #' \item{direct: find anchors directly on the bridge representation space} #' \item{cca: perform cca on the on the bridge representation space and then find anchors #' } #' } #' #' @export #' @return Returns an \code{AnchorSet} object that can be used as input to #' \code{\link{IntegrateEmbeddings}}. #' FindBridgeIntegrationAnchors <- function( extended.reference, query, query.assay = NULL, dims = 1:30, scale = FALSE, reduction = c('lsiproject', 'pcaproject'), integration.reduction = c('direct', 'cca'), verbose = TRUE ) { reduction <- match.arg(arg = reduction) integration.reduction <- match.arg(arg = integration.reduction) query.assay <- query.assay %||% DefaultAssay(query) DefaultAssay(query) <- query.assay command.name <- grep(pattern = 'PrepareBridgeReference', x = names(slot(object = extended.reference, name = 'commands')), value = TRUE) params <- Command(object = extended.reference, command = command.name) bridge.query.assay <- params$bridge.query.assay bridge.query.reduction <- params$bridge.query.reduction %||% params$supervised.reduction reference.reduction <- params$reference.reduction bridge.ref.reduction <- paste0( 'ref.', params$bridge.ref.reduction) DefaultAssay(extended.reference) <- bridge.query.assay extended.reference.bridge <- DietSeurat( object = extended.reference, assays = bridge.query.assay, dimreducs = c(bridge.query.reduction, bridge.ref.reduction, params$laplacian.reduction.name) ) query.anchor <- FindTransferAnchors( reference = extended.reference.bridge, reference.reduction = bridge.query.reduction, dims = dims, query = query, reduction = reduction, scale = scale, features = rownames(Loadings(extended.reference.bridge[[bridge.query.reduction]])), k.filter = NA, verbose = verbose ) query <- MapQuery(anchorset = query.anchor, reference = extended.reference.bridge, query = query, store.weights = TRUE ) DefaultAssay(extended.reference) <- 'Bridge' bridge_anchor <- FindBridgeAnchor( object.list = list(DietSeurat(object = extended.reference, assays = 'Bridge'), query), bridge.object = extended.reference.bridge, reduction = integration.reduction, object.reduction = c(reference.reduction, paste0('ref.', bridge.query.reduction)), bridge.reduction = c(bridge.ref.reduction, bridge.query.reduction), anchor.type = "Integration", reference.bridge.stored = TRUE, verbose = verbose ) return(bridge_anchor) } #' Perform integration on the joint PCA cell embeddings. #' #' This is a convenience wrapper function around the following three functions #' that are often run together when perform integration. #' #' \code{\link{FindIntegrationAnchors}}, \code{\link{RunPCA}}, #' \code{\link{IntegrateEmbeddings}}. #' #' @inheritParams FindIntegrationAnchors #' @param new.reduction.name Name of integrated dimensional reduction #' @param npcs Total Number of PCs to compute and store (50 by default) #' @param findintegrationanchors.args A named list of additional arguments to #' \code{\link{FindIntegrationAnchors}} #' @param verbose Print messages and progress #' #' @importFrom rlang exec #' @return Returns a Seurat object with integrated dimensional reduction #' @export #' FastRPCAIntegration <- function( object.list, reference = NULL, anchor.features = 2000, k.anchor = 20, dims = 1:30, scale = TRUE, normalization.method = c("LogNormalize", "SCT"), new.reduction.name = 'integrated_dr', npcs = 50, findintegrationanchors.args = list(), verbose = TRUE ) { npcs <- max(npcs, dims) my.lapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pblapply, no = future_lapply ) reduction <- 'rpca' if (is.numeric(x = anchor.features)) { anchor.features <- SelectIntegrationFeatures( object.list = object.list, nfeatures = anchor.features, verbose = FALSE ) } if (normalization.method == 'SCT') { scale <- FALSE object.list <- PrepSCTIntegration(object.list = object.list, anchor.features = anchor.features ) } if (verbose) { message('Performing PCA for each object') } object.list <- my.lapply(X = object.list, FUN = function(x) { if (normalization.method != 'SCT') { x <- ScaleData(x, features = anchor.features, do.scale = scale, verbose = FALSE) } x <- RunPCA(x, features = anchor.features, verbose = FALSE, npcs = npcs) return(x) } ) fia.allarguments <- c(list( object.list = object.list, reference = reference, anchor.features = anchor.features, reduction = reduction, normalization.method = normalization.method, scale = scale, k.anchor = k.anchor, dims = dims, verbose = verbose ), findintegrationanchors.args ) anchor <- exec("FindIntegrationAnchors",!!!fia.allarguments) object_merged <- merge(x = object.list[[1]], y = object.list[2:length(object.list)] ) anchor.feature <- slot(object = anchor, name = 'anchor.features') if (normalization.method != 'SCT') { object_merged <- ScaleData(object = object_merged, features = anchor.feature, do.scale = scale, verbose = FALSE ) } object_merged <- RunPCA(object_merged, features = anchor.feature, verbose = FALSE, npcs = npcs ) temp <- object_merged[["pca"]] object_merged <- IntegrateEmbeddings( anchorset = anchor, reductions = object_merged[['pca']], new.reduction.name = new.reduction.name, verbose = verbose) object_merged[['pca']] <- temp VariableFeatures(object = object_merged) <- anchor.feature return(object_merged) } #' Transfer embeddings from sketched cells to the full data #' #' @param atom.data Atom data #' @param atom.cells Atom cells #' @param orig.data Original data #' @param embeddings Embeddings of atom cells #' @param sketch.matrix Sketch matrix #' #' @importFrom MASS ginv #' @importFrom Matrix t #' #' @export #' UnSketchEmbeddings <- function( atom.data, atom.cells = NULL, orig.data, embeddings, sketch.matrix = NULL ) { if(!all(rownames(atom.data) == rownames(orig.data))) { stop('features in atom.data and orig.data are not identical') } else { features = rownames(atom.data) } atom.cells <- atom.cells %||% colnames(x = atom.data) if (inherits(x = orig.data, what = 'DelayedMatrix') ) { stop("PseudobulkExpression does not support DelayedMatrix objects") } else if(inherits(x = orig.data, what = 'IterableMatrix')) { matrix.prod.function <- crossprod_BPCells } else { matrix.prod.function <- crossprod } sketch.matrix <- sketch.matrix %||% as.sparse(diag(length(features))) atom.data <- atom.data[, atom.cells] embeddings <- embeddings[atom.cells,] exp.mat <- as.matrix(x = t(x = atom.data) %*% sketch.matrix) sketch.transform <- ginv(X = exp.mat) %*% embeddings emb <- matrix.prod.function( x = as.matrix(sketch.matrix %*% sketch.transform), y = orig.data ) emb <- as.matrix(x = emb) return(emb) } FeatureSketch <- function(features, ratio = 0.8, seed = 123) { sketch.R <- t(x = CountSketch( nsketch = round(x = ratio * length(x = features)), ncells = length(x = features), seed = seed) ) return(sketch.R) } Seurat/R/dimensional_reduction.R0000644000176200001440000025207414525500037016432 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Determine statistical significance of PCA scores. #' #' Randomly permutes a subset of data, and calculates projected PCA scores for #' these 'random' genes. Then compares the PCA scores for the 'random' genes #' with the observed PCA scores to determine statistical signifance. End result #' is a p-value for each gene's association with each principal component. #' #' @param object Seurat object #' @param reduction DimReduc to use. ONLY PCA CURRENTLY SUPPORTED. #' @param assay Assay used to calculate reduction. #' @param dims Number of PCs to compute significance for #' @param num.replicate Number of replicate samplings to perform #' @param prop.freq Proportion of the data to randomly permute for each #' replicate #' @param verbose Print progress bar showing the number of replicates #' that have been processed. #' @param maxit maximum number of iterations to be performed by the irlba function of RunPCA #' #' @return Returns a Seurat object where JS(object = object[['pca']], slot = 'empirical') #' represents p-values for each gene in the PCA analysis. If ProjectPCA is #' subsequently run, JS(object = object[['pca']], slot = 'full') then #' represents p-values for all genes. #' #' @importFrom methods new #' @importFrom pbapply pblapply pbsapply #' @importFrom future.apply future_lapply future_sapply #' @importFrom future nbrOfWorkers #' #' @references Inspired by Chung et al, Bioinformatics (2014) #' @concept dimensional_reduction #' #' @export #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small = suppressWarnings(JackStraw(pbmc_small)) #' head(JS(object = pbmc_small[['pca']], slot = 'empirical')) #' } #' JackStraw <- function( object, reduction = "pca", assay = NULL, dims = 20, num.replicate = 100, prop.freq = 0.01, verbose = TRUE, maxit = 1000 ) { if (reduction != "pca") { stop("Only pca for reduction is currently supported") } if (verbose && nbrOfWorkers() == 1) { my.lapply <- pblapply my.sapply <- pbsapply } else { my.lapply <- future_lapply my.sapply <- future_sapply } assay <- assay %||% DefaultAssay(object = object) if (IsSCT(assay = object[[assay]])) { stop("JackStraw cannot be run on SCTransform-normalized data. Please supply a non-SCT assay.") } if (dims > length(x = object[[reduction]])) { dims <- length(x = object[[reduction]]) warning("Number of dimensions specified is greater than those available. Setting dims to ", dims, " and continuing", immediate. = TRUE) } if (dims > nrow(x = object)) { dims <- nrow(x = object) warning("Number of dimensions specified is greater than the number of cells. Setting dims to ", dims, " and continuing", immediate. = TRUE) } loadings <- Loadings(object = object[[reduction]], projected = FALSE) reduc.features <- rownames(x = loadings) if (length(x = reduc.features) < 3) { stop("Too few features") } if (length(x = reduc.features) * prop.freq < 3) { warning( "Number of variable genes given ", prop.freq, " as the prop.freq is low. Consider including more variable genes and/or increasing prop.freq. ", "Continuing with 3 genes in every random sampling." ) } data.use <- GetAssayData(object = object, assay = assay, slot = "scale.data")[reduc.features, ] rev.pca <- object[[paste0('RunPCA.', assay)]]$rev.pca weight.by.var <- object[[paste0('RunPCA.', assay)]]$weight.by.var fake.vals.raw <- my.lapply( X = 1:num.replicate, FUN = JackRandom, scaled.data = data.use, prop.use = prop.freq, r1.use = 1, r2.use = dims, rev.pca = rev.pca, weight.by.var = weight.by.var, maxit = maxit ) fake.vals <- sapply( X = 1:dims, FUN = function(x) { return(as.numeric(x = unlist(x = lapply( X = 1:num.replicate, FUN = function(y) { return(fake.vals.raw[[y]][, x]) } )))) } ) fake.vals <- as.matrix(x = fake.vals) jackStraw.empP <- as.matrix( my.sapply( X = 1:dims, FUN = function(x) { return(unlist(x = lapply( X = abs(loadings[, x]), FUN = EmpiricalP, nullval = abs(fake.vals[,x]) ))) } ) ) colnames(x = jackStraw.empP) <- paste0("PC", 1:ncol(x = jackStraw.empP)) jackstraw.obj <- new( Class = "JackStrawData", empirical.p.values = jackStraw.empP, fake.reduction.scores = fake.vals, empirical.p.values.full = matrix() ) JS(object = object[[reduction]]) <- jackstraw.obj object <- LogSeuratCommand(object = object) return(object) } #' L2-normalization #' #' Perform l2 normalization on given dimensional reduction #' #' @param object Seurat object #' @param reduction Dimensional reduction to normalize #' @param new.dr name of new dimensional reduction to store #' (default is olddr.l2) #' @param new.key name of key for new dimensional reduction #' #' @return Returns a \code{\link{Seurat}} object #' @concept dimensional_reduction #' #' @export #' L2Dim <- function(object, reduction, new.dr = NULL, new.key = NULL) { l2.norm <- L2Norm(mat = Embeddings(object[[reduction]])) if(is.null(new.dr)){ new.dr <- paste0(reduction, ".l2") } if(is.null(new.key)){ new.key <- paste0("L2", Key(object[[reduction]])) } colnames(x = l2.norm) <- paste0(new.key, 1:ncol(x = l2.norm)) l2.dr <- CreateDimReducObject( embeddings = l2.norm, loadings = Loadings(object = object[[reduction]], projected = FALSE), projected = Loadings(object = object[[reduction]], projected = TRUE), assay = DefaultAssay(object = object), stdev = slot(object = object[[reduction]], name = 'stdev'), key = new.key, jackstraw = slot(object = object[[reduction]], name = 'jackstraw'), misc = slot(object = object[[reduction]], name = 'misc') ) object[[new.dr]] <- l2.dr return(object) } #' L2-Normalize CCA #' #' Perform l2 normalization on CCs #' #' @param object Seurat object #' @param \dots Additional parameters to L2Dim. #' @concept dimensional_reduction #' #' @export #' L2CCA <- function(object, ...){ CheckDots(..., fxns = 'L2Dim') return(L2Dim(object = object, reduction = "cca", ...)) } #' Significant genes from a PCA #' #' Returns a set of genes, based on the JackStraw analysis, that have #' statistically significant associations with a set of PCs. #' #' @param object Seurat object #' @param pcs.use PCS to use. #' @param pval.cut P-value cutoff #' @param use.full Use the full list of genes (from the projected PCA). Assumes #' that \code{ProjectDim} has been run. Currently, must be set to FALSE. #' @param max.per.pc Maximum number of genes to return per PC. Used to avoid genes from one PC dominating the entire analysis. #' #' @return A vector of genes whose p-values are statistically significant for #' at least one of the given PCs. #' #' @export #' @concept dimensional_reduction #' #' @seealso \code{\link{ProjectDim}} \code{\link{JackStraw}} #' #' @examples #' data("pbmc_small") #' PCASigGenes(pbmc_small, pcs.use = 1:2) #' PCASigGenes <- function( object, pcs.use, pval.cut = 0.1, use.full = FALSE, max.per.pc = NULL ) { # pvals.use <- GetDimReduction(object,reduction.type = "pca",slot = "jackstraw")@empirical.p.values empirical.use <- ifelse(test = use.full, yes = 'full', no = 'empirical') pvals.use <- JS(object = object[['pca']], slot = empirical.use) if (length(x = pcs.use) == 1) { pvals.min <- pvals.use[, pcs.use] } if (length(x = pcs.use) > 1) { pvals.min <- apply(X = pvals.use[, pcs.use], MARGIN = 1, FUN = min) } names(x = pvals.min) <- rownames(x = pvals.use) features <- names(x = pvals.min)[pvals.min < pval.cut] if (!is.null(x = max.per.pc)) { top.features <- TopFeatures( object = object[['pca']], dim = pcs.use, nfeatures = max.per.pc, projected = use.full, balanced = FALSE ) features <- intersect(x = top.features, y = features) } return(features) } #' Project Dimensional reduction onto full dataset #' #' Takes a pre-computed dimensional reduction (typically calculated on a subset #' of genes) and projects this onto the entire dataset (all genes). Note that #' the cell loadings will remain unchanged, but now there are gene loadings for #' all genes. #' #' @param object Seurat object #' @param reduction Reduction to use #' @param assay Assay to use #' @param dims.print Number of dims to print features for #' @param nfeatures.print Number of features with highest/lowest loadings to print for #' each dimension #' @param overwrite Replace the existing data in feature.loadings #' @param do.center Center the dataset prior to projection (should be set to TRUE) #' @param verbose Print top genes associated with the projected dimensions #' #' @return Returns Seurat object with the projected values #' #' @export #' @concept dimensional_reduction #' #' @examples #' data("pbmc_small") #' pbmc_small #' pbmc_small <- ProjectDim(object = pbmc_small, reduction = "pca") #' # Vizualize top projected genes in heatmap #' DimHeatmap(object = pbmc_small, reduction = "pca", dims = 1, balanced = TRUE) #' ProjectDim <- function( object, reduction = "pca", assay = NULL, dims.print = 1:5, nfeatures.print = 20, overwrite = FALSE, do.center = FALSE, verbose = TRUE ) { redeuc <- object[[reduction]] assay <- assay %||% DefaultAssay(object = redeuc) data.use <- GetAssayData( object = object[[assay]], slot = "scale.data" ) if (do.center) { data.use <- scale(x = as.matrix(x = data.use), center = TRUE, scale = FALSE) } cell.embeddings <- Embeddings(object = redeuc) new.feature.loadings.full <- data.use %*% cell.embeddings rownames(x = new.feature.loadings.full) <- rownames(x = data.use) colnames(x = new.feature.loadings.full) <- colnames(x = cell.embeddings) Loadings(object = redeuc, projected = TRUE) <- new.feature.loadings.full if (overwrite) { Loadings(object = redeuc, projected = FALSE) <- new.feature.loadings.full } object[[reduction]] <- redeuc if (verbose) { print( x = redeuc, dims = dims.print, nfeatures = nfeatures.print, projected = TRUE ) } object <- LogSeuratCommand(object = object) return(object) } #' @param query.dims Dimensions (columns) to use from query #' @param reference.dims Dimensions (columns) to use from reference #' @param ... Additional parameters to \code{\link{RunUMAP}} #' #' @inheritParams FindNeighbors #' @inheritParams RunUMAP #' #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' ProjectUMAP.default <- function( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) { query.dims <- query.dims %||% 1:ncol(x = query) reference.dims <- reference.dims %||% query.dims if (length(x = reference.dims) != length(x = query.dims)) { stop("Length of Reference and Query number of dimensions are not equal") } if (any(reference.dims > ncol(x = reference))) { stop("Reference dims is larger than the number of dimensions present.", call. = FALSE) } if (any(query.dims > ncol(x = query))) { stop("Query dims is larger than the number of dimensions present.", call. = FALSE) } if (length(x = Misc(object = reduction.model, slot = 'model')) == 0) { stop( "The provided reduction.model does not have a model stored. Please try ", "running umot-learn on the object first", call. = FALSE ) } query.neighbor <- FindNeighbors( object = reference[, reference.dims], query = query[, query.dims], k.param = k.param, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, cache.index = cache.index, index = index, return.neighbor = TRUE, l2.norm = l2.norm ) proj.umap <- RunUMAP(object = query.neighbor, reduction.model = reduction.model, ...) return(list(proj.umap = proj.umap, query.neighbor = query.neighbor)) } #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' @method ProjectUMAP DimReduc #' ProjectUMAP.DimReduc <- function( query, query.dims = NULL, reference, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, ... ) { proj.umap <- ProjectUMAP( query = Embeddings(object = query), query.dims = query.dims, reference = Embeddings(object = reference), reference.dims = reference.dims, k.param = k.param, nn.method = nn.method, n.trees = 50, annoy.metric = annoy.metric, l2.norm = l2.norm, cache.index = cache.index, index = index, neighbor.name = neighbor.name, reduction.model = reduction.model, ... ) return(proj.umap) } #' @param reference Reference dataset #' @param query.reduction Name of reduction to use from the query for neighbor #' finding #' @param reference.reduction Name of reduction to use from the reference for #' neighbor finding #' @param neighbor.name Name to store neighbor information in the query #' @param reduction.name Name of projected UMAP to store in the query #' @param reduction.key Value for the projected UMAP key #' @rdname ProjectUMAP #' @concept dimensional_reduction #' @export #' @method ProjectUMAP Seurat #' ProjectUMAP.Seurat <- function( query, query.reduction, query.dims = NULL, reference, reference.reduction, reference.dims = NULL, k.param = 30, nn.method = "annoy", n.trees = 50, annoy.metric = "cosine", l2.norm = FALSE, cache.index = TRUE, index = NULL, neighbor.name = "query_ref.nn", reduction.model, reduction.name = "ref.umap", reduction.key = "refUMAP_", ... ) { if (!query.reduction %in% Reductions(object = query)) { stop("The query.reduction (", query.reduction, ") is not present in the ", "provided query", call. = FALSE) } if (!reference.reduction %in% Reductions(object = reference)) { stop("The reference.reduction (", reference.reduction, ") is not present in the ", "provided reference.", call. = FALSE) } if (!reduction.model %in% Reductions(object = reference)) { stop("The reduction.model (", reduction.model, ") is not present in the ", "provided reference.", call. = FALSE) } proj.umap <- ProjectUMAP( query = query[[query.reduction]], query.dims = query.dims, reference = reference[[reference.reduction]], reference.dims = reference.dims, k.param = k.param, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, l2.norm = l2.norm, cache.index = cache.index, index = index, neighbor.name = neighbor.name, reduction.model = reference[[reduction.model]], reduction.key = reduction.key, assay = DefaultAssay(query), ... ) query[[reduction.name]] <- proj.umap$proj.umap query[[neighbor.name]] <- proj.umap$query.neighbor return(query) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param standardize Standardize matrices - scales columns to have unit variance #' and mean 0 #' @param num.cc Number of canonical vectors to calculate #' @param seed.use Random seed to set. If NULL, does not set a seed #' @param verbose Show progress messages #' #' @importFrom irlba irlba #' #' @rdname RunCCA #' @concept dimensional_reduction #' @export #' RunCCA.default <- function( object1, object2, standardize = TRUE, num.cc = 20, seed.use = 42, verbose = FALSE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } cells1 <- colnames(x = object1) cells2 <- colnames(x = object2) if (standardize) { object1 <- Standardize(mat = object1, display_progress = FALSE) object2 <- Standardize(mat = object2, display_progress = FALSE) } mat3 <- crossprod(x = object1, y = object2) cca.svd <- irlba(A = mat3, nv = num.cc) cca.data <- rbind(cca.svd$u, cca.svd$v) colnames(x = cca.data) <- paste0("CC", 1:num.cc) rownames(cca.data) <- c(cells1, cells2) cca.data <- apply( X = cca.data, MARGIN = 2, FUN = function(x) { if (sign(x[1]) == -1) { x <- x * -1 } return(x) } ) return(list(ccv = cca.data, d = cca.svd$d)) } #' @param assay1,assay2 Assays to pull from in the first and second objects, respectively #' @param features Set of genes to use in CCA. Default is the union of both #' the variable features sets present in both objects. #' @param renormalize Renormalize raw data after merging the objects. If FALSE, #' merge the data matrices also. #' @param rescale Rescale the datasets prior to CCA. If FALSE, uses existing data in the scale data slots. #' @param compute.gene.loadings Also compute the gene loadings. NOTE - this will #' scale every gene in the dataset which may impose a high memory cost. #' @param add.cell.id1,add.cell.id2 Add ... #' @param ... Extra parameters (passed onto MergeSeurat in case with two objects #' passed, passed onto ScaleData in case with single object and rescale.groups #' set to TRUE) #' #' @rdname RunCCA #' @concept dimensional_reduction #' @export #' @method RunCCA Seurat #' RunCCA.Seurat <- function( object1, object2, assay1 = NULL, assay2 = NULL, num.cc = 20, features = NULL, renormalize = FALSE, rescale = FALSE, compute.gene.loadings = TRUE, add.cell.id1 = NULL, add.cell.id2 = NULL, verbose = TRUE, ... ) { op <- options(Seurat.object.assay.version = "v3", Seurat.object.assay.calcn = FALSE) on.exit(expr = options(op), add = TRUE) assay1 <- assay1 %||% DefaultAssay(object = object1) assay2 <- assay2 %||% DefaultAssay(object = object2) if (assay1 != assay2) { warning("Running CCA on different assays") } if (is.null(x = features)) { if (length(x = VariableFeatures(object = object1, assay = assay1)) == 0) { stop(paste0("VariableFeatures not computed for the ", assay1, " assay in object1")) } if (length(x = VariableFeatures(object = object2, assay = assay2)) == 0) { stop(paste0("VariableFeatures not computed for the ", assay2, " assay in object2")) } features <- union(x = VariableFeatures(object = object1), y = VariableFeatures(object = object2)) if (length(x = features) == 0) { stop("Zero features in the union of the VariableFeature sets ") } } nfeatures <- length(x = features) if (!(rescale)) { data.use1 <- GetAssayData(object = object1, assay = assay1, slot = "scale.data") data.use2 <- GetAssayData(object = object2, assay = assay2, slot = "scale.data") features <- CheckFeatures(data.use = data.use1, features = features, object.name = "object1", verbose = FALSE) features <- CheckFeatures(data.use = data.use2, features = features, object.name = "object2", verbose = FALSE) data1 <- data.use1[features, ] data2 <- data.use2[features, ] } if (rescale) { data.use1 <- GetAssayData(object = object1, assay = assay1, slot = "data") data.use2 <- GetAssayData(object = object2, assay = assay2, slot = "data") features <- CheckFeatures(data.use = data.use1, features = features, object.name = "object1", verbose = FALSE) features <- CheckFeatures(data.use = data.use2, features = features, object.name = "object2", verbose = FALSE) data1 <- data.use1[features,] data2 <- data.use2[features,] if (verbose) message("Rescaling groups") data1 <- FastRowScale(as.matrix(data1)) dimnames(data1) <- list(features, colnames(x = object1)) data2 <- FastRowScale(as.matrix(data2)) dimnames(data2) <- list(features, colnames(x = object2)) } if (length(x = features) / nfeatures < 0.1 & verbose) { warning("More than 10% of provided features filtered out. Please check that the given features are present in the scale.data slot for both the assays provided here and that they have non-zero variance.") } if (length(x = features) < 50) { warning("Fewer than 50 features used as input for CCA.") } if (verbose) { message("Running CCA") } cca.results <- RunCCA( object1 = data1, object2 = data2, standardize = TRUE, num.cc = num.cc, verbose = verbose, ) if (verbose) { message("Merging objects") } combined.object <- merge( x = object1, y = object2, merge.data = TRUE, ... ) rownames(x = cca.results$ccv) <- Cells(x = combined.object) colnames(x = data1) <- Cells(x = combined.object)[1:ncol(x = data1)] colnames(x = data2) <- Cells(x = combined.object)[(ncol(x = data1) + 1):length(x = Cells(x = combined.object))] combined.object[['cca']] <- CreateDimReducObject( embeddings = cca.results$ccv[colnames(combined.object), ], assay = assay1, key = "CC_" ) combined.object[['cca']]@assay.used <- DefaultAssay(combined.object) if (ncol(combined.object) != (ncol(object1) + ncol(object2))) { warning("Some cells removed after object merge due to minimum feature count cutoff") } combined.scale <- cbind(data1,data2) combined.object <- SetAssayData(object = combined.object, new.data = combined.scale, slot = "scale.data") ## combined.object@assays$ToIntegrate@scale.data <- combined.scale if (renormalize) { combined.object <- NormalizeData( object = combined.object, assay = assay1, normalization.method = object1[[paste0("NormalizeData.", assay1)]]$normalization.method, scale.factor = object1[[paste0("NormalizeData.", assay1)]]$scale.factor ) } if (compute.gene.loadings) { combined.object <- ProjectDim( object = combined.object, reduction = "cca", verbose = FALSE, overwrite = TRUE) } return(combined.object) } #' @param assay Name of Assay ICA is being run on #' @param nics Number of ICs to compute #' @param rev.ica By default, computes the dimensional reduction on the cell x #' feature matrix. Setting to true will compute it on the transpose (feature x cell #' matrix). #' @param ica.function ICA function from ica package to run (options: icafast, #' icaimax, icajade) #' @param verbose Print the top genes associated with high/low loadings for #' the ICs #' @param ndims.print ICs to print genes for #' @param nfeatures.print Number of genes to print for each IC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. #' @param seed.use Set a random seed. Setting NULL will not set a seed. #' @param \dots Additional arguments to be passed to fastica #' #' @importFrom ica icafast icaimax icajade #' #' @rdname RunICA #' @concept dimensional_reduction #' @export #' @method RunICA default #' RunICA.default <- function( object, assay = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) { CheckDots(..., fxns = ica.function) if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } nics <- min(nics, ncol(x = object)) ica.fxn <- eval(expr = parse(text = ica.function)) if (rev.ica) { ica.results <- ica.fxn(object, nc = nics,...) cell.embeddings <- ica.results$M } else { ica.results <- ica.fxn(t(x = object), nc = nics,...) cell.embeddings <- ica.results$S } feature.loadings <- (as.matrix(x = object ) %*% as.matrix(x = cell.embeddings)) colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = feature.loadings)) colnames(x = cell.embeddings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, key = reduction.key ) if (verbose) { print(x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print) } return(reduction.data) } #' @param features Features to compute ICA on #' #' @rdname RunICA #' @concept dimensional_reduction #' @export #' @method RunICA Assay #' RunICA.Assay <- function( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "ica_", seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunICA( object = data.use, assay = assay, nics = nics, rev.ica = rev.ica, ica.function = ica.function, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name #' #' @rdname RunICA #' @concept dimensional_reduction #' @method RunICA Seurat #' @export #' RunICA.Seurat <- function( object, assay = NULL, features = NULL, nics = 50, rev.ica = FALSE, ica.function = "icafast", verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "ica", reduction.key = "IC_", seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) reduction.data <- RunICA( object = object[[assay]], assay = assay, features = features, nics = nics, rev.ica = rev.ica, ica.function = ica.function, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of Assay PCA is being run on #' @param npcs Total Number of PCs to compute and store (50 by default) #' @param rev.pca By default computes the PCA on the cell x gene matrix. Setting #' to true will compute it on gene x cell matrix. #' @param weight.by.var Weight the cell embeddings by the variance of each PC #' (weights the gene loadings if rev.pca is TRUE) #' @param verbose Print the top genes associated with high/low loadings for #' the PCs #' @param ndims.print PCs to print genes for #' @param nfeatures.print Number of genes to print for each PC #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. PC by default #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' @param approx Use truncated singular value decomposition to approximate PCA #' #' @importFrom irlba irlba #' @importFrom stats prcomp #' @importFrom utils capture.output #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' RunPCA.default <- function( object, assay = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, approx = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (inherits(x = object, what = 'matrix')) { RowVar.function <- RowVar } else if (inherits(x = object, what = 'dgCMatrix')) { RowVar.function <- RowVarSparse } else if (inherits(x = object, what = 'IterableMatrix')) { RowVar.function <- function(x) { return(BPCells::matrix_stats( matrix = x, row_stats = 'variance' )$row_stats['variance',]) } } if (rev.pca) { npcs <- min(npcs, ncol(x = object) - 1) pca.results <- irlba(A = object, nv = npcs, ...) total.variance <- sum(RowVar.function(x = t(x = object))) sdev <- pca.results$d/sqrt(max(1, nrow(x = object) - 1)) if (weight.by.var) { feature.loadings <- pca.results$u %*% diag(pca.results$d) } else{ feature.loadings <- pca.results$u } cell.embeddings <- pca.results$v } else { total.variance <- sum(RowVar.function(x = object)) if (approx) { npcs <- min(npcs, nrow(x = object) - 1) pca.results <- irlba(A = t(x = object), nv = npcs, ...) feature.loadings <- pca.results$v sdev <- pca.results$d/sqrt(max(1, ncol(object) - 1)) if (weight.by.var) { cell.embeddings <- pca.results$u %*% diag(pca.results$d) } else { cell.embeddings <- pca.results$u } } else { npcs <- min(npcs, nrow(x = object)) pca.results <- prcomp(x = t(object), rank. = npcs, ...) feature.loadings <- pca.results$rotation sdev <- pca.results$sdev if (weight.by.var) { cell.embeddings <- pca.results$x } else { cell.embeddings <- pca.results$x / (pca.results$sdev[1:npcs] * sqrt(x = ncol(x = object) - 1)) } } } rownames(x = feature.loadings) <- rownames(x = object) colnames(x = feature.loadings) <- paste0(reduction.key, 1:npcs) rownames(x = cell.embeddings) <- colnames(x = object) colnames(x = cell.embeddings) <- colnames(x = feature.loadings) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, stdev = sdev, key = reduction.key, misc = list(total.variance = total.variance) ) if (verbose) { msg <- capture.output(print( x = reduction.data, dims = ndims.print, nfeatures = nfeatures.print )) message(paste(msg, collapse = '\n')) } return(reduction.data) } #' @param features Features to compute PCA on. If features=NULL, PCA will be run #' using the variable features for the Assay. Note that the features must be present #' in the scaled data. Any requested features that are not scaled or have 0 variance #' will be dropped, and the PCA will be run using the remaining features. #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' @method RunPCA Assay #' RunPCA.Assay <- function( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunPCA( object = data.use, assay = assay, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) return(reduction.data) } #' @method RunPCA StdAssay #' @export #' RunPCA.StdAssay <- function( object, assay = NULL, features = NULL, layer = 'scale.data', npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.key = "PC_", seed.use = 42, ... ) { data.use <- PrepDR5( object = object, features = features, layer = layer, verbose = verbose ) return(RunPCA( object = data.use, assay = assay, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... )) } #' @param reduction.name dimensional reduction name, pca by default #' #' @rdname RunPCA #' @concept dimensional_reduction #' @export #' @method RunPCA Seurat #' RunPCA.Seurat <- function( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "pca", reduction.key = "PC_", seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) reduction.data <- RunPCA( object = object[[assay]], assay = assay, features = features, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @method RunPCA Seurat5 #' @export #' RunPCA.Seurat5 <- function( object, assay = NULL, features = NULL, npcs = 50, rev.pca = FALSE, weight.by.var = TRUE, verbose = TRUE, ndims.print = 1:5, nfeatures.print = 30, reduction.name = "pca", reduction.key = "PC_", seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) reduction.data <- RunPCA( object = object[[assay]], assay = assay, features = features, npcs = npcs, rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = verbose, ndims.print = ndims.print, nfeatures.print = nfeatures.print, reduction.key = reduction.key, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data # object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of assay that that t-SNE is being run on #' @param seed.use Random seed for the t-SNE. If NULL, does not set the seed #' @param tsne.method Select the method to use to compute the tSNE. Available #' methods are: #' \itemize{ #' \item \dQuote{\code{Rtsne}}: Use the Rtsne package Barnes-Hut #' implementation of tSNE (default) #' \item \dQuote{\code{FIt-SNE}}: Use the FFT-accelerated Interpolation-based #' t-SNE. Based on Kluger Lab code found here: #' \url{https://github.com/KlugerLab/FIt-SNE} #' } #' @param dim.embed The dimensional space of the resulting tSNE embedding #' (default is 2). For example, set to 3 for a 3d tSNE #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. \dQuote{\code{tSNE_}} by default #' #' @importFrom Rtsne Rtsne #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE matrix #' RunTSNE.matrix <- function( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } tsne.data <- switch( EXPR = tsne.method, 'Rtsne' = Rtsne( X = object, dims = dim.embed, pca = FALSE, ... # PCA/is_distance )$Y, 'FIt-SNE' = fftRtsne(X = object, dims = dim.embed, rand_seed = seed.use, ...), stop("Invalid tSNE method: please choose from 'Rtsne' or 'FIt-SNE'") ) colnames(x = tsne.data) <- paste0(reduction.key, 1:ncol(x = tsne.data)) rownames(x = tsne.data) <- rownames(x = object) tsne.reduction <- CreateDimReducObject( embeddings = tsne.data, key = reduction.key, assay = assay, global = TRUE ) return(tsne.reduction) } #' @param cells Which cells to analyze (default, all cells) #' @param dims Which dimensions to use as input features #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE DimReduc #' RunTSNE.DimReduc <- function( object, cells = NULL, dims = 1:5, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) args$object <- args$object[[cells, args$dims]] args$dims <- NULL args$cells <- NULL args$assay <- DefaultAssay(object = object) return(do.call(what = 'RunTSNE', args = args)) } #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE dist #' RunTSNE.dist <- function( object, assay = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, reduction.key = "tSNE_", ... ) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) args$object <- as.matrix(x = args$object) args$is_distance <- TRUE return(do.call(what = 'RunTSNE', args = args)) } #' @param reduction Which dimensional reduction (e.g. PCA, ICA) to use for #' the tSNE. Default is PCA #' @param features If set, run the tSNE on this subset of features #' (instead of running on a set of reduced dimensions). Not set (NULL) by default; #' \code{dims} must be NULL to run on features #' @param distance.matrix If set, runs tSNE on the given distance matrix #' instead of data matrix (experimental) #' @param reduction.name dimensional reduction name, specifies the position in the object$dr list. tsne by default #' #' @rdname RunTSNE #' @concept dimensional_reduction #' @export #' @method RunTSNE Seurat #' RunTSNE.Seurat <- function( object, reduction = "pca", cells = NULL, dims = 1:5, features = NULL, seed.use = 1, tsne.method = "Rtsne", dim.embed = 2, distance.matrix = NULL, reduction.name = "tsne", reduction.key = "tSNE_", ... ) { cells <- cells %||% Cells(x = object) tsne.reduction <- if (!is.null(x = distance.matrix)) { RunTSNE( object = distance.matrix, assay = DefaultAssay(object = object), seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, is_distance = TRUE, ... ) } else if (!is.null(x = dims)) { RunTSNE( object = object[[reduction]], cells = cells, dims = dims, seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, ... ) } else if (!is.null(x = features)) { RunTSNE( object = t(x = as.matrix(x = GetAssayData(object = object)[features, cells])), assay = DefaultAssay(object = object), seed.use = seed.use, tsne.method = tsne.method, dim.embed = dim.embed, reduction.key = reduction.key, ... ) } else { stop("Unknown way of running tSNE") } object[[reduction.name]] <- tsne.reduction object <- LogSeuratCommand(object = object) return(object) } #' @importFrom reticulate py_module_available py_set_seed import #' @importFrom uwot umap umap_transform #' @importFrom future nbrOfWorkers #' #' @rdname RunUMAP #' @concept dimensional_reduction #' @method RunUMAP default #' @export #' RunUMAP.default <- function( object, reduction.key = 'UMAP_', assay = NULL, reduction.model = NULL, return.model = FALSE, umap.method = 'uwot', n.neighbors = 30L, n.components = 2L, metric = 'cosine', n.epochs = NULL, learning.rate = 1.0, min.dist = 0.3, spread = 1.0, set.op.mix.ratio = 1.0, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, ... ) { CheckDots(...) if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (umap.method != 'umap-learn' && getOption('Seurat.warn.umap.uwot', TRUE)) { warning( "The default method for RunUMAP has changed from calling Python UMAP via reticulate to the R-native UWOT using the cosine metric", "\nTo use Python UMAP via reticulate, set umap.method to 'umap-learn' and metric to 'correlation'", "\nThis message will be shown once per session", call. = FALSE, immediate. = TRUE ) options(Seurat.warn.umap.uwot = FALSE) } if (umap.method == 'uwot-learn') { warning("'uwot-learn' is deprecated. Set umap.method = 'uwot' and return.model = TRUE") umap.method <- "uwot" return.model <- TRUE } if (densmap && umap.method != 'umap-learn'){ warning("densmap is only supported by umap-learn method. Method is changed to 'umap-learn'") umap.method <- 'umap-learn' } if (return.model) { if (verbose) { message("UMAP will return its model") } umap.method = "uwot" } if (inherits(x = object, what = "Neighbor")) { object <- list( idx = Indices(object), dist = Distances(object) ) } if (!is.null(x = reduction.model)) { if (verbose) { message("Running UMAP projection") } umap.method <- "uwot-predict" } umap.output <- switch( EXPR = umap.method, 'umap-learn' = { if (!py_module_available(module = 'umap')) { stop("Cannot find UMAP, please install through pip (e.g. pip install umap-learn).") } if (!py_module_available(module = 'sklearn')) { stop("Cannot find sklearn, please install through pip (e.g. pip install scikit-learn).") } if (!is.null(x = seed.use)) { py_set_seed(seed = seed.use) } if (typeof(x = n.epochs) == "double") { n.epochs <- as.integer(x = n.epochs) } umap_import <- import(module = "umap", delay_load = TRUE) sklearn <- import("sklearn", delay_load = TRUE) if (densmap && numeric_version(x = umap_import$pkg_resources$get_distribution("umap-learn")$version) < numeric_version(x = "0.5.0")) { stop("densmap is only supported by versions >= 0.5.0 of umap-learn. Upgrade umap-learn (e.g. pip install --upgrade umap-learn).") } random.state <- sklearn$utils$check_random_state(seed = as.integer(x = seed.use)) umap.args <- list( n_neighbors = as.integer(x = n.neighbors), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, random_state = random.state, a = a, b = b, metric_kwds = metric.kwds, angular_rp_forest = angular.rp.forest, verbose = verbose ) if (numeric_version(x = umap_import$pkg_resources$get_distribution("umap-learn")$version) >= numeric_version(x = "0.5.0")) { umap.args <- c(umap.args, list( densmap = densmap, dens_lambda = dens.lambda, dens_frac = dens.frac, dens_var_shift = dens.var.shift, output_dens = FALSE )) } umap <- do.call(what = umap_import$UMAP, args = umap.args) umap$fit_transform(as.matrix(x = object)) }, 'uwot' = { if (is.list(x = object)) { umap( X = NULL, nn_method = object, n_threads = nbrOfWorkers(), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, ret_model = return.model ) } else { umap( X = object, n_threads = nbrOfWorkers(), n_neighbors = as.integer(x = n.neighbors), n_components = as.integer(x = n.components), metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, negative_sample_rate = negative.sample.rate, a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, ret_model = return.model ) } }, 'uwot-predict' = { if (metric == 'correlation') { warning( "UWOT does not implement the correlation metric, using cosine instead", call. = FALSE, immediate. = TRUE ) metric <- 'cosine' } if (is.null(x = reduction.model) || !inherits(x = reduction.model, what = 'DimReduc')) { stop( "If running projection UMAP, please pass a DimReduc object with the model stored to reduction.model.", call. = FALSE ) } model <- Misc( object = reduction.model, slot = "model" ) # add num_precomputed_nns to = numeric_version(x = "0.5.0")) { umap.args <- c(umap.args, list( densmap = densmap, densmap_kwds = densmap.kwds, output_dens = FALSE )) } embeddings <- do.call(what = umap$umap_$simplicial_set_embedding, args = umap.args) if (length(x = embeddings) == 2) { embeddings <- embeddings[[1]] } rownames(x = embeddings) <- colnames(x = data) colnames(x = embeddings) <- paste0("UMAP_", 1:n.components) # center the embeddings on zero embeddings <- scale(x = embeddings, scale = FALSE) umap <- CreateDimReducObject( embeddings = embeddings, key = reduction.key, assay = assay, global = TRUE ) return(umap) } #' @rdname RunUMAP #' @concept dimensional_reduction #' @method RunUMAP Neighbor #' @export #' RunUMAP.Neighbor <- function( object, reduction.model, ... ) { neighborlist <- list("idx" = Indices(object), "dist" = Distances(object)) RunUMAP( object = neighborlist, reduction.model = reduction.model, ... ) } #' @param reduction.model \code{DimReduc} object that contains the umap model #' @param dims Which dimensions to use as input features, used only if #' \code{features} is NULL #' @param reduction Which dimensional reduction (PCA or ICA) to use for the #' UMAP input. Default is PCA #' @param features If set, run UMAP on this subset of features (instead of running on a #' set of reduced dimensions). Not set (NULL) by default; \code{dims} must be NULL to run #' on features #' @param graph Name of graph on which to run UMAP #' @param assay Assay to pull data for when using \code{features}, or assay used to construct Graph #' if running UMAP on a Graph #' @param nn.name Name of knn output on which to run UMAP #' @param slot The slot used to pull data for when using \code{features}. data slot is by default. #' @param umap.method UMAP implementation to run. Can be #' \describe{ #' \item{\code{uwot}:}{Runs umap via the uwot R package} #' \item{\code{uwot-learn}:}{Runs umap via the uwot R package and return the learned umap model} #' \item{\code{umap-learn}:}{Run the Seurat wrapper of the python umap-learn package} #' } #' @param n.neighbors This determines the number of neighboring points used in #' local approximations of manifold structure. Larger values will result in more #' global structure being preserved at the loss of detailed local structure. In #' general this parameter should often be in the range 5 to 50. #' @param n.components The dimension of the space to embed into. #' @param metric metric: This determines the choice of metric used to measure #' distance in the input space. A wide variety of metrics are already coded, and #' a user defined function can be passed as long as it has been JITd by numba. #' @param n.epochs he number of training epochs to be used in optimizing the low dimensional #' embedding. Larger values result in more accurate embeddings. If NULL is specified, a value will #' be selected based on the size of the input dataset (200 for large datasets, 500 for small). #' @param learning.rate The initial learning rate for the embedding optimization. #' @param min.dist This controls how tightly the embedding is allowed compress points together. #' Larger values ensure embedded points are moreevenly distributed, while smaller values allow the #' algorithm to optimise more accurately with regard to local structure. Sensible values are in #' the range 0.001 to 0.5. #' @param spread The effective scale of embedded points. In combination with min.dist this #' determines how clustered/clumped the embedded points are. #' @param set.op.mix.ratio Interpolate between (fuzzy) union and intersection as the set operation #' used to combine local fuzzy simplicial sets to obtain a global fuzzy simplicial sets. Both fuzzy #' set operations use the product t-norm. The value of this parameter should be between 0.0 and #' 1.0; a value of 1.0 will use a pure fuzzy union, while 0.0 will use a pure fuzzy intersection. #' @param local.connectivity The local connectivity required - i.e. the number of nearest neighbors #' that should be assumed to be connected at a local level. The higher this value the more connected #' the manifold becomes locally. In practice this should be not more than the local intrinsic #' dimension of the manifold. #' @param repulsion.strength Weighting applied to negative samples in low dimensional embedding #' optimization. Values higher than one will result in greater weight being given to negative #' samples. #' @param negative.sample.rate The number of negative samples to select per positive sample in the #' optimization process. Increasing this value will result in greater repulsive force being applied, #' greater optimization cost, but slightly more accuracy. #' @param a More specific parameters controlling the embedding. If NULL, these values are set #' automatically as determined by min. dist and spread. Parameter of differentiable approximation of #' right adjoint functor. #' @param b More specific parameters controlling the embedding. If NULL, these values are set #' automatically as determined by min. dist and spread. Parameter of differentiable approximation of #' right adjoint functor. #' @param uwot.sgd Set \code{uwot::umap(fast_sgd = TRUE)}; see \code{\link[uwot]{umap}} for more details #' @param metric.kwds A dictionary of arguments to pass on to the metric, such as the p value for #' Minkowski distance. If NULL then no arguments are passed on. #' @param angular.rp.forest Whether to use an angular random projection forest to initialise the #' approximate nearest neighbor search. This can be faster, but is mostly on useful for metric that #' use an angular style distance such as cosine, correlation etc. In the case of those metrics #' angular forests will be chosen automatically. #' @param densmap Whether to use the density-augmented objective of densMAP. #' Turning on this option generates an embedding where the local densities #' are encouraged to be correlated with those in the original space. #' Parameters below with the prefix ‘dens’ further control the behavior #' of this extension. Default is FALSE. Only compatible with 'umap-learn' method #' and version of umap-learn >= 0.5.0 #' @param densmap.kwds A dictionary of arguments to pass on to the densMAP optimization. #' @param dens.lambda Specific parameter which controls the regularization weight #' of the density correlation term in densMAP. Higher values prioritize density #' preservation over the UMAP objective, and vice versa for values closer to zero. #' Setting this parameter to zero is equivalent to running the original UMAP algorithm. #' Default value is 2. #' @param dens.frac Specific parameter which controls the fraction of epochs #' (between 0 and 1) where the density-augmented objective is used in densMAP. #' The first (1 - dens_frac) fraction of epochs optimize the original UMAP #' objective before introducing the density correlation term. Default is 0.3. #' @param dens.var.shift Specific parameter which specifies a small constant #' added to the variance of local radii in the embedding when calculating #' the density correlation objective to prevent numerical instability from #' dividing by a small number. Default is 0.1. #' @param reduction.name Name to store dimensional reduction under in the Seurat object #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. UMAP by default #' @param return.model whether UMAP will return the uwot model #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed #' @param verbose Controls verbosity #' #' @rdname RunUMAP #' @concept dimensional_reduction #' @export #' @method RunUMAP Seurat #' RunUMAP.Seurat <- function( object, dims = NULL, reduction = 'pca', features = NULL, graph = NULL, assay = DefaultAssay(object = object), nn.name = NULL, slot = 'data', umap.method = 'uwot', reduction.model = NULL, return.model = FALSE, n.neighbors = 30L, n.components = 2L, metric = 'cosine', n.epochs = NULL, learning.rate = 1, min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, repulsion.strength = 1, negative.sample.rate = 5L, a = NULL, b = NULL, uwot.sgd = FALSE, seed.use = 42L, metric.kwds = NULL, angular.rp.forest = FALSE, densmap = FALSE, dens.lambda = 2, dens.frac = 0.3, dens.var.shift = 0.1, verbose = TRUE, reduction.name = 'umap', reduction.key = NULL, ... ) { CheckDots(...) if (sum(c(is.null(x = dims), is.null(x = features), is.null(x = graph))) < 2) { stop("Please specify only one of the following arguments: dims, features, or graph") } if (sum(!is.null(x = dims), !is.null(x = nn.name), !is.null(x = graph), !is.null(x = features)) != 1) { stop("Only one parameter among 'dims', 'nn.name', 'graph', or 'features' ", "should be used at a time to run UMAP") } if (!is.null(x = features)) { data.use <- as.matrix(x = t(x = GetAssayData(object = object, slot = slot, assay = assay)[features, , drop = FALSE])) if (ncol(x = data.use) < n.components) { stop( "Please provide as many or more features than n.components: ", length(x = features), " features provided, ", n.components, " UMAP components requested", call. = FALSE ) } } else if (!is.null(x = dims)) { data.use <- Embeddings(object[[reduction]])[, dims] assay <- DefaultAssay(object = object[[reduction]]) if (length(x = dims) < n.components) { stop( "Please provide as many or more dims than n.components: ", length(x = dims), " dims provided, ", n.components, " UMAP components requested", call. = FALSE ) } } else if (!is.null(x = nn.name)) { if (!inherits(x = object[[nn.name]], what = "Neighbor")) { stop( "Please specify a Neighbor object name, ", "instead of the name of a ", class(object[[nn.name]]), " object", call. = FALSE ) } data.use <- object[[nn.name]] } else if (!is.null(x = graph)) { if (!inherits(x = object[[graph]], what = "Graph")) { stop( "Please specify a Graph object name, ", "instead of the name of a ", class(object[[graph]]), " object", call. = FALSE ) } data.use <- object[[graph]] } else { stop("Please specify one of dims, features, or graph") } object[[reduction.name]] <- RunUMAP( object = data.use, reduction.model = reduction.model, return.model = return.model, assay = assay, umap.method = umap.method, n.neighbors = n.neighbors, n.components = n.components, metric = metric, n.epochs = n.epochs, learning.rate = learning.rate, min.dist = min.dist, spread = spread, set.op.mix.ratio = set.op.mix.ratio, local.connectivity = local.connectivity, repulsion.strength = repulsion.strength, negative.sample.rate = negative.sample.rate, a = a, b = b, uwot.sgd = uwot.sgd, seed.use = seed.use, metric.kwds = metric.kwds, angular.rp.forest = angular.rp.forest, densmap = densmap, dens.lambda = dens.lambda, dens.frac = dens.frac, dens.var.shift = dens.var.shift, reduction.key = reduction.key %||% Key(object = reduction.name, quiet = TRUE), verbose = verbose ) object <- LogSeuratCommand(object = object) return(object) } #' @param dims Which dimensions to examine #' @param score.thresh Threshold to use for the proportion test of PC #' significance (see Details) #' #' @importFrom stats prop.test #' #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw JackStrawData #' ScoreJackStraw.JackStrawData <- function( object, dims = 1:5, score.thresh = 1e-5, ... ) { CheckDots(...) pAll <- JS(object = object, slot = "empirical.p.values") pAll <- pAll[, dims, drop = FALSE] pAll <- as.data.frame(pAll) pAll$Contig <- rownames(x = pAll) score.df <- NULL for (i in dims) { pc.score <- suppressWarnings(prop.test( x = c( length(x = which(x = pAll[, i] <= score.thresh)), floor(x = nrow(x = pAll) * score.thresh) ), n = c(nrow(pAll), nrow(pAll)) )$p.val) if (length(x = which(x = pAll[, i] <= score.thresh)) == 0) { pc.score <- 1 } if (is.null(x = score.df)) { score.df <- data.frame(PC = paste0("PC", i), Score = pc.score) } else { score.df <- rbind(score.df, data.frame(PC = paste0("PC", i), Score = pc.score)) } } score.df$PC <- dims score.df <- as.matrix(score.df) JS(object = object, slot = 'overall') <- score.df return(object) } #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw DimReduc #' ScoreJackStraw.DimReduc <- function(object, dims = 1:5, score.thresh = 1e-5, ...) { JS(object = object) <- ScoreJackStraw( object = JS(object = object), dims = dims, score.thresh = score.thresh, ... ) return(object) } #' @param reduction Reduction associated with JackStraw to score #' @param do.plot Show plot. To return ggplot object, use \code{JackStrawPlot} after #' running ScoreJackStraw. #' #' @seealso \code{\link{JackStrawPlot}} #' #' @rdname ScoreJackStraw #' @concept dimensional_reduction #' @export #' @method ScoreJackStraw Seurat #' ScoreJackStraw.Seurat <- function( object, reduction = "pca", dims = 1:5, score.thresh = 1e-5, do.plot = FALSE, ... ) { object[[reduction]] <- ScoreJackStraw( object = object[[reduction]], dims = dims, score.thresh = score.thresh, ... ) if (do.plot) { CheckDots(..., fxns = 'JackStrawPlot') suppressWarnings(expr = print(JackStrawPlot( object = object, reduction = reduction, dims = dims, ... ))) } object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Check that features are present and have non-zero variance # # @param data.use Feature matrix (features are rows) # @param features Features to check # @param object.name Name of object for message printing # @param verbose Print warnings # # @return Returns a vector of features that is the subset of features # that have non-zero variance # CheckFeatures <- function( data.use, features, object.name, verbose = TRUE ) { if (any(!features %in% rownames(x = data.use))) { missing.features <- features[!features %in% rownames(x = data.use)] features <- setdiff(x = features, y = missing.features) if (verbose){ warning( paste0( "The following ", length(x = missing.features), " features are not scaled in ", object.name, ": ", paste0(missing.features, collapse = ", ") )) } } if (inherits(x = data.use, what = 'dgCMatrix')) { features.var <- SparseRowVar(mat = data.use[features, ], display_progress = F) } else if (inherits(x = data.use, what = "IterableMatrix")) { bp.stats <- BPCells::matrix_stats(matrix = data.use, row_stats = "variance") features.var <- bp.stats$row_stats["variance",][features] } else { features.var <- RowVar(x = data.use[features, ]) } no.var.features <- features[features.var == 0] if (length(x = no.var.features) > 0 && verbose) { warning( paste0( "The following features have zero variance in ", object.name, ": ", paste0(no.var.features, collapse = ", ") )) } features <- setdiff(x = features, y = no.var.features) features <- features[!is.na(x = features)] return(features) } #internal EmpiricalP <- function(x, nullval) { return(sum(nullval > x) / length(x = nullval)) } # FIt-SNE helper function for calling fast_tsne from R # # Based on Kluger Lab FIt-SNE v1.2.1 code on https://github.com/KlugerLab/FIt-SNE/blob/master/fast_tsne.R # commit 601608ed42e4be2765970910927da20f0b0bf9b9 on June 25, 2020 # #' @importFrom utils file_test # fftRtsne <- function(X, dims = 2, perplexity = 30, theta = 0.5, max_iter = 750, fft_not_bh = TRUE, ann_not_vptree = TRUE, stop_early_exag_iter = 250, exaggeration_factor = 12.0, no_momentum_during_exag = FALSE, start_late_exag_iter = -1, late_exag_coeff = 1.0, mom_switch_iter = 250, momentum = 0.5, final_momentum = 0.8, learning_rate = 'auto', n_trees = 50, search_k = -1, rand_seed = -1, nterms = 3, intervals_per_integer = 1, min_num_intervals = 50, K = -1, sigma = -30, initialization = 'pca', max_step_norm = 5, data_path = NULL, result_path = NULL, load_affinities = NULL, fast_tsne_path = NULL, nthreads = getOption('mc.cores', default = 1), perplexity_list = NULL, get_costs = FALSE, df = 1.0, ... ) { CheckDots(...) if (is.null(x = data_path)) { data_path <- tempfile(pattern = 'fftRtsne_data_', fileext = '.dat') } if (is.null(x = result_path)) { result_path <- tempfile(pattern = 'fftRtsne_result_', fileext = '.dat') } if (is.null(x = fast_tsne_path)) { # suppressWarnings(expr = fast_tsne_path <- system2(command = 'which', args = 'fast_tsne', stdout = TRUE)) fast_tsne_path <- SysExec(progs = ifelse( test = .Platform$OS.type == 'windows', yes = 'FItSNE.exe', no = 'fast_tsne' )) if (length(x = fast_tsne_path) == 0) { stop("no fast_tsne_path specified and fast_tsne binary is not in the search path") } } fast_tsne_path <- normalizePath(path = fast_tsne_path) if (!file_test(op = '-x', x = fast_tsne_path)) { stop("fast_tsne_path '", fast_tsne_path, "' does not exist or is not executable") } # check fast_tsne version ft.out <- suppressWarnings(expr = system2(command = fast_tsne_path, stdout = TRUE)) version_number <- regmatches(ft.out[1], regexpr('= t-SNE v[0-9.]+', ft.out[1])) if (is.null(version_number)){ message("First line of fast_tsne output is") message(ft.out[1]) stop("Our FIt-SNE wrapper requires FIt-SNE v1.0+, please install the appropriate version from github.com/KlugerLab/FIt-SNE and have fast_tsne_path point to it if it's not in your path") } else { version_number <- gsub('= t-SNE v', '', version_number) } is.wholenumber <- function(x, tol = .Machine$double.eps ^ 0.5) { return(abs(x = x - round(x = x)) < tol) } if (version_number == '1.0.0' && df != 1.0) { stop("This version of FIt-SNE does not support df!=1. Please install the appropriate version from github.com/KlugerLab/FIt-SNE") } if (!is.numeric(x = theta) || (theta < 0.0) || (theta > 1.0) ) { stop("Incorrect theta.") } if (nrow(x = X) - 1 < 3 * perplexity) { stop("Perplexity is too large.") } if (!is.matrix(x = X)) { stop("Input X is not a matrix") } if (!(max_iter > 0)) { stop("Incorrect number of iterations.") } if (!is.wholenumber(x = stop_early_exag_iter) || stop_early_exag_iter < 0) { stop("stop_early_exag_iter should be a positive integer") } if (!is.numeric(x = exaggeration_factor)) { stop("exaggeration_factor should be numeric") } if (!is.numeric(df)) { stop("df should be numeric") } if (!is.wholenumber(x = dims) || dims <= 0) { stop("Incorrect dimensionality.") } if (search_k == -1) { if (perplexity > 0) { search_k <- n_trees * perplexity * 3 } else if (perplexity == 0) { search_k <- n_trees * max(perplexity_list) * 3 } else { search_k <- n_trees * K } } if (is.character(learning_rate) && learning_rate =='auto') { learning_rate = max(200, nrow(X)/exaggeration_factor) } if (is.character(start_late_exag_iter) && start_late_exag_iter =='auto') { if (late_exag_coeff > 0) { start_late_exag_iter = stop_early_exag_iter } else { start_late_exag_iter = -1 } } if (is.character(initialization) && initialization == 'pca') { if (rand_seed != -1) { set.seed(rand_seed) } if (requireNamespace("rsvd", quietly = TRUE)) { message('Using rsvd() to compute the top PCs for initialization.') X_c <- scale(x = X, center = TRUE, scale = FALSE) rsvd_out <- rsvd::rsvd(A = X_c, k = dims) X_top_pcs <- rsvd_out$u %*% diag(x = rsvd_out$d, nrow = dims) } else if (requireNamespace("irlba", quietly = TRUE)) { message('Using irlba() to compute the top PCs for initialization.') X_colmeans <- colMeans(x = X) irlba_out <- irlba::irlba(A = X, nv = dims, center = X_colmeans) X_top_pcs <- irlba_out$u %*% diag(x = irlba_out$d, nrow = dims) } else { stop( "By default, FIt-SNE initializes the embedding with the top PCs. We use either rsvd or irlba for fast computation. To use this functionality, please install the rsvd package with install.packages('rsvd') or the irlba package with install.packages('ilrba'). Otherwise, set initialization to NULL for random initialization, or any N by dims matrix for custom initialization." ) } initialization <- 0.0001*(X_top_pcs/sd(X_top_pcs[,1])) } else if (is.character(x = initialization) && initialization == 'random') { message('Random initialization') initialization = NULL } nbody_algo <- ifelse(test = fft_not_bh, yes = 2, no = 1) if (is.null(load_affinities)) { load_affinities <- 0 } else { if (load_affinities == 'load') { load_affinities <- 1 } else if (load_affinities == 'save') { load_affinities <- 2 } else { load_affinities <- 0 } } knn_algo <- ifelse(test = ann_not_vptree, yes = 1, no = 2) tX <- as.numeric(t(X)) f <- file(description = data_path, open = "wb") n = nrow(x = X) D = ncol(x = X) writeBin(object = as.integer(x = n), con = f, size = 4) writeBin(object = as.integer(x = D), con = f, size = 4) writeBin(object = as.numeric(x = theta), con = f, size = 8) writeBin(object = as.numeric(x = perplexity), con = f, size = 8) if (perplexity == 0) { writeBin(object = as.integer(x = length(x = perplexity_list)), con = f, size = 4) writeBin(object = perplexity_list, con = f) } writeBin(object = as.integer(x = dims), con = f, size = 4) #theta writeBin(object = as.integer(x = max_iter), con = f, size = 4) writeBin(object = as.integer(x = stop_early_exag_iter), con = f, size = 4) writeBin(object = as.integer(x = mom_switch_iter), con = f, size = 4) writeBin(object = as.numeric(x = momentum), con = f, size = 8) writeBin(object = as.numeric(x = final_momentum), con = f, size = 8) writeBin(object = as.numeric(x = learning_rate), con = f, size = 8) if (!(version_number %in% c('1.1.0', '1.0.0'))) { writeBin(object = as.numeric(x = max_step_norm), f, size = 8) } writeBin(object = as.integer(x = K), con = f, size = 4) #K writeBin(object = as.numeric(x = sigma), con = f, size = 8) #sigma writeBin(object = as.integer(x = nbody_algo), con = f, size = 4) #not barnes hut writeBin(object = as.integer(x = knn_algo), con = f, size = 4) writeBin(object = as.numeric(x = exaggeration_factor), con = f, size = 8) #compexag writeBin(object = as.integer(x = no_momentum_during_exag), con = f, size = 4) writeBin(object = as.integer(x = n_trees), con = f, size = 4) writeBin(object = as.integer(x = search_k), con = f, size = 4) writeBin(object = as.integer(x = start_late_exag_iter), con = f, size = 4) writeBin(object = as.numeric(x = late_exag_coeff), con = f, size = 8) writeBin(object = as.integer(x = nterms), con = f, size = 4) writeBin(object = as.numeric(x = intervals_per_integer), con = f, size = 8) writeBin(object = as.integer(x = min_num_intervals), con = f, size = 4) writeBin(object = tX, con = f) writeBin(object = as.integer(x = rand_seed), con = f, size = 4) if (version_number != "1.0.0") { writeBin(object = as.numeric(x = df), con = f, size = 8) } writeBin(object = as.integer(x = load_affinities), con = f, size = 4) if (!is.null(x = initialization)) { writeBin(object = c(t(x = initialization)), con = f) } close(con = f) if (version_number == "1.0.0") { flag <- system2( command = fast_tsne_path, args = c(data_path, result_path, nthreads) ) } else { flag <- system2( command = fast_tsne_path, args = c(version_number, data_path, result_path, nthreads) ) } if (flag != 0) { stop('tsne call failed') } f <- file(description = result_path, open = "rb") n <- readBin(con = f, what = integer(), n = 1, size = 4) d <- readBin(con = f, what = integer(), n = 1, size = 4) Y <- readBin(con = f, what = numeric(), n = n * d) Y <- t(x = matrix(Y, nrow = d)) if (get_costs) { tmp <- readBin(con = f, what = integer(), n = 1, size = 4) costs <- readBin(con = f, what = numeric(), n = max_iter, size = 8) Yout <- list(Y = Y, costs = costs) } else { Yout <- Y } close(con = f) file.remove(data_path) file.remove(result_path) return(Yout) } #internal # JackRandom <- function( scaled.data, prop.use = 0.01, r1.use = 1, r2.use = 5, seed.use = 1, rev.pca = FALSE, weight.by.var = weight.by.var, maxit = 1000 ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } rand.genes <- sample( x = rownames(x = scaled.data), size = nrow(x = scaled.data) * prop.use ) # make sure that rand.genes is at least 3 if (length(x = rand.genes) < 3) { rand.genes <- sample(x = rownames(x = scaled.data), size = 3) } data.mod <- scaled.data data.mod[rand.genes, ] <- MatrixRowShuffle(x = scaled.data[rand.genes, ]) temp.object <- RunPCA( object = data.mod, assay = "temp", npcs = r2.use, features = rownames(x = data.mod), rev.pca = rev.pca, weight.by.var = weight.by.var, verbose = FALSE, maxit = maxit ) return(Loadings(temp.object)[rand.genes, r1.use:r2.use]) } # Calculates the l2-norm of a vector # # Modified from PMA package # @references Witten, Tibshirani, and Hastie, Biostatistics 2009 # @references \url{https://github.com/cran/PMA/blob/master/R/PMD.R} # # @param vec numeric vector # # @return returns the l2-norm. # L2Norm <- function(vec) { a <- sqrt(x = sum(vec ^ 2)) if (a == 0) { a <- .05 } return(a) } # Prep data for dimensional reduction # # Common checks and preparatory steps before running certain dimensional # reduction techniques # # @param object Assay object # @param features Features to use as input for the dimensional reduction technique. # Default is variable features # @ param verbose Print messages and warnings # # PrepDR <- function( object, features = NULL, slot = 'scale.data', verbose = TRUE ) { if (length(x = VariableFeatures(object = object)) == 0 && is.null(x = features)) { stop("Variable features haven't been set. Run FindVariableFeatures() or provide a vector of feature names.") } data.use <- GetAssayData(object = object, slot = slot) if (nrow(x = data.use ) == 0 && slot == "scale.data") { stop("Data has not been scaled. Please run ScaleData and retry") } features <- features %||% VariableFeatures(object = object) features.keep <- unique(x = features[features %in% rownames(x = data.use)]) if (length(x = features.keep) < length(x = features)) { features.exclude <- setdiff(x = features, y = features.keep) if (verbose) { warning(paste0("The following ", length(x = features.exclude), " features requested have not been scaled (running reduction without them): ", paste0(features.exclude, collapse = ", "))) } } features <- features.keep if (inherits(x = data.use, what = 'dgCMatrix')) { features.var <- RowVarSparse(mat = data.use[features, ]) } else { features.var <- RowVar(x = data.use[features, ]) } features.keep <- features[features.var > 0] if (length(x = features.keep) < length(x = features)) { features.exclude <- setdiff(x = features, y = features.keep) if (verbose) { warning(paste0("The following ", length(x = features.exclude), " features requested have zero variance (running reduction without them): ", paste0(features.exclude, collapse = ", "))) } } features <- features.keep features <- features[!is.na(x = features)] data.use <- data.use[features, ] return(data.use) } PrepDR5 <- function(object, features = NULL, layer = 'scale.data', verbose = TRUE) { layer <- layer[1L] olayer <- layer layer <- Layers(object = object, search = layer) if (is.null(layer)) { abort(paste0("No layer matching pattern '", olayer, "' not found. Please run ScaleData and retry")) } data.use <- LayerData(object = object, layer = layer) features <- features %||% VariableFeatures(object = object) if (!length(x = features)) { stop("No variable features, run FindVariableFeatures() or provide a vector of features", call. = FALSE) } features.var <- apply(X = data.use, MARGIN = 1L, FUN = var) features.keep <- features[features.var > 0] if (!length(x = features.keep)) { stop("None of the requested features have any variance", call. = FALSE) } else if (length(x = features.keep) < length(x = features)) { exclude <- setdiff(x = features, y = features.keep) if (isTRUE(x = verbose)) { warning( "The following ", length(x = exclude), " features requested have zero variance; running reduction without them: ", paste(exclude, collapse = ', '), call. = FALSE, immediate. = TRUE ) } } features <- features.keep features <- features[!is.na(x = features)] features.use <- features[features %in% rownames(data.use)] if(!isTRUE(all.equal(features, features.use))) { missing_features <- setdiff(features, features.use) if(length(missing_features) > 0) { warning_message <- paste("The following features were not available: ", paste(missing_features, collapse = ", "), ".", sep = "") warning(warning_message, immediate. = TRUE) } } data.use <- data.use[features.use, ] return(data.use) } #' @param assay Name of Assay SPCA is being run on #' @param npcs Total Number of SPCs to compute and store (50 by default) #' @param verbose Print the top genes associated with high/low loadings for #' the SPCs #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names. SPC by default #' @param graph Graph used supervised by SPCA #' @param seed.use Set a random seed. By default, sets the seed to 42. Setting #' NULL will not set a seed. #' #' @importFrom irlba irlba #' #' @concept dimensional_reduction #' @rdname RunSPCA #' @export RunSPCA.default <- function( object, assay = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = FALSE, seed.use = 42, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } npcs <- min(npcs, nrow(x = object) - 1) if (verbose) { message("Computing sPCA transformation") } HSIC <- object %*% graph %*% t(x = object) pca.results <- irlba(A = HSIC, nv = npcs) feature.loadings <- pca.results$u rownames(x = feature.loadings) <- rownames(x = object) cell.embeddings <- t(object) %*% feature.loadings colnames(x = cell.embeddings) <- colnames(x = feature.loadings) <- paste0(reduction.key, 1:ncol(x = cell.embeddings)) sdev <- pca.results$d / sqrt(max(1, nrow(x = HSIC) - 1)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, assay = assay, stdev = sdev, key = reduction.key ) return(reduction.data) } #' @param features Features to compute SPCA on. If features=NULL, SPCA will be run #' using the variable features for the Assay. #' #' @rdname RunSPCA #' @concept dimensional_reduction #' @export #' @method RunSPCA Assay #' RunSPCA.Assay <- function( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, verbose = verbose ) reduction.data <- RunSPCA( object = data.use, assay = assay, npcs = npcs, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) return(reduction.data) } #' @param features Features to compute SPCA on. If features=NULL, SPCA will be run #' using the variable features for the Assay. #' @param layer Layer to run SPCA on #' #' @rdname RunSPCA #' @concept dimensional_reduction #' @export #' @method RunSPCA Assay5 #' RunSPCA.Assay5 <- function( object, assay = NULL, features = NULL, npcs = 50, reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, layer = 'scale.data', ... ) { data.use <- PrepDR5( object = object, features = features, layer = layer, verbose = verbose ) reduction.data <- RunSPCA( object = data.use, assay = assay, npcs = npcs, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name, spca by default #' @rdname RunSPCA #' @concept dimensional_reduction #' @export #' @method RunSPCA Seurat #' RunSPCA.Seurat <- function( object, assay = NULL, features = NULL, npcs = 50, reduction.name = "spca", reduction.key = "SPC_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) if (is.null(x = graph)) { stop("Graph is not provided") } else if (is.character(x = graph)) { graph <- object[[graph]] } reduction.data <- RunSPCA( object = object[[assay]], assay = assay, features = features, npcs = npcs, reduction.name = reduction.name, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } #' @param assay Name of Assay SLSI is being run on #' @param n Total Number of SLSI components to compute and store #' @param verbose Display messages #' @param reduction.key dimensional reduction key, specifies the string before #' the number for the dimension names #' @param graph Graph used supervised by SLSI #' @param seed.use Set a random seed. Setting NULL will not set a seed. #' #' @importFrom irlba irlba #' @importMethodsFrom Matrix t #' #' @concept dimensional_reduction #' @rdname RunSLSI #' @export RunSLSI.default <- function( object, assay = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } n <- min(n, nrow(x = object) - 1) if (verbose) { message("Smoothing peaks matrix") } object.smooth <- t(x = graph) %*% (t(x = object) %*% object) %*% graph if (verbose) { message("Performing eigendecomposition") } svd.V <- irlba(A = object.smooth, nv = n, nu = n, ...) sigma <- sqrt(x = svd.V$d) feature.loadings <- object %*% (graph %*% svd.V$u) %*% diag(x = 1/sigma) feature.loadings <- as.matrix(x = feature.loadings) cell.embeddings <- t(x = object) %*% feature.loadings %*% diag(x = 1/sigma) cell.embeddings <- as.matrix(x = cell.embeddings) # construct svd list stored in misc for LSI projection svd.lsi <- svd.V svd.lsi$d <- sigma svd.lsi$u <- feature.loadings svd.lsi$v <- cell.embeddings colnames(x = cell.embeddings) <- paste0(reduction.key, 1:ncol(cell.embeddings)) reduction.data <- CreateDimReducObject( embeddings = cell.embeddings, loadings = feature.loadings, key = reduction.key, assay = assay, misc = svd.lsi ) return(reduction.data) } #' @param features Features to compute SLSI on. If NULL, SLSI will be run #' using the variable features for the Assay. #' #' @rdname RunSLSI #' @concept dimensional_reduction #' @export #' @method RunSLSI Assay #' RunSLSI.Assay <- function( object, assay = NULL, features = NULL, n = 50, reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { data.use <- PrepDR( object = object, features = features, slot = "data", verbose = verbose ) reduction.data <- RunSLSI( object = data.use, assay = assay, n = n, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) return(reduction.data) } #' @param reduction.name dimensional reduction name #' @rdname RunSLSI #' @concept dimensional_reduction #' @export #' @method RunSLSI Seurat #' RunSLSI.Seurat <- function( object, assay = NULL, features = NULL, n = 50, reduction.name = "slsi", reduction.key = "SLSI_", graph = NULL, verbose = TRUE, seed.use = 42, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- GetAssay(object = object, assay = assay) if (is.null(x = graph)) { stop("Graph is not provided") } else if (is.character(x = graph)) { graph <- object[[graph]] } reduction.data <- RunSLSI( object = assay.data, assay = assay, features = features, n = n, reduction.name = reduction.name, reduction.key = reduction.key, graph = graph, verbose = verbose, seed.use = seed.use, ... ) object[[reduction.name]] <- reduction.data object <- LogSeuratCommand(object = object) return(object) } Seurat/R/clustering.R0000644000176200001440000015667114525500037014241 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Construct weighted nearest neighbor graph #' #' This function will construct a weighted nearest neighbor (WNN) graph. For #' each cell, we identify the nearest neighbors based on a weighted combination #' of two modalities. Takes as input two dimensional reductions, one computed #' for each modality.Other parameters are listed for debugging, but can be left #' as default values. #' #' @param object A Seurat object #' @param reduction.list A list of two dimensional reductions, one for each of #' the modalities to be integrated #' @param dims.list A list containing the dimensions for each reduction to use #' @param k.nn the number of multimodal neighbors to compute. 20 by default #' @param l2.norm Perform L2 normalization on the cell embeddings after #' dimensional reduction. TRUE by default. #' @param knn.graph.name Multimodal knn graph name #' @param snn.graph.name Multimodal snn graph name #' @param weighted.nn.name Multimodal neighbor object name #' @param modality.weight.name Variable name to store modality weight in object #' meta data #' @param knn.range The number of approximate neighbors to compute #' @param prune.SNN Cutoff not to discard edge in SNN graph #' @param sd.scale The scaling factor for kernel width. 1 by default #' @param cross.contant.list Constant used to avoid divide-by-zero errors. 1e-4 #' by default #' @param smooth Smoothing modality score across each individual modality #' neighbors. FALSE by default #' @param return.intermediate Store intermediate results in misc #' @param modality.weight A \code{\link{ModalityWeights}} object generated by #' \code{FindModalityWeights} #' @param verbose Print progress bars and output #' #' @return Seurat object containing a nearest-neighbor object, KNN graph, and #' SNN graph - each based on a weighted combination of modalities. #' @concept clustering #' @export #' FindMultiModalNeighbors <- function( object, reduction.list, dims.list, k.nn = 20, l2.norm = TRUE, knn.graph.name = "wknn", snn.graph.name = "wsnn", weighted.nn.name = "weighted.nn", modality.weight.name = NULL, knn.range = 200, prune.SNN = 1/15, sd.scale = 1, cross.contant.list = NULL, smooth = FALSE, return.intermediate = FALSE, modality.weight = NULL, verbose = TRUE ) { cross.contant.list <- cross.contant.list %||% as.list(x = rep(x = 1e-4, times = length(x = reduction.list))) if (is.null(x = modality.weight)) { if (verbose) { message("Calculating cell-specific modality weights") } modality.weight <- FindModalityWeights( object = object, reduction.list = reduction.list, dims.list = dims.list, k.nn = k.nn, sd.scale = sd.scale, l2.norm = l2.norm, cross.contant.list = cross.contant.list, smooth = smooth, verbose = verbose ) } modality.weight.name <- modality.weight.name %||% paste0(modality.weight@modality.assay, ".weight") modality.assay <- slot(object = modality.weight, name = "modality.assay") if (length(modality.weight.name) != length(reduction.list)) { warning("The number of provided modality.weight.name is not equal to the number of modalities. ", paste(paste0(modality.assay, ".weight"), collapse = " "), " are used to store the modality weights" ) modality.weight.name <- paste0(modality.assay, ".weight") } first.assay <- modality.assay[1] weighted.nn <- MultiModalNN( object = object, k.nn = k.nn, modality.weight = modality.weight, knn.range = knn.range, verbose = verbose ) select_nn <- Indices(object = weighted.nn) select_nn_dist <- Distances(object = weighted.nn) # compute KNN graph if (verbose) { message("Constructing multimodal KNN graph") } j <- as.numeric(x = t(x = select_nn )) i <- ((1:length(x = j)) - 1) %/% k.nn + 1 nn.matrix <- sparseMatrix( i = i, j = j, x = 1, dims = c(ncol(x = object), ncol(x = object)) ) diag(x = nn.matrix) <- 1 rownames(x = nn.matrix) <- colnames(x = nn.matrix) <- colnames(x = object) nn.matrix <- nn.matrix + t(x = nn.matrix) - t(x = nn.matrix) * nn.matrix nn.matrix <- as.Graph(x = nn.matrix) slot(object = nn.matrix, name = "assay.used") <- first.assay object[[knn.graph.name]] <- nn.matrix # compute SNN graph if (verbose) { message("Constructing multimodal SNN graph") } snn.matrix <- ComputeSNN(nn_ranked = select_nn, prune = prune.SNN) rownames(x = snn.matrix) <- colnames(x = snn.matrix) <- Cells(x = object) snn.matrix <- as.Graph(x = snn.matrix ) slot(object = snn.matrix, name = "assay.used") <- first.assay object[[snn.graph.name]] <- snn.matrix # add neighbors and modality weights object[[weighted.nn.name]] <- weighted.nn for (m in 1:length(x = modality.weight.name)) { object[[modality.weight.name[[m]]]] <- slot( object = modality.weight, name = "modality.weight.list" )[[m]] } # add command log modality.weight.command <- slot(object = modality.weight, name = "command") slot(object = modality.weight.command, name = "assay.used") <- first.assay modality.weight.command.name <- slot(object = modality.weight.command, name = "name") object[[modality.weight.command.name]] <- modality.weight.command command <- LogSeuratCommand(object = object, return.command = TRUE) slot(object = command, name = "params")$modality.weight <- NULL slot(object = command, name = "assay.used") <- first.assay command.name <- slot(object = command, name = "name") object[[command.name]] <- command if (return.intermediate) { Misc(object = object, slot = "modality.weight") <- modality.weight } return (object) } #' Find subclusters under one cluster #' #' @inheritParams FindClusters #' @param cluster the cluster to be sub-clustered #' @param subcluster.name the name of sub cluster added in the meta.data #' #' @return return a object with sub cluster labels in the sub-cluster.name variable #' @concept clustering #' @export #' FindSubCluster <- function( object, cluster, graph.name, subcluster.name = "sub.cluster", resolution = 0.5, algorithm = 1 ) { sub.cell <- WhichCells(object = object, idents = cluster) sub.graph <- as.Graph(x = object[[graph.name]][sub.cell, sub.cell]) sub.clusters <- FindClusters( object = sub.graph, resolution = resolution, algorithm = algorithm ) sub.clusters[, 1] <- paste(cluster, sub.clusters[, 1], sep = "_") object[[subcluster.name]] <- as.character(x = Idents(object = object)) object[[subcluster.name]][sub.cell, ] <- sub.clusters[, 1] return(object) } #' Predict value from nearest neighbors #' #' This function will predict expression or cell embeddings from its k nearest #' neighbors index. For each cell, it will average its k neighbors value to get #' its new imputed value. It can average expression value in assays and cell #' embeddings from dimensional reductions. #' #' @param object The object used to calculate knn #' @param nn.idx k near neighbour indices. A cells x k matrix. #' @param assay Assay used for prediction #' @param reduction Cell embedding of the reduction used for prediction #' @param dims Number of dimensions of cell embedding #' @param return.assay Return an assay or a predicted matrix #' @param slot slot used for prediction #' @param features features used for prediction #' @param mean.function the function used to calculate row mean #' @param seed Sets the random seed to check if the nearest neighbor is query #' cell #' @param verbose Print progress #' #' @return return an assay containing predicted expression value in the data #' slot #' @concept integration #' @export #' PredictAssay <- function( object, nn.idx, assay, reduction = NULL, dims = NULL, return.assay = TRUE, slot = "scale.data", features = NULL, mean.function = rowMeans, seed = 4273, verbose = TRUE ){ if (!inherits(x = mean.function, what = 'function')) { stop("'mean.function' must be a function") } if (is.null(x = reduction)) { reference.data <- GetAssayData( object = object, assay = assay, slot = slot ) features <- features %||% VariableFeatures(object = object[[assay]]) if (length(x = features) == 0) { features <- rownames(x = reference.data) if (verbose) { message("VariableFeatures are empty in the ", assay, " assay, features in the ", slot, " slot will be used" ) } } reference.data <- reference.data[features, , drop = FALSE] } else { if (is.null(x = dims)) { stop("dims is empty") } reference.data <- t(x = Embeddings(object = object, reduction = reduction)[, dims]) } set.seed(seed = seed) nn.check <- sample(x = 1:nrow(x = nn.idx), size = min(50, nrow(x = nn.idx))) if (all(nn.idx[nn.check, 1] == nn.check)) { if(verbose){ message("The nearest neighbor is the query cell itself, and it will not be used for prediction") } nn.idx <- nn.idx[,-1] } predicted <- apply( X = nn.idx, MARGIN = 1, FUN = function(x) mean.function(reference.data[, x] ) ) colnames(x = predicted) <- Cells(x = object) if (return.assay) { predicted.assay <- CreateAssayObject(data = predicted, check.matrix = FALSE) return (predicted.assay) } else { return (predicted) } } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom pbapply pblapply #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @param modularity.fxn Modularity function (1 = standard; 2 = alternative). #' @param initial.membership,node.sizes Parameters to pass to the Python leidenalg function. #' @param resolution Value of the resolution parameter, use a value above #' (below) 1.0 if you want to obtain a larger (smaller) number of communities. #' @param algorithm Algorithm for modularity optimization (1 = original Louvain #' algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM #' algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python. #' @param method Method for running leiden (defaults to matrix which is fast for small datasets). #' Enable method = "igraph" to avoid casting large data to a dense matrix. #' @param n.start Number of random starts. #' @param n.iter Maximal number of iterations per random start. #' @param random.seed Seed of the random number generator. #' @param group.singletons Group singletons into nearest cluster. If FALSE, assign all singletons to #' a "singleton" group #' @param temp.file.location Directory where intermediate files will be written. #' Specify the ABSOLUTE path. #' @param edge.file.name Edge file to use as input for modularity optimizer jar. #' @param verbose Print output #' #' @rdname FindClusters #' @concept clustering #' @export #' FindClusters.default <- function( object, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) { CheckDots(...) if (is.null(x = object)) { stop("Please provide an SNN graph") } if (tolower(x = algorithm) == "louvain") { algorithm <- 1 } if (tolower(x = algorithm) == "leiden") { algorithm <- 4 } if (nbrOfWorkers() > 1) { clustering.results <- future_lapply( X = resolution, FUN = function(r) { if (algorithm %in% c(1:3)) { ids <- RunModularityClustering( SNN = object, modularity = modularity.fxn, resolution = r, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, print.output = verbose, temp.file.location = temp.file.location, edge.file.name = edge.file.name ) } else if (algorithm == 4) { ids <- RunLeiden( object = object, method = method, partition.type = "RBConfigurationVertexPartition", initial.membership = initial.membership, node.sizes = node.sizes, resolution.parameter = r, random.seed = random.seed, n.iter = n.iter ) } else { stop("algorithm not recognised, please specify as an integer or string") } names(x = ids) <- colnames(x = object) ids <- GroupSingletons(ids = ids, SNN = object, verbose = verbose) results <- list(factor(x = ids)) names(x = results) <- paste0('res.', r) return(results) } ) clustering.results <- as.data.frame(x = clustering.results) } else { clustering.results <- data.frame(row.names = colnames(x = object)) for (r in resolution) { if (algorithm %in% c(1:3)) { ids <- RunModularityClustering( SNN = object, modularity = modularity.fxn, resolution = r, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, print.output = verbose, temp.file.location = temp.file.location, edge.file.name = edge.file.name) } else if (algorithm == 4) { ids <- RunLeiden( object = object, method = method, partition.type = "RBConfigurationVertexPartition", initial.membership = initial.membership, node.sizes = node.sizes, resolution.parameter = r, random.seed = random.seed, n.iter = n.iter ) } else { stop("algorithm not recognised, please specify as an integer or string") } names(x = ids) <- colnames(x = object) ids <- GroupSingletons(ids = ids, SNN = object, group.singletons = group.singletons, verbose = verbose) clustering.results[, paste0("res.", r)] <- factor(x = ids) } } return(clustering.results) } #' @importFrom methods is #' #' @param graph.name Name of graph to use for the clustering algorithm #' @param cluster.name Name of output clusters #' #' @rdname FindClusters #' @export #' @concept clustering #' @method FindClusters Seurat #' FindClusters.Seurat <- function( object, graph.name = NULL, cluster.name = NULL, modularity.fxn = 1, initial.membership = NULL, node.sizes = NULL, resolution = 0.8, method = "matrix", algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, group.singletons = TRUE, temp.file.location = NULL, edge.file.name = NULL, verbose = TRUE, ... ) { CheckDots(...) graph.name <- graph.name %||% paste0(DefaultAssay(object = object), "_snn") if (!graph.name %in% names(x = object)) { stop("Provided graph.name not present in Seurat object") } if (!is(object = object[[graph.name]], class2 = "Graph")) { stop("Provided graph.name does not correspond to a graph object.") } clustering.results <- FindClusters( object = object[[graph.name]], modularity.fxn = modularity.fxn, initial.membership = initial.membership, node.sizes = node.sizes, resolution = resolution, method = method, algorithm = algorithm, n.start = n.start, n.iter = n.iter, random.seed = random.seed, group.singletons = group.singletons, temp.file.location = temp.file.location, edge.file.name = edge.file.name, verbose = verbose, ... ) cluster.name <- cluster.name %||% paste( graph.name, names(x = clustering.results), sep = '_' ) names(x = clustering.results) <- cluster.name # object <- AddMetaData(object = object, metadata = clustering.results) # Idents(object = object) <- colnames(x = clustering.results)[ncol(x = clustering.results)] idents.use <- names(x = clustering.results)[ncol(x = clustering.results)] object[[]] <- clustering.results Idents(object = object, replace = TRUE) <- object[[idents.use, drop = TRUE]] levels <- levels(x = object) levels <- tryCatch( expr = as.numeric(x = levels), warning = function(...) { return(levels) }, error = function(...) { return(levels) } ) Idents(object = object) <- factor(x = Idents(object = object), levels = sort(x = levels)) object[['seurat_clusters']] <- Idents(object = object) cmd <- LogSeuratCommand(object = object, return.command = TRUE) slot(object = cmd, name = 'assay.used') <- DefaultAssay(object = object[[graph.name]]) object[[slot(object = cmd, name = 'name')]] <- cmd return(object) } #' @param query Matrix of data to query against object. If missing, defaults to #' object. #' @param distance.matrix Boolean value of whether the provided matrix is a #' distance matrix; note, for objects of class \code{dist}, this parameter will #' be set automatically #' @param k.param Defines k for the k-nearest neighbor algorithm #' @param return.neighbor Return result as \code{\link{Neighbor}} object. Not #' used with distance matrix input. #' @param compute.SNN also compute the shared nearest neighbor graph #' @param prune.SNN Sets the cutoff for acceptable Jaccard index when #' computing the neighborhood overlap for the SNN construction. Any edges with #' values less than or equal to this will be set to 0 and removed from the SNN #' graph. Essentially sets the stringency of pruning (0 --- no pruning, 1 --- #' prune everything). #' @param nn.method Method for nearest neighbor finding. Options include: rann, #' annoy #' @param annoy.metric Distance metric for annoy. Options include: euclidean, #' cosine, manhattan, and hamming #' @param n.trees More trees gives higher precision when using annoy approximate #' nearest neighbor search #' @param nn.eps Error bound when performing nearest neighbor seach using RANN; #' default of 0.0 implies exact nearest neighbor search #' @param verbose Whether or not to print output to the console #' @param l2.norm Take L2Norm of the data #' @param cache.index Include cached index in returned Neighbor object #' (only relevant if return.neighbor = TRUE) #' @param index Precomputed index. Useful if querying new data against existing #' index to avoid recomputing. #' #' @importFrom RANN nn2 #' @importFrom methods as #' #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors default #' FindNeighbors.default <- function( object, query = NULL, distance.matrix = FALSE, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, index = NULL, ... ) { CheckDots(...) if (is.null(x = dim(x = object))) { warning( "Object should have two dimensions, attempting to coerce to matrix", call. = FALSE ) object <- as.matrix(x = object) } if (is.null(rownames(x = object))) { stop("Please provide rownames (cell names) with the input object") } n.cells <- nrow(x = object) if (n.cells < k.param) { warning( "k.param set larger than number of cells. Setting k.param to number of cells - 1.", call. = FALSE ) k.param <- n.cells - 1 } if (l2.norm) { object <- L2Norm(mat = object) query <- query %iff% L2Norm(mat = query) } query <- query %||% object # find the k-nearest neighbors for each single cell if (!distance.matrix) { if (verbose) { if (return.neighbor) { message("Computing nearest neighbors") } else { message("Computing nearest neighbor graph") } } nn.ranked <- NNHelper( data = object, query = query, k = k.param, method = nn.method, n.trees = n.trees, searchtype = "standard", eps = nn.eps, metric = annoy.metric, cache.index = cache.index, index = index ) if (return.neighbor) { if (compute.SNN) { warning("The SNN graph is not computed if return.neighbor is TRUE.", call. = FALSE) } return(nn.ranked) } nn.ranked <- Indices(object = nn.ranked) } else { if (verbose) { message("Building SNN based on a provided distance matrix") } knn.mat <- matrix(data = 0, ncol = k.param, nrow = n.cells) knd.mat <- knn.mat for (i in 1:n.cells) { knn.mat[i, ] <- order(object[i, ])[1:k.param] knd.mat[i, ] <- object[i, knn.mat[i, ]] } nn.ranked <- knn.mat[, 1:k.param] } # convert nn.ranked into a Graph j <- as.numeric(x = t(x = nn.ranked)) i <- ((1:length(x = j)) - 1) %/% k.param + 1 nn.matrix <- as(object = sparseMatrix(i = i, j = j, x = 1, dims = c(nrow(x = object), nrow(x = object))), Class = "Graph") rownames(x = nn.matrix) <- rownames(x = object) colnames(x = nn.matrix) <- rownames(x = object) neighbor.graphs <- list(nn = nn.matrix) if (compute.SNN) { if (verbose) { message("Computing SNN") } snn.matrix <- ComputeSNN( nn_ranked = nn.ranked, prune = prune.SNN ) rownames(x = snn.matrix) <- rownames(x = object) colnames(x = snn.matrix) <- rownames(x = object) snn.matrix <- as.Graph(x = snn.matrix) neighbor.graphs[["snn"]] <- snn.matrix } return(neighbor.graphs) } #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors Assay #' FindNeighbors.Assay <- function( object, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) features <- features %||% VariableFeatures(object = object) data.use <- t(x = GetAssayData(object = object, slot = "data")[features, ]) neighbor.graphs <- FindNeighbors( object = data.use, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) return(neighbor.graphs) } #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors dist #' FindNeighbors.dist <- function( object, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) return(FindNeighbors( object = as.matrix(x = object), distance.matrix = TRUE, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.eps = nn.eps, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, verbose = verbose, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... )) } #' @param assay Assay to use in construction of (S)NN; used only when \code{dims} #' is \code{NULL} #' @param features Features to use as input for building the (S)NN; used only when #' \code{dims} is \code{NULL} #' @param reduction Reduction to use as input for building the (S)NN #' @param dims Dimensions of reduction to use as input #' @param do.plot Plot SNN graph on tSNE coordinates #' @param graph.name Optional naming parameter for stored (S)NN graph #' (or Neighbor object, if return.neighbor = TRUE). Default is assay.name_(s)nn. #' To store both the neighbor graph and the shared nearest neighbor (SNN) graph, #' you must supply a vector containing two names to the \code{graph.name} #' parameter. The first element in the vector will be used to store the nearest #' neighbor (NN) graph, and the second element used to store the SNN graph. If #' only one name is supplied, only the NN graph is stored. #' #' @importFrom igraph graph.adjacency plot.igraph E #' #' @rdname FindNeighbors #' @export #' @concept clustering #' @method FindNeighbors Seurat #' FindNeighbors.Seurat <- function( object, reduction = "pca", dims = 1:10, assay = NULL, features = NULL, k.param = 20, return.neighbor = FALSE, compute.SNN = !return.neighbor, prune.SNN = 1/15, nn.method = "annoy", n.trees = 50, annoy.metric = "euclidean", nn.eps = 0, verbose = TRUE, do.plot = FALSE, graph.name = NULL, l2.norm = FALSE, cache.index = FALSE, ... ) { CheckDots(...) if (!is.null(x = dims)) { assay <- DefaultAssay(object = object[[reduction]]) data.use <- Embeddings(object = object[[reduction]]) if (max(dims) > ncol(x = data.use)) { stop("More dimensions specified in dims than have been computed") } data.use <- data.use[, dims] neighbor.graphs <- FindNeighbors( object = data.use, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) } else { assay <- assay %||% DefaultAssay(object = object) neighbor.graphs <- FindNeighbors( object = object[[assay]], features = features, k.param = k.param, compute.SNN = compute.SNN, prune.SNN = prune.SNN, nn.method = nn.method, n.trees = n.trees, annoy.metric = annoy.metric, nn.eps = nn.eps, verbose = verbose, l2.norm = l2.norm, return.neighbor = return.neighbor, cache.index = cache.index, ... ) } if (length(x = neighbor.graphs) == 1) { neighbor.graphs <- list(nn = neighbor.graphs) } graph.name <- graph.name %||% if (return.neighbor) { paste0(assay, ".", names(x = neighbor.graphs)) } else { paste0(assay, "_", names(x = neighbor.graphs)) } if (length(x = graph.name) == 1) { message("Only one graph name supplied, storing nearest-neighbor graph only") } for (ii in 1:length(x = graph.name)) { if (inherits(x = neighbor.graphs[[ii]], what = "Graph")) { DefaultAssay(object = neighbor.graphs[[ii]]) <- assay } object[[graph.name[[ii]]]] <- neighbor.graphs[[ii]] } if (do.plot) { if (!"tsne" %in% names(x = object@reductions)) { warning("Please compute a tSNE for SNN visualization. See RunTSNE().") } else { if (nrow(x = Embeddings(object = object[["tsne"]])) != ncol(x = object)) { warning("Please compute a tSNE for SNN visualization. See RunTSNE().") } else { net <- graph.adjacency( adjmatrix = as.matrix(x = neighbor.graphs[[2]]), mode = "undirected", weighted = TRUE, diag = FALSE ) plot.igraph( x = net, layout = as.matrix(x = Embeddings(object = object[["tsne"]])), edge.width = E(graph = net)$weight, vertex.label = NA, vertex.size = 0 ) } } } object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Run annoy # # @param data Data to build the index with # @param query A set of data to be queried against data # @param metric Distance metric; can be one of "euclidean", "cosine", "manhattan", # "hamming" # @param n.trees More trees gives higher precision when querying # @param k Number of neighbors # @param search.k During the query it will inspect up to search_k nodes which # gives you a run-time tradeoff between better accuracy and speed. # @param include.distance Include the corresponding distances # @param index optional index object, will be recomputed if not provided # AnnoyNN <- function(data, query = data, metric = "euclidean", n.trees = 50, k, search.k = -1, include.distance = TRUE, index = NULL ) { idx <- index %||% AnnoyBuildIndex( data = data, metric = metric, n.trees = n.trees) nn <- AnnoySearch( index = idx, query = query, k = k, search.k = search.k, include.distance = include.distance) nn$idx <- idx nn$alg.info <- list(metric = metric, ndim = ncol(x = data)) return(nn) } # Build the annoy index # # @param data Data to build the index with # @param metric Distance metric; can be one of "euclidean", "cosine", "manhattan", # "hamming" # @param n.trees More trees gives higher precision when querying # #' @importFrom RcppAnnoy AnnoyEuclidean AnnoyAngular AnnoyManhattan AnnoyHamming # AnnoyBuildIndex <- function(data, metric = "euclidean", n.trees = 50) { f <- ncol(x = data) a <- switch( EXPR = metric, "euclidean" = new(Class = RcppAnnoy::AnnoyEuclidean, f), "cosine" = new(Class = RcppAnnoy::AnnoyAngular, f), "manhattan" = new(Class = RcppAnnoy::AnnoyManhattan, f), "hamming" = new(Class = RcppAnnoy::AnnoyHamming, f), stop ("Invalid metric") ) for (ii in seq(nrow(x = data))) { a$addItem(ii - 1, data[ii, ]) } a$build(n.trees) return(a) } # Search an Annoy approximate nearest neighbor index # # @param Annoy index, built with AnnoyBuildIndex # @param query A set of data to be queried against the index # @param k Number of neighbors # @param search.k During the query it will inspect up to search_k nodes which # gives you a run-time tradeoff between better accuracy and speed. # @param include.distance Include the corresponding distances in the result # # @return A list with 'nn.idx' (for each element in 'query', the index of the # nearest k elements in the index) and 'nn.dists' (the distances of the nearest # k elements) # #' @importFrom future plan #' @importFrom future.apply future_lapply # AnnoySearch <- function(index, query, k, search.k = -1, include.distance = TRUE) { n <- nrow(x = query) idx <- matrix(nrow = n, ncol = k) dist <- matrix(nrow = n, ncol = k) convert <- methods::is(index, "Rcpp_AnnoyAngular") if (!inherits(x = plan(), what = "multicore")) { oplan <- plan(strategy = "sequential") on.exit(plan(oplan), add = TRUE) } res <- future_lapply(X = 1:n, FUN = function(x) { res <- index$getNNsByVectorList(query[x, ], k, search.k, include.distance) # Convert from Angular to Cosine distance if (convert) { res$dist <- 0.5 * (res$dist * res$dist) } list(res$item + 1, res$distance) }) for (i in 1:n) { idx[i, ] <- res[[i]][[1]] if (include.distance) { dist[i, ] <- res[[i]][[2]] } } return(list(nn.idx = idx, nn.dists = dist)) } # Calculate mean distance of the farthest neighbors from SNN graph # # This function will compute the average distance of the farthest k.nn # neighbors with the lowest nonzero SNN edge weight. First, for each cell it # finds the k.nn neighbors with the smallest edge weight. If there are multiple # cells with the same edge weight at the k.nn-th index, consider all of those # cells in the next step. Next, it computes the euclidean distance to all k.nn # cells in the space defined by the embeddings matrix and returns the average # distance to the farthest k.nn cells. # # @param snn.graph An SNN graph # @param embeddings The cell embeddings used to calculate neighbor distances # @param k.nn The number of neighbors to calculate # @param l2.norm Perform L2 normalization on the cell embeddings # @param nearest.dist The vector of distance to the nearest neighbors to # subtract off from distance calculations # # ComputeSNNwidth <- function( snn.graph, embeddings, k.nn, l2.norm = TRUE, nearest.dist = NULL ) { if (l2.norm) { embeddings <- L2Norm(mat = embeddings) } nearest.dist <- nearest.dist %||% rep(x = 0, times = ncol(x = snn.graph)) if (length(x = nearest.dist) != ncol(x = snn.graph)) { stop("Please provide a vector for nearest.dist that has as many elements as", " there are columns in the snn.graph (", ncol(x = snn.graph), ").") } snn.width <- SNN_SmallestNonzero_Dist( snn = snn.graph, mat = embeddings, n = k.nn, nearest_dist = nearest.dist ) return (snn.width) } # Create an Annoy index # # @note Function exists because it's not exported from \pkg{uwot} # # @param name Distance metric name # @param ndim Number of dimensions # # @return An nn index object # #' @importFrom methods new #' @importFrom RcppAnnoy AnnoyAngular AnnoyManhattan AnnoyEuclidean AnnoyHamming # CreateAnn <- function(name, ndim) { return(switch( EXPR = name, cosine = new(Class = AnnoyAngular, ndim), manhattan = new(Class = AnnoyManhattan, ndim), euclidean = new(Class = AnnoyEuclidean, ndim), hamming = new(Class = AnnoyHamming, ndim), stop("BUG: unknown Annoy metric '", name, "'") )) } # Calculate modality weights # # This function calculates cell-specific modality weights which are used to # in WNN analysis. #' @inheritParams FindMultiModalNeighbors # @param object A Seurat object # @param snn.far.nn Use SNN farthest neighbors to calculate the kernel width # @param s.nn How many SNN neighbors to use in kernel width # @param sigma.idx Neighbor index used to calculate kernel width if snn.far.nn = FALSE # @importFrom pbapply pblapply # @return Returns a \code{ModalityWeights} object that can be used as input to # \code{\link{FindMultiModalNeighbors}} # #' @importFrom pbapply pblapply # FindModalityWeights <- function( object, reduction.list, dims.list, k.nn = 20, snn.far.nn = TRUE, s.nn = k.nn, prune.SNN = 0, l2.norm = TRUE, sd.scale = 1, query = NULL, cross.contant.list = NULL, sigma.idx = k.nn, smooth = FALSE, verbose = TRUE ) { my.lapply <- ifelse( test = verbose, yes = pblapply, no = lapply ) cross.contant.list <- cross.contant.list %||% as.list(x = rep(x = 1e-4, times = length(x = reduction.list))) reduction.set <- unlist(x = reduction.list) names(x = reduction.list) <- names(x = dims.list) <- names(x = cross.contant.list) <- reduction.set embeddings.list <- lapply( X = reduction.list, FUN = function(r) Embeddings(object = object, reduction = r)[, dims.list[[r]]] ) if (l2.norm) { embeddings.list.norm <- lapply( X = embeddings.list, FUN = function(embeddings) L2Norm(mat = embeddings) ) } else { embeddings.list.norm <- embeddings.list } if (is.null(x = query)) { query.embeddings.list.norm <- embeddings.list.norm query <- object } else { if (snn.far.nn) { stop("query does not support using snn to find distant neighbors") } query.embeddings.list <- lapply( X = reduction.list, FUN = function(r) { Embeddings(object = query, reduction = r)[, dims.list[[r]]] } ) if (l2.norm) { query.embeddings.list <- lapply( X = query.embeddings.list, FUN = function(embeddings) L2Norm(mat = embeddings) ) } query.embeddings.list.norm <- query.embeddings.list } if (verbose) { message("Finding ", k.nn, " nearest neighbors for each modality.") } nn.list <- my.lapply( X = reduction.list, FUN = function(r) { nn.r <- NNHelper( data = embeddings.list.norm[[r]], query = query.embeddings.list.norm[[r]], k = max(k.nn, sigma.idx, s.nn), method = "annoy", metric = "euclidean" ) return(nn.r) } ) sigma.nn.list <- nn.list if (sigma.idx > k.nn || s.nn > k.nn) { nn.list <- lapply( X = nn.list, FUN = function(nn){ slot(object = nn, name = "nn.idx") <- Indices(object = nn)[, 1:k.nn] slot(object = nn, name = "nn.dists") <- Distances(object = nn)[, 1:k.nn] return(nn) } ) } nearest_dist <- lapply(X = reduction.list, FUN = function(r) Distances(object = nn.list[[r]])[, 2]) within_impute <- list() cross_impute <- list() # Calculating within and cross modality distance for (r in reduction.set) { reduction.norm <- paste0(r, ".norm") object[[ reduction.norm ]] <- CreateDimReducObject( embeddings = embeddings.list.norm[[r]], key = paste0("norm", Key(object = object[[r]])), assay = DefaultAssay(object = object[[r]]) ) within_impute[[r]] <- PredictAssay( object = object, nn.idx = Indices(object = nn.list[[r]]), reduction = reduction.norm, dims = 1:ncol(x = embeddings.list.norm[[r]]), verbose = FALSE, return.assay = FALSE ) cross.modality <- setdiff(x = reduction.set, y = r) cross_impute[[r]] <- lapply(X = cross.modality, FUN = function(r2) { PredictAssay( object = object, nn.idx = Indices(object = nn.list[[r2]]), reduction = reduction.norm, dims = 1:ncol(x = embeddings.list.norm[[r]]), verbose = FALSE, return.assay = FALSE ) } ) names(x = cross_impute[[r]]) <- cross.modality } within_impute_dist <- lapply( X = reduction.list, FUN = function(r) { r_dist <- impute_dist( x = query.embeddings.list.norm[[r]], y = t(x = within_impute[[r]]), nearest.dist = nearest_dist[[r]] ) return(r_dist) } ) cross_impute_dist <- lapply( X = reduction.list, FUN = function(r) { r_dist <- sapply(setdiff(x = reduction.set, y = r), FUN = function(r2) { r2_dist <- impute_dist( x = query.embeddings.list.norm[[r]], y = t(x = cross_impute[[r]][[r2]]), nearest.dist = nearest_dist[[r]] ) return( r2_dist) }) return(r_dist) } ) # calculate kernel width if (snn.far.nn) { if (verbose) { message("Calculating kernel bandwidths") } snn.graph.list <- lapply( X = sigma.nn.list, FUN = function(nn) { snn.matrix <- ComputeSNN( nn_ranked = Indices(object = nn)[, 1:s.nn], prune = prune.SNN ) colnames(x = snn.matrix) <- rownames(x = snn.matrix) <- Cells(x = object) return (snn.matrix) } ) farthest_nn_dist <- my.lapply( X = 1:length(x = snn.graph.list), FUN = function(s) { distant_nn <- ComputeSNNwidth( snn.graph = snn.graph.list[[s]], k.nn = k.nn, l2.norm = FALSE, embeddings = embeddings.list.norm[[s]], nearest.dist = nearest_dist[[s]] ) return (distant_nn) } ) names(x = farthest_nn_dist) <- unlist(x = reduction.list) modality_sd.list <- lapply( X = farthest_nn_dist, FUN = function(sd) sd * sd.scale ) } else { if (verbose) { message("Calculating sigma by ", sigma.idx, "th neighbor") } modality_sd.list <- lapply( X = reduction.list , FUN = function(r) { rdist <- Distances(object = sigma.nn.list[[r]])[, sigma.idx] - nearest_dist[[r]] rdist <- rdist * sd.scale return (rdist) } ) } # Calculating within and cross modality kernel, and modality weights within_impute_kernel <- lapply( X = reduction.list, FUN = function(r) { exp(-1 * (within_impute_dist[[r]] / modality_sd.list[[r]]) ) } ) cross_impute_kernel <- lapply( X = reduction.list, FUN = function(r) { exp(-1 * (cross_impute_dist[[r]] / modality_sd.list[[r]]) ) } ) params <- list( "reduction.list" = reduction.list, "dims.list" = dims.list, "l2.norm" = l2.norm, "k.nn" = k.nn, "sigma.idx" = sigma.idx, "snn.far.nn" = snn.far.nn , "sigma.list" = modality_sd.list, "nearest.dist" = nearest_dist ) modality_score <- lapply( X = reduction.list, FUN = function(r) { score.r <- sapply( X = setdiff(x = reduction.set, y = r), FUN = function(r2) { score <- within_impute_kernel[[r]] / (cross_impute_kernel[[r]][, r2] + cross.contant.list[[r]]) score <- MinMax(data = score, min = 0, max = 200) return(score) } ) return(score.r) } ) if (smooth) { modality_score <- lapply( X = reduction.list, FUN = function(r) { apply( X = Indices(object = nn.list[[r]]), MARGIN = 1, FUN = function(nn) mean(x = modality_score[[r]][nn[-1]]) ) } ) } all_modality_score <- rowSums(x = exp(x = Reduce(f = cbind, x = modality_score))) modality.weight <- lapply( X = modality_score, FUN = function(score_m) { rowSums(x = exp(x = score_m))/all_modality_score } ) score.mat <- list( within_impute_dist = within_impute_dist, cross_impute_dist = cross_impute_dist, within_impute_kernel = within_impute_kernel, cross_impute_kernel = cross_impute_kernel, modality_score = modality_score ) # unlist the input parameters command <- LogSeuratCommand(object = object, return.command = TRUE) command@params <- lapply(X = command@params, FUN = function(l) unlist(x = l)) modality.assay <- sapply( X = reduction.list , FUN = function (r) slot(object[[r]], name = "assay.used") ) modality.weights.obj <- new( Class = "ModalityWeights", modality.weight.list = modality.weight, modality.assay = modality.assay, params = params, score.matrix = score.mat, command = command ) return(modality.weights.obj) } # Group single cells that make up their own cluster in with the cluster they are # most connected to. # # @param ids Named vector of cluster ids # @param SNN SNN graph used in clustering # @param group.singletons Group singletons into nearest cluster. If FALSE, assign all singletons to # a "singleton" group # # @return Returns Seurat object with all singletons merged with most connected cluster # GroupSingletons <- function(ids, SNN, group.singletons = TRUE, verbose = TRUE) { # identify singletons singletons <- c() singletons <- names(x = which(x = table(ids) == 1)) singletons <- intersect(x = unique(x = ids), singletons) if (!group.singletons) { ids[which(ids %in% singletons)] <- "singleton" return(ids) } # calculate connectivity of singletons to other clusters, add singleton # to cluster it is most connected to cluster_names <- as.character(x = unique(x = ids)) cluster_names <- setdiff(x = cluster_names, y = singletons) connectivity <- vector(mode = "numeric", length = length(x = cluster_names)) names(x = connectivity) <- cluster_names new.ids <- ids for (i in singletons) { i.cells <- names(which(ids == i)) for (j in cluster_names) { j.cells <- names(which(ids == j)) subSNN <- SNN[i.cells, j.cells] set.seed(1) # to match previous behavior, random seed being set in WhichCells if (is.object(x = subSNN)) { connectivity[j] <- sum(subSNN) / (nrow(x = subSNN) * ncol(x = subSNN)) } else { connectivity[j] <- mean(x = subSNN) } } m <- max(connectivity, na.rm = T) mi <- which(x = connectivity == m, arr.ind = TRUE) closest_cluster <- sample(x = names(x = connectivity[mi]), 1) ids[i.cells] <- closest_cluster } if (length(x = singletons) > 0 && verbose) { message(paste( length(x = singletons), "singletons identified.", length(x = unique(x = ids)), "final clusters." )) } return(ids) } # Find multimodal neighbors # # @param object The object used to calculate knn # @param query The query object when query and reference are different # @param modality.weight A \code{\link{ModalityWeights}} object generated by # \code{\link{FindModalityWeights}} # @param modality.weight.list A list of modality weight value # @param k.nn Number of nearest multimodal neighbors to compute # @param reduction.list A list of reduction name # @param dims.list A list of dimensions used for the reduction # @param knn.range The number of approximate neighbors to compute # @param kernel.power The power for the exponential kernel # @param nearest.dist The list of distance to the nearest neighbors # @param sigma.list The list of kernel width # @param l2.norm Perform L2 normalization on the cell embeddings after # dimensional reduction # @param verbose Print output to the console # @importFrom pbapply pblapply # @return return a list containing nn index and nn multimodal distance # #' @importFrom methods new #' @importClassesFrom SeuratObject Neighbor # MultiModalNN <- function( object, query = NULL, modality.weight = NULL, modality.weight.list = NULL, k.nn = NULL, reduction.list = NULL, dims.list = NULL, knn.range = 200, kernel.power = 1, nearest.dist = NULL, sigma.list = NULL, l2.norm = NULL, verbose = TRUE ){ my.lapply <- ifelse( test = verbose, yes = pblapply, no = lapply ) k.nn <- k.nn %||% slot(object = modality.weight, name = "params")$k.nn reduction.list <- reduction.list %||% slot(object = modality.weight, name = "params")$reduction.list dims.list = dims.list %||% slot(object = modality.weight, name = "params")$dims.list nearest.dist = nearest.dist %||% slot(object = modality.weight, name = "params")$nearest.dist sigma.list =sigma.list %||% slot(object = modality.weight, name = "params")$sigma.list l2.norm = l2.norm %||% slot(object = modality.weight, name = "params")$l2.norm modality.weight.value <- modality.weight.list %||% slot(object = modality.weight, name = "modality.weight.list") names(x = modality.weight.value) <- unlist(x = reduction.list) if (inherits(x = object, what = "Seurat")) { reduction_embedding <- lapply( X = 1:length(x = reduction.list), FUN = function(x) { Embeddings(object = object, reduction = reduction.list[[x]])[, dims.list[[x]]] } ) } else { reduction_embedding <- object } if (is.null(x = query)) { query.reduction_embedding <- reduction_embedding query <- object } else { if (inherits(x = object, what = "Seurat")) { query.reduction_embedding <- lapply( X = 1:length(x = reduction.list), FUN = function(x) { Embeddings(object = query, reduction = reduction.list[[x]] )[, dims.list[[x]]] } ) } else { query.reduction_embedding <- query } } if (l2.norm) { query.reduction_embedding <- lapply( X = query.reduction_embedding, FUN = function(x) L2Norm(mat = x) ) reduction_embedding <- lapply( X = reduction_embedding, FUN = function(x) L2Norm(mat = x) ) } query.cell.num <- nrow(x = query.reduction_embedding[[1]]) reduction.num <- length(x = query.reduction_embedding) if (verbose) { message("Finding multimodal neighbors") } reduction_nn <- my.lapply( X = 1:reduction.num, FUN = function(x) { nn_x <- NNHelper( data = reduction_embedding[[x]], query = query.reduction_embedding[[x]], k = knn.range, method = 'annoy', metric = "euclidean" ) return (nn_x) } ) # union of rna and adt nn, remove itself from neighobors reduction_nn <- lapply( X = reduction_nn, FUN = function(x) Indices(object = x)[, -1] ) nn_idx <- lapply( X = 1:query.cell.num , FUN = function(x) { Reduce( f = union, x = lapply( X = reduction_nn, FUN = function(y) y[x, ] ) ) } ) # calculate euclidean distance of all neighbors nn_dist <- my.lapply( X = 1:reduction.num, FUN = function(r) { nndist <- NNdist( nn.idx = nn_idx, embeddings = reduction_embedding[[r]], query.embeddings = query.reduction_embedding[[r]], nearest.dist = nearest.dist[[r]] ) return(nndist) } ) # modality weighted distance if (length(x = sigma.list[[1]]) == 1) { sigma.list <- lapply(X = sigma.list, FUN = function(x) rep(x = x, ncol(x = object))) } nn_weighted_dist <- lapply( X = 1:reduction.num, FUN = function(r) { lapply( X = 1:query.cell.num, FUN = function(x) { exp(-1*(nn_dist[[r]][[x]] / sigma.list[[r]][x] ) ** kernel.power) * modality.weight.value[[r]][x] } ) } ) nn_weighted_dist <- sapply( X = 1:query.cell.num, FUN = function(x) { Reduce( f = "+", x = lapply( X = 1:reduction.num, FUN = function(r) nn_weighted_dist[[r]][[x]] ) ) } ) # select k nearest joint neighbors select_order <- lapply( X = nn_weighted_dist, FUN = function(dist) { order(dist, decreasing = TRUE) }) select_nn <- t(x = sapply( X = 1:query.cell.num, FUN = function(x) nn_idx[[x]][select_order[[x]]][1:k.nn] ) ) select_dist <- t(x = sapply( X = 1:query.cell.num, FUN = function(x) nn_weighted_dist[[x]][select_order[[x]]][1:k.nn]) ) select_dist <- sqrt(x = MinMax(data = (1 - select_dist) / 2, min = 0, max = 1)) weighted.nn <- new( Class = 'Neighbor', nn.idx = select_nn, nn.dist = select_dist, alg.info = list(), cell.names = Cells(x = query) ) return(weighted.nn) } # Calculate NN distance for the given nn.idx # @param nn.idx The nearest neighbors position index # @param embeddings cell embeddings # @param metric distance metric # @param query.embeddings query cell embeddings # @param nearest.dist The list of distance to the nearest neighbors # NNdist <- function( nn.idx, embeddings, metric = "euclidean", query.embeddings = NULL, nearest.dist = NULL ) { if (!is.list(x = nn.idx)) { nn.idx <- lapply(X = 1:nrow(x = nn.idx), FUN = function(x) nn.idx[x, ]) } query.embeddings <- query.embeddings %||% embeddings nn.dist <- fast_dist( x = query.embeddings, y = embeddings, n = nn.idx ) if (!is.null(x = nearest.dist)) { nn.dist <- lapply( X = 1:nrow(x = query.embeddings), FUN = function(x) { r_dist = nn.dist[[x]] - nearest.dist[x] r_dist[r_dist < 0] <- 0 return(r_dist) } ) } return(nn.dist) } # Internal helper function to dispatch to various neighbor finding methods # # @param data Input data # @param query Data to query against data # @param k Number of nearest neighbors to compute # @param method Nearest neighbor method to use: "rann", "annoy" # @param cache.index Store algorithm index with results for reuse # @param ... additional parameters to specific neighbor finding method # #' @importFrom methods new #' @importClassesFrom SeuratObject Neighbor # NNHelper <- function(data, query = data, k, method, cache.index = FALSE, ...) { args <- as.list(x = sys.frame(which = sys.nframe())) args <- c(args, list(...)) results <- ( switch( EXPR = method, "rann" = { args <- args[intersect(x = names(x = args), y = names(x = formals(fun = nn2)))] do.call(what = 'nn2', args = args) }, "annoy" = { args <- args[intersect(x = names(x = args), y = names(x = formals(fun = AnnoyNN)))] do.call(what = 'AnnoyNN', args = args) }, "hnsw" = { args <- args[intersect(x = names(x = args), y = names(x = formals(fun = HnswNN)))] do.call(what = 'HnswNN', args = args) }, stop("Invalid method. Please choose one of 'rann', 'annoy'") ) ) n.ob <- new( Class = 'Neighbor', nn.idx = results$nn.idx, nn.dist = results$nn.dists, alg.info = results$alg.info %||% list(), cell.names = rownames(x = query) ) if (isTRUE(x = cache.index) && !is.null(x = results$idx)) { slot(object = n.ob, name = "alg.idx") <- results$idx } return(n.ob) } # Run Leiden clustering algorithm # # Implements the Leiden clustering algorithm in R using reticulate # to run the Python version. Requires the python "leidenalg" and "igraph" modules # to be installed. Returns a vector of partition indices. # # @param adj_mat An adjacency matrix or SNN matrix # @param partition.type Type of partition to use for Leiden algorithm. # Defaults to RBConfigurationVertexPartition. Options include: ModularityVertexPartition, # RBERVertexPartition, CPMVertexPartition, MutableVertexPartition, # SignificanceVertexPartition, SurpriseVertexPartition (see the Leiden python # module documentation for more details) # @param initial.membership,node.sizes Parameters to pass to the Python leidenalg function. # @param resolution.parameter A parameter controlling the coarseness of the clusters # for Leiden algorithm. Higher values lead to more clusters. (defaults to 1.0 for # partition types that accept a resolution parameter) # @param random.seed Seed of the random number generator # @param n.iter Maximal number of iterations per random start # # @keywords graph network igraph mvtnorm simulation # #' @importFrom leiden leiden #' @importFrom reticulate py_module_available #' @importFrom igraph graph_from_adjacency_matrix graph_from_adj_list # # @author Tom Kelly # # @export # RunLeiden <- function( object, method = c("matrix", "igraph"), partition.type = c( 'RBConfigurationVertexPartition', 'ModularityVertexPartition', 'RBERVertexPartition', 'CPMVertexPartition', 'MutableVertexPartition', 'SignificanceVertexPartition', 'SurpriseVertexPartition' ), initial.membership = NULL, node.sizes = NULL, resolution.parameter = 1, random.seed = 0, n.iter = 10 ) { if (!py_module_available(module = 'leidenalg')) { stop( "Cannot find Leiden algorithm, please install through pip (e.g. pip install leidenalg).", call. = FALSE ) } switch( EXPR = method, "matrix" = { input <- as(object = object, Class = "matrix") }, "igraph" = { input <- if (inherits(x = object, what = 'list')) { graph_from_adj_list(adjlist = object) } else if (inherits(x = object, what = c('dgCMatrix', 'matrix', 'Matrix'))) { if (inherits(x = object, what = 'Graph')) { object <- as.sparse(x = object) } graph_from_adjacency_matrix(adjmatrix = object, weighted = TRUE) } else if (inherits(x = object, what = 'igraph')) { object } else { stop( "Method for Leiden not found for class", class(x = object), call. = FALSE ) } }, stop("Method for Leiden must be either 'matrix' or igraph'") ) #run leiden from CRAN package (calls python with reticulate) partition <- leiden( object = input, partition_type = partition.type, initial_membership = initial.membership, weights = NULL, node_sizes = node.sizes, resolution_parameter = resolution.parameter, seed = random.seed, n_iterations = n.iter ) return(partition) } # Runs the modularity optimizer (C++ port of java program ModularityOptimizer.jar) # # @param SNN SNN matrix to use as input for the clustering algorithms # @param modularity Modularity function to use in clustering (1 = standard; 2 = alternative) # @param resolution Value of the resolution parameter, use a value above (below) 1.0 if you want to obtain a larger (smaller) number of communities # @param algorithm Algorithm for modularity optimization (1 = original Louvain algorithm; 2 = Louvain algorithm with multilevel refinement; 3 = SLM algorithm; 4 = Leiden algorithm). Leiden requires the leidenalg python module. # @param n.start Number of random starts # @param n.iter Maximal number of iterations per random start # @param random.seed Seed of the random number generator # @param print.output Whether or not to print output to the console # @param temp.file.location Deprecated and no longer used # @param edge.file.name Path to edge file to use # # @return Seurat object with identities set to the results of the clustering procedure # #' @importFrom utils read.table write.table # RunModularityClustering <- function( SNN = matrix(), modularity = 1, resolution = 0.8, algorithm = 1, n.start = 10, n.iter = 10, random.seed = 0, print.output = TRUE, temp.file.location = NULL, edge.file.name = NULL ) { edge_file <- edge.file.name %||% '' clusters <- RunModularityClusteringCpp( SNN, modularity, resolution, algorithm, n.start, n.iter, random.seed, print.output, edge_file ) return(clusters) } Seurat/R/sketching.R0000644000176200001440000005466714525500037014043 0ustar liggesusers#' @include zzz.R #' @include generics.R #' @importFrom rlang enquo is_quosure quo_get_env quo_get_expr #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Sketch Data #' #' This function uses sketching methods to downsample high-dimensional single-cell RNA expression data, #' which can help with scalability for large datasets. #' #' @param object A Seurat object. #' @param assay Assay name. Default is NULL, in which case the default assay of the object is used. #' @param ncells A positive integer indicating the number of cells to sample for the sketching. Default is 5000. #' @param sketched.assay Sketched assay name. A sketch assay is created or overwrite with the sketch data. Default is 'sketch'. #' @param method Sketching method to use. Can be 'LeverageScore' or 'Uniform'. #' Default is 'LeverageScore'. #' @param var.name A metadata column name to store the leverage scores. Default is 'leverage.score'. #' @param over.write whether to overwrite existing column in the metadata. Default is FALSE. #' @param seed A positive integer for the seed of the random number generator. Default is 123. #' @param cast The type to cast the resulting assay to. Default is 'dgCMatrix'. #' @param verbose Print progress and diagnostic messages #' @param ... Arguments passed to other methods #' #' @return A Seurat object with the sketched data added as a new assay. #' #' @importFrom SeuratObject CastAssay Key Key<- Layers #' #' @export #' #' #' SketchData <- function( object, assay = NULL, ncells = 5000L, sketched.assay = 'sketch', method = c('LeverageScore', 'Uniform'), var.name = "leverage.score", over.write = FALSE, seed = 123L, cast = 'dgCMatrix', verbose = TRUE, ... ) { assay <- assay[1L] %||% DefaultAssay(object = object) assay <- match.arg(arg = assay, choices = Assays(object = object)) method <- match.arg(arg = method) if (sketched.assay == assay) { abort(message = "Cannot overwrite existing assays") } if (sketched.assay %in% Assays(object = object)) { if (sketched.assay == DefaultAssay(object = object)) { DefaultAssay(object = object) <- assay } object[[sketched.assay]] <- NULL } if (!over.write) { var.name <- CheckMetaVarName(object = object, var.name = var.name) } if (method == 'LeverageScore') { if (verbose) { message("Calcuating Leverage Score") } object <- LeverageScore( object = object, assay = assay, var.name = var.name, over.write = over.write, seed = seed, verbose = FALSE, ... ) } else if (method == 'Uniform') { if (verbose) { message("Uniformly sampling") } object[[var.name]] <- 1 } leverage.score <- object[[var.name]] layers.data <- Layers(object = object[[assay]], search = 'data') cells <- lapply( X = seq_along(along.with = layers.data), FUN = function(i, seed) { set.seed(seed = seed) lcells <- Cells(x = object[[assay]], layer = layers.data[i]) if (length(x = lcells) < ncells) { return(lcells) } return(sample( x = lcells, size = ncells, prob = leverage.score[lcells,] )) }, seed = seed ) sketched <- suppressWarnings(expr = subset( x = object[[assay]], cells = unlist(cells), layers = Layers(object = object[[assay]], search = c('counts', 'data')) )) for (lyr in layers.data) { try( expr = VariableFeatures(object = sketched, method = "sketch", layer = lyr) <- VariableFeatures(object = object[[assay]], layer = lyr), silent = TRUE ) } if (!is.null(x = cast) && inherits(x = sketched, what = 'Assay5')) { sketched <- CastAssay(object = sketched, to = cast, ...) } Key(object = sketched) <- Key(object = sketched.assay, quiet = TRUE) object[[sketched.assay]] <- sketched DefaultAssay(object = object) <- sketched.assay return(object) } #' Project full data to the sketch assay #' #' #' This function allows projection of high-dimensional single-cell RNA expression data from a full dataset #' onto the lower-dimensional embedding of the sketch of the dataset. #' #' @param object A Seurat object. #' @param assay Assay name for the full data. Default is 'RNA'. #' @param sketched.assay Sketched assay name to project onto. Default is 'sketch'. #' @param sketched.reduction Dimensional reduction results of the sketched assay to project onto. #' @param full.reduction Dimensional reduction name for the projected full dataset. #' @param dims Dimensions to include in the projection. #' @param normalization.method Normalization method to use. Can be 'LogNormalize' or 'SCT'. #' Default is 'LogNormalize'. #' @param refdata An optional list for label transfer from sketch to full data. Default is NULL. #' Similar to refdata in `MapQuery` #' @param k.weight Number of neighbors to consider when weighting labels for transfer. Default is 50. #' @param umap.model An optional pre-computed UMAP model. Default is NULL. #' @param recompute.neighbors Whether to recompute the neighbors for label transfer. Default is FALSE. #' @param recompute.weights Whether to recompute the weights for label transfer. Default is FALSE. #' @param verbose Print progress and diagnostic messages. #' #' @return A Seurat object with the full data projected onto the sketched dimensional reduction results. #' The projected data are stored in the specified full reduction. #' #' @export #' ProjectData <- function( object, assay = 'RNA', sketched.assay = 'sketch', sketched.reduction, full.reduction, dims, normalization.method = c("LogNormalize", "SCT"), refdata = NULL, k.weight = 50, umap.model = NULL, recompute.neighbors = FALSE, recompute.weights = FALSE, verbose = TRUE ) { if (!full.reduction %in% Reductions(object)) { if (verbose) { message(full.reduction, ' is not in the object.' ,' Data from all cells will be projected to ', sketched.reduction) } proj.emb <- ProjectCellEmbeddings( query = object, reference = object, query.assay = assay, dims = dims, normalization.method = normalization.method, reference.assay = sketched.assay, reduction = sketched.reduction, verbose = verbose) object[[full.reduction]] <- CreateDimReducObject( embeddings = proj.emb, assay = assay, key = Key(object = full.reduction, quiet = TRUE) ) } object <- TransferSketchLabels( object = object, sketched.assay = sketched.assay, reduction = full.reduction, dims = dims, k = k.weight, refdata = refdata, reduction.model = umap.model, recompute.neighbors = recompute.neighbors, recompute.weights = recompute.weights, verbose = verbose) return(object) } #' Transfer data from sketch data to full data #' #' This function transfers cell type labels from a sketched dataset to a full dataset #' based on the similarities in the lower dimensional space. #' #' @param object A Seurat object. #' @param sketched.assay Sketched assay name. Default is 'sketch'. #' @param reduction Dimensional reduction name to use for label transfer. #' @param dims An integer vector indicating which dimensions to use for label transfer. #' @param refdata A list of character strings indicating the metadata columns containing labels to transfer. Default is NULL. #' Similar to refdata in `MapQuery` #' @param k Number of neighbors to use for label transfer. Default is 50. #' @param reduction.model Dimensional reduction model to use for label transfer. Default is NULL. #' @param neighbors An object storing the neighbors found during the sketching process. Default is NULL. #' @param recompute.neighbors Whether to recompute the neighbors for label transfer. Default is FALSE. #' @param recompute.weights Whether to recompute the weights for label transfer. Default is FALSE. #' @param verbose Print progress and diagnostic messages #' #' @return A Seurat object with transferred labels stored in the metadata. If a UMAP model is provided, #' the full data are also projected onto the UMAP space, with the results stored in a new reduction, full.`reduction.model` #' #' #' @export #' TransferSketchLabels <- function( object, sketched.assay = 'sketch', reduction, dims, refdata = NULL, k = 50, reduction.model = NULL, neighbors = NULL, recompute.neighbors = FALSE, recompute.weights = FALSE, verbose = TRUE ){ full_sketch.nn <- neighbors %||% Tool( object = object, slot = 'TransferSketchLabels' )$full_sketch.nn full_sketch.weight <- Tool( object = object, slot = 'TransferSketchLabels' )$full_sketch.weight compute.neighbors <- is.null(x = full_sketch.nn) || !all(Cells(full_sketch.nn) == Cells(object[[reduction]])) || max(Indices(full_sketch.nn)) > ncol(object[[sketched.assay]]) || !identical(x = full_sketch.nn@alg.info$dims, y = dims) || !identical(x = full_sketch.nn@alg.info$reduction, y = reduction) || recompute.neighbors compute.weights <- is.null(x = full_sketch.weight) || !all(colnames(full_sketch.weight) == Cells(object[[reduction]])) || !all(rownames(full_sketch.weight) == colnames(object[[sketched.assay]])) || recompute.weights || recompute.neighbors if (compute.neighbors) { if (verbose) { message("Finding sketch neighbors") } full_sketch.nn <- NNHelper( query = Embeddings(object[[reduction]])[, dims], data = Embeddings(object[[reduction]])[colnames(object[[sketched.assay]]), dims], k = k, method = "annoy" ) slot(object = full_sketch.nn, name = 'alg.info')$dims <- dims slot(object = full_sketch.nn, name = 'alg.info')$reduction <- reduction } if (compute.weights) { if (verbose) { message("Finding sketch weight matrix") } full_sketch.weight <- FindWeightsNN( nn.obj = full_sketch.nn, query.cells = Cells(object[[reduction]]), reference.cells = colnames(object[[sketched.assay]]), verbose = verbose) rownames(full_sketch.weight) <- colnames(object[[sketched.assay]]) colnames(full_sketch.weight) <- Cells(object[[reduction]]) } slot( object = object, name = 'tools' )$TransferSketchLabels$full_sketch.nn <- full_sketch.nn slot( object = object, name = 'tools' )$TransferSketchLabels$full_sketch.weight <- full_sketch.weight if (!is.null(refdata)) { if (length(refdata) == 1 & is.character(refdata)) { refdata <- list(refdata) names(refdata) <- unlist(refdata) } if (verbose) { message("Transfering refdata from sketch") } for (rd in 1:length(x = refdata)) { if (isFALSE(x = refdata[[rd]])) { transfer.results[[rd]] <- NULL next } rd.name <- names(x = refdata)[rd] label.rd <- refdata[[rd]] ## FetchData not work if (!label.rd %in% colnames( object[[]])) { stop(label.rd, ' is not in the meta.data') } reference.labels <- object[[]][colnames(object[[sketched.assay]]), label.rd] predicted.labels.list <- TransferLablesNN( reference.labels = reference.labels, weight.matrix = full_sketch.weight) object[[paste0(rd.name)]] <- predicted.labels.list$labels object[[paste0(rd.name, '.score')]] <- predicted.labels.list$scores } } if (!is.null(reduction.model)) { umap.model <- Misc(object = object[[reduction.model]], slot = 'model') if (is.null(umap.model)) { warning(reduction.model, ' does not have a stored umap model') return(object) } if (verbose) { message("Projection to sketch umap") } if (ncol(full_sketch.nn) > umap.model$n_neighbors) { full_sketch.nn@nn.idx <- full_sketch.nn@nn.idx[, 1:umap.model$n_neighbors] full_sketch.nn@nn.dist <- full_sketch.nn@nn.dist[, 1:umap.model$n_neighbors] } proj.umap <- RunUMAP( object = full_sketch.nn, reduction.model = object[[reduction.model]], verbose = verbose, assay = slot(object = object[[reduction]], name = 'assay.used') ) full.umap.reduction <- rev( x = make.unique( names = c( Reductions(object = object), paste0('full.',reduction.model) ) ) )[1] Key(object = proj.umap) <- Key(object = full.umap.reduction) object[[full.umap.reduction ]] <- proj.umap } return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param nsketch A positive integer. The number of sketches to be used in the approximation. #' Default is 5000. #' @param ndims A positive integer or NULL. The number of dimensions to use. If NULL, the number #' of dimensions will default to the number of columns in the object. #' @param method The sketching method to use, defaults to CountSketch. #' @param eps A numeric. The error tolerance for the approximation in Johnson–Lindenstrauss embeddings, #' defaults to 0.5. #' @param seed A positive integer. The seed for the random number generator, defaults to 123. #' @param verbose Print progress and diagnostic messages #' @importFrom Matrix qrR t #' @importFrom irlba irlba #' #' @rdname LeverageScore #' @method LeverageScore default #' @export #' LeverageScore.default <- function( object, nsketch = 5000L, ndims = NULL, method = CountSketch, eps = 0.5, seed = 123L, verbose = TRUE, ... ) { # Check the dimensions of the object, nsketch, and ndims ncells <- ncol(x = object) if (ncells < nsketch * 1.5) { nv <- ifelse(nrow(x = object) < 50, nrow(x = object) - 1, 50) Z <- irlba(A = object, nv = 50, nu = 0, verbose = FALSE)$v return(rowSums(x = Z ^ 2)) } if (nrow(x = object) > 5000L) { abort(message = "too slow") } else if (nrow(x = object) > (ncells / 1.1)) { abort(message = "too square") } ndims <- ndims %||% ncells if (nsketch < (1.1 * nrow(x = object))) { nsketch <- 1.1 * nrow(x = object) warning( "'nsketch' is too close to the number of features, setting to ", round(x = nsketch, digits = 2L), call. = FALSE, immediate. = TRUE ) } nsketch <- min(nsketch, ndims) # Check the method if (is_quosure(x = method)) { method <- eval( expr = quo_get_expr(quo = method), envir = quo_get_env(quo = method) ) } if (is.character(x = method)) { method <- match.fun(FUN = method) } stopifnot(is.function(x = method)) # Run the sketching if (isTRUE(x = verbose)) { message("sampling ", nsketch, " cells for random sketching") } S <- method(nsketch = nsketch, ncells = ncells, seed = seed, ...) object <- t(x = object) if (isTRUE(x = verbose)) { message("Performing QR decomposition") } if (inherits(x = object, what = 'IterableMatrix')) { temp <- tempdir() object.gene_index <- BPCells::transpose_storage_order(matrix = object, tmpdir = temp) sa <- as(object = S %*% object.gene_index, Class = 'dgCMatrix') rm(object.gene_index) unlink(list.files(path = temp, full.names = TRUE)) } else { sa <- S %*% object } if (!inherits(x = sa, what = 'dgCMatrix')) { sa <- as(object = sa, Class = 'dgCMatrix') } qr.sa <- base::qr(x = sa) R <- if (inherits(x = qr.sa, what = 'sparseQR')) { qrR(qr = qr.sa) } else { base::qr.R(qr = qr.sa) } R.inv <- as.sparse(x = backsolve(r = R, x = diag(x = ncol(x = R)))) if (isTRUE(x = verbose)) { message("Performing random projection") } JL <- as.sparse(x = JLEmbed( nrow = ncol(x = R.inv), ncol = ndims, eps = eps, seed = seed )) Z <- object %*% (R.inv %*% JL) if (inherits(x = Z, what = 'IterableMatrix')) { Z.score <- BPCells::matrix_stats(matrix = Z ^ 2, row_stats = 'mean' )$row_stats['mean',]*ncol(x = Z) } else { Z.score <- rowSums(x = Z ^ 2) } return(Z.score) } #' @param nsketch A positive integer. The number of sketches to be used in the approximation. #' Default is 5000. #' @param ndims A positive integer or NULL. The number of dimensions to use. If NULL, the number #' of dimensions will default to the number of columns in the object. #' @param method The sketching method to use, defaults to CountSketch. #' @param vf.method VariableFeatures method #' @param layer layer to use #' @param eps A numeric. The error tolerance for the approximation in Johnson–Lindenstrauss embeddings, #' defaults to 0.5. #' @param seed A positive integer. The seed for the random number generator, defaults to 123. #' @param verbose Print progress and diagnostic messages #' #' @importFrom SeuratObject EmptyDF #' #' @rdname LeverageScore #' @method LeverageScore StdAssay #' #' @export #' LeverageScore.StdAssay <- function( object, nsketch = 5000L, ndims = NULL, method = CountSketch, vf.method = NULL, layer = 'data', eps = 0.5, seed = 123L, verbose = TRUE, ... ) { layer <- unique(x = layer) %||% DefaultLayer(object = object) layer <- Layers(object = object, search = layer) if (!is_quosure(x = method)) { method <- enquo(arg = method) } scores <- EmptyDF(n = ncol(x = object)) row.names(x = scores) <- colnames(x = object) scores[, 1] <- NA_real_ for (i in seq_along(along.with = layer)) { l <- layer[i] if (isTRUE(x = verbose)) { message("Running LeverageScore for layer ", l) } scores[Cells(x = object, layer = l), 1] <- LeverageScore( object = LayerData( object = object, layer = l, features = VariableFeatures( object = object, method = vf.method, layer = l ), fast = TRUE ), nsketch = nsketch, ndims = ndims %||% ncol(x = object), method = method, eps = eps, seed = seed, verbose = verbose, ... ) } return(scores) } #' @rdname LeverageScore #' @method LeverageScore Assay #' @export #' LeverageScore.Assay <- LeverageScore.StdAssay #' @param assay assay to use #' @param nsketch A positive integer. The number of sketches to be used in the approximation. #' Default is 5000. #' @param ndims A positive integer or NULL. The number of dimensions to use. If NULL, the number #' of dimensions will default to the number of columns in the object. #' @param method The sketching method to use, defaults to CountSketch. #' @param var.name name of slot to store leverage scores #' @param over.write whether to overwrite slot that currently stores leverage scores. Defaults #' to FALSE, in which case the 'var.name' is modified if it already exists in the object #' #' @rdname LeverageScore #' @method LeverageScore Seurat #' @export #' LeverageScore.Seurat <- function( object, assay = NULL, nsketch = 5000L, ndims = NULL, var.name = 'leverage.score', over.write = FALSE, method = CountSketch, vf.method = NULL, layer = 'data', eps = 0.5, seed = 123L, verbose = TRUE, ... ) { if (!over.write) { var.name <- CheckMetaVarName(object = object, var.name = var.name) } assay <- assay[1L] %||% DefaultAssay(object = object) assay <- match.arg(arg = assay, choices = Assays(object = object)) method <- enquo(arg = method) scores <- LeverageScore( object = object[[assay]], nsketch = nsketch, ndims = ndims, method = method, vf.method = vf.method, layer = layer, eps = eps, seed = seed, verbose = verbose, ... ) names(x = scores) <- var.name object[[]] <- scores return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for R-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Generate CountSketch random matrix #' #' @inheritParams base::set.seed #' @param nsketch Number of sketching random cells #' @param ncells Number of cells in the original data #' @param ... Ignored #' #' @return ... #' #' @importFrom Matrix sparseMatrix #' #' @export #' #' @keywords internal #' #' @references Clarkson, KL. & Woodruff, DP. #' Low-rank approximation and regression in input sparsity time. #' Journal of the ACM (JACM). 2017 Jan 30;63(6):1-45. #' \url{https://dl.acm.org/doi/abs/10.1145/3019134}; CountSketch <- function(nsketch, ncells, seed = NA_integer_, ...) { if (!is.na(x = seed)) { set.seed(seed = seed) } iv <- xv <- vector(mode = "numeric", length = ncells) jv <- seq_len(length.out = ncells) for (i in jv) { iv[i] <- sample(x = seq_len(length.out = nsketch), size = 1L) xv[i] <- sample(x = c(-1L, 1L), size = 1L) } return(sparseMatrix( i = iv, j = jv, x = xv, dims = c(nsketch, ncells) )) } #' Gaussian sketching #' #' @inheritParams CountSketch #' #' @return ... #' #' @export #' #' @keywords internal #' GaussianSketch <- function(nsketch, ncells, seed = NA_integer_, ...) { if (!is.na(x = seed)) { set.seed(seed = seed) } return(matrix( data = rnorm(n = nsketch * ncells, mean = 0L, sd = 1 / (ncells ^ 2)), nrow = nsketch, ncol = ncells )) } #' Generate JL random projection embeddings #' #' @keywords internal #' #' @references Aghila G and Siddharth R (2020). #' RandPro: Random Projection with Classification. R package version 0.2.2. #' \url{https://CRAN.R-project.org/package=RandPro} #' #' @noRd # JLEmbed <- function(nrow, ncol, eps = 0.1, seed = NA_integer_, method = "li") { if (!is.na(x = seed)) { set.seed(seed = seed) } method <- method[1L] method <- match.arg(arg = method) if (!is.null(x = eps)) { if (eps > 1 || eps <= 0) { stop("'eps' must be 0 < eps <= 1") } ncol <- floor(x = 4 * log(x = ncol) / ((eps ^ 2) / 2 - (eps ^ 3 / 3))) } m <- switch( EXPR = method, "li" = { s <- ceiling(x = sqrt(x = ncol)) prob <- c( 1 / (2 * s), 1 - (1 / s), 1 / (2 * s) ) matrix( data = sample( x = seq.int(from = -1L, to = 1L), size = nrow * ncol, replace = TRUE, prob = prob ), nrow = nrow ) } ) return(m) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # S4 Methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Seurat/R/visualization.R0000644000176200001440000106437514525500056014764 0ustar liggesusers#' @importFrom utils globalVariables #' @importFrom ggplot2 fortify GeomViolin ggproto #' @importFrom SeuratObject DefaultDimReduc #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom methods setGeneric #' setGeneric( name = '.PrepImageData', def = function(data, cells, ...) { standardGeneric(f = '.PrepImageData') } ) #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Heatmaps #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Dimensional reduction heatmap #' #' Draws a heatmap focusing on a principal component. Both cells and genes are sorted by their #' principal component scores. Allows for nice visualization of sources of heterogeneity in the dataset. #' #' @inheritParams DoHeatmap #' @param dims Dimensions to plot #' @param nfeatures Number of genes to plot #' @param cells A list of cells to plot. If numeric, just plots the top cells. #' @param reduction Which dimensional reduction to use #' @param balanced Plot an equal number of genes with both + and - scores. #' @param projected Use the full projected dimensional reduction #' @param ncol Number of columns to plot #' @param fast If true, use \code{image} to generate plots; faster than using ggplot2, but not customizable #' @param assays A vector of assays to pull data from #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return No return value by default. If using fast = FALSE, will return a #' \code{\link[patchwork]{patchwork}ed} ggplot object if combine = TRUE, otherwise #' returns a list of ggplot objects #' #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' #' @seealso \code{\link[graphics]{image}} \code{\link[ggplot2]{geom_raster}} #' #' @examples #' data("pbmc_small") #' DimHeatmap(object = pbmc_small) #' DimHeatmap <- function( object, dims = 1, nfeatures = 30, cells = NULL, reduction = 'pca', disp.min = -2.5, disp.max = NULL, balanced = TRUE, projected = FALSE, ncol = NULL, fast = TRUE, raster = TRUE, slot = 'scale.data', assays = NULL, combine = TRUE ) { ncol <- ncol %||% ifelse(test = length(x = dims) > 2, yes = 3, no = length(x = dims)) plots <- vector(mode = 'list', length = length(x = dims)) assays <- assays %||% DefaultAssay(object = object) disp.max <- disp.max %||% ifelse( test = slot == 'scale.data', yes = 2.5, no = 6 ) if (!DefaultAssay(object = object[[reduction]]) %in% assays) { warning("The original assay that the reduction was computed on is different than the assay specified") } cells <- cells %||% ncol(x = object) if (is.numeric(x = cells)) { cells <- lapply( X = dims, FUN = function(x) { cells <- TopCells( object = object[[reduction]], dim = x, ncells = cells, balanced = balanced ) if (balanced) { cells$negative <- rev(x = cells$negative) } cells <- unlist(x = unname(obj = cells)) return(cells) } ) } if (!is.list(x = cells)) { cells <- lapply(X = 1:length(x = dims), FUN = function(x) {return(cells)}) } features <- lapply( X = dims, FUN = TopFeatures, object = object[[reduction]], nfeatures = nfeatures, balanced = balanced, projected = projected ) features.all <- unique(x = unlist(x = features)) if (length(x = assays) > 1) { features.keyed <- lapply( X = assays, FUN = function(assay) { features <- features.all[features.all %in% rownames(x = object[[assay]])] if (length(x = features) > 0) { return(paste0(Key(object = object[[assay]]), features)) } } ) features.keyed <- Filter(f = Negate(f = is.null), x = features.keyed) features.keyed <- unlist(x = features.keyed) } else { features.keyed <- features.all DefaultAssay(object = object) <- assays } data.all <- FetchData( object = object, vars = features.keyed, cells = unique(x = unlist(x = cells)), slot = slot ) data.all <- MinMax(data = data.all, min = disp.min, max = disp.max) data.limits <- c(min(data.all), max(data.all)) # if (check.plot && any(c(length(x = features.keyed), length(x = cells[[1]])) > 700)) { # choice <- menu(c("Continue with plotting", "Quit"), title = "Plot(s) requested will likely take a while to plot.") # if (choice != 1) { # return(invisible(x = NULL)) # } # } if (fast) { nrow <- floor(x = length(x = dims) / 3.01) + 1 orig.par <- par()$mfrow par(mfrow = c(nrow, ncol)) } for (i in 1:length(x = dims)) { dim.features <- c(features[[i]][[2]], rev(x = features[[i]][[1]])) dim.features <- rev(x = unlist(x = lapply( X = dim.features, FUN = function(feat) { return(grep(pattern = paste0(feat, '$'), x = features.keyed, value = TRUE)) } ))) dim.cells <- cells[[i]] data.plot <- data.all[dim.cells, dim.features] if (fast) { SingleImageMap( data = data.plot, title = paste0(Key(object = object[[reduction]]), dims[i]), order = dim.cells ) } else { plots[[i]] <- SingleRasterMap( data = data.plot, raster = raster, limits = data.limits, cell.order = dim.cells, feature.order = dim.features ) } } if (fast) { par(mfrow = orig.par) return(invisible(x = NULL)) } if (combine) { plots <- wrap_plots(plots, ncol = ncol, guides = "collect") } return(plots) } #' Feature expression heatmap #' #' Draws a heatmap of single cell feature expression. #' #' @param object Seurat object #' @param features A vector of features to plot, defaults to \code{VariableFeatures(object = object)} #' @param cells A vector of cells to plot #' @param disp.min Minimum display value (all values below are clipped) #' @param disp.max Maximum display value (all values above are clipped); defaults to 2.5 #' if \code{slot} is 'scale.data', 6 otherwise #' @param group.by A vector of variables to group cells by; pass 'ident' to group by cell identity classes #' @param group.bar Add a color bar showing group status for cells #' @param group.colors Colors to use for the color bar #' @param slot Data slot to use, choose from 'raw.data', 'data', or 'scale.data' #' @param assay Assay to pull from # @param check.plot Check that plotting will finish in a reasonable amount of time #' @param label Label the cell identies above the color bar #' @param size Size of text above color bar #' @param hjust Horizontal justification of text above color bar #' @param vjust Vertical justification of text above color bar #' @param angle Angle of text above color bar #' @param raster If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on #' some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE #' if you are encountering that issue (note that plots may take longer to produce/render). #' @param draw.lines Include white lines to separate the groups #' @param lines.width Integer number to adjust the width of the separating white lines. #' Corresponds to the number of "cells" between each group. #' @param group.bar.height Scale the height of the color bar #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom stats median #' @importFrom scales hue_pal #' @importFrom ggplot2 annotation_raster coord_cartesian scale_color_manual #' ggplot_build aes_string geom_text #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' DoHeatmap(object = pbmc_small) #' DoHeatmap <- function( object, features = NULL, cells = NULL, group.by = 'ident', group.bar = TRUE, group.colors = NULL, disp.min = -2.5, disp.max = NULL, slot = 'scale.data', assay = NULL, label = TRUE, size = 5.5, hjust = 0, vjust = 0, angle = 45, raster = TRUE, draw.lines = TRUE, lines.width = NULL, group.bar.height = 0.02, combine = TRUE ) { assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay cells <- cells %||% colnames(x = object[[assay]]) if (is.numeric(x = cells)) { cells <- colnames(x = object)[cells] } features <- features %||% VariableFeatures(object = object) features <- rev(x = unique(x = features)) disp.max <- disp.max %||% ifelse( test = slot == 'scale.data', yes = 2.5, no = 6 ) # make sure features are present possible.features <- rownames(x = GetAssayData(object = object, slot = slot)) if (any(!features %in% possible.features)) { bad.features <- features[!features %in% possible.features] features <- features[features %in% possible.features] if(length(x = features) == 0) { stop("No requested features found in the ", slot, " slot for the ", assay, " assay.") } warning("The following features were omitted as they were not found in the ", slot, " slot for the ", assay, " assay: ", paste(bad.features, collapse = ", ")) } data <- as.data.frame(x = as.matrix(x = t(x = GetAssayData( object = object, slot = slot)[features, cells, drop = FALSE]))) object <- suppressMessages(expr = StashIdent(object = object, save.name = 'ident')) group.by <- group.by %||% 'ident' groups.use <- object[[group.by]][cells, , drop = FALSE] # group.use <- switch( # EXPR = group.by, # 'ident' = Idents(object = object), # object[[group.by, drop = TRUE]] # ) # group.use <- factor(x = group.use[cells]) plots <- vector(mode = 'list', length = ncol(x = groups.use)) for (i in 1:ncol(x = groups.use)) { data.group <- data group.use <- groups.use[, i, drop = TRUE] if (!is.factor(x = group.use)) { group.use <- factor(x = group.use) } names(x = group.use) <- cells if (draw.lines) { # create fake cells to serve as the white lines, fill with NAs lines.width <- lines.width %||% ceiling(x = nrow(x = data.group) * 0.0025) placeholder.cells <- sapply( X = 1:(length(x = levels(x = group.use)) * lines.width), FUN = function(x) { return(RandomName(length = 20)) } ) placeholder.groups <- rep(x = levels(x = group.use), times = lines.width) group.levels <- levels(x = group.use) names(x = placeholder.groups) <- placeholder.cells group.use <- as.vector(x = group.use) names(x = group.use) <- cells group.use <- factor(x = c(group.use, placeholder.groups), levels = group.levels) na.data.group <- matrix( data = NA, nrow = length(x = placeholder.cells), ncol = ncol(x = data.group), dimnames = list(placeholder.cells, colnames(x = data.group)) ) data.group <- rbind(data.group, na.data.group) } lgroup <- length(levels(group.use)) plot <- SingleRasterMap( data = data.group, raster = raster, disp.min = disp.min, disp.max = disp.max, feature.order = features, cell.order = names(x = sort(x = group.use)), group.by = group.use ) if (group.bar) { # TODO: Change group.bar to annotation.bar default.colors <- c(hue_pal()(length(x = levels(x = group.use)))) if (!is.null(x = names(x = group.colors))) { cols <- unname(obj = group.colors[levels(x = group.use)]) } else { cols <- group.colors[1:length(x = levels(x = group.use))] %||% default.colors } if (any(is.na(x = cols))) { cols[is.na(x = cols)] <- default.colors[is.na(x = cols)] cols <- Col2Hex(cols) col.dups <- sort(x = unique(x = which(x = duplicated(x = substr( x = cols, start = 1, stop = 7 ))))) through <- length(x = default.colors) while (length(x = col.dups) > 0) { pal.max <- length(x = col.dups) + through cols.extra <- hue_pal()(pal.max)[(through + 1):pal.max] cols[col.dups] <- cols.extra col.dups <- sort(x = unique(x = which(x = duplicated(x = substr( x = cols, start = 1, stop = 7 ))))) } } group.use2 <- sort(x = group.use) if (draw.lines) { na.group <- RandomName(length = 20) levels(x = group.use2) <- c(levels(x = group.use2), na.group) group.use2[placeholder.cells] <- na.group cols <- c(cols, "#FFFFFF") } pbuild <- ggplot_build(plot = plot) names(x = cols) <- levels(x = group.use2) # scale the height of the bar y.range <- diff(x = pbuild$layout$panel_params[[1]]$y.range) y.pos <- max(pbuild$layout$panel_params[[1]]$y.range) + y.range * 0.015 y.max <- y.pos + group.bar.height * y.range x.min <- min(pbuild$layout$panel_params[[1]]$x.range) + 0.1 x.max <- max(pbuild$layout$panel_params[[1]]$x.range) - 0.1 plot <- plot + annotation_raster( raster = t(x = cols[group.use2]), xmin = x.min, xmax = x.max, ymin = y.pos, ymax = y.max ) + coord_cartesian(ylim = c(0, y.max), clip = 'off') + scale_color_manual( values = cols[-length(x = cols)], name = "Identity", na.translate = FALSE ) if (label) { x.max <- max(pbuild$layout$panel_params[[1]]$x.range) # Attempt to pull xdivs from x.major in ggplot2 < 3.3.0; if NULL, pull from the >= 3.3.0 slot x.divs <- pbuild$layout$panel_params[[1]]$x.major %||% attr(x = pbuild$layout$panel_params[[1]]$x$get_breaks(), which = "pos") x <- data.frame(group = sort(x = group.use), x = x.divs) label.x.pos <- tapply(X = x$x, INDEX = x$group, FUN = function(y) { if (isTRUE(x = draw.lines)) { mean(x = y[-length(x = y)]) } else { mean(x = y) } }) label.x.pos <- data.frame(group = names(x = label.x.pos), label.x.pos) plot <- plot + geom_text( stat = "identity", data = label.x.pos, aes_string(label = 'group', x = 'label.x.pos'), y = y.max + y.max * 0.03 * 0.5 + vjust, angle = angle, hjust = hjust, size = size ) plot <- suppressMessages(plot + coord_cartesian( ylim = c(0, y.max + y.max * 0.002 * max(nchar(x = levels(x = group.use))) * size), clip = 'off') ) } } plot <- plot + theme(line = element_blank()) plots[[i]] <- plot } if (combine) { plots <- wrap_plots(plots) } return(plots) } #' Hashtag oligo heatmap #' #' Draws a heatmap of hashtag oligo signals across singlets/doublets/negative cells. Allows for the visualization of HTO demultiplexing results. #' #' @param object Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized, and demultiplexing has been run with HTODemux(). #' @param classification The naming for metadata column with classification result from HTODemux(). #' @param global.classification The slot for metadata column specifying a cell as singlet/doublet/negative. #' @param assay Hashtag assay name. #' @param ncells Number of cells to plot. Default is to choose 5000 cells by random subsampling, to avoid having to draw exceptionally large heatmaps. #' @param singlet.names Namings for the singlets. Default is to use the same names as HTOs. #' @param raster If true, plot with geom_raster, else use geom_tile. geom_raster may look blurry on #' some viewing applications such as Preview due to how the raster is interpolated. Set this to FALSE #' if you are encountering that issue (note that plots may take longer to produce/render). #' @return Returns a ggplot2 plot object. #' #' @importFrom ggplot2 guides #' @export #' @concept visualization #' #' @seealso \code{\link{HTODemux}} #' #' @examples #' \dontrun{ #' object <- HTODemux(object) #' HTOHeatmap(object) #' } #' HTOHeatmap <- function( object, assay = 'HTO', classification = paste0(assay, '_classification'), global.classification = paste0(assay, '_classification.global'), ncells = 5000, singlet.names = NULL, raster = TRUE ) { DefaultAssay(object = object) <- assay Idents(object = object) <- object[[classification, drop = TRUE]] if (ncells > ncol(x = object)) { warning("ncells (", ncells, ") is larger than the number of cells present in the provided object (", ncol(x = object), "). Plotting heatmap for all cells.") } else { object <- subset( x = object, cells = sample(x = colnames(x = object), size = ncells) ) } classification <- object[[classification]] singlets <- which(x = object[[global.classification]] == 'Singlet') singlet.ids <- sort(x = unique(x = as.character(x = classification[singlets, ]))) doublets <- which(object[[global.classification]] == 'Doublet') doublet.ids <- sort(x = unique(x = as.character(x = classification[doublets, ]))) heatmap.levels <- c(singlet.ids, doublet.ids, 'Negative') object <- ScaleData(object = object, assay = assay, verbose = FALSE) data <- FetchData(object = object, vars = singlet.ids) Idents(object = object) <- factor(x = classification[, 1], levels = heatmap.levels) plot <- SingleRasterMap( data = data, raster = raster, feature.order = rev(x = singlet.ids), cell.order = names(x = sort(x = Idents(object = object))), group.by = Idents(object = object) ) + guides(color = FALSE) return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Expression by identity plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Single cell ridge plot #' #' Draws a ridge plot of single cell data (gene expression, metrics, PC #' scores, etc.) #' #' @param object Seurat object #' @param features Features to plot (gene expression, metrics, PC scores, #' anything that can be retreived by FetchData) #' @param cols Colors to use for plotting #' @param idents Which classes to include in the plot (default is all) #' @param sort Sort identity classes (on the x-axis) by the average #' expression of the attribute being potted, can also pass 'increasing' or 'decreasing' to change sort direction #' @param assay Name of assay to use, defaults to the active assay #' @param group.by Group (color) cells in different ways (for example, orig.ident) #' @param y.max Maximum y axis value #' @param same.y.lims Set all the y-axis limits to the same values #' @param log plot the feature axis on log scale #' @param ncol Number of columns if multiple plots are displayed #' @param slot Slot to pull expression data from (e.g. "counts" or "data") #' @param layer Layer to pull expression data from (e.g. "counts" or "data") #' @param stack Horizontally stack plots for each feature #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot #' @param fill.by Color violins/ridges based on either 'feature' or 'ident' #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' RidgePlot(object = pbmc_small, features = 'PC_1') #' RidgePlot <- function( object, features, cols = NULL, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = deprecated(), layer = 'data', stack = FALSE, combine = TRUE, fill.by = 'feature' ) { if (is_present(arg = slot)) { deprecate_soft( when = '5.0.0', what = 'RidgePlot(slot = )', with = 'RidgePlot(layer = )' ) layer <- slot %||% layer } return(ExIPlot( object = object, type = 'ridge', features = features, idents = idents, ncol = ncol, sort = sort, assay = assay, y.max = y.max, same.y.lims = same.y.lims, cols = cols, group.by = group.by, log = log, layer = layer, stack = stack, combine = combine, fill.by = fill.by )) } #' Single cell violin plot #' #' Draws a violin plot of single cell data (gene expression, metrics, PC #' scores, etc.) #' #' @inheritParams RidgePlot #' @param pt.size Point size for points #' @param alpha Alpha value for points #' @param split.by A factor in object metadata to split the plot by, pass 'ident' #' to split by cell identity' #' @param split.plot plot each group of the split violin plots by multiple or #' single violin shapes. #' @param adjust Adjust parameter for geom_violin #' @param flip flip plot orientation (identities on x-axis) #' @param add.noise determine if adding a small noise for plotting #' @param raster Convert points to raster format. Requires 'ggrastr' to be installed. # default is \code{NULL} which automatically rasterizes if ggrastr is installed and # number of points exceed 100,000. #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @export #' @concept visualization #' #' @seealso \code{\link{FetchData}} #' #' @examples #' data("pbmc_small") #' VlnPlot(object = pbmc_small, features = 'PC_1') #' VlnPlot(object = pbmc_small, features = 'LYZ', split.by = 'groups') #' VlnPlot <- function( object, features, cols = NULL, pt.size = NULL, alpha = 1, idents = NULL, sort = FALSE, assay = NULL, group.by = NULL, split.by = NULL, adjust = 1, y.max = NULL, same.y.lims = FALSE, log = FALSE, ncol = NULL, slot = deprecated(), layer = NULL, split.plot = FALSE, stack = FALSE, combine = TRUE, fill.by = 'feature', flip = FALSE, add.noise = TRUE, raster = NULL ) { if (is_present(arg = slot)) { deprecate_soft( when = '5.0.0', what = 'VlnPlot(slot = )', with = 'VlnPlot(layer = )' ) layer <- slot %||% layer } layer.set <- suppressWarnings( Layers( object = object, search = layer %||% 'data' ) ) if (is.null(layer) && length(layer.set) == 1 && layer.set == 'scale.data'){ warning('Default search for "data" layer yielded no results; utilizing "scale.data" layer instead.') } assay.name <- DefaultAssay(object) if (is.null(layer.set) & is.null(layer) ) { warning('Default search for "data" layer in "', assay.name, '" assay yielded no results; utilizing "counts" layer instead.', call. = FALSE, immediate. = TRUE) layer.set <- Layers( object = object, search = 'counts' ) } if (is.null(layer.set)) { stop('layer "', layer,'" is not found in assay: "', assay.name, '"') } else { layer <- layer.set } if ( !is.null(x = split.by) & getOption(x = 'Seurat.warn.vlnplot.split', default = TRUE) ) { message( "The default behaviour of split.by has changed.\n", "Separate violin plots are now plotted side-by-side.\n", "To restore the old behaviour of a single split violin,\n", "set split.plot = TRUE. \nThis message will be shown once per session." ) options(Seurat.warn.vlnplot.split = FALSE) } return(ExIPlot( object = object, type = ifelse(test = split.plot, yes = 'splitViolin', no = 'violin'), features = features, idents = idents, ncol = ncol, sort = sort, assay = assay, y.max = y.max, same.y.lims = same.y.lims, adjust = adjust, pt.size = pt.size, alpha = alpha, cols = cols, group.by = group.by, split.by = split.by, log = log, layer = layer, stack = stack, combine = combine, fill.by = fill.by, flip = flip, add.noise = add.noise, raster = raster )) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Dimensional reduction plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Color dimensional reduction plot by tree split #' #' Returns a DimPlot colored based on whether the cells fall in clusters #' to the left or to the right of a node split in the cluster tree. #' #' @param object Seurat object #' @param node Node in cluster tree on which to base the split #' @param left.color Color for the left side of the split #' @param right.color Color for the right side of the split #' @param other.color Color for all other cells #' @inheritDotParams DimPlot -object #' #' @return Returns a DimPlot #' #' @export #' @concept visualization #' #' @seealso \code{\link{DimPlot}} #' #' @examples #' \dontrun{ #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small <- BuildClusterTree(object = pbmc_small, verbose = FALSE) #' PlotClusterTree(pbmc_small) #' ColorDimSplit(pbmc_small, node = 5) #' } #' } #' ColorDimSplit <- function( object, node, left.color = 'red', right.color = 'blue', other.color = 'grey50', ... ) { CheckDots(..., fxns = 'DimPlot') tree <- Tool(object = object, slot = "BuildClusterTree") split <- tree$edge[which(x = tree$edge[, 1] == node), ][, 2] all.children <- sort(x = tree$edge[, 2][!tree$edge[, 2] %in% tree$edge[, 1]]) left.group <- DFT(tree = tree, node = split[1], only.children = TRUE) right.group <- DFT(tree = tree, node = split[2], only.children = TRUE) if (any(is.na(x = left.group))) { left.group <- split[1] } if (any(is.na(x = right.group))) { right.group <- split[2] } left.group <- MapVals(v = left.group, from = all.children, to = tree$tip.label) right.group <- MapVals(v = right.group, from = all.children, to = tree$tip.label) remaining.group <- setdiff(x = tree$tip.label, y = c(left.group, right.group)) left.cells <- WhichCells(object = object, ident = left.group) right.cells <- WhichCells(object = object, ident = right.group) remaining.cells <- WhichCells(object = object, ident = remaining.group) object <- SetIdent( object = object, cells = left.cells, value = "Left Split" ) object <- SetIdent( object = object, cells = right.cells, value = "Right Split" ) object <- SetIdent( object = object, cells = remaining.cells, value = "Not in Split" ) levels(x = object) <- c("Left Split", "Right Split", "Not in Split") colors.use = c(left.color, right.color, other.color) return(DimPlot(object = object, cols = colors.use, ...)) } #' Dimensional reduction plot #' #' Graphs the output of a dimensional reduction technique on a 2D scatter plot where each point is a #' cell and it's positioned based on the cell embeddings determined by the reduction technique. By #' default, cells are colored by their identity class (can be changed with the group.by parameter). #' #' @param object Seurat object #' @param dims Dimensions to plot, must be a two-length numeric vector specifying x- and y-dimensions #' @param cells Vector of cells to plot (default is all cells) #' @param cols Vector of colors, each color corresponds to an identity class. This may also be a single character #' or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. #' By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. #' See \code{\link{DiscretePalette}} for details. #' @param pt.size Adjust point size for plotting #' @param reduction Which dimensionality reduction to use. If not specified, first searches for umap, then tsne, then pca #' @param group.by Name of one or more metadata columns to group (color) cells by #' (for example, orig.ident); pass 'ident' to group by identity class #' @param split.by A factor in object metadata to split the plot by, pass 'ident' #' to split by cell identity' #' @param shape.by If NULL, all points are circles (default). You can specify any #' cell attribute (that can be pulled with FetchData) allowing for both #' different colors and different shapes on cells. Only applicable if \code{raster = FALSE}. #' @param order Specify the order of plotting for the idents. This can be #' useful for crowded plots if points of interest are being buried. Provide #' either a full list of valid idents or a subset to be plotted last (on top) #' @param shuffle Whether to randomly shuffle the order of points. This can be #' useful for crowded plots if points of interest are being buried. (default is FALSE) #' @param seed Sets the seed if randomly shuffling the order of points. #' @param label Whether to label the clusters #' @param label.size Sets size of labels #' @param label.color Sets the color of the label text #' @param label.box Whether to put a box around the label text (geom_text vs #' geom_label) #' @param alpha Alpha value for plotting (default is 1) #' @param repel Repel labels #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply #' pass a vector instead of a list. If set, colors selected cells to the color(s) #' in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); #' will also resize to the size(s) passed to \code{sizes.highlight} #' @param cols.highlight A vector of colors to highlight the cells as; will #' repeat to the length groups in cells.highlight #' @param sizes.highlight Size of highlighted cells; will repeat to the length #' groups in cells.highlight. If \code{sizes.highlight = TRUE} size of all #' points will be this value. #' @param na.value Color value for NA points when using custom scale #' @param ncol Number of columns for display when combining plots #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' @param raster Convert points to raster format, default is \code{NULL} which #' automatically rasterizes if plotting more than 100,000 cells #' @param raster.dpi Pixel resolution for rasterized plots, passed to geom_scattermore(). #' Default is c(512, 512). #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom rlang !! #' @importFrom ggplot2 facet_wrap vars sym labs #' @importFrom patchwork wrap_plots #' #' @export #' @concept visualization #' #' @note For the old \code{do.hover} and \code{do.identify} functionality, please see #' \code{HoverLocator} and \code{CellSelector}, respectively. #' #' @aliases TSNEPlot PCAPlot ICAPlot #' @seealso \code{\link{FeaturePlot}} \code{\link{HoverLocator}} #' \code{\link{CellSelector}} \code{\link{FetchData}} #' #' @examples #' data("pbmc_small") #' DimPlot(object = pbmc_small) #' DimPlot(object = pbmc_small, split.by = 'letter.idents') #' DimPlot <- function( object, dims = c(1, 2), cells = NULL, cols = NULL, pt.size = NULL, reduction = NULL, group.by = NULL, split.by = NULL, shape.by = NULL, order = NULL, shuffle = FALSE, seed = 1, label = FALSE, label.size = 4, label.color = 'black', label.box = FALSE, repel = FALSE, alpha = 1, cells.highlight = NULL, cols.highlight = '#DE2D26', sizes.highlight = 1, na.value = 'grey50', ncol = NULL, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) { if (!is_integerish(x = dims, n = 2L, finite = TRUE) || !all(dims > 0L)) { abort(message = "'dims' must be a two-length integer vector") } reduction <- reduction %||% DefaultDimReduc(object = object) # cells <- cells %||% colnames(x = object) ##### Cells for all cells in the assay. #### Cells function should not only get default layer cells <- cells %||% Cells( x = object, assay = DefaultAssay(object = object[[reduction]]) ) # data <- Embeddings(object = object[[reduction]])[cells, dims] # data <- as.data.frame(x = data) dims <- paste0(Key(object = object[[reduction]]), dims) orig.groups <- group.by group.by <- group.by %||% 'ident' data <- FetchData( object = object, vars = c(dims, group.by), cells = cells, clean = 'project' ) # cells <- rownames(x = object) # object[['ident']] <- Idents(object = object) # orig.groups <- group.by # group.by <- group.by %||% 'ident' # data <- cbind(data, object[[group.by]][cells, , drop = FALSE]) group.by <- colnames(x = data)[3:ncol(x = data)] for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } if (!is.null(x = shape.by)) { data[, shape.by] <- object[[shape.by, drop = TRUE]] } if (!is.null(x = split.by)) { split <- FetchData(object = object, vars = split.by, clean=TRUE)[split.by] data <- data[rownames(split),] data[, split.by] <- split } if (isTRUE(x = shuffle)) { set.seed(seed = seed) data <- data[sample(x = 1:nrow(x = data)), ] } plots <- lapply( X = group.by, FUN = function(x) { plot <- SingleDimPlot( data = data[, c(dims, x, split.by, shape.by)], dims = dims, col.by = x, cols = cols, pt.size = pt.size, shape.by = shape.by, order = order, alpha = alpha, label = FALSE, cells.highlight = cells.highlight, cols.highlight = cols.highlight, sizes.highlight = sizes.highlight, na.value = na.value, raster = raster, raster.dpi = raster.dpi ) if (label) { plot <- LabelClusters( plot = plot, id = x, repel = repel, size = label.size, split.by = split.by, box = label.box, color = label.color ) } if (!is.null(x = split.by)) { plot <- plot + FacetTheme() + facet_wrap( facets = vars(!!sym(x = split.by)), ncol = if (length(x = group.by) > 1 || is.null(x = ncol)) { length(x = unique(x = data[, split.by])) } else { ncol } ) } plot <- if (is.null(x = orig.groups)) { plot + labs(title = NULL) } else { plot + CenterTitle() } } ) if (!is.null(x = split.by)) { ncol <- 1 } if (combine) { plots <- wrap_plots(plots, ncol = orig.groups %iff% ncol) } return(plots) } #' Visualize 'features' on a dimensional reduction plot #' #' Colors single cells on a dimensional reduction plot according to a 'feature' #' (i.e. gene expression, PC scores, number of genes detected, etc.) #' #' @inheritParams DimPlot #' @param order Boolean determining whether to plot cells in order of expression. Can be useful if #' cells expressing given feature are getting buried. #' @param features Vector of features to plot. Features can come from: #' \itemize{ #' \item An \code{Assay} feature (e.g. a gene name - "MS4A1") #' \item A column name from meta.data (e.g. mitochondrial percentage - #' "percent.mito") #' \item A column name from a \code{DimReduc} object corresponding to the #' cell embedding values (e.g. the PC 1 scores - "PC_1") #' } #' @param cols The two colors to form the gradient over. Provide as string vector with #' the first color corresponding to low values, the second to high. Also accepts a Brewer #' color scale or vector of colors. Note: this will bin the data into number of colors provided. #' When blend is \code{TRUE}, takes anywhere from 1-3 colors: #' \describe{ #' \item{1 color:}{Treated as color for double-negatives, will use default colors 2 and 3 for per-feature expression} #' \item{2 colors:}{Treated as colors for per-feature expression, will use default color 1 for double-negatives} #' \item{3+ colors:}{First color used for double-negatives, colors 2 and 3 used for per-feature expression, all others ignored} #' } #' @param min.cutoff,max.cutoff Vector of minimum and maximum cutoff values for each feature, #' may specify quantile in the form of 'q##' where '##' is the quantile (eg, 'q1', 'q10') #' @param split.by A factor in object metadata to split the plot by, pass 'ident' #' to split by cell identity' #' @param keep.scale How to handle the color scale across multiple plots. Options are: #' \itemize{ #' \item \dQuote{feature} (default; by row/feature scaling): The plots for #' each individual feature are scaled to the maximum expression of the #' feature across the conditions provided to \code{split.by} #' \item \dQuote{all} (universal scaling): The plots for all features and #' conditions are scaled to the maximum expression value for the feature #' with the highest overall expression #' \item \code{all} (no scaling): Each individual plot is scaled to the #' maximum expression value of the feature in the condition provided to #' \code{split.by}. Be aware setting \code{NULL} will result in color #' scales that are not comparable between plots #' } #' @param slot Which slot to pull expression data from? #' @param blend Scale and blend expression values to visualize coexpression of two features #' @param blend.threshold The color cutoff from weak signal to strong signal; ranges from 0 to 1. #' @param ncol Number of columns to combine multiple feature plots to, ignored if \code{split.by} is not \code{NULL} #' @param coord.fixed Plot cartesian coordinates with fixed aspect ratio #' @param by.col If splitting by a factor, plot the splits per column with the features as rows; ignored if \code{blend = TRUE} #' @param sort.cell Redundant with \code{order}. This argument is being #' deprecated. Please use \code{order} instead. #' @param interactive Launch an interactive \code{\link[Seurat:IFeaturePlot]{FeaturePlot}} #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{\link[patchwork]{patchwork}ed} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom grDevices rgb #' @importFrom patchwork wrap_plots #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 labs scale_x_continuous scale_y_continuous theme element_rect #' dup_axis guides element_blank element_text margin scale_color_brewer scale_color_gradientn #' scale_color_manual coord_fixed ggtitle #' #' @export #' @concept visualization #' #' @note For the old \code{do.hover} and \code{do.identify} functionality, please see #' \code{HoverLocator} and \code{CellSelector}, respectively. #' #' @aliases FeatureHeatmap #' @seealso \code{\link{DimPlot}} \code{\link{HoverLocator}} #' \code{\link{CellSelector}} #' #' @examples #' data("pbmc_small") #' FeaturePlot(object = pbmc_small, features = 'PC_1') #' FeaturePlot <- function( object, features, dims = c(1, 2), cells = NULL, cols = if (blend) { c('lightgrey', '#ff0000', '#00ff00') } else { c('lightgrey', 'blue') }, pt.size = NULL, alpha = 1, order = FALSE, min.cutoff = NA, max.cutoff = NA, reduction = NULL, split.by = NULL, keep.scale = "feature", shape.by = NULL, slot = 'data', blend = FALSE, blend.threshold = 0.5, label = FALSE, label.size = 4, label.color = "black", repel = FALSE, ncol = NULL, coord.fixed = FALSE, by.col = TRUE, sort.cell = deprecated(), interactive = FALSE, combine = TRUE, raster = NULL, raster.dpi = c(512, 512) ) { # TODO: deprecate fully on 3.2.0 if (is_present(arg = sort.cell)) { deprecate_stop( when = '4.9.0', what = 'FeaturePlot(sort.cell = )', with = 'FeaturePlot(order = )' ) } if (isTRUE(x = interactive)) { return(IFeaturePlot( object = object, feature = features[1], dims = dims, reduction = reduction, slot = slot )) } # Check keep.scale param for valid entries if (!is.null(x = keep.scale)) { keep.scale <- arg_match0(arg = keep.scale, values = c('feature', 'all')) } # Set a theme to remove right-hand Y axis lines # Also sets right-hand Y axis text label formatting no.right <- theme( axis.line.y.right = element_blank(), axis.ticks.y.right = element_blank(), axis.text.y.right = element_blank(), axis.title.y.right = element_text( face = "bold", size = 14, margin = margin(r = 7) ) ) # Get the DimReduc to use reduction <- reduction %||% DefaultDimReduc(object = object) if (!is_integerish(x = dims, n = 2L, finite = TRUE) && !all(dims > 0L)) { abort(message = "'dims' must be a two-length integer vector") } # Figure out blending stuff if (isTRUE(x = blend) && length(x = features) != 2) { abort(message = "Blending feature plots only works with two features") } # Set color scheme for blended FeaturePlots if (isTRUE(x = blend)) { default.colors <- eval(expr = formals(fun = FeaturePlot)$cols) cols <- switch( EXPR = as.character(x = length(x = cols)), '0' = { warn(message = "No colors provided, using default colors") default.colors }, '1' = { warn(message = paste( "Only one color provided, assuming", sQuote(x = cols), "is double-negative and augmenting with default colors" )) c(cols, default.colors[2:3]) }, '2' = { warn(message = paste( "Only two colors provided, assuming specified are for features and agumenting with", sQuote(default.colors[1]), "for double-negatives", )) c(default.colors[1], cols) }, '3' = cols, { warn(message = "More than three colors provided, using only first three") cols[1:3] } ) } if (isTRUE(x = blend) && length(x = cols) != 3) { abort("Blending feature plots only works with three colors; first one for negative cells") } # Name the reductions dims <- paste0(Key(object = object[[reduction]]), dims) cells <- cells %||% Cells(x = object[[reduction]]) # Get plotting data data <- FetchData( object = object, vars = c(dims, 'ident', features), cells = cells, slot = slot ) # Check presence of features/dimensions if (ncol(x = data) < 4) { abort(message = paste( "None of the requested features were found:", paste(features, collapse = ', '), "in slot ", slot )) } else if (!all(dims %in% colnames(x = data))) { abort(message = "The dimensions requested were not found") } features <- setdiff(x = names(x = data), y = c(dims, 'ident')) # Determine cutoffs min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { abort( message = "There must be the same number of minimum and maximum cuttoffs as there are features" ) } names(x = min.cutoff) <- names(x = max.cutoff) <- features brewer.gran <- ifelse( test = length(x = cols) == 1, yes = brewer.pal.info[cols, ]$maxcolors, no = length(x = cols) ) # Apply cutoffs for (i in seq_along(along.with = features)) { f <- features[i] data.feature <- data[[f]] min.use <- SetQuantile(cutoff = min.cutoff[f], data = data.feature) max.use <- SetQuantile(cutoff = max.cutoff[f], data = data.feature) data.feature[data.feature < min.use] <- min.use data.feature[data.feature > max.use] <- max.use if (brewer.gran != 2) { data.feature <- if (all(data.feature == 0)) { rep_len(x = 0, length.out = length(x = data.feature)) } else { as.numeric(x = as.factor(x = cut( x = as.numeric(x = data.feature), breaks = 2 ))) } } data[[f]] <- data.feature } # Figure out splits (FeatureHeatmap) data$split <- if (is.null(x = split.by)) { RandomName() } else { switch( EXPR = split.by, ident = Idents(object = object)[cells, drop = TRUE], object[[split.by, drop = TRUE]][cells, drop = TRUE] ) } if (!is.factor(x = data$split)) { data$split <- factor(x = data$split) } # Set shaping variable if (!is.null(x = shape.by)) { data[, shape.by] <- object[[shape.by, drop = TRUE]] } # Make list of plots plots <- vector( mode = "list", length = ifelse( test = blend, yes = 4, no = length(x = features) * length(x = levels(x = data$split)) ) ) # Apply common limits xlims <- c(floor(x = min(data[, dims[1]])), ceiling(x = max(data[, dims[1]]))) ylims <- c(floor(min(data[, dims[2]])), ceiling(x = max(data[, dims[2]]))) # Set blended colors if (blend) { ncol <- 4 color.matrix <- BlendMatrix( two.colors = cols[2:3], col.threshold = blend.threshold, negative.color = cols[1] ) cols <- cols[2:3] colors <- list( color.matrix[, 1], color.matrix[1, ], as.vector(x = color.matrix) ) } # Make the plots for (i in 1:length(x = levels(x = data$split))) { # Figure out which split we're working with ident <- levels(x = data$split)[i] data.plot <- data[as.character(x = data$split) == ident, , drop = FALSE] # Blend expression values if (isTRUE(x = blend)) { features <- features[1:2] no.expression <- features[colMeans(x = data.plot[, features]) == 0] if (length(x = no.expression) != 0) { abort(message = paste( "The following features have no value:", paste(no.expression, collapse = ', ') )) } data.plot <- cbind(data.plot[, c(dims, 'ident')], BlendExpression(data = data.plot[, features[1:2]])) features <- colnames(x = data.plot)[4:ncol(x = data.plot)] } # Make per-feature plots for (j in 1:length(x = features)) { feature <- features[j] # Get blended colors if (isTRUE(x = blend)) { cols.use <- as.numeric(x = as.character(x = data.plot[, feature])) + 1 cols.use <- colors[[j]][sort(x = unique(x = cols.use))] } else { cols.use <- NULL } data.single <- data.plot[, c(dims, 'ident', feature, shape.by)] # Make the plot plot <- SingleDimPlot( data = data.single, dims = dims, col.by = feature, order = order, pt.size = pt.size, alpha = alpha, cols = cols.use, shape.by = shape.by, label = FALSE, raster = raster, raster.dpi = raster.dpi ) + scale_x_continuous(limits = xlims) + scale_y_continuous(limits = ylims) + theme_cowplot() + CenterTitle() # theme(plot.title = element_text(hjust = 0.5)) # Add labels if (isTRUE(x = label)) { plot <- LabelClusters( plot = plot, id = 'ident', repel = repel, size = label.size, color = label.color ) } # Make FeatureHeatmaps look nice(ish) if (length(x = levels(x = data$split)) > 1) { plot <- plot + theme(panel.border = element_rect(fill = NA, colour = 'black')) # Add title plot <- plot + if (i == 1) { labs(title = feature) } else { labs(title = NULL) } # Add second axis if (j == length(x = features) && !blend) { suppressMessages( expr = plot <- plot + scale_y_continuous( sec.axis = dup_axis(name = ident), limits = ylims ) + no.right ) } # Remove left Y axis if (j != 1) { plot <- plot + theme( axis.line.y = element_blank(), axis.ticks.y = element_blank(), axis.text.y = element_blank(), axis.title.y.left = element_blank() ) } # Remove bottom X axis if (i != length(x = levels(x = data$split))) { plot <- plot + theme( axis.line.x = element_blank(), axis.ticks.x = element_blank(), axis.text.x = element_blank(), axis.title.x = element_blank() ) } } else { plot <- plot + labs(title = feature) } # Add colors scale for normal FeaturePlots if (!blend) { plot <- plot + guides(color = NULL) cols.grad <- cols if (length(x = cols) == 1) { plot <- plot + scale_color_brewer(palette = cols) } else if (length(x = cols) > 1) { unique.feature.exp <- unique(data.plot[, feature]) if (length(unique.feature.exp) == 1) { warn(message = paste0( "All cells have the same value (", unique.feature.exp, ") of ", dQuote(x = feature) )) if (unique.feature.exp == 0) { cols.grad <- cols[1] } else{ cols.grad <- cols } } plot <- suppressMessages( expr = plot + scale_color_gradientn( colors = cols.grad, guide = "colorbar" ) ) } } if (!(is.null(x = keep.scale)) && keep.scale == "feature" && !blend) { max.feature.value <- max(data[, feature]) min.feature.value <- min(data[, feature]) plot <- suppressMessages(plot & scale_color_gradientn(colors = cols, limits = c(min.feature.value, max.feature.value))) } # Add coord_fixed if (coord.fixed) { plot <- plot + coord_fixed() } # I'm not sure why, but sometimes the damn thing fails without this # Thanks ggplot2 plot <- plot # Place the plot plots[[(length(x = features) * (i - 1)) + j]] <- plot } } # Add blended color key if (isTRUE(x = blend)) { blend.legend <- BlendMap(color.matrix = color.matrix) for (ii in 1:length(x = levels(x = data$split))) { suppressMessages(expr = plots <- append( x = plots, values = list( blend.legend + scale_y_continuous( sec.axis = dup_axis(name = ifelse( test = length(x = levels(x = data$split)) > 1, yes = levels(x = data$split)[ii], no = '' )), expand = c(0, 0) ) + labs( x = features[1], y = features[2], title = if (ii == 1) { paste('Color threshold:', blend.threshold) } else { NULL } ) + no.right ), after = 4 * ii - 1 )) } } # Remove NULL plots plots <- Filter(f = Negate(f = is.null), x = plots) # Combine the plots if (is.null(x = ncol)) { ncol <- 2 if (length(x = features) == 1) { ncol <- 1 } if (length(x = features) > 6) { ncol <- 3 } if (length(x = features) > 9) { ncol <- 4 } } ncol <- ifelse( test = is.null(x = split.by) || isTRUE(x = blend), yes = ncol, no = length(x = features) ) legend <- if (isTRUE(x = blend)) { 'none' } else { split.by %iff% 'none' } # Transpose the FeatureHeatmap matrix (not applicable for blended FeaturePlots) if (isTRUE(x = combine)) { if (by.col && !is.null(x = split.by) && !blend) { plots <- lapply( X = plots, FUN = function(x) { return(suppressMessages( expr = x + theme_cowplot() + ggtitle("") + scale_y_continuous(sec.axis = dup_axis(name = ""), limits = ylims) + no.right )) } ) nsplits <- length(x = levels(x = data$split)) idx <- 1 for (i in (length(x = features) * (nsplits - 1) + 1):(length(x = features) * nsplits)) { plots[[i]] <- suppressMessages( expr = plots[[i]] + scale_y_continuous( sec.axis = dup_axis(name = features[[idx]]), limits = ylims ) + no.right ) idx <- idx + 1 } idx <- 1 for (i in which(x = 1:length(x = plots) %% length(x = features) == 1)) { plots[[i]] <- plots[[i]] + ggtitle(levels(x = data$split)[[idx]]) + theme(plot.title = element_text(hjust = 0.5)) idx <- idx + 1 } idx <- 1 if (length(x = features) == 1) { for (i in 1:length(x = plots)) { plots[[i]] <- plots[[i]] + ggtitle(levels(x = data$split)[[idx]]) + theme(plot.title = element_text(hjust = 0.5)) idx <- idx + 1 } ncol <- 1 nrow <- nsplits } else { nrow <- split.by %iff% length(x = levels(x = data$split)) } plots <- plots[c(do.call( what = rbind, args = split( x = 1:length(x = plots), f = ceiling(x = seq_along(along.with = 1:length(x = plots)) / length(x = features)) ) ))] # Set ncol to number of splits (nrow) and nrow to number of features (ncol) plots <- wrap_plots(plots, ncol = nrow, nrow = ncol) if (!is.null(x = legend) && legend == 'none') { plots <- plots & NoLegend() } } else { plots <- wrap_plots(plots, ncol = ncol, nrow = split.by %iff% length(x = levels(x = data$split))) } if (!is.null(x = legend) && legend == 'none') { plots <- plots & NoLegend() } if (!(is.null(x = keep.scale)) && keep.scale == "all" && !blend) { max.feature.value <- max(data[, features]) min.feature.value <- min(data[, features]) plots <- suppressMessages(plots & scale_color_gradientn(colors = cols, limits = c(min.feature.value, max.feature.value))) } } return(plots) } #' Visualize features in dimensional reduction space interactively #' #' @inheritParams FeaturePlot #' @param feature Feature to plot #' #' @return Returns the final plot as a ggplot object #' #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 theme element_text guides scale_color_gradientn #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow sidebarPanel selectInput plotOutput reactiveValues #' observeEvent stopApp observe updateSelectInput renderPlot runGadget #' #' @export #' @concept visualization #' IFeaturePlot <- function(object, feature, dims = c(1, 2), reduction = NULL, slot = 'data') { # Set initial data values feature.label <- 'Feature to visualize' assay.keys <- Key(object = object)[Assays(object = object)] keyed <- sapply(X = assay.keys, FUN = grepl, x = feature) assay <- if (any(keyed)) { names(x = which(x = keyed))[1] } else { DefaultAssay(object = object) } features <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) assays.use <- vapply( X = Assays(object = object), FUN = function(x) { return(!IsMatrixEmpty(x = GetAssayData( object = object, slot = slot, assay = x ))) }, FUN.VALUE = logical(length = 1L) ) assays.use <- sort(x = Assays(object = object)[assays.use]) reduction <- reduction %||% DefaultDimReduc(object = object) dims.reduc <- gsub( pattern = Key(object = object[[reduction]]), replacement = '', x = colnames(x = object[[reduction]]) ) # Set up the gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( sidebarPanel( selectInput( inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay, selectize = FALSE, width = '100%' ), selectInput( inputId = 'feature', label = feature.label, choices = features, selected = feature, selectize = FALSE, width = '100%' ), selectInput( inputId = 'reduction', label = 'Dimensional reduction', choices = Reductions(object = object), selected = reduction, selectize = FALSE, width = '100%' ), selectInput( inputId = 'xdim', label = 'X dimension', choices = dims.reduc, selected = as.character(x = dims[1]), selectize = FALSE, width = '100%' ), selectInput( inputId = 'ydim', label = 'Y dimension', choices = dims.reduc, selected = as.character(x = dims[2]), selectize = FALSE, width = '100%' ), selectInput( inputId = 'palette', label = 'Color scheme', choices = names(x = FeaturePalettes), selected = 'Seurat', selectize = FALSE, width = '100%' ), width = '100%' ), plotOutput(outputId = 'plot', height = '100%'), flex = c(1, 4) ) ) ) # Prepare plotting data dims <- paste0(Key(object = object[[reduction]]), dims) plot.data <- FetchData(object = object, vars = c(dims, feature), slot = slot) # Shiny server server <- function(input, output, session) { plot.env <- reactiveValues( data = plot.data, dims = paste0(Key(object = object[[reduction]]), dims), feature = feature, palette = 'Seurat' ) # Observe events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observe(x = { assay <- input$assay feature.use <- input$feature features.assay <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.use <- ifelse( test = feature.use %in% features.assay, yes = feature.use, no = features.assay[1] ) reduc <- input$reduction dims.reduc <- gsub( pattern = Key(object = object[[reduc]]), replacement = '', x = colnames(x = object[[reduc]]) ) dims <- c(input$xdim, input$ydim) for (i in seq_along(along.with = dims)) { if (!dims[i] %in% dims.reduc) { dims[i] <- dims.reduc[i] } } updateSelectInput( session = session, inputId = 'xdim', label = 'X dimension', choices = dims.reduc, selected = as.character(x = dims[1]) ) updateSelectInput( session = session, inputId = 'ydim', label = 'Y dimension', choices = dims.reduc, selected = as.character(x = dims[2]) ) updateSelectInput( session = session, inputId = 'feature', label = feature.label, choices = features.assay, selected = feature.use ) }) observe(x = { feature.use <- input$feature feature.keyed <- paste0(Key(object = object[[input$assay]]), feature.use) reduc <- input$reduction dims <- c(input$xdim, input$ydim) dims <- paste0(Key(object = object[[reduc]]), dims) plot.data <- tryCatch( expr = FetchData( object = object, vars = c(dims, feature.keyed), slot = slot ), warning = function(...) { return(plot.env$data) }, error = function(...) { return(plot.env$data) } ) dims <- colnames(x = plot.data)[1:2] colnames(x = plot.data) <- c(dims, feature.use) plot.env$data <- plot.data plot.env$feature <- feature.use plot.env$dims <- dims }) observe(x = { plot.env$palette <- input$palette }) # Create the plot output$plot <- renderPlot(expr = { plot.env$plot <- SingleDimPlot( data = plot.env$data, dims = plot.env$dims, col.by = plot.env$feature, label = FALSE ) + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) + guides(color = NULL) + scale_color_gradientn( colors = FeaturePalettes[[plot.env$palette]], guide = 'colorbar' ) plot.env$plot }) } runGadget(app = ui, server = server) } #' Highlight Neighbors in DimPlot #' #' It will color the query cells and the neighbors of the query cells in the #' DimPlot #' #' @inheritParams DimPlot #' @param nn.idx the neighbor index of all cells #' @param query.cells cells used to find their neighbors #' @param show.all.cells Show all cells or only query and neighbor cells #' #' @inherit DimPlot return #' #' @export #' @concept visualization #' NNPlot <- function( object, reduction, nn.idx, query.cells, dims = 1:2, label = FALSE, label.size = 4, repel = FALSE, sizes.highlight = 2, pt.size = 1, cols.highlight = c("#377eb8", "#e41a1c"), na.value = "#bdbdbd", order = c("self", "neighbors", "other"), show.all.cells = TRUE, ... ) { if (inherits(x = nn.idx, what = 'Neighbor')) { rownames(x = slot(object = nn.idx, name = 'nn.idx')) <- Cells(x = nn.idx) nn.idx <- Indices(object = nn.idx) } if (length(x = query.cells) > 1) { neighbor.cells <- apply( X = nn.idx[query.cells, -1], MARGIN = 2, FUN = function(x) { return(Cells(x = object)[x]) } ) } else { neighbor.cells <- Cells(x = object)[nn.idx[query.cells , -1]] } neighbor.cells <- as.vector(x = neighbor.cells) neighbor.cells <- neighbor.cells[!is.na(x = neighbor.cells)] object[["nn.col"]] <- "other" object[["nn.col"]][neighbor.cells, ] <- "neighbors" object[["nn.col"]][query.cells, ] <- "self" object$nn.col <- factor( x = object$nn.col, levels = c("self", "neighbors", "other") ) if (!show.all.cells) { object <- subset( x = object, cells = Cells(x = object)[which(x = object[["nn.col"]] != "other")] ) nn.cols <- c(rev(x = cols.highlight)) nn.pt.size <- sizes.highlight } else { highlight.info <- SetHighlight( cells.highlight = c(query.cells, neighbor.cells), cells.all = Cells(x = object), sizes.highlight = sizes.highlight, pt.size = pt.size, cols.highlight = "red" ) nn.cols <- c(na.value, rev(x = cols.highlight)) nn.pt.size <- highlight.info$size } NN.plot <- DimPlot( object = object, reduction = reduction, dims = dims, group.by = "nn.col", cols = nn.cols, label = label, order = order, pt.size = nn.pt.size , label.size = label.size, repel = repel ) return(NN.plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Scatter plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Cell-cell scatter plot #' #' Creates a plot of scatter plot of features across two single cells. Pearson #' correlation between the two cells is displayed above the plot. #' #' @inheritParams FeatureScatter #' @inheritParams DimPlot #' @param cell1 Cell 1 name #' @param cell2 Cell 2 name #' @param features Features to plot (default, all features) #' @param highlight Features to highlight #' @return A ggplot object #' #' @export #' @concept visualization #' #' @aliases CellPlot #' #' @examples #' data("pbmc_small") #' CellScatter(object = pbmc_small, cell1 = 'ATAGGAGAAACAGA', cell2 = 'CATCAGGATGCACA') #' CellScatter <- function( object, cell1, cell2, features = NULL, highlight = NULL, cols = NULL, pt.size = 1, smooth = FALSE, raster = NULL, raster.dpi = c(512, 512) ) { features <- features %||% rownames(x = object) data <- FetchData( object = object, vars = features, cells = c(cell1, cell2) ) data <- as.data.frame(x = t(x = data)) plot <- SingleCorPlot( data = data, cols = cols, pt.size = pt.size, rows.highlight = highlight, smooth = smooth, raster = raster, raster.dpi = raster.dpi ) return(plot) } #' Scatter plot of single cell data #' #' Creates a scatter plot of two features (typically feature expression), across a #' set of single cells. Cells are colored by their identity class. Pearson #' correlation between the two features is displayed above the plot. #' #' @param object Seurat object #' @param feature1 First feature to plot. Typically feature expression but can also #' be metrics, PC scores, etc. - anything that can be retreived with FetchData #' @param feature2 Second feature to plot. #' @param cells Cells to include on the scatter plot. #' @param shuffle Whether to randomly shuffle the order of points. This can be #' useful for crowded plots if points of interest are being buried. (default is FALSE) #' @param seed Sets the seed if randomly shuffling the order of points. #' @param group.by Name of one or more metadata columns to group (color) cells by #' (for example, orig.ident); pass 'ident' to group by identity class #' @param cols Colors to use for identity class plotting. #' @param pt.size Size of the points on the plot #' @param shape.by Ignored for now #' @param split.by A factor in object metadata to split the feature plot by, pass 'ident' #' to split by cell identity' #' @param span Spline span in loess function call, if \code{NULL}, no spline added #' @param smooth Smooth the graph (similar to smoothScatter) #' @param slot Slot to pull data from, should be one of 'counts', 'data', or 'scale.data' #' @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} #' @param plot.cor Display correlation in plot title #' @param ncol Number of columns if plotting multiple plots #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is greater than #' 100,000 #' @param raster.dpi Pixel resolution for rasterized plots, passed to geom_scattermore(). #' Default is c(512, 512). #' @param jitter Jitter for easier visualization of crowded points (default is FALSE) #' #' @return A ggplot object #' #' @importFrom ggplot2 geom_smooth aes_string facet_wrap vars sym labs #' @importFrom patchwork wrap_plots #' #' @export #' @concept visualization #' #' @aliases GenePlot #' #' @examples #' data("pbmc_small") #' FeatureScatter(object = pbmc_small, feature1 = 'CD9', feature2 = 'CD3E') #' FeatureScatter <- function( object, feature1, feature2, cells = NULL, shuffle = FALSE, seed = 1, group.by = NULL, split.by = NULL, cols = NULL, pt.size = 1, shape.by = NULL, span = NULL, smooth = FALSE, combine = TRUE, slot = 'data', plot.cor = TRUE, ncol = NULL, raster = NULL, raster.dpi = c(512, 512), jitter = FALSE ) { cells <- cells %||% colnames(x = object) if (isTRUE(x = shuffle)) { set.seed(seed = seed) cells <- sample(x = cells) } group.by <- group.by %||% 'ident' data <- FetchData( object = object, vars = c(feature1, feature2, group.by), cells = cells, slot = slot ) if (!grepl(pattern = feature1, x = names(x = data)[1])) { abort(message = paste("Feature 1", sQuote(x = feature1), "not found")) } if (!grepl(pattern = feature2, x = names(x = data)[2])) { abort(message = paste("Feature 2", sQuote(x = feature2), "not found")) } feature1 <- names(x = data)[1] feature2 <- names(x = data)[2] group.by <- intersect(x = group.by, y = names(x = data)[3:ncol(x = data)]) for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } if (!is.null(x = split.by)) { split <- FetchData(object = object, vars = split.by, clean=TRUE)[split.by] data <- data[rownames(split),] data[, split.by] <- split } plots <- lapply( X = group.by, FUN = function(x) { plot <- SingleCorPlot( data = data[,c(feature1, feature2, split.by)], col.by = data[, x], cols = cols, pt.size = pt.size, smooth = smooth, legend.title = 'Identity', span = span, plot.cor = plot.cor, raster = raster, raster.dpi = raster.dpi, jitter = jitter ) if (!is.null(x = split.by)) { plot <- plot + FacetTheme() + facet_wrap( facets = vars(!!sym(x = split.by)), ncol = if (length(x = group.by) > 1 || is.null(x = ncol)) { length(x = unique(x = data[, split.by])) } else { ncol } ) } plot } ) if (isTRUE(x = length(x = plots) == 1)) { return(plots[[1]]) } if (isTRUE(x = combine)) { plots <- wrap_plots(plots, ncol = length(x = group.by)) } return(plots) } #' View variable features #' #' @inheritParams FeatureScatter #' @inheritParams SeuratObject::HVFInfo #' @param cols Colors to specify non-variable/variable status #' @param assay Assay to pull variable features from #' @param log Plot the x-axis in log scale #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is greater than #' 100,000 #' #' @return A ggplot object #' #' @importFrom ggplot2 labs scale_color_manual scale_x_log10 #' @export #' @concept visualization #' #' @aliases VariableGenePlot MeanVarPlot #' #' @seealso \code{\link{FindVariableFeatures}} #' #' @examples #' data("pbmc_small") #' VariableFeaturePlot(object = pbmc_small) #' VariableFeaturePlot <- function( object, cols = c('black', 'red'), pt.size = 1, log = NULL, selection.method = NULL, assay = NULL, raster = NULL, raster.dpi = c(512, 512) ) { if (length(x = cols) != 2) { stop("'cols' must be of length 2") } hvf.info <- HVFInfo( object = object, assay = assay, method = selection.method, status = TRUE ) status.col <- colnames(hvf.info)[grepl("variable", colnames(hvf.info))][[1]] var.status <- c('no', 'yes')[unlist(hvf.info[[status.col]]) + 1] if (colnames(x = hvf.info)[3] == 'dispersion.scaled') { hvf.info <- hvf.info[, c(1, 2)] } else if (colnames(x = hvf.info)[3] == 'variance.expected') { hvf.info <- hvf.info[, c(1, 4)] } else { hvf.info <- hvf.info[, c(1, 3)] } axis.labels <- switch( EXPR = colnames(x = hvf.info)[2], 'variance.standardized' = c('Average Expression', 'Standardized Variance'), 'dispersion' = c('Average Expression', 'Dispersion'), 'residual_variance' = c('Geometric Mean of Expression', 'Residual Variance') ) log <- log %||% (any(c('variance.standardized', 'residual_variance') %in% colnames(x = hvf.info))) # var.features <- VariableFeatures(object = object, assay = assay) # var.status <- ifelse( # test = rownames(x = hvf.info) %in% var.features, # yes = 'yes', # no = 'no' # ) plot <- SingleCorPlot( data = hvf.info, col.by = var.status, pt.size = pt.size, raster = raster, raster.dpi = raster.dpi ) if (length(x = unique(x = var.status)) == 1) { switch( EXPR = var.status[1], 'yes' = { cols <- cols[2] labels.legend <- 'Variable' }, 'no' = { cols <- cols[1] labels.legend <- 'Non-variable' } ) } else { labels.legend <- c('Non-variable', 'Variable') } plot <- plot + labs(title = NULL, x = axis.labels[1], y = axis.labels[2]) + scale_color_manual( labels = paste(labels.legend, 'count:', table(var.status)), values = cols ) if (log) { plot <- plot + scale_x_log10() } return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Polygon Plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Polygon DimPlot #' #' Plot cells as polygons, rather than single points. Color cells by identity, or a categorical variable #' in metadata #' #' @inheritParams PolyFeaturePlot #' @param group.by A grouping variable present in the metadata. Default is to use the groupings present #' in the current cell identities (\code{Idents(object = object)}) #' #' @return Returns a ggplot object #' #' @export #' @concept visualization #' PolyDimPlot <- function( object, group.by = NULL, cells = NULL, poly.data = 'spatial', flip.coords = FALSE ) { polygons <- Misc(object = object, slot = poly.data) if (is.null(x = polygons)) { stop("Could not find polygon data in misc slot") } group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells ) group.data$cell <- rownames(x = group.data) data <- merge(x = polygons, y = group.data, by = 'cell') if (flip.coords) { coord.x <- data$x data$x <- data$y data$y <- coord.x } plot <- SinglePolyPlot(data = data, group.by = group.by) return(plot) } #' Polygon FeaturePlot #' #' Plot cells as polygons, rather than single points. Color cells by any value #' accessible by \code{\link{FetchData}}. #' #' @inheritParams FeaturePlot #' @param poly.data Name of the polygon dataframe in the misc slot #' @param ncol Number of columns to split the plot into #' @param common.scale ... #' @param flip.coords Flip x and y coordinates #' #' @return Returns a ggplot object #' #' @importFrom ggplot2 scale_fill_viridis_c facet_wrap #' #' @export #' @concept visualization #' @concept spatial #' PolyFeaturePlot <- function( object, features, cells = NULL, poly.data = 'spatial', ncol = ceiling(x = length(x = features) / 2), min.cutoff = 0, max.cutoff = NA, common.scale = TRUE, flip.coords = FALSE ) { polygons <- Misc(object = object, slot = poly.data) if (is.null(x = polygons)) { stop("Could not find polygon data in misc slot") } assay.data <- FetchData( object = object, vars = features, cells = cells ) features <- colnames(x = assay.data) cells <- rownames(x = assay.data) min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(assay.data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(assay.data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } assay.data <- mapply( FUN = function(feature, min, max) { return(ScaleColumn(vec = assay.data[, feature], cutoffs = c(min, max))) }, feature = features, min = min.cutoff, max = max.cutoff ) if (common.scale) { assay.data <- apply( X = assay.data, MARGIN = 2, FUN = function(x) { return(x - min(x)) } ) assay.data <- t( x = t(x = assay.data) / apply(X = assay.data, MARGIN = 2, FUN = max) ) } assay.data <- as.data.frame(x = assay.data) assay.data <- data.frame( cell = as.vector(x = replicate(n = length(x = features), expr = cells)), feature = as.vector(x = t(x = replicate(n = length(x = cells), expr = features))), expression = unlist(x = assay.data, use.names = FALSE) ) data <- merge(x = polygons, y = assay.data, by = 'cell') data$feature <- factor(x = data$feature, levels = features) if (flip.coords) { coord.x <- data$x data$x <- data$y data$y <- coord.x } plot <- SinglePolyPlot(data = data, group.by = 'expression', font_size = 8) + scale_fill_viridis_c() + facet_wrap(facets = 'feature', ncol = ncol) return(plot) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Spatial Plots #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Spatial Cluster Plots #' #' Visualize clusters or other categorical groupings in a spatial context #' #' @inheritParams DimPlot #' @inheritParams SingleImagePlot #' @param object A \code{\link[SeuratObject]{Seurat}} object #' @param fov Name of FOV to plot #' @param boundaries A vector of segmentation boundaries per image to plot; #' can be a character vector, a named character vector, or a named list. #' Names should be the names of FOVs and values should be the names of #' segmentation boundaries #' @param molecules A vector of molecules to plot #' @param nmols Max number of each molecule specified in `molecules` to plot #' @param dark.background Set plot background to black #' @param crop Crop the plots to area with cells only #' @param overlap Overlay boundaries from a single image to create a single #' plot; if \code{TRUE}, then boundaries are stacked in the order they're #' given (first is lowest) #' @param axes Keep axes and panel background #' @param combine Combine plots into a single #' \code{patchwork} ggplot object.If \code{FALSE}, #' return a list of ggplot objects #' @param coord.fixed Plot cartesian coordinates with fixed aspect ratio #' @param flip_xy Flag to flip X and Y axes. Default is FALSE. #' #' @return If \code{combine = TRUE}, a \code{patchwork} #' ggplot object; otherwise, a list of ggplot objects #' #' @importFrom rlang !! is_na sym #' @importFrom patchwork wrap_plots #' @importFrom ggplot2 element_blank facet_wrap vars #' @importFrom SeuratObject DefaultFOV Cells #' DefaultBoundary FetchData Images Overlay #' #' @export #' ImageDimPlot <- function( object, fov = NULL, boundaries = NULL, group.by = NULL, split.by = NULL, cols = NULL, shuffle.cols = FALSE, size = 0.5, molecules = NULL, mols.size = 0.1, mols.cols = NULL, mols.alpha = 1.0, nmols = 1000, alpha = 1.0, border.color = 'white', border.size = NULL, na.value = 'grey50', dark.background = TRUE, crop = FALSE, cells = NULL, overlap = FALSE, axes = FALSE, combine = TRUE, coord.fixed = TRUE, flip_xy = TRUE ) { cells <- cells %||% Cells(x = object) # Determine FOV to use fov <- fov %||% DefaultFOV(object = object) fov <- Filter( f = function(x) { return( x %in% Images(object = object) && inherits(x = object[[x]], what = 'FOV') ) }, x = fov ) if (!length(x = fov)) { stop("No compatible spatial coordinates present") } # Identify boundaries to use boundaries <- boundaries %||% sapply( X = fov, FUN = function(x) { return(DefaultBoundary(object = object[[x]])) }, simplify = FALSE, USE.NAMES = TRUE ) boundaries <- .BoundariesByImage( object = object, fov = fov, boundaries = boundaries ) fov <- names(x = boundaries) overlap <- rep_len(x = overlap, length.out = length(x = fov)) crop <- rep_len(x = crop, length.out = length(x = fov)) names(x = crop) <- fov # Prepare plotting data group.by <- boundaries %!NA% group.by %||% 'ident' vars <- c(group.by, split.by) md <- if (!is_na(x = vars)) { FetchData( object = object, vars = vars[!is.na(x = vars)], cells = cells ) } else { NULL } pnames <- unlist(x = lapply( X = seq_along(along.with = fov), FUN = function(i) { return(if (isTRUE(x = overlap[i])) { fov[i] } else { paste(fov[i], boundaries[[i]], sep = '_') }) } )) pdata <- vector(mode = 'list', length = length(x = pnames)) names(x = pdata) <- pnames for (i in names(x = pdata)) { ul <- unlist(x = strsplit(x = i, split = '_')) img <- paste(ul[1:length(ul)-1], collapse = '_') # Apply overlap lyr <- ul[length(ul)] if (is.na(x = lyr)) { lyr <- boundaries[[img]] } # TODO: Apply crop pdata[[i]] <- lapply( X = lyr, FUN = function(l) { if (l == 'NA') { return(NA) } df <- fortify(model = object[[img]][[l]]) df <- df[df$cell %in% cells, , drop = FALSE] if (!is.null(x = md)) { df <- merge(x = df, y = md, by.x = 'cell', by.y = 0, all.x = TRUE) } df$cell <- paste(l, df$cell, sep = '_') df$boundary <- l return(df) } ) pdata[[i]] <- if (!is_na(x = pdata[[i]])) { do.call(what = 'rbind', args = pdata[[i]]) } else { unlist(x = pdata[[i]]) } } # Fetch molecule information if (!is.null(x = molecules)) { molecules <- .MolsByFOV( object = object, fov = fov, molecules = molecules ) mdata <- vector(mode = 'list', length = length(x = fov)) names(x = mdata) <- fov for (img in names(x = mdata)) { idata <- object[[img]] if (!img %in% names(x = molecules)) { mdata[[img]] <- NULL next } if (isTRUE(x = crop[img])) { idata <- Overlay(x = idata, y = idata) } imols <- gsub( pattern = paste0('^', Key(object = idata)), replacement = '', x = molecules[[img]] ) mdata[[img]] <- FetchData( object = idata, vars = imols, nmols = nmols ) } } else { mdata <- NULL } # Build the plots plots <- vector( mode = 'list', length = length(x = pdata) * ifelse( test = length(x = group.by), yes = length(x = group.by), no = 1L ) ) idx <- 1L for (group in group.by) { for (i in seq_along(along.with = pdata)) { img <- unlist(x = strsplit(x = names(x = pdata)[i], split = '_'))[1L] p <- SingleImagePlot( data = pdata[[i]], col.by = pdata[[i]] %!NA% group, molecules = mdata[[img]], cols = cols, shuffle.cols = shuffle.cols, size = size, alpha = alpha, mols.size = mols.size, mols.cols = mols.cols, mols.alpha = mols.alpha, border.color = border.color, border.size = border.size, na.value = na.value, dark.background = dark.background ) if (!is.null(x = split.by)) { p <- p + facet_wrap( facets = vars(!!sym(x = split.by)) ) } if (!isTRUE(x = axes)) { p <- p + NoAxes(panel.background = element_blank()) } if (!anyDuplicated(x = pdata[[i]]$cell)) { p <- p + guides(fill = guide_legend(override.aes = list(size=4L, alpha=1))) } if (isTRUE(coord.fixed)) { p <- p + coord_fixed() } if(!isTRUE(flip_xy) && isTRUE(coord.fixed)){ xy_ratio = (max(pdata[[i]]$x) - min(pdata[[i]]$x)) / (max(pdata[[i]]$y) - min(pdata[[i]]$y)) p = p + coord_flip() + theme(aspect.ratio = 1/xy_ratio) } plots[[idx]] <- p idx <- idx + 1L } } if (isTRUE(x = combine)) { plots <- wrap_plots(plots) } return(plots) } #' Spatial Feature Plots #' #' Visualize expression in a spatial context #' #' @inheritParams FeaturePlot #' @inheritParams ImageDimPlot #' @param scale Set color scaling across multiple plots; choose from: #' \itemize{ #' \item \dQuote{\code{feature}}: Plots per-feature are scaled across splits #' \item \dQuote{\code{all}}: Plots per-feature are scaled across all features #' \item \dQuote{\code{none}}: Plots are not scaled; \strong{note}: setting #' \code{scale} to \dQuote{\code{none}} will result in color scales that are #' \emph{not} comparable between plots #' } #' Ignored if \code{blend = TRUE} #' #' @inherit ImageDimPlot return #' #' @importFrom patchwork wrap_plots #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 dup_axis element_blank element_text facet_wrap guides #' labs margin vars scale_y_continuous theme #' @importFrom SeuratObject DefaultFOV Cells DefaultBoundary #' FetchData Images Overlay #' #' @export #' ImageFeaturePlot <- function( object, features, fov = NULL, boundaries = NULL, cols = if (isTRUE(x = blend)) { c("lightgrey", "#ff0000", "#00ff00") } else { c("lightgrey", "firebrick1") }, size = 0.5, min.cutoff = NA, max.cutoff = NA, split.by = NULL, molecules = NULL, mols.size = 0.1, mols.cols = NULL, nmols = 1000, alpha = 1.0, border.color = 'white', border.size = NULL, dark.background = TRUE, blend = FALSE, blend.threshold = 0.5, crop = FALSE, cells = NULL, scale = c('feature', 'all', 'none'), overlap = FALSE, axes = FALSE, combine = TRUE, coord.fixed = TRUE ) { cells <- cells %||% Cells(x = object) scale <- scale[[1L]] scale <- match.arg(arg = scale) # Set a theme to remove right-hand Y axis lines # Also sets right-hand Y axis text label formatting no.right <- theme( axis.line.y.right = element_blank(), axis.ticks.y.right = element_blank(), axis.text.y.right = element_blank(), axis.title.y.right = element_text( face = "bold", size = 14, margin = margin(r = 7) ) ) # Determine fov to use fov <- fov %||% DefaultFOV(object = object) fov <- Filter( f = function(x) { return( x %in% Images(object = object) && inherits(x = object[[x]], what = 'FOV') ) }, x = fov ) if (!length(x = fov)) { stop("No compatible spatial coordinates present") } # Identify boundaries to use boundaries <- boundaries %||% sapply( X = fov, FUN = function(x) { return(DefaultBoundary(object = object[[x]])) }, simplify = FALSE, USE.NAMES = TRUE ) boundaries <- .BoundariesByImage( object = object, fov = fov, boundaries = boundaries ) fov <- names(x = boundaries) # Check overlaps/crops if (isTRUE(x = blend) || !is.null(x = split.by)) { type <- ifelse(test = isTRUE(x = 'blend'), yes = 'Blended', no = 'Split') if (length(x = fov) != 1L) { fov <- fov[1L] warning( type, ' image feature plots can only be done on a single image, using "', fov, '"', call. = FALSE, immediate. = TRUE ) } if (any(!overlap) && length(x = boundaries[[fov]]) > 1L) { warning( type, " image feature plots require overlapped segmentations", call. = FALSE, immediate. = TRUE ) } overlap <- TRUE } overlap <- rep_len(x = overlap, length.out = length(x = fov)) crop <- rep_len(x = crop, length.out = length(x = fov)) names(x = crop) <- names(x = overlap) <- fov # Checks for blending if (isTRUE(x = blend)) { if (length(x = features) != 2L) { stop("Blended feature plots only works with two features") } default.colors <- eval(expr = formals(fun = ImageFeaturePlot)$cols) cols <- switch( EXPR = as.character(x = length(x = cols)), '0' = { warning("No colors provided, using default colors", immediate. = TRUE) default.colors }, '1' = { warning( "Only one color provided, assuming specified is double-negative and augmenting with default colors", immediate. = TRUE ) c(cols, default.colors[2:3]) }, '2' = { warning( "Only two colors provided, assuming specified are for features and augmenting with '", default.colors[1], "' for double-negatives", immediate. = TRUE ) c(default.colors[1], cols) }, '3' = cols, { warning( "More than three colors provided, using only first three", immediate. = TRUE ) cols[1:3] } ) } # Get feature, splitting data md <- FetchData( object = object, vars = c(features, split.by[1L]), cells = cells ) split.by <- intersect(x = split.by, y = colnames(x = md)) if (!length(x = split.by)) { split.by <- NULL } imax <- ifelse( test = is.null(x = split.by), yes = ncol(x = md), no = ncol(x = md) - length(x = split.by) ) features <- colnames(x = md)[1:imax] # Determine cutoffs min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(md[[feature]]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(md[[feature]]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } brewer.gran <- ifelse( test = length(x = cols) == 1, yes = brewer.pal.info[cols, ]$maxcolors, no = length(x = cols) ) # Apply cutoffs for (i in seq_along(along.with = features)) { f <- features[[i]] data.feature <- md[[f]] min.use <- SetQuantile(cutoff = min.cutoff[i], data = data.feature) max.use <- SetQuantile(cutoff = max.cutoff[i], data = data.feature) data.feature[data.feature < min.use] <- min.use data.feature[data.feature > max.use] <- max.use if (brewer.gran != 2) { data.feature <- if (all(data.feature == 0)) { rep_len(x = 0, length.out = length(x = data.feature)) } else { as.numeric(x = as.factor(x = cut( x = as.numeric(x = data.feature), breaks = brewer.gran ))) } } md[[f]] <- data.feature } # Figure out splits if (is.null(x = split.by)) { split.by <- RandomName() md[[split.by]] <- factor(x = split.by) } if (!is.factor(x = md[[split.by]])) { md[[split.by]] <- factor(x = md[[split.by]]) } # Apply blends if (isTRUE(x = blend)) { md <- lapply( X = levels(x = md[[split.by]]), FUN = function(x) { df <- md[as.character(x = md[[split.by]]) == x, , drop = FALSE] no.expression <- features[colMeans(x = df[, features]) == 0] if (length(x = no.expression)) { stop( "The following features have no value: ", paste(no.expression, collapse = ', ') ) } return(cbind( df[, split.by, drop = FALSE], BlendExpression(data = df[, features]) )) } ) md <- do.call(what = 'rbind', args = md) features <- setdiff(x = colnames(x = md), y = split.by) } # Prepare plotting data pnames <- unlist(x = lapply( X = seq_along(along.with = fov), FUN = function(i) { return(if (isTRUE(x = overlap[i])) { fov[i] } else { paste(fov[i], boundaries[[i]], sep = '_') }) } )) pdata <- vector(mode = 'list', length = length(x = pnames)) names(x = pdata) <- pnames for (i in names(x = pdata)) { ul <- unlist(x = strsplit(x = i, split = '_')) # img <- paste(ul[1:length(ul)-1], collapse = '_') # Apply overlap # lyr <- ul[length(ul)] if(length(ul) > 1) { img <- paste(ul[1:length(ul)-1], collapse = '_') lyr <- ul[length(ul)] } else if (length(ul) == 1) { img <- ul[1] lyr <- "centroids" } else { stop("the length of ul is 0. please check.") } if (is.na(x = lyr)) { lyr <- boundaries[[img]] } pdata[[i]] <- lapply( X = lyr, FUN = function(l) { df <- fortify(model = object[[img]][[l]]) df <- df[df$cell %in% cells, , drop = FALSE] if (!is.null(x = md)) { df <- merge(x = df, y = md, by.x = 'cell', by.y = 0, all.x = TRUE) } df$cell <- paste(l, df$cell, sep = '_') df$boundary <- l return(df) } ) pdata[[i]] <- if (!is_na(x = pdata[[i]])) { do.call(what = 'rbind', args = pdata[[i]]) } else { unlist(x = pdata[[i]]) } } # Fetch molecule information if (!is.null(x = molecules)) { molecules <- .MolsByFOV( object = object, fov = fov, molecules = molecules ) mdata <- vector(mode = 'list', length = length(x = fov)) names(x = mdata) <- fov for (img in names(x = mdata)) { idata <- object[[img]] if (!img %in% names(x = molecules)) { mdata[[img]] <- NULL next } if (isTRUE(x = crop[img])) { idata <- Overlay(x = idata, y = idata) } imols <- gsub( pattern = paste0('^', Key(object = idata)), replacement = '', x = molecules[[img]] ) mdata[[img]] <- FetchData( object = idata, vars = imols, nmols = nmols ) } } else { mdata <- NULL } # Set blended colors if (isTRUE(x = blend)) { ncol <- 4 color.matrix <- BlendMatrix( two.colors = cols[2:3], col.threshold = blend.threshold, negative.color = cols[1] ) cols <- cols[2:3] colors <- list( color.matrix[, 1], color.matrix[1, ], as.vector(x = color.matrix) ) blend.legend <- BlendMap(color.matrix = color.matrix) } limits <- switch( EXPR = scale, 'all' = range(unlist(x = md[, features])), NULL ) # Build the plots plots <- vector( mode = 'list', length = length(x = levels(x = md[[split.by]])) ) names(x = plots) <- levels(x = md[[split.by]]) for (i in seq_along(along.with = levels(x = md[[split.by]]))) { ident <- levels(x = md[[split.by]])[i] plots[[ident]] <- vector(mode = 'list', length = length(x = pdata)) names(x = plots[[ident]]) <- names(x = pdata) if (isTRUE(x = blend)) { blend.key <- suppressMessages( expr = blend.legend + scale_y_continuous( sec.axis = dup_axis(name = ifelse( test = length(x = levels(x = md[[split.by]])) > 1, yes = ident, no = '' )), expand = c(0, 0) ) + labs( x = features[1L], y = features[2L], title = if (i == 1L) { paste('Color threshold:', blend.threshold) } else { NULL } ) + no.right ) } for (j in seq_along(along.with = pdata)) { key <- names(x = pdata)[j] img <- unlist(x = strsplit(x = key, split = '_'))[1L] plots[[ident]][[key]] <- vector( mode = 'list', length = length(x = features) + ifelse( test = isTRUE(x = blend), yes = 1L, no = 0L ) ) data.plot <- pdata[[j]][as.character(x = pdata[[j]][[split.by]]) == ident, , drop = FALSE] for (y in seq_along(along.with = features)) { feature <- features[y] # Get blended colors cols.use <- if (isTRUE(x = blend)) { cc <- as.numeric(x = as.character(x = data.plot[, feature])) + 1 colors[[y]][sort(unique(x = cc))] } else { NULL } colnames(data.plot) <- gsub("-", "_", colnames(data.plot)) p <- SingleImagePlot( data = data.plot, col.by = gsub("-", "_", feature), size = size, col.factor = blend, cols = cols.use, molecules = mdata[[img]], mols.size = mols.size, mols.cols = mols.cols, alpha = alpha, border.color = border.color, border.size = border.size, dark.background = dark.background ) + CenterTitle() + labs(fill=feature) # Remove fill guides for blended plots if (isTRUE(x = blend)) { p <- p + guides(fill = 'none') } if (isTRUE(coord.fixed)) { p <- p + coord_fixed() } # Remove axes if (!isTRUE(x = axes)) { p <- p + NoAxes(panel.background = element_blank()) } else if (isTRUE(x = blend) || length(x = levels(x = md[[split.by]])) > 1L) { if (y != 1L) { p <- p + theme( axis.line.y = element_blank(), axis.ticks.y = element_blank(), axis.text.y = element_blank(), axis.title.y.left = element_blank() ) } if (i != length(x = levels(x = md[[split.by]]))) { p <- p + theme( axis.line.x = element_blank(), axis.ticks.x = element_blank(), axis.text.x = element_blank(), axis.title.x = element_blank() ) } } # Add colors for unblended plots if (!isTRUE(x = blend)) { if (length(x = cols) == 1L) { p <- p + scale_fill_brewer(palette = cols) } else { cols.grad <- cols fexp <- data.plot[data.plot[[split.by]] == ident, feature, drop = TRUE] fexp <- unique(x = fexp) if (length(x = fexp) == 1L) { warning( "All cells have the same value (", fexp, ") of ", feature, call. = FALSE, immediate. = TRUE ) if (fexp == 0) { cols.grad <- cols.grad[1L] } } # Check if we're scaling the colorbar across splits if (scale == 'feature') { limits <- range(pdata[[j]][[feature]]) } p <- p + ggplot2::scale_fill_gradientn( colors = cols.grad, guide = 'colorbar', limits = limits ) } } # Add some labels p <- p + if (i == 1L) { ggplot2::labs(title = feature) } else { ggplot2::labs(title = NULL) } plots[[ident]][[key]][[y]] <- p } if (isTRUE(x = blend)) { plots[[ident]][[key]][[length(x = plots[[ident]][[key]])]] <- blend.key } else if (length(x = levels(x = md[[split.by]])) > 1L) { plots[[ident]][[key]][[y]] <- suppressMessages( expr = plots[[ident]][[key]][[y]] + scale_y_continuous(sec.axis = dup_axis(name = ident)) + no.right ) } } plots[[ident]] <- unlist( x = plots[[ident]], recursive = FALSE, use.names = FALSE ) } plots <- unlist(x = plots, recursive = FALSE, use.names = FALSE) if (isTRUE(x = combine)) { if (isTRUE(x = blend) || length(x = levels(x = md[[split.by]])) > 1L) { plots <- wrap_plots( plots, ncol = ifelse( test = isTRUE(x = blend), yes = 4L, no = length(x = features) ), nrow = length(x = levels(x = md[[split.by]])), guides = 'collect' ) } else { plots <- wrap_plots(plots) } } return(plots) } #' Visualize spatial and clustering (dimensional reduction) data in a linked, #' interactive framework #' #' @inheritParams SpatialPlot #' @inheritParams FeaturePlot #' @inheritParams DimPlot #' @param feature Feature to visualize #' @param image Name of the image to use in the plot #' #' @return Returns final plots. If \code{combine}, plots are stiched together #' using \code{\link{CombinePlots}}; otherwise, returns a list of ggplot objects #' #' @rdname LinkedPlots #' @name LinkedPlots #' #' @importFrom scales hue_pal #' @importFrom patchwork wrap_plots #' @importFrom ggplot2 scale_alpha_ordinal guides #' @importFrom miniUI miniPage gadgetTitleBar miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow plotOutput brushOpts clickOpts hoverOpts #' verbatimTextOutput reactiveValues observeEvent stopApp nearPoints #' brushedPoints renderPlot renderPrint runGadget #' #' @aliases LinkedPlot LinkedDimPlot #' #' @export #' @concept visualization #' @concept spatial #' #' @examples #' \dontrun{ #' LinkedDimPlot(seurat.object) #' LinkedFeaturePlot(seurat.object, feature = 'Hpca') #' } #' LinkedDimPlot <- function( object, dims = 1:2, reduction = NULL, image = NULL, group.by = NULL, alpha = c(0.1, 1), combine = TRUE ) { # Setup gadget UI ui <- miniPage( gadgetTitleBar( title = 'LinkedDimPlot', left = miniTitleBarButton(inputId = 'reset', label = 'Reset') ), miniContentPanel( fillRow( plotOutput( outputId = 'spatialplot', height = '100%', # brush = brushOpts(id = 'brush', delay = 10, clip = TRUE, resetOnNew = FALSE), click = clickOpts(id = 'spclick', clip = TRUE), hover = hoverOpts(id = 'sphover', delay = 10, nullOutside = TRUE) ), plotOutput( outputId = 'dimplot', height = '100%', brush = brushOpts(id = 'brush', delay = 10, clip = TRUE, resetOnNew = FALSE), click = clickOpts(id = 'dimclick', clip = TRUE), hover = hoverOpts(id = 'dimhover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) reduction <- reduction %||% DefaultDimReduc(object = object) dims <- dims[1:2] dims <- paste0(Key(object = object[[reduction]]), dims) group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) embeddings <- Embeddings(object = object[[reduction]])[cells.use, dims] plot.data <- cbind(coords, group.data, embeddings) plot.data$selected_ <- FALSE Idents(object = object) <- group.by # Setup the server server <- function(input, output, session) { click <- reactiveValues(pt = NULL, invert = FALSE) plot.env <- reactiveValues(data = plot.data, alpha.by = NULL) # Handle events observeEvent( eventExpr = input$done, handlerExpr = { plots <- list(plot.env$spatialplot, plot.env$dimplot) if (combine) { plots <- wrap_plots(plots, ncol = 2) } stopApp(returnValue = plots) } ) observeEvent( eventExpr = input$reset, handlerExpr = { click$pt <- NULL click$invert <- FALSE session$resetBrush(brushId = 'brush') } ) observeEvent(eventExpr = input$brush, handlerExpr = click$pt <- NULL) observeEvent( eventExpr = input$spclick, handlerExpr = { click$pt <- input$spclick click$invert <- TRUE } ) observeEvent( eventExpr = input$dimclick, handlerExpr = { click$pt <- input$dimclick click$invert <- FALSE } ) observeEvent( eventExpr = c(input$brush, input$spclick, input$dimclick), handlerExpr = { plot.env$data <- if (is.null(x = input$brush)) { clicked <- nearPoints( df = plot.data, coordinfo = if (click$invert) { InvertCoordinate(x = click$pt) } else { click$pt }, threshold = 10, maxpoints = 1 ) if (nrow(x = clicked) == 1) { cell.clicked <- rownames(x = clicked) group.clicked <- plot.data[cell.clicked, group.by, drop = TRUE] idx.group <- which(x = plot.data[[group.by]] == group.clicked) plot.data[idx.group, 'selected_'] <- TRUE plot.data } else { plot.data } } else if (input$brush$outputId == 'dimplot') { brushedPoints(df = plot.data, brush = input$brush, allRows = TRUE) } else if (input$brush$outputId == 'spatialplot') { brushedPoints(df = plot.data, brush = InvertCoordinate(x = input$brush), allRows = TRUE) } plot.env$alpha.by <- if (any(plot.env$data$selected_)) { 'selected_' } else { NULL } } ) # Set plots output$spatialplot <- renderPlot( expr = { plot.env$spatialplot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = group.by, pt.size.factor = 1.6, crop = TRUE, alpha.by = plot.env$alpha.by ) + scale_alpha_ordinal(range = alpha) + NoLegend() plot.env$spatialplot } ) output$dimplot <- renderPlot( expr = { plot.env$dimplot <- SingleDimPlot( data = plot.env$data, dims = dims, col.by = group.by, alpha.by = plot.env$alpha.by ) + scale_alpha_ordinal(range = alpha) + guides(alpha = "none") plot.env$dimplot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = if (is.null(x = input[['sphover']])) { input$dimhover } else { InvertCoordinate(x = input$sphover) }, threshold = 10, maxpoints = 1 )) # if (length(x = cell.hover) == 1) { # palette <- hue_pal()(n = length(x = levels(x = object))) # group <- plot.data[cell.hover, group.by, drop = TRUE] # background <- palette[which(x = levels(x = object) == group)] # text <- unname(obj = BGTextColor(background = background)) # style <- paste0( # paste( # paste('background-color:', background), # paste('color:', text), # sep = '; ' # ), # ';' # ) # info <- paste(cell.hover, paste('Group:', group), sep = '
') # } else { # style <- 'background-color: white; color: black' # info <- NULL # } # HTML(text = paste0("

")) # p(HTML(info), style = style) # paste0('
', info, '
') # TODO: Get newlines, extra information, and background color working if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Group:', plot.data[cell.hover, group.by, drop = TRUE]), collapse = '
') } else { NULL } } ) } # Run the thang runGadget(app = ui, server = server) } #' @rdname LinkedPlots #' #' @aliases LinkedFeaturePlot #' #' @importFrom ggplot2 scale_fill_gradientn theme scale_alpha guides #' scale_color_gradientn guide_colorbar #' #' @export #' @concept visualization #' @concept spatial LinkedFeaturePlot <- function( object, feature, dims = 1:2, reduction = NULL, image = NULL, slot = 'data', alpha = c(0.1, 1), combine = TRUE ) { # Setup gadget UI ui <- miniPage( gadgetTitleBar( title = 'LinkedFeaturePlot', left = NULL ), miniContentPanel( fillRow( plotOutput( outputId = 'spatialplot', height = '100%', hover = hoverOpts(id = 'sphover', delay = 10, nullOutside = TRUE) ), plotOutput( outputId = 'dimplot', height = '100%', hover = hoverOpts(id = 'dimhover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Prepare plotting data cols <- SpatialColors(n = 100) image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) reduction <- reduction %||% DefaultDimReduc(object = object) dims <- dims[1:2] dims <- paste0(Key(object = object[[reduction]]), dims) group.data <- FetchData( object = object, vars = feature, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) embeddings <- Embeddings(object = object[[reduction]])[cells.use, dims] plot.data <- cbind(coords, group.data, embeddings) # Setup the server server <- function(input, output, session) { plot.env <- reactiveValues() # Handle events observeEvent( eventExpr = input$done, handlerExpr = { plots <- list(plot.env$spatialplot, plot.env$dimplot) if (combine) { plots <- wrap_plots(plots, ncol = 2) } stopApp(returnValue = plots) } ) # Set plots output$spatialplot <- renderPlot( expr = { plot.env$spatialplot <- SingleSpatialPlot( data = plot.data, image = object[[image]], col.by = feature, pt.size.factor = 1.6, crop = TRUE, alpha.by = feature ) + scale_fill_gradientn(name = feature, colours = cols) + theme(legend.position = 'top') + scale_alpha(range = alpha) + guides(alpha = "none") plot.env$spatialplot } ) output$dimplot <- renderPlot( expr = { plot.env$dimplot <- SingleDimPlot( data = plot.data, dims = dims, col.by = feature ) + scale_color_gradientn(name = feature, colours = cols, guide = 'colorbar') + guides(color = guide_colorbar()) plot.env$dimplot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = if (is.null(x = input[['sphover']])) { input$dimhover } else { InvertCoordinate(x = input$sphover) }, threshold = 10, maxpoints = 1 )) # TODO: Get newlines, extra information, and background color working if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Expression:', plot.data[cell.hover, feature, drop = TRUE]), collapse = '
') } else { NULL } } ) } runGadget(app = ui, server = server) } #' Visualize clusters spatially and interactively #' #' @inheritParams SpatialPlot #' @inheritParams DimPlot #' @inheritParams LinkedPlots #' #' @return Returns final plot as a ggplot object #' #' @importFrom ggplot2 scale_alpha_ordinal #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow plotOutput verbatimTextOutput reactiveValues #' observeEvent stopApp nearPoints renderPlot runGadget #' #' @export #' @concept visualization #' @concept spatial #' ISpatialDimPlot <- function( object, image = NULL, group.by = NULL, alpha = c(0.3, 1) ) { # Setup gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( plotOutput( outputId = 'plot', height = '100%', click = clickOpts(id = 'click', clip = TRUE), hover = hoverOpts(id = 'hover', delay = 10, nullOutside = TRUE) ), height = '97%' ), verbatimTextOutput(outputId = 'info') ) ) # Get plotting data # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) group.by <- group.by %||% 'ident' group.data <- FetchData( object = object, vars = group.by, cells = cells.use ) coords <- GetTissueCoordinates(object = object[[image]]) plot.data <- cbind(coords, group.data) plot.data$selected_ <- FALSE Idents(object = object) <- group.by # Set up the server server <- function(input, output, session) { click <- reactiveValues(pt = NULL) plot.env <- reactiveValues(data = plot.data, alpha.by = NULL) # Handle events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observeEvent( eventExpr = input$click, handlerExpr = { clicked <- nearPoints( df = plot.data, coordinfo = InvertCoordinate(x = input$click), threshold = 10, maxpoints = 1 ) plot.env$data <- if (nrow(x = clicked) == 1) { cell.clicked <- rownames(x = clicked) cell.clicked <- rownames(x = clicked) group.clicked <- plot.data[cell.clicked, group.by, drop = TRUE] idx.group <- which(x = plot.data[[group.by]] == group.clicked) plot.data[idx.group, 'selected_'] <- TRUE plot.data } else { plot.data } plot.env$alpha.by <- if (any(plot.env$data$selected_)) { 'selected_' } else { NULL } } ) # Set plot output$plot <- renderPlot( expr = { plot.env$plot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = group.by, crop = TRUE, alpha.by = plot.env$alpha.by, pt.size.factor = 1.6 ) + scale_alpha_ordinal(range = alpha) + NoLegend() plot.env$plot } ) # Add hover text output$info <- renderPrint( expr = { cell.hover <- rownames(x = nearPoints( df = plot.data, coordinfo = InvertCoordinate(x = input$hover), threshold = 10, maxpoints = 1 )) if (length(x = cell.hover) == 1) { paste(cell.hover, paste('Group:', plot.data[cell.hover, group.by, drop = TRUE]), collapse = '
') } else { NULL } } ) } runGadget(app = ui, server = server) } #' Visualize features spatially and interactively #' #' @inheritParams SpatialPlot #' @inheritParams FeaturePlot #' @inheritParams LinkedPlots #' #' @return Returns final plot as a ggplot object #' #' @importFrom ggplot2 scale_fill_gradientn theme scale_alpha guides #' @importFrom miniUI miniPage miniButtonBlock miniTitleBarButton miniContentPanel #' @importFrom shiny fillRow sidebarPanel sliderInput selectInput reactiveValues #' observeEvent stopApp observe updateSelectInput plotOutput renderPlot runGadget #' #' @export #' @concept visualization #' @concept spatial ISpatialFeaturePlot <- function( object, feature, image = NULL, slot = 'data', alpha = c(0.1, 1) ) { # Set inital data values assay.keys <- Key(object = object)[Assays(object = object)] keyed <- sapply(X = assay.keys, FUN = grepl, x = feature) assay <- if (any(keyed)) { names(x = which(x = keyed))[1] } else { DefaultAssay(object = object) } features <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.label <- 'Feature to visualize' assays.use <- vapply( X = Assays(object = object), FUN = function(x) { return(!IsMatrixEmpty(x = GetAssayData( object = object, slot = slot, assay = x ))) }, FUN.VALUE = logical(length = 1L) ) assays.use <- sort(x = Assays(object = object)[assays.use]) # Setup gadget UI ui <- miniPage( miniButtonBlock(miniTitleBarButton( inputId = 'done', label = 'Done', primary = TRUE )), miniContentPanel( fillRow( sidebarPanel( sliderInput( inputId = 'alpha', label = 'Alpha intensity', min = 0, max = max(alpha), value = min(alpha), step = 0.01, width = '100%' ), sliderInput( inputId = 'pt.size', label = 'Point size', min = 0, max = 5, value = 1.6, step = 0.1, width = '100%' ), selectInput( inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay, selectize = FALSE, width = '100%' ), selectInput( inputId = 'feature', label = feature.label, choices = features, selected = feature, selectize = FALSE, width = '100%' ), selectInput( inputId = 'palette', label = 'Color scheme', choices = names(x = FeaturePalettes), selected = 'Spatial', selectize = FALSE, width = '100%' ), width = '100%' ), plotOutput(outputId = 'plot', height = '100%'), flex = c(1, 4) ) ) ) # Prepare plotting data image <- image %||% DefaultImage(object = object) cells.use <- Cells(x = object[[image]]) coords <- GetTissueCoordinates(object = object[[image]]) feature.data <- FetchData( object = object, vars = feature, cells = cells.use, slot = slot ) plot.data <- cbind(coords, feature.data) server <- function(input, output, session) { plot.env <- reactiveValues( data = plot.data, feature = feature, palette = 'Spatial' ) # Observe events observeEvent( eventExpr = input$done, handlerExpr = stopApp(returnValue = plot.env$plot) ) observe(x = { assay <- input$assay feature.use <- input$feature features.assay <- sort(x = rownames(x = GetAssayData( object = object, slot = slot, assay = assay ))) feature.use <- ifelse( test = feature.use %in% features.assay, yes = feature.use, no = features.assay[1] ) updateSelectInput( session = session, inputId = 'assay', label = 'Assay', choices = assays.use, selected = assay ) updateSelectInput( session = session, inputId = 'feature', label = feature.label, choices = features.assay, selected = feature.use ) }) observe(x = { feature.use <- input$feature try( expr = { feature.data <- FetchData( object = object, vars = paste0(Key(object = object[[input$assay]]), feature.use), cells = cells.use, slot = slot ) colnames(x = feature.data) <- feature.use plot.env$data <- cbind(coords, feature.data) plot.env$feature <- feature.use }, silent = TRUE ) }) observe(x = { plot.env$palette <- input$palette }) # Create plot output$plot <- renderPlot(expr = { plot.env$plot <- SingleSpatialPlot( data = plot.env$data, image = object[[image]], col.by = plot.env$feature, pt.size.factor = input$pt.size, crop = TRUE, alpha.by = plot.env$feature ) + # scale_fill_gradientn(name = plot.env$feature, colours = cols) + scale_fill_gradientn(name = plot.env$feature, colours = FeaturePalettes[[plot.env$palette]]) + theme(legend.position = 'top') + scale_alpha(range = c(input$alpha, 1)) + guides(alpha = "none") plot.env$plot }) } runGadget(app = ui, server = server) } #' Visualize spatial clustering and expression data. #' #' SpatialPlot plots a feature or discrete grouping (e.g. cluster assignments) as #' spots over the image that was collected. We also provide SpatialFeaturePlot #' and SpatialDimPlot as wrapper functions around SpatialPlot for a consistent #' naming framework. #' #' @inheritParams HoverLocator #' @param object A Seurat object #' @param group.by Name of meta.data column to group the data by #' @param features Name of the feature to visualize. Provide either group.by OR #' features, not both. #' @param images Name of the images to use in the plot(s) #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character or numeric value corresponding to a #' palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. By #' default, ggplot2 assigns colors #' @param image.alpha Adjust the opacity of the background images. Set to 0 to #' remove. #' @param crop Crop the plot in to focus on points plotted. Set to \code{FALSE} to show #' entire background image. #' @param slot If plotting a feature, which data slot to pull from (counts, #' data, or scale.data) #' @param keep.scale How to handle the color scale across multiple plots. Options are: #' \itemize{ #' \item \dQuote{feature} (default; by row/feature scaling): The plots for #' each individual feature are scaled to the maximum expression of the #' feature across the conditions provided to \code{split.by} #' \item \dQuote{all} (universal scaling): The plots for all features and #' conditions are scaled to the maximum expression value for the feature #' with the highest overall expression #' \item \code{NULL} (no scaling): Each individual plot is scaled to the #' maximum expression value of the feature in the condition provided to #' \code{split.by}; be aware setting \code{NULL} will result in color #' scales that are not comparable between plots #' } #' @param min.cutoff,max.cutoff Vector of minimum and maximum cutoff #' values for each feature, may specify quantile in the form of 'q##' where '##' #' is the quantile (eg, 'q1', 'q10') #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply pass a vector #' instead of a list. If set, colors selected cells to the color(s) in #' cols.highlight #' @param cols.highlight A vector of colors to highlight the cells as; ordered #' the same as the groups in cells.highlight; last color corresponds to #' unselected cells. #' @param facet.highlight When highlighting certain groups of cells, split each #' group into its own plot #' @param label Whether to label the clusters #' @param label.size Sets the size of the labels #' @param label.color Sets the color of the label text #' @param label.box Whether to put a box around the label text (geom_text vs #' geom_label) #' @param repel Repels the labels to prevent overlap #' @param ncol Number of columns if plotting multiple plots #' @param combine Combine plots into a single gg object; note that if TRUE; #' themeing will not work when plotting multiple features/groupings #' @param pt.size.factor Scale the size of the spots. #' @param alpha Controls opacity of spots. Provide as a vector specifying the #' min and max for SpatialFeaturePlot. For SpatialDimPlot, provide a single #' alpha value for each plot. #' @param stroke Control the width of the border around the spots #' @param interactive Launch an interactive SpatialDimPlot or SpatialFeaturePlot #' session, see \code{\link{ISpatialDimPlot}} or #' \code{\link{ISpatialFeaturePlot}} for more details #' @param do.identify,do.hover DEPRECATED in favor of \code{interactive} #' @param identify.ident DEPRECATED #' #' @return If \code{do.identify}, either a vector of cells selected or the object #' with selected cells set to the value of \code{identify.ident} (if set). Else, #' if \code{do.hover}, a plotly object with interactive graphics. Else, a ggplot #' object #' #' @importFrom ggplot2 scale_fill_gradientn ggtitle theme element_text scale_alpha #' @importFrom patchwork wrap_plots #' @export #' @concept visualization #' @concept spatial #' #' @examples #' \dontrun{ #' # For functionality analagous to FeaturePlot #' SpatialPlot(seurat.object, features = "MS4A1") #' SpatialFeaturePlot(seurat.object, features = "MS4A1") #' #' # For functionality analagous to DimPlot #' SpatialPlot(seurat.object, group.by = "clusters") #' SpatialDimPlot(seurat.object, group.by = "clusters") #' } #' SpatialPlot <- function( object, group.by = NULL, features = NULL, images = NULL, cols = NULL, image.alpha = 1, crop = TRUE, slot = 'data', keep.scale = "feature", min.cutoff = NA, max.cutoff = NA, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), facet.highlight = FALSE, label = FALSE, label.size = 5, label.color = 'white', label.box = TRUE, repel = FALSE, ncol = NULL, combine = TRUE, pt.size.factor = 1.6, alpha = c(1, 1), stroke = 0.25, interactive = FALSE, do.identify = FALSE, identify.ident = NULL, do.hover = FALSE, information = NULL ) { if (isTRUE(x = do.hover) || isTRUE(x = do.identify)) { warning( "'do.hover' and 'do.identify' are deprecated as we are removing plotly-based interactive graphics, use 'interactive' instead for Shiny-based interactivity", call. = FALSE, immediate. = TRUE ) interactive <- TRUE } if (!is.null(x = group.by) & !is.null(x = features)) { stop("Please specific either group.by or features, not both.") } images <- images %||% Images(object = object, assay = DefaultAssay(object = object)) if (length(x = images) == 0) { images <- Images(object = object) } if (length(x = images) < 1) { stop("Could not find any spatial image information") } # Check keep.scale param for valid entries if (!(is.null(x = keep.scale)) && !(keep.scale %in% c("feature", "all"))) { stop("`keep.scale` must be set to either `feature`, `all`, or NULL") } if (is.null(x = features)) { if (interactive) { return(ISpatialDimPlot( object = object, image = images[1], group.by = group.by, alpha = alpha )) } group.by <- group.by %||% 'ident' object[['ident']] <- Idents(object = object) data <- object[[group.by]] for (group in group.by) { if (!is.factor(x = data[, group])) { data[, group] <- factor(x = data[, group]) } } } else { if (interactive) { return(ISpatialFeaturePlot( object = object, feature = features[1], image = images[1], slot = slot, alpha = alpha )) } data <- FetchData( object = object, vars = features, slot = slot ) features <- colnames(x = data) # Determine cutoffs min.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = min(data[, feature]), no = cutoff )) }, cutoff = min.cutoff, feature = features ) max.cutoff <- mapply( FUN = function(cutoff, feature) { return(ifelse( test = is.na(x = cutoff), yes = max(data[, feature]), no = cutoff )) }, cutoff = max.cutoff, feature = features ) check.lengths <- unique(x = vapply( X = list(features, min.cutoff, max.cutoff), FUN = length, FUN.VALUE = numeric(length = 1) )) if (length(x = check.lengths) != 1) { stop("There must be the same number of minimum and maximum cuttoffs as there are features") } # Apply cutoffs data <- sapply( X = 1:ncol(x = data), FUN = function(index) { data.feature <- as.vector(x = data[, index]) min.use <- SetQuantile(cutoff = min.cutoff[index], data.feature) max.use <- SetQuantile(cutoff = max.cutoff[index], data.feature) data.feature[data.feature < min.use] <- min.use data.feature[data.feature > max.use] <- max.use return(data.feature) } ) colnames(x = data) <- features rownames(x = data) <- Cells(x = object) } features <- colnames(x = data) colnames(x = data) <- features rownames(x = data) <- colnames(x = object) facet.highlight <- facet.highlight && (!is.null(x = cells.highlight) && is.list(x = cells.highlight)) if (do.hover) { if (length(x = images) > 1) { images <- images[1] warning( "'do.hover' requires only one image, using image ", images, call. = FALSE, immediate. = TRUE ) } if (length(x = features) > 1) { features <- features[1] type <- ifelse(test = is.null(x = group.by), yes = 'feature', no = 'grouping') warning( "'do.hover' requires only one ", type, ", using ", features, call. = FALSE, immediate. = TRUE ) } if (facet.highlight) { warning( "'do.hover' requires no faceting highlighted cells", call. = FALSE, immediate. = TRUE ) facet.highlight <- FALSE } } if (facet.highlight) { if (length(x = images) > 1) { images <- images[1] warning( "Faceting the highlight only works with a single image, using image ", images, call. = FALSE, immediate. = TRUE ) } ncols <- length(x = cells.highlight) } else { ncols <- length(x = images) } plots <- vector( mode = "list", length = length(x = features) * ncols ) # Get max across all features if (!(is.null(x = keep.scale)) && keep.scale == "all") { max.feature.value <- max(apply(data, 2, function(x) max(x, na.rm = TRUE))) } for (i in 1:ncols) { plot.idx <- i image.idx <- ifelse(test = facet.highlight, yes = 1, no = i) image.use <- object[[images[[image.idx]]]] coordinates <- GetTissueCoordinates(object = image.use) highlight.use <- if (facet.highlight) { cells.highlight[i] } else { cells.highlight } for (j in 1:length(x = features)) { cols.unset <- is.factor(x = data[, features[j]]) && is.null(x = cols) if (cols.unset) { cols <- hue_pal()(n = length(x = levels(x = data[, features[j]]))) names(x = cols) <- levels(x = data[, features[j]]) } # Get feature max for individual feature if (!(is.null(x = keep.scale)) && keep.scale == "feature" && !inherits(x = data[, features[j]], what = "factor") ) { max.feature.value <- max(data[, features[j]]) } plot <- SingleSpatialPlot( data = cbind( coordinates, data[rownames(x = coordinates), features[j], drop = FALSE] ), image = image.use, image.alpha = image.alpha, col.by = features[j], cols = cols, alpha.by = if (is.null(x = group.by)) { features[j] } else { NULL }, pt.alpha = if (!is.null(x = group.by)) { alpha[j] } else { NULL }, geom = if (inherits(x = image.use, what = "STARmap")) { 'poly' } else { 'spatial' }, cells.highlight = highlight.use, cols.highlight = cols.highlight, pt.size.factor = pt.size.factor, stroke = stroke, crop = crop ) if (is.null(x = group.by)) { plot <- plot + scale_fill_gradientn( name = features[j], colours = SpatialColors(n = 100) ) + theme(legend.position = 'top') + scale_alpha(range = alpha) + guides(alpha = "none") } else if (label) { plot <- LabelClusters( plot = plot, id = ifelse( test = is.null(x = cells.highlight), yes = features[j], no = 'highlight' ), geom = if (inherits(x = image.use, what = "STARmap")) { 'GeomPolygon' } else { 'GeomSpatial' }, repel = repel, size = label.size, color = label.color, box = label.box, position = "nearest" ) } if (j == 1 && length(x = images) > 1 && !facet.highlight) { plot <- plot + ggtitle(label = images[[image.idx]]) + theme(plot.title = element_text(hjust = 0.5)) } if (facet.highlight) { plot <- plot + ggtitle(label = names(x = cells.highlight)[i]) + theme(plot.title = element_text(hjust = 0.5)) + NoLegend() } # Plot multiple images depending on keep.scale if (!(is.null(x = keep.scale)) && !inherits(x = data[, features[j]], "factor")) { plot <- suppressMessages(plot & scale_fill_gradientn(colors = SpatialColors(n = 100), limits = c(NA, max.feature.value))) } plots[[plot.idx]] <- plot plot.idx <- plot.idx + ncols if (cols.unset) { cols <- NULL } } } # if (do.identify) { # return(CellSelector( # plot = plot, # object = identify.ident %iff% object, # ident = identify.ident # )) # } else if (do.hover) { # return(HoverLocator( # plot = plots[[1]], # information = information %||% data[, features, drop = FALSE], # axes = FALSE, # # cols = c('size' = 'point.size.factor', 'colour' = 'fill'), # images = GetImage(object = object, mode = 'plotly', image = images) # )) # } if (combine) { if (!is.null(x = ncol)) { return(wrap_plots(plots = plots, ncol = ncol)) } if (length(x = images) > 1) { return(wrap_plots(plots = plots, ncol = length(x = images))) } return(wrap_plots(plots = plots)) } return(plots) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Other plotting functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Plot the Barcode Distribution and Calculated Inflection Points #' #' This function plots the calculated inflection points derived from the barcode-rank #' distribution. #' #' See [CalculateBarcodeInflections()] to calculate inflection points and #' [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. #' #' @param object Seurat object #' #' @return Returns a `ggplot2` object showing the by-group inflection points and provided #' (or default) rank threshold values in grey. #' #' @importFrom methods slot #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot geom_line geom_vline aes_string #' #' @export #' @concept visualization #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{CalculateBarcodeInflections}} \code{\link{SubsetByBarcodeInflections}} #' #' @examples #' data("pbmc_small") #' pbmc_small <- CalculateBarcodeInflections(pbmc_small, group.column = 'groups') #' BarcodeInflectionsPlot(pbmc_small) #' BarcodeInflectionsPlot <- function(object) { cbi.data <- Tool(object = object, slot = 'CalculateBarcodeInflections') if (is.null(x = cbi.data)) { stop("Barcode inflections not calculated, please run CalculateBarcodeInflections") } ## Extract necessary data frames inflection_points <- cbi.data$inflection_points barcode_distribution <- cbi.data$barcode_distribution threshold_values <- cbi.data$threshold_values # Set a cap to max rank to avoid plot being overextended if (threshold_values$rank[[2]] > max(barcode_distribution$rank, na.rm = TRUE)) { threshold_values$rank[[2]] <- max(barcode_distribution$rank, na.rm = TRUE) } ## Infer the grouping/barcode variables group_var <- colnames(x = barcode_distribution)[1] barcode_var <- colnames(x = barcode_distribution)[2] barcode_distribution[, barcode_var] <- log10(x = barcode_distribution[, barcode_var] + 1) ## Make the plot plot <- ggplot( data = barcode_distribution, mapping = aes_string( x = 'rank', y = barcode_var, group = group_var, colour = group_var ) ) + geom_line() + geom_vline( data = threshold_values, aes_string(xintercept = 'rank'), linetype = "dashed", colour = 'grey60', size = 0.5 ) + geom_vline( data = inflection_points, mapping = aes_string( xintercept = 'rank', group = group_var, colour = group_var ), linetype = "dashed" ) + theme_cowplot() return(plot) } #' Dot plot visualization #' #' Intuitive way of visualizing how feature expression changes across different #' identity classes (clusters). The size of the dot encodes the percentage of #' cells within a class, while the color encodes the AverageExpression level #' across all cells within a class (blue is high). #' #' @param object Seurat object #' @param assay Name of assay to use, defaults to the active assay #' @param features Input vector of features, or named list of feature vectors #' if feature-grouped panels are desired (replicates the functionality of the #' old SplitDotPlotGG) #' @param cols Colors to plot: the name of a palette from #' \code{RColorBrewer::brewer.pal.info}, a pair of colors defining a gradient, #' or 3+ colors defining multiple gradients (if split.by is set) #' @param col.min Minimum scaled average expression threshold (everything #' smaller will be set to this) #' @param col.max Maximum scaled average expression threshold (everything larger #' will be set to this) #' @param dot.min The fraction of cells at which to draw the smallest dot #' (default is 0). All cell groups with less than this expressing the given #' gene will have no dot drawn. #' @param dot.scale Scale the size of the points, similar to cex #' @param idents Identity classes to include in plot (default is all) #' @param group.by Factor to group the cells by #' @param split.by A factor in object metadata to split the plot by, pass 'ident' #' to split by cell identity' #' see \code{\link{FetchData}} for more details #' @param cluster.idents Whether to order identities by hierarchical clusters #' based on given features, default is FALSE #' @param scale Determine whether the data is scaled, TRUE for default #' @param scale.by Scale the size of the points by 'size' or by 'radius' #' @param scale.min Set lower limit for scaling, use NA for default #' @param scale.max Set upper limit for scaling, use NA for default #' #' @return A ggplot object #' #' @importFrom grDevices colorRampPalette #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point scale_size scale_radius #' theme element_blank labs scale_color_identity scale_color_distiller #' scale_color_gradient guides guide_legend guide_colorbar #' facet_grid unit #' @importFrom scattermore geom_scattermore #' @importFrom stats dist hclust #' @importFrom RColorBrewer brewer.pal.info #' #' @export #' @concept visualization #' #' @aliases SplitDotPlotGG #' @seealso \code{RColorBrewer::brewer.pal.info} #' #' @examples #' data("pbmc_small") #' cd_genes <- c("CD247", "CD3E", "CD9") #' DotPlot(object = pbmc_small, features = cd_genes) #' pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) #' DotPlot(object = pbmc_small, features = cd_genes, split.by = 'groups') #' DotPlot <- function( object, features, assay = NULL, cols = c("lightgrey", "blue"), col.min = -2.5, col.max = 2.5, dot.min = 0, dot.scale = 6, idents = NULL, group.by = NULL, split.by = NULL, cluster.idents = FALSE, scale = TRUE, scale.by = 'radius', scale.min = NA, scale.max = NA ) { assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay split.colors <- !is.null(x = split.by) && !any(cols %in% rownames(x = brewer.pal.info)) scale.func <- switch( EXPR = scale.by, 'size' = scale_size, 'radius' = scale_radius, stop("'scale.by' must be either 'size' or 'radius'") ) feature.groups <- NULL if (is.list(features) | any(!is.na(names(features)))) { feature.groups <- unlist(x = sapply( X = 1:length(features), FUN = function(x) { return(rep(x = names(x = features)[x], each = length(features[[x]]))) } )) if (any(is.na(x = feature.groups))) { warning( "Some feature groups are unnamed.", call. = FALSE, immediate. = TRUE ) } features <- unlist(x = features) names(x = feature.groups) <- features } cells <- unlist(x = CellsByIdentities(object = object, cells = colnames(object[[assay]]), idents = idents)) data.features <- FetchData(object = object, vars = features, cells = cells) data.features$id <- if (is.null(x = group.by)) { Idents(object = object)[cells, drop = TRUE] } else { object[[group.by, drop = TRUE]][cells, drop = TRUE] } if (!is.factor(x = data.features$id)) { data.features$id <- factor(x = data.features$id) } id.levels <- levels(x = data.features$id) data.features$id <- as.vector(x = data.features$id) if (!is.null(x = split.by)) { splits <- FetchData(object = object, vars = split.by)[cells, split.by] if (split.colors) { if (length(x = unique(x = splits)) > length(x = cols)) { stop(paste0("Need to specify at least ", length(x = unique(x = splits)), " colors using the cols parameter")) } cols <- cols[1:length(x = unique(x = splits))] names(x = cols) <- unique(x = splits) } data.features$id <- paste(data.features$id, splits, sep = '_') unique.splits <- unique(x = splits) id.levels <- paste0(rep(x = id.levels, each = length(x = unique.splits)), "_", rep(x = unique(x = splits), times = length(x = id.levels))) } data.plot <- lapply( X = unique(x = data.features$id), FUN = function(ident) { data.use <- data.features[data.features$id == ident, 1:(ncol(x = data.features) - 1), drop = FALSE] avg.exp <- apply( X = data.use, MARGIN = 2, FUN = function(x) { return(mean(x = expm1(x = x))) } ) pct.exp <- apply(X = data.use, MARGIN = 2, FUN = PercentAbove, threshold = 0) return(list(avg.exp = avg.exp, pct.exp = pct.exp)) } ) names(x = data.plot) <- unique(x = data.features$id) if (cluster.idents) { mat <- do.call( what = rbind, args = lapply(X = data.plot, FUN = unlist) ) mat <- scale(x = mat) id.levels <- id.levels[hclust(d = dist(x = mat))$order] } data.plot <- lapply( X = names(x = data.plot), FUN = function(x) { data.use <- as.data.frame(x = data.plot[[x]]) data.use$features.plot <- rownames(x = data.use) data.use$id <- x return(data.use) } ) data.plot <- do.call(what = 'rbind', args = data.plot) if (!is.null(x = id.levels)) { data.plot$id <- factor(x = data.plot$id, levels = id.levels) } ngroup <- length(x = levels(x = data.plot$id)) if (ngroup == 1) { scale <- FALSE warning( "Only one identity present, the expression values will be not scaled", call. = FALSE, immediate. = TRUE ) } else if (ngroup < 5 & scale) { warning( "Scaling data with a low number of groups may produce misleading results", call. = FALSE, immediate. = TRUE ) } avg.exp.scaled <- sapply( X = unique(x = data.plot$features.plot), FUN = function(x) { data.use <- data.plot[data.plot$features.plot == x, 'avg.exp'] if (scale) { data.use <- scale(x = log1p(data.use)) data.use <- MinMax(data = data.use, min = col.min, max = col.max) } else { data.use <- log1p(x = data.use) } return(data.use) } ) avg.exp.scaled <- as.vector(x = t(x = avg.exp.scaled)) if (split.colors) { avg.exp.scaled <- as.numeric(x = cut(x = avg.exp.scaled, breaks = 20)) } data.plot$avg.exp.scaled <- avg.exp.scaled data.plot$features.plot <- factor( x = data.plot$features.plot, levels = features ) data.plot$pct.exp[data.plot$pct.exp < dot.min] <- NA data.plot$pct.exp <- data.plot$pct.exp * 100 if (split.colors) { splits.use <- unlist(x = lapply( X = data.plot$id, FUN = function(x) sub( paste0(".*_(", paste(sort(unique(x = splits), decreasing = TRUE), collapse = '|' ),")$"), "\\1", x ) ) ) data.plot$colors <- mapply( FUN = function(color, value) { return(colorRampPalette(colors = c('grey', color))(20)[value]) }, color = cols[splits.use], value = avg.exp.scaled ) } color.by <- ifelse(test = split.colors, yes = 'colors', no = 'avg.exp.scaled') if (!is.na(x = scale.min)) { data.plot[data.plot$pct.exp < scale.min, 'pct.exp'] <- scale.min } if (!is.na(x = scale.max)) { data.plot[data.plot$pct.exp > scale.max, 'pct.exp'] <- scale.max } if (!is.null(x = feature.groups)) { data.plot$feature.groups <- factor( x = feature.groups[data.plot$features.plot], levels = unique(x = feature.groups) ) } plot <- ggplot(data = data.plot, mapping = aes_string(x = 'features.plot', y = 'id')) + geom_point(mapping = aes_string(size = 'pct.exp', color = color.by)) + scale.func(range = c(0, dot.scale), limits = c(scale.min, scale.max)) + theme(axis.title.x = element_blank(), axis.title.y = element_blank()) + guides(size = guide_legend(title = 'Percent Expressed')) + labs( x = 'Features', y = ifelse(test = is.null(x = split.by), yes = 'Identity', no = 'Split Identity') ) + theme_cowplot() if (!is.null(x = feature.groups)) { plot <- plot + facet_grid( facets = ~feature.groups, scales = "free_x", space = "free_x", switch = "y" ) + theme( panel.spacing = unit(x = 1, units = "lines"), strip.background = element_blank() ) } if (split.colors) { plot <- plot + scale_color_identity() } else if (length(x = cols) == 1) { plot <- plot + scale_color_distiller(palette = cols) } else { plot <- plot + scale_color_gradient(low = cols[1], high = cols[2]) } if (!split.colors) { plot <- plot + guides(color = guide_colorbar(title = 'Average Expression')) } return(plot) } #' Quickly Pick Relevant Dimensions #' #' Plots the standard deviations (or approximate singular values if running PCAFast) #' of the principle components for easy identification of an elbow in the graph. #' This elbow often corresponds well with the significant dims and is much faster to run than #' Jackstraw #' #' @param object Seurat object #' @param ndims Number of dimensions to plot standard deviation for #' @param reduction Reduction technique to plot standard deviation for #' #' @return A ggplot object #' #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point labs element_line #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' ElbowPlot(object = pbmc_small) #' ElbowPlot <- function(object, ndims = 20, reduction = 'pca') { data.use <- Stdev(object = object, reduction = reduction) if (length(x = data.use) == 0) { stop(paste("No standard deviation info stored for", reduction)) } if (ndims > length(x = data.use)) { warning("The object only has information for ", length(x = data.use), " reductions") ndims <- length(x = data.use) } stdev <- 'Standard Deviation' plot <- ggplot(data = data.frame(dims = 1:ndims, stdev = data.use[1:ndims])) + geom_point(mapping = aes_string(x = 'dims', y = 'stdev')) + labs( x = gsub( pattern = '_$', replacement = '', x = Key(object = object[[reduction]]) ), y = stdev ) + theme_cowplot() return(plot) } #' Boxplot of correlation of a variable (e.g. number of UMIs) with expression #' data #' #' @param object Seurat object #' @param assay Assay where the feature grouping info and correlations are #' stored #' @param feature.group Name of the column in meta.features where the feature #' grouping info is stored #' @param cor Name of the column in meta.features where correlation info is #' stored #' #' @return Returns a ggplot boxplot of correlations split by group #' #' @importFrom ggplot2 geom_boxplot scale_fill_manual geom_hline #' @importFrom cowplot theme_cowplot #' @importFrom scales brewer_pal #' @importFrom stats complete.cases #' #' @export #' @concept visualization #' GroupCorrelationPlot <- function( object, assay = NULL, feature.group = "feature.grp", cor = "nCount_RNA_cor" ) { assay <- assay %||% DefaultAssay(object = object) data <- object[[assay]][c(feature.group, cor)] data <- data[complete.cases(data), ] colnames(x = data) <- c('grp', 'cor') data$grp <- as.character(data$grp) plot <- ggplot(data = data, aes_string(x = "grp", y = "cor", fill = "grp")) + geom_boxplot() + theme_cowplot() + scale_fill_manual(values = rev(x = brewer_pal(palette = 'YlOrRd')(n = 7))) + ylab(paste( "Correlation with", gsub(x = cor, pattern = "_cor", replacement = "") )) + geom_hline(yintercept = 0) + NoLegend() + theme( axis.line.x = element_blank(), axis.title.x = element_blank(), axis.ticks.x = element_blank(), axis.text.x = element_blank() ) return(plot) } #' JackStraw Plot #' #' Plots the results of the JackStraw analysis for PCA significance. For each #' PC, plots a QQ-plot comparing the distribution of p-values for all genes #' across each PC, compared with a uniform distribution. Also determines a #' p-value for the overall significance of each PC (see Details). #' #' Significant PCs should show a p-value distribution (black curve) that is #' strongly skewed to the left compared to the null distribution (dashed line) #' The p-value for each PC is based on a proportion test comparing the number #' of genes with a p-value below a particular threshold (score.thresh), compared with the #' proportion of genes expected under a uniform distribution of p-values. #' #' @param object Seurat object #' @param dims Dims to plot #' @param cols Vector of colors, each color corresponds to an individual PC. This may also be a single character #' or numeric value corresponding to a palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}. #' By default, ggplot2 assigns colors. We also include a number of palettes from the pals package. #' See \code{\link{DiscretePalette}} for details. #' @param reduction reduction to pull jackstraw info from #' @param xmax X-axis maximum on each QQ plot. #' @param ymax Y-axis maximum on each QQ plot. #' #' @return A ggplot object #' #' @author Omri Wurtzel #' @seealso \code{\link{ScoreJackStraw}} #' #' @importFrom stats qunif #' @importFrom scales hue_pal #' @importFrom ggplot2 ggplot aes_string stat_qq labs xlim ylim #' coord_flip geom_abline guides guide_legend #' @importFrom cowplot theme_cowplot #' #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' JackStrawPlot(object = pbmc_small) #' JackStrawPlot <- function( object, dims = 1:5, cols = NULL, reduction = 'pca', xmax = 0.1, ymax = 0.3 ) { pAll <- JS(object = object[[reduction]], slot = 'empirical') if (max(dims) > ncol(x = pAll)) { stop("Max dimension is ", ncol(x = pAll)) } pAll <- pAll[, dims, drop = FALSE] pAll <- as.data.frame(x = pAll) data.plot <- Melt(x = pAll) colnames(x = data.plot) <- c("Contig", "PC", "Value") score.df <- JS(object = object[[reduction]], slot = 'overall') if (nrow(x = score.df) < max(dims)) { stop("Jackstraw procedure not scored for all the provided dims. Please run ScoreJackStraw.") } score.df <- score.df[dims, , drop = FALSE] if (nrow(x = score.df) == 0) { stop(paste0("JackStraw hasn't been scored. Please run ScoreJackStraw before plotting.")) } data.plot$PC.Score <- rep( x = paste0("PC ", score.df[ ,"PC"], ": ", sprintf("%1.3g", score.df[ ,"Score"])), each = length(x = unique(x = data.plot$Contig)) ) data.plot$PC.Score <- factor( x = data.plot$PC.Score, levels = paste0("PC ", score.df[, "PC"], ": ", sprintf("%1.3g", score.df[, "Score"])) ) if (is.null(x = cols)) { cols <- hue_pal()(length(x = dims)) } if (length(x = cols) < length(x = dims)) { stop("Not enough colors for the number of dims selected") } gp <- ggplot(data = data.plot, mapping = aes_string(sample = 'Value', color = 'PC.Score')) + stat_qq(distribution = qunif) + labs(x = "Theoretical [runif(1000)]", y = "Empirical") + scale_color_manual(values = cols) + xlim(0, ymax) + ylim(0, xmax) + coord_flip() + geom_abline(intercept = 0, slope = 1, linetype = "dashed", na.rm = TRUE) + guides(color = guide_legend(title = "PC: p-value")) + theme_cowplot() return(gp) } #' Plot clusters as a tree #' #' Plots previously computed tree (from BuildClusterTree) #' #' @param object Seurat object #' @param direction A character string specifying the direction of the tree (default is downwards) #' Possible options: "rightwards", "leftwards", "upwards", and "downwards". #' @param \dots Additional arguments to #' \code{\link[ape:plot.phylo]{ape::plot.phylo}} #' #' @return Plots dendogram (must be precomputed using BuildClusterTree), returns no value #' #' @export #' @concept visualization #' #' @examples #' \dontrun{ #' if (requireNamespace("ape", quietly = TRUE)) { #' data("pbmc_small") #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' PlotClusterTree(object = pbmc_small) #' } #' } PlotClusterTree <- function(object, direction = "downwards", ...) { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } if (is.null(x = Tool(object = object, slot = "BuildClusterTree"))) { stop("Phylogenetic tree does not exist, build using BuildClusterTree") } data.tree <- Tool(object = object, slot = "BuildClusterTree") ape::plot.phylo(x = data.tree, direction = direction, ...) ape::nodelabels() } #' Visualize Dimensional Reduction genes #' #' Visualize top genes associated with reduction components #' #' @param object Seurat object #' @param reduction Reduction technique to visualize results for #' @param dims Number of dimensions to display #' @param nfeatures Number of genes to display #' @param col Color of points to use #' @param projected Use reduction values for full dataset (i.e. projected #' dimensional reduction values) #' @param balanced Return an equal number of genes with + and - scores. If #' FALSE (default), returns the top genes ranked by the scores absolute values #' @param ncol Number of columns to display #' @param combine Combine plots into a single \code{patchwork} #' ggplot object. If \code{FALSE}, return a list of ggplot objects #' #' @return A \code{patchwork} ggplot object if #' \code{combine = TRUE}; otherwise, a list of ggplot objects #' #' @importFrom patchwork wrap_plots #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_point labs #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' VizDimLoadings(object = pbmc_small) #' VizDimLoadings <- function( object, dims = 1:5, nfeatures = 30, col = 'blue', reduction = 'pca', projected = FALSE, balanced = FALSE, ncol = NULL, combine = TRUE ) { if (is.null(x = ncol)) { ncol <- 2 if (length(x = dims) == 1) { ncol <- 1 } if (length(x = dims) > 6) { ncol <- 3 } if (length(x = dims) > 9) { ncol <- 4 } } loadings <- Loadings(object = object[[reduction]], projected = projected) features <- lapply( X = dims, FUN = TopFeatures, object = object[[reduction]], nfeatures = nfeatures, projected = projected, balanced = balanced ) features <- lapply( X = features, FUN = unlist, use.names = FALSE ) loadings <- loadings[unlist(x = features), dims, drop = FALSE] names(x = features) <- colnames(x = loadings) <- as.character(x = dims) plots <- lapply( X = as.character(x = dims), FUN = function(i) { data.plot <- as.data.frame(x = loadings[features[[i]], i, drop = FALSE]) colnames(x = data.plot) <- paste0(Key(object = object[[reduction]]), i) data.plot$feature <- factor(x = rownames(x = data.plot), levels = rownames(x = data.plot)) plot <- ggplot( data = data.plot, mapping = aes_string(x = colnames(x = data.plot)[1], y = 'feature') ) + geom_point(col = col) + labs(y = NULL) + theme_cowplot() return(plot) } ) if (combine) { plots <- wrap_plots(plots, ncol = ncol) } return(plots) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Exported utility functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Augments ggplot2-based plot with a PNG image. #' #' Creates "vector-friendly" plots. Does this by saving a copy of the plot as a PNG file, #' then adding the PNG image with \code{\link[ggplot2]{annotation_raster}} to a blank plot #' of the same dimensions as \code{plot}. Please note: original legends and axes will be lost #' during augmentation. #' #' @param plot A ggplot object #' @param width,height Width and height of PNG version of plot #' @param dpi Plot resolution #' #' @return A ggplot object #' #' @importFrom png readPNG #' @importFrom ggplot2 ggplot_build ggsave ggplot aes_string geom_blank annotation_raster ggtitle #' #' @export #' @concept visualization #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' AugmentPlot(plot = plot) #' } #' AugmentPlot <- function(plot, width = 10, height = 10, dpi = 100) { pbuild.params <- ggplot_build(plot = plot)$layout$panel_params[[1]] range.values <- c( pbuild.params$x.range, pbuild.params$y.range ) xyparams <- GetXYAesthetics( plot = plot, geom = class(x = plot$layers[[1]]$geom)[1] ) title <- plot$labels$title tmpfile <- tempfile(fileext = '.png') ggsave( filename = tmpfile, plot = plot + NoLegend() + NoAxes() + theme(plot.title = element_blank()), width = width, height = height, dpi = dpi ) img <- readPNG(source = tmpfile) file.remove(tmpfile) blank <- ggplot( data = plot$data, mapping = aes_string(x = xyparams$x, y = xyparams$y) ) + geom_blank() blank <- blank + plot$theme + ggtitle(label = title) blank <- blank + annotation_raster( raster = img, xmin = range.values[1], xmax = range.values[2], ymin = range.values[3], ymax = range.values[4] ) return(blank) } #' Automagically calculate a point size for ggplot2-based scatter plots #' #' It happens to look good #' #' @param data A data frame being passed to ggplot2 #' @param raster If TRUE, point size is set to 1 #' #' @return The "optimal" point size for visualizing these data #' #' @export #' @concept visualization #' #' @examples #' df <- data.frame(x = rnorm(n = 10000), y = runif(n = 10000)) #' AutoPointSize(data = df) #' AutoPointSize <- function(data, raster = NULL) { return(ifelse( test = isTRUE(x = raster), yes = 1, no = min(1583 / nrow(x = data), 1) )) } #' Determine text color based on background color #' #' @param background A vector of background colors; supports R color names and #' hexadecimal codes #' @param threshold Intensity threshold for light/dark cutoff; intensities #' greater than \code{theshold} yield \code{dark}, others yield \code{light} #' @param w3c Use \href{https://www.w3.org/TR/WCAG20/}{W3C} formula for calculating #' background text color; ignores \code{threshold} #' @param dark Color for dark text #' @param light Color for light text #' #' @return A named vector of either \code{dark} or \code{light}, depending on #' \code{background}; names of vector are \code{background} #' #' @export #' @concept visualization #' #' @source \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} #' #' @examples #' BGTextColor(background = c('black', 'white', '#E76BF3')) #' BGTextColor <- function( background, threshold = 186, w3c = FALSE, dark = 'black', light = 'white' ) { if (w3c) { luminance <- Luminance(color = background) threshold <- 179 return(ifelse( test = luminance > sqrt(x = 1.05 * 0.05) - 0.05, yes = dark, no = light )) } return(ifelse( test = Intensity(color = background) > threshold, yes = dark, no = light )) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases BlackAndWhite #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = BlackAndWhite()) #' BlackAndWhite <- function(mid = NULL, k = 50) { return(CustomPalette(low = "white", high = "black", mid = mid, k = k)) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases BlueAndRed #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = BlueAndRed()) #' BlueAndRed <- function(k = 50) { return(CustomPalette(low = "#313695" , high = "#A50026", mid = "#FFFFBF", k = k)) } #' Cell Selector #' #' Select points on a scatterplot and get information about them #' #' @param plot A ggplot2 plot #' @param object An optional Seurat object; if passes, will return an object #' with the identities of selected cells set to \code{ident} #' @param ident An optional new identity class to assign the selected cells #' @param ... Ignored #' #' @return If \code{object} is \code{NULL}, the names of the points selected; #' otherwise, a Seurat object with the selected cells identity classes set to #' \code{ident} #' #' @importFrom miniUI miniPage gadgetTitleBar miniTitleBarButton #' miniContentPanel #' @importFrom shiny fillRow plotOutput brushOpts reactiveValues observeEvent #' stopApp brushedPoints renderPlot runGadget #' #' @export #' @concept visualization #' #' @seealso \code{\link{DimPlot}} \code{\link{FeaturePlot}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' # Follow instructions in the terminal to select points #' cells.located <- CellSelector(plot = plot) #' cells.located #' # Automatically set the identity class of selected cells and return a new Seurat object #' pbmc_small <- CellSelector(plot = plot, object = pbmc_small, ident = 'SelectedCells') #' } #' CellSelector <- function(plot, object = NULL, ident = 'SelectedCells', ...) { # Set up the gadget UI ui <- miniPage( gadgetTitleBar( title = "Cell Selector", left = miniTitleBarButton(inputId = "reset", label = "Reset") ), miniContentPanel( fillRow( plotOutput( outputId = "plot", height = '100%', brush = brushOpts( id = 'brush', delay = 100, delayType = 'debounce', clip = TRUE, resetOnNew = FALSE ) ) ), ) ) # Get some plot information if (inherits(x = plot, what = 'patchwork')) { if (length(x = plot$patches$plots)) { warning( "Multiple plots passed, using last plot", call. = FALSE, immediate. = TRUE ) } class(x = plot) <- grep( pattern = 'patchwork', x = class(x = plot), value = TRUE, invert = TRUE ) } xy.aes <- GetXYAesthetics(plot = plot) dark.theme <- !is.null(x = plot$theme$plot.background$fill) && plot$theme$plot.background$fill == 'black' plot.data <- GGpointToBase(plot = plot, do.plot = FALSE) plot.data$selected_ <- FALSE rownames(x = plot.data) <- rownames(x = plot$data) colnames(x = plot.data) <- gsub( pattern = '-', replacement = '.', x = colnames(x = plot.data) ) # Server function server <- function(input, output, session) { plot.env <- reactiveValues(data = plot.data) # Event handlers observeEvent( eventExpr = input$done, handlerExpr = { PlotBuild(data = plot.env$data, dark.theme = dark.theme) selected <- rownames(x = plot.data)[plot.env$data$selected_] if (inherits(x = object, what = 'Seurat')) { if (!all(selected %in% Cells(x = object))) { stop("Cannot find the selected cells in the Seurat object, please be sure you pass the same object used to generate the plot") } Idents(object = object, cells = selected) <- ident selected <- object } stopApp(returnValue = selected) } ) observeEvent( eventExpr = input$reset, handlerExpr = { plot.env$data <- plot.data session$resetBrush(brushId = 'brush') } ) observeEvent( eventExpr = input$brush, handlerExpr = { plot.env$data <- brushedPoints( df = plot.data, brush = input$brush, xvar = xy.aes$x, yvar = xy.aes$y, allRows = TRUE ) plot.env$data$color <- ifelse( test = plot.env$data$selected_, yes = '#DE2D26', no = '#C3C3C3' ) } ) # Render the plot output$plot <- renderPlot(expr = PlotBuild( data = plot.env$data, dark.theme = dark.theme )) } return(runGadget(app = ui, server = server)) } #' Move outliers towards center on dimension reduction plot #' #' @param object Seurat object #' @param reduction Name of DimReduc to adjust #' @param dims Dimensions to visualize #' @param group.by Group (color) cells in different ways (for example, orig.ident) #' @param outlier.sd Controls the outlier distance #' @param reduction.key Key for DimReduc that is returned #' #' @return Returns a DimReduc object with the modified embeddings #' #' @export #' @concept visualization #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small <- FindClusters(pbmc_small, resolution = 1.1) #' pbmc_small <- RunUMAP(pbmc_small, dims = 1:5) #' DimPlot(pbmc_small, reduction = "umap") #' pbmc_small[["umap_new"]] <- CollapseEmbeddingOutliers(pbmc_small, #' reduction = "umap", reduction.key = 'umap_', outlier.sd = 0.5) #' DimPlot(pbmc_small, reduction = "umap_new") #' } #' CollapseEmbeddingOutliers <- function( object, reduction = 'umap', dims = 1:2, group.by = 'ident', outlier.sd = 2, reduction.key = 'UMAP_' ) { embeddings <- Embeddings(object = object[[reduction]])[, dims] idents <- FetchData(object = object, vars = group.by) data.medians <- sapply(X = dims, FUN = function(x) { tapply(X = embeddings[, x], INDEX = idents, FUN = median) }) data.sd <- apply(X = data.medians, MARGIN = 2, FUN = sd) data.medians.scale <- as.matrix(x = scale(x = data.medians, center = TRUE, scale = TRUE)) data.medians.scale[abs(x = data.medians.scale) < outlier.sd] <- 0 data.medians.scale <- sign(x = data.medians.scale) * (abs(x = data.medians.scale) - outlier.sd) data.correct <- Sweep( x = data.medians.scale, MARGIN = 2, STATS = data.sd, FUN = "*" ) data.correct <- data.correct[abs(x = apply(X = data.correct, MARGIN = 1, FUN = min)) > 0, ] new.embeddings <- embeddings for (i in rownames(x = data.correct)) { cells.correct <- rownames(x = idents)[idents[, "ident"] == i] new.embeddings[cells.correct, ] <- Sweep( x = new.embeddings[cells.correct,], MARGIN = 2, STATS = data.correct[i, ], FUN = "-" ) } reduc <- CreateDimReducObject( embeddings = new.embeddings, loadings = Loadings(object = object[[reduction]]), assay = slot(object = object[[reduction]], name = "assay.used"), key = reduction.key ) return(reduc) } #' Combine ggplot2-based plots into a single plot #' #' @param plots A list of gg objects #' @param ncol Number of columns #' @param legend Combine legends into a single legend #' choose from 'right' or 'bottom'; pass 'none' to remove legends, or \code{NULL} #' to leave legends as they are #' @param ... Extra parameters passed to plot_grid #' #' @return A combined plot #' #' @importFrom cowplot plot_grid get_legend #' @export #' @concept visualization #' #' @examples #' data("pbmc_small") #' pbmc_small[['group']] <- sample( #' x = c('g1', 'g2'), #' size = ncol(x = pbmc_small), #' replace = TRUE #' ) #' plot1 <- FeaturePlot( #' object = pbmc_small, #' features = 'MS4A1', #' split.by = 'group' #' ) #' plot2 <- FeaturePlot( #' object = pbmc_small, #' features = 'FCN1', #' split.by = 'group' #' ) #' CombinePlots( #' plots = list(plot1, plot2), #' legend = 'none', #' nrow = length(x = unique(x = pbmc_small[['group', drop = TRUE]])) #' ) #' CombinePlots <- function(plots, ncol = NULL, legend = NULL, ...) { .Deprecated(msg = "CombinePlots is being deprecated. Plots should now be combined using the patchwork system.") plots.combined <- if (length(x = plots) > 1) { if (!is.null(x = legend)) { if (legend != 'none') { plot.legend <- get_legend(plot = plots[[1]] + theme(legend.position = legend)) } plots <- lapply( X = plots, FUN = function(x) { return(x + NoLegend()) } ) } plots.combined <- plot_grid( plotlist = plots, ncol = ncol, align = 'hv', ... ) if (!is.null(x = legend)) { plots.combined <- switch( EXPR = legend, 'bottom' = plot_grid( plots.combined, plot.legend, ncol = 1, rel_heights = c(1, 0.2) ), 'right' = plot_grid( plots.combined, plot.legend, rel_widths = c(3, 0.3) ), plots.combined ) } plots.combined } else { plots[[1]] } return(plots.combined) } #' Create a custom color palette #' #' Creates a custom color palette based on low, middle, and high color values #' #' @param low low color #' @param high high color #' @param mid middle color. Optional. #' @param k number of steps (colors levels) to include between low and high values #' #' @return A color palette for plotting #' #' @importFrom grDevices col2rgb rgb #' @export #' @concept visualization #' #' @rdname CustomPalette #' @examples #' myPalette <- CustomPalette() #' myPalette #' CustomPalette <- function( low = "white", high = "red", mid = NULL, k = 50 ) { low <- col2rgb(col = low) / 255 high <- col2rgb(col = high) / 255 if (is.null(x = mid)) { r <- seq(from = low[1], to = high[1], len = k) g <- seq(from = low[2], to = high[2], len = k) b <- seq(from = low[3], to = high[3], len = k) } else { k2 <- round(x = k / 2) mid <- col2rgb(col = mid) / 255 r <- c( seq(from = low[1], to = mid[1], len = k2), seq(from = mid[1], to = high[1], len = k2) ) g <- c( seq(from = low[2], to = mid[2], len = k2), seq(from = mid[2], to = high[2],len = k2) ) b <- c( seq(from = low[3], to = mid[3], len = k2), seq(from = mid[3], to = high[3], len = k2) ) } return(rgb(red = r, green = g, blue = b)) } #' Discrete colour palettes from pals #' #' These are included here because pals depends on a number of compiled #' packages, and this can lead to increases in run time for Travis, #' and generally should be avoided when possible. #' #' These palettes are a much better default for data with many classes #' than the default ggplot2 palette. #' #' Many thanks to Kevin Wright for writing the pals package. #' #' @param n Number of colours to be generated. #' @param palette Options are #' "alphabet", "alphabet2", "glasbey", "polychrome", "stepped", and "parade". #' Can be omitted and the function will use the one based on the requested n. #' @param shuffle Shuffle the colors in the selected palette. #' #' @return A vector of colors #' #' @details #' Taken from the pals package (Licence: GPL-3). #' \url{https://cran.r-project.org/package=pals} #' Credit: Kevin Wright #' #' @export #' @concept visualization #' DiscretePalette <- function(n, palette = NULL, shuffle = FALSE) { palettes <- list( alphabet = c( "#F0A0FF", "#0075DC", "#993F00", "#4C005C", "#191919", "#005C31", "#2BCE48", "#FFCC99", "#808080", "#94FFB5", "#8F7C00", "#9DCC00", "#C20088", "#003380", "#FFA405", "#FFA8BB", "#426600", "#FF0010", "#5EF1F2", "#00998F", "#E0FF66", "#740AFF", "#990000", "#FFFF80", "#FFE100", "#FF5005" ), alphabet2 = c( "#AA0DFE", "#3283FE", "#85660D", "#782AB6", "#565656", "#1C8356", "#16FF32", "#F7E1A0", "#E2E2E2", "#1CBE4F", "#C4451C", "#DEA0FD", "#FE00FA", "#325A9B", "#FEAF16", "#F8A19F", "#90AD1C", "#F6222E", "#1CFFCE", "#2ED9FF", "#B10DA1", "#C075A6", "#FC1CBF", "#B00068", "#FBE426", "#FA0087" ), glasbey = c( "#0000FF", "#FF0000", "#00FF00", "#000033", "#FF00B6", "#005300", "#FFD300", "#009FFF", "#9A4D42", "#00FFBE", "#783FC1", "#1F9698", "#FFACFD", "#B1CC71", "#F1085C", "#FE8F42", "#DD00FF", "#201A01", "#720055", "#766C95", "#02AD24", "#C8FF00", "#886C00", "#FFB79F", "#858567", "#A10300", "#14F9FF", "#00479E", "#DC5E93", "#93D4FF", "#004CFF", "#F2F318" ), polychrome = c( "#5A5156", "#E4E1E3", "#F6222E", "#FE00FA", "#16FF32", "#3283FE", "#FEAF16", "#B00068", "#1CFFCE", "#90AD1C", "#2ED9FF", "#DEA0FD", "#AA0DFE", "#F8A19F", "#325A9B", "#C4451C", "#1C8356", "#85660D", "#B10DA1", "#FBE426", "#1CBE4F", "#FA0087", "#FC1CBF", "#F7E1A0", "#C075A6", "#782AB6", "#AAF400", "#BDCDFF", "#822E1C", "#B5EFB5", "#7ED7D1", "#1C7F93", "#D85FF7", "#683B79", "#66B0FF", "#3B00FB" ), stepped = c( "#990F26", "#B33E52", "#CC7A88", "#E6B8BF", "#99600F", "#B3823E", "#CCAA7A", "#E6D2B8", "#54990F", "#78B33E", "#A3CC7A", "#CFE6B8", "#0F8299", "#3E9FB3", "#7ABECC", "#B8DEE6", "#3D0F99", "#653EB3", "#967ACC", "#C7B8E6", "#333333", "#666666", "#999999", "#CCCCCC" ), parade = c( '#ff6969', '#9b37ff', '#cd3737', '#69cdff', '#ffff69', '#69cdcd', '#9b379b', '#3737cd', '#ffff9b', '#cdff69', '#ff9b37', '#37ffff', '#9b69ff', '#37cd69', '#ff3769', '#ff3737', '#37ff9b', '#cdcd37', '#3769cd', '#37cdff', '#9b3737', '#ff699b', '#9b9bff', '#cd9b37', '#69ff37', '#cd3769', '#cd69cd', '#cd6937', '#3737ff', '#cdcd69', '#ff9b69', '#cd37cd', '#9bff37', '#cd379b', '#cd6969', '#69ff9b', '#ff379b', '#9bff9b', '#6937ff', '#69cd37', '#cdff37', '#9bff69', '#9b37cd', '#ff37ff', '#ff37cd', '#ffff37', '#37cd9b', '#379bff', '#ffcd37', '#379b37', '#ff9bff', '#379b9b', '#69ffcd', '#379bcd', '#ff69ff', '#ff9b9b', '#37ff69', '#ff6937', '#6969ff', '#699bff', '#ffcd69', '#69ffff', '#37ff37', '#6937cd', '#37cd37', '#3769ff', '#cd69ff', '#6969cd', '#9bcd37', '#69ff69', '#37cdcd', '#cd37ff', '#37379b', '#37ffcd', '#69cd69', '#ff69cd', '#9bffff', '#9b9b37' ) ) if (is.null(x = n)) { return(names(x = palettes)) } if (is.null(x = palette)) { if (n <= 26) { palette <- "alphabet" } else if (n <= 32) { palette <- "glasbey" } else { palette <- "polychrome" } } palette.vec <- palettes[[palette]] if (n > length(x = palette.vec)) { warning("Not enough colours in specified palette") } if (isTRUE(shuffle)) { palette.vec <- sample(palette.vec) } palette <- palette.vec[seq_len(length.out = n)] return(palette) } #' @rdname CellSelector #' @export #' @concept visualization #' FeatureLocator <- function(plot, ...) { .Defunct( new = 'CellSelector', package = 'Seurat', msg = "'FeatureLocator' has been replaced by 'CellSelector'" ) } #' Hover Locator #' #' Get quick information from a scatterplot by hovering over points #' #' @param plot A ggplot2 plot #' @param information An optional dataframe or matrix of extra information to be displayed on hover #' @param dark.theme Plot using a dark theme? #' @param axes Display or hide x- and y-axes #' @param ... Extra parameters to be passed to \code{\link[plotly]{layout}} #' #' @importFrom ggplot2 ggplot_build #' @importFrom plotly plot_ly layout add_annotations #' @export #' @concept visualization #' #' @seealso \code{\link[plotly]{layout}} \code{\link[ggplot2]{ggplot_build}} #' \code{\link{DimPlot}} \code{\link{FeaturePlot}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' HoverLocator(plot = plot, information = FetchData(object = pbmc_small, vars = 'percent.mito')) #' } #' HoverLocator <- function( plot, information = NULL, axes = TRUE, dark.theme = FALSE, ... ) { # Use GGpointToBase because we already have ggplot objects # with colors (which are annoying in plotly) plot.build <- suppressWarnings(expr = GGpointToPlotlyBuild( plot = plot, information = information, ... )) data <- ggplot_build(plot = plot)$plot$data # Set up axis labels here # Also, a bunch of stuff to get axis lines done properly if (axes) { xaxis <- list( title = names(x = data)[1], showgrid = FALSE, zeroline = FALSE, showline = TRUE ) yaxis <- list( title = names(x = data)[2], showgrid = FALSE, zeroline = FALSE, showline = TRUE ) } else { xaxis <- yaxis <- list(visible = FALSE) } # Check for dark theme if (dark.theme) { title <- list(color = 'white') xaxis <- c(xaxis, color = 'white') yaxis <- c(yaxis, color = 'white') plotbg <- 'black' } else { title = list(color = 'black') plotbg = 'white' } # The `~' means pull from the data passed (this is why we reset the names) # Use I() to get plotly to accept the colors from the data as is # Set hoverinfo to 'text' to override the default hover information # rather than append to it p <- layout( p = plot_ly( data = plot.build, x = ~x, y = ~y, type = 'scatter', mode = 'markers', color = ~I(color), hoverinfo = 'text', text = ~feature ), xaxis = xaxis, yaxis = yaxis, title = plot$labels$title, titlefont = title, paper_bgcolor = plotbg, plot_bgcolor = plotbg, ... ) # Add labels label.layer <- which(x = sapply( X = plot$layers, FUN = function(x) { return(inherits(x = x$geom, what = c('GeomText', 'GeomTextRepel'))) } )) if (length(x = label.layer) == 1) { p <- add_annotations( p = p, x = plot$layers[[label.layer]]$data[, 1], y = plot$layers[[label.layer]]$data[, 2], xref = "x", yref = "y", text = plot$layers[[label.layer]]$data[, 3], xanchor = 'right', showarrow = FALSE, font = list(size = plot$layers[[label.layer]]$aes_params$size * 4) ) } return(p) } #' Get the intensity and/or luminance of a color #' #' @param color A vector of colors #' #' @return A vector of intensities/luminances for each color #' #' @name contrast-theory #' @rdname contrast-theory #' #' @importFrom grDevices col2rgb #' #' @export #' @concept visualization #' #' @source \url{https://stackoverflow.com/questions/3942878/how-to-decide-font-color-in-white-or-black-depending-on-background-color} #' #' @examples #' Intensity(color = c('black', 'white', '#E76BF3')) #' Intensity <- function(color) { intensities <- apply( X = col2rgb(col = color), MARGIN = 2, FUN = function(col) { col <- rbind(as.vector(x = col), c(0.299, 0.587, 0.114)) return(sum(apply(X = col, MARGIN = 2, FUN = prod))) } ) names(x = intensities) <- color return(intensities) } #' Label clusters on a ggplot2-based scatter plot #' #' @param plot A ggplot2-based scatter plot #' @param id Name of variable used for coloring scatter plot #' @param clusters Vector of cluster ids to label #' @param labels Custom labels for the clusters #' @param split.by Split labels by some grouping label, useful when using #' \code{\link[ggplot2]{facet_wrap}} or \code{\link[ggplot2]{facet_grid}} #' @param repel Use \code{geom_text_repel} to create nicely-repelled labels #' @param geom Name of geom to get X/Y aesthetic names for #' @param box Use geom_label/geom_label_repel (includes a box around the text #' labels) #' @param position How to place the label if repel = FALSE. If "median", place #' the label at the median position. If "nearest" place the label at the #' position of the nearest data point to the median. #' @param ... Extra parameters to \code{\link[ggrepel]{geom_text_repel}}, such as \code{size} #' #' @return A ggplot2-based scatter plot with cluster labels #' #' @importFrom stats median na.omit #' @importFrom ggrepel geom_text_repel geom_label_repel #' @importFrom ggplot2 aes_string geom_text geom_label layer_scales #' @importFrom RANN nn2 #' #' @export #' @concept visualization #' #' @seealso \code{\link[ggrepel]{geom_text_repel}} \code{\link[ggplot2]{geom_text}} #' #' @examples #' data("pbmc_small") #' plot <- DimPlot(object = pbmc_small) #' LabelClusters(plot = plot, id = 'ident') #' LabelClusters <- function( plot, id, clusters = NULL, labels = NULL, split.by = NULL, repel = TRUE, box = FALSE, geom = 'GeomPoint', position = "median", ... ) { xynames <- unlist(x = GetXYAesthetics(plot = plot, geom = geom), use.names = TRUE) if (!id %in% colnames(x = plot$data)) { stop("Cannot find variable ", id, " in plotting data") } if (!is.null(x = split.by) && !split.by %in% colnames(x = plot$data)) { warning("Cannot find splitting variable ", id, " in plotting data") split.by <- NULL } data <- plot$data[, c(xynames, id, split.by)] possible.clusters <- as.character(x = na.omit(object = unique(x = data[, id]))) groups <- clusters %||% as.character(x = na.omit(object = unique(x = data[, id]))) if (any(!groups %in% possible.clusters)) { stop("The following clusters were not found: ", paste(groups[!groups %in% possible.clusters], collapse = ",")) } pb <- ggplot_build(plot = plot) if (geom == 'GeomSpatial') { xrange.save <- layer_scales(plot = plot)$x$range$range yrange.save <- layer_scales(plot = plot)$y$range$range data[, xynames["y"]] = max(data[, xynames["y"]]) - data[, xynames["y"]] + min(data[, xynames["y"]]) if (!pb$plot$plot_env$crop) { y.transform <- c(0, nrow(x = pb$plot$plot_env$image)) - pb$layout$panel_params[[1]]$y.range data[, xynames["y"]] <- data[, xynames["y"]] + sum(y.transform) } } data <- cbind(data, color = pb$data[[1]][[1]]) labels.loc <- lapply( X = groups, FUN = function(group) { data.use <- data[data[, id] == group, , drop = FALSE] data.medians <- if (!is.null(x = split.by)) { do.call( what = 'rbind', args = lapply( X = unique(x = data.use[, split.by]), FUN = function(split) { medians <- apply( X = data.use[data.use[, split.by] == split, xynames, drop = FALSE], MARGIN = 2, FUN = median, na.rm = TRUE ) medians <- as.data.frame(x = t(x = medians)) medians[, split.by] <- split return(medians) } ) ) } else { as.data.frame(x = t(x = apply( X = data.use[, xynames, drop = FALSE], MARGIN = 2, FUN = median, na.rm = TRUE ))) } data.medians[, id] <- group data.medians$color <- data.use$color[1] return(data.medians) } ) if (position == "nearest") { labels.loc <- lapply(X = labels.loc, FUN = function(x) { group.data <- data[as.character(x = data[, id]) == as.character(x[3]), ] nearest.point <- nn2(data = group.data[, 1:2], query = as.matrix(x = x[c(1,2)]), k = 1)$nn.idx x[1:2] <- group.data[nearest.point, 1:2] return(x) }) } labels.loc <- do.call(what = 'rbind', args = labels.loc) labels.loc[, id] <- factor(x = labels.loc[, id], levels = levels(data[, id])) labels <- labels %||% groups if (length(x = unique(x = labels.loc[, id])) != length(x = labels)) { stop("Length of labels (", length(x = labels), ") must be equal to the number of clusters being labeled (", length(x = labels.loc), ").") } names(x = labels) <- groups for (group in groups) { labels.loc[labels.loc[, id] == group, id] <- labels[group] } if (box) { geom.use <- ifelse(test = repel, yes = geom_label_repel, no = geom_label) plot <- plot + geom.use( data = labels.loc, mapping = aes_string(x = xynames['x'], y = xynames['y'], label = id, fill = id), show.legend = FALSE, ... ) + scale_fill_manual(values = labels.loc$color[order(labels.loc[, id])]) } else { geom.use <- ifelse(test = repel, yes = geom_text_repel, no = geom_text) plot <- plot + geom.use( data = labels.loc, mapping = aes_string(x = xynames['x'], y = xynames['y'], label = id), show.legend = FALSE, ... ) } # restore old axis ranges if (geom == 'GeomSpatial') { plot <- suppressMessages(expr = plot + coord_fixed(xlim = xrange.save, ylim = yrange.save)) } return(plot) } #' Add text labels to a ggplot2 plot #' #' @param plot A ggplot2 plot with a GeomPoint layer #' @param points A vector of points to label; if \code{NULL}, will use all points in the plot #' @param labels A vector of labels for the points; if \code{NULL}, will use #' rownames of the data provided to the plot at the points selected #' @param repel Use \code{geom_text_repel} to create a nicely-repelled labels; this #' is slow when a lot of points are being plotted. If using \code{repel}, set \code{xnudge} #' and \code{ynudge} to 0 #' @param xnudge,ynudge Amount to nudge X and Y coordinates of labels by #' @param ... Extra parameters passed to \code{geom_text} #' #' @return A ggplot object #' #' @importFrom ggrepel geom_text_repel #' @importFrom ggplot2 geom_text aes_string #' @export #' @concept visualization #' #' @aliases Labeler #' @seealso \code{\link[ggplot2]{geom_text}} #' #' @examples #' data("pbmc_small") #' ff <- TopFeatures(object = pbmc_small[['pca']]) #' cc <- TopCells(object = pbmc_small[['pca']]) #' plot <- FeatureScatter(object = pbmc_small, feature1 = ff[1], feature2 = ff[2]) #' LabelPoints(plot = plot, points = cc) #' LabelPoints <- function( plot, points, labels = NULL, repel = FALSE, xnudge = 0.3, ynudge = 0.05, ... ) { xynames <- GetXYAesthetics(plot = plot) points <- points %||% rownames(x = plot$data) if (is.numeric(x = points)) { points <- rownames(x = plot$data) } points <- intersect(x = points, y = rownames(x = plot$data)) if (length(x = points) == 0) { stop("Cannot find points provided") } labels <- labels %||% points labels <- as.character(x = labels) label.data <- plot$data[points, ] label.data$labels <- labels geom.use <- ifelse(test = repel, yes = geom_text_repel, no = geom_text) if (repel) { if (!all(c(xnudge, ynudge) == 0)) { message("When using repel, set xnudge and ynudge to 0 for optimal results") } } plot <- plot + geom.use( mapping = aes_string(x = xynames$x, y = xynames$y, label = 'labels'), data = label.data, nudge_x = xnudge, nudge_y = ynudge, ... ) return(plot) } #' @name contrast-theory #' @rdname contrast-theory #' #' @importFrom grDevices col2rgb #' #' @export #' @concept visualization #' #' @examples #' Luminance(color = c('black', 'white', '#E76BF3')) #' Luminance <- function(color) { luminance <- apply( X = col2rgb(col = color), MARGIN = 2, function(col) { col <- as.vector(x = col) / 255 col <- sapply( X = col, FUN = function(x) { return(ifelse( test = x <= 0.03928, yes = x / 12.92, no = ((x + 0.055) / 1.055) ^ 2.4 )) } ) col <- rbind(col, c(0.2126, 0.7152, 0.0722)) return(sum(apply(X = col, MARGIN = 2, FUN = prod))) } ) names(x = luminance) <- color return(luminance) } #' @export #' @concept visualization #' #' @rdname CustomPalette #' @aliases PurpleAndYellow #' #' @examples #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' plot(df, col = PurpleAndYellow()) #' PurpleAndYellow <- function(k = 50) { return(CustomPalette(low = "magenta", high = "yellow", mid = "black", k = k)) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Seurat themes #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Seurat Themes #' #' Various themes to be applied to ggplot2-based plots #' \describe{ #' \item{\code{SeuratTheme}}{The curated Seurat theme, consists of ...} #' \item{\code{DarkTheme}}{A dark theme, axes and text turn to white, the background becomes black} #' \item{\code{NoAxes}}{Removes axis lines, text, and ticks} #' \item{\code{NoLegend}}{Removes the legend} #' \item{\code{FontSize}}{Sets axis and title font sizes} #' \item{\code{NoGrid}}{Removes grid lines} #' \item{\code{SeuratAxes}}{Set Seurat-style axes} #' \item{\code{SpatialTheme}}{A theme designed for spatial visualizations (eg \code{\link{PolyFeaturePlot}}, \code{\link{PolyDimPlot}})} #' \item{\code{RestoreLegend}}{Restore a legend after removal} #' \item{\code{RotatedAxis}}{Rotate X axis text 45 degrees} #' \item{\code{BoldTitle}}{Enlarges and emphasizes the title} #' } #' #' @param ... Extra parameters to be passed to \code{theme} #' #' @return A ggplot2 theme object #' #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @seealso \code{\link[ggplot2]{theme}} #' @aliases SeuratTheme #' SeuratTheme <- function() { return(DarkTheme() + NoLegend() + NoGrid() + SeuratAxes()) } #' @importFrom ggplot2 theme element_text #' #' @rdname SeuratTheme #' @export #' @concept visualization #' #' @aliases CenterTitle #' CenterTitle <- function(...) { return(theme(plot.title = element_text(hjust = 0.5), validate = TRUE, ...)) } #' @importFrom ggplot2 theme element_rect element_text element_line margin #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases DarkTheme #' #' @examples #' # Generate a plot with a dark theme #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + DarkTheme(legend.position = 'none') #' DarkTheme <- function(...) { # Some constants for easier changing in the future black.background <- element_rect(fill = 'black') black.background.no.border <- element_rect(fill = 'black', size = 0) font.margin <- 4 white.text <- element_text( colour = 'white', margin = margin( t = font.margin, r = font.margin, b = font.margin, l = font.margin ) ) white.line <- element_line(colour = 'white', size = 1) no.line <- element_line(size = 0) # Create the dark theme dark.theme <- theme( # Set background colors plot.background = black.background, panel.background = black.background, legend.background = black.background, legend.box.background = black.background.no.border, legend.key = black.background.no.border, strip.background = element_rect(fill = 'grey50', colour = NA), # Set text colors plot.title = white.text, plot.subtitle = white.text, axis.title = white.text, axis.text = white.text, legend.title = white.text, legend.text = white.text, strip.text = white.text, # Set line colors axis.line.x = white.line, axis.line.y = white.line, panel.grid = no.line, panel.grid.minor = no.line, # Validate the theme validate = TRUE, # Extra parameters ... ) return(dark.theme) } #' @param x.text,y.text X and Y axis text sizes #' @param x.title,y.title X and Y axis title sizes #' @param main Plot title size #' #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases FontSize #' FontSize <- function( x.text = NULL, y.text = NULL, x.title = NULL, y.title = NULL, main = NULL, ... ) { font.size <- theme( # Set font sizes axis.text.x = element_text(size = x.text), axis.text.y = element_text(size = y.text), axis.title.x = element_text(size = x.title), axis.title.y = element_text(size = y.title), plot.title = element_text(size = main), # Validate the theme validate = TRUE, # Extra parameters ... ) } #' @param keep.text Keep axis text #' @param keep.ticks Keep axis ticks #' #' @importFrom ggplot2 theme element_blank #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoAxes #' #' @examples #' # Generate a plot with no axes #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoAxes() #' NoAxes <- function(..., keep.text = FALSE, keep.ticks = FALSE) { blank <- element_blank() no.axes.theme <- theme( # Remove the axis elements axis.line.x = blank, axis.line.y = blank, # Validate the theme validate = TRUE, ... ) if (!keep.text) { no.axes.theme <- no.axes.theme + theme( axis.text.x = blank, axis.text.y = blank, axis.title.x = blank, axis.title.y = blank, validate = TRUE, ... ) } if (!keep.ticks){ no.axes.theme <- no.axes.theme + theme( axis.ticks.x = blank, axis.ticks.y = blank, validate = TRUE, ... ) } return(no.axes.theme) } #' @importFrom ggplot2 theme #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoLegend #' #' @examples #' # Generate a plot with no legend #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoLegend() #' NoLegend <- function(...) { no.legend.theme <- theme( # Remove the legend legend.position = 'none', # Validate the theme validate = TRUE, ... ) return(no.legend.theme) } #' @importFrom ggplot2 theme element_blank #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases NoGrid #' #' @examples #' # Generate a plot with no grid lines #' library(ggplot2) #' df <- data.frame(x = rnorm(n = 100, mean = 20, sd = 2), y = rbinom(n = 100, size = 100, prob = 0.2)) #' p <- ggplot(data = df, mapping = aes(x = x, y = y)) + geom_point(mapping = aes(color = 'red')) #' p + NoGrid() #' NoGrid <- function(...) { no.grid.theme <- theme( # Set grid lines to blank panel.grid.major = element_blank(), panel.grid.minor = element_blank(), # Validate the theme validate = TRUE, ... ) return(no.grid.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases SeuratAxes #' SeuratAxes <- function(...) { axes.theme <- theme( # Set axis things axis.title = element_text(face = 'bold', color = '#990000', size = 16), axis.text = element_text(vjust = 0.5, size = 12), # Validate the theme validate = TRUE, ... ) return(axes.theme) } #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases SpatialTheme #' SpatialTheme <- function(...) { return(DarkTheme() + NoAxes() + NoGrid() + NoLegend(...)) } #' @param position A position to restore the legend to #' #' @importFrom ggplot2 theme #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases RestoreLegend #' RestoreLegend <- function(..., position = 'right') { restored.theme <- theme( # Restore legend position legend.position = 'right', # Validate the theme validate = TRUE, ... ) return(restored.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases RotatedAxis #' RotatedAxis <- function(...) { rotated.theme <- theme( # Rotate X axis text axis.text.x = element_text(angle = 45, hjust = 1), # Validate the theme validate = TRUE, ... ) return(rotated.theme) } #' @importFrom ggplot2 theme element_text #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases BoldTitle #' BoldTitle <- function(...) { bold.theme <- theme( # Make the title bold plot.title = element_text(size = 20, face = 'bold'), # Validate the theme validate = TRUE, ... ) return(bold.theme) } #' @importFrom ggplot2 theme element_rect #' @export #' @concept visualization #' #' @rdname SeuratTheme #' @aliases WhiteBackground #' WhiteBackground <- function(...) { white.rect = element_rect(fill = 'white') white.theme <- theme( # Make the plot, panel, and legend key backgrounds white plot.background = white.rect, panel.background = white.rect, legend.key = white.rect, # Validate the theme validate = TRUE, ... ) return(white.theme) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Fortify Methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Prepare Coordinates for Spatial Plots #' #' @inheritParams SeuratObject::GetTissueCoordinates #' @param model A \code{\linkS4class{Segmentation}}, #' \code{\linkS4class{Centroids}}, #' or \code{\linkS4class{Molecules}} object #' @param data Extra data to be used for annotating the cell segmentations; the #' easiest way to pass data is a one-column #' \code{\link[base:data.frame]{data frame}} with the values to color by and #' the cell names are rownames #' @param ... Arguments passed to other methods #' #' @name fortify-Spatial #' @rdname fortify-Spatial #' #' @importFrom SeuratObject GetTissueCoordinates #' #' @keywords internal #' #' @method fortify Centroids #' @export #' #' @aliases fortify #' fortify.Centroids <- function(model, data, ...) { df <- GetTissueCoordinates(object = model, full = FALSE) if (missing(x = data)) { data <- NULL } data <- .PrepImageData(data = data, cells = lengths(x = model), ...) df <- cbind(df, data) return(df) } #' @rdname fortify-Spatial #' @method fortify Molecules #' #' @importFrom SeuratObject FetchData #' #' @export #' fortify.Molecules <- function( model, data, nmols = NULL, seed = NA_integer_, ... ) { return(FetchData(object = model, vars = data, nmols = nmols, seed = seed, ...)) } #' @rdname fortify-Spatial #' @method fortify Segmentation #' @export #' fortify.Segmentation <- function(model, data, ...) { df <- GetTissueCoordinates(object = model, full = TRUE) if (missing(x = data)) { data <- NULL } data <- .PrepImageData(data = data, cells = lengths(x = model), ...) df <- cbind(df, data) return(df) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @importFrom SeuratObject Features Key Keys Molecules #' .MolsByFOV <- function(object, fov, molecules) { keys <- Key(object = object)[fov] keyed.mols <- sapply( X = names(x = keys), FUN = function(img) { if (is.null(x = Molecules(object = object[[img]]))) { return(NULL) } key <- keys[img] mols <- grep(pattern = paste0('^', key), x = molecules, value = TRUE) names(x = mols) <- mols mols <- gsub(pattern = paste0('^', key), replacement = '', x = mols) keyed <- sapply( X = SeuratObject::Keys(object = object[[img]]), FUN = function(x) { return(grep(pattern = paste0('^', x), x = mols, value = TRUE)) } ) keyed <- unlist(x = keyed) names(x = keyed) <- gsub( pattern = '^.*\\.', replacement = '', x = names(x = keyed) ) missing <- mols[!mols %in% keyed] missing <- missing[missing %in% Features(x = object[[img]])] if (length(x = missing)) { # TODO: replace with default molecules default <- Molecules(object = object[[img]])[1L] mn <- names(x = missing) missing <- paste0( SeuratObject::Key(object = object[[img]][[default]]), missing ) names(x = missing) <- mn } return(c(missing, keyed)) }, simplify = FALSE, USE.NAMES = TRUE ) found <- names(x = unlist(x = keyed.mols)) found <- gsub(pattern = '^.*\\.', replacement = '', x = found) missing <- setdiff(x = molecules, y = found) names(x = missing) <- missing for (img in fov) { imissing <- missing for (i in seq_along(along.with = imissing)) { for (lkey in Keys(object = object[[img]])) { imissing[[i]] <- gsub( pattern = paste0('^', lkey), replacement = '', x = imissing[[i]] ) } } imissing <- names(x = imissing[imissing %in% Features(x = object[[img]])]) keyed.mols[[img]] <- c(keyed.mols[[img]], imissing) } keyed.mols <- Filter(f = length, x = keyed.mols) keyed.mols <- sapply(X = keyed.mols, FUN = unname, simplify = FALSE) return(keyed.mols) } # Calculate bandwidth for use in ggplot2-based smooth scatter plots # # Inspired by MASS::bandwidth.nrd and graphics:::.smoothScatterCalcDensity # # @param data A two-column data frame with X and Y coordinates for a plot # # @return The calculated bandwidth # #' @importFrom stats quantile var # Bandwidth <- function(data) { r <- diff(x = apply( X = data, MARGIN = 2, FUN = quantile, probs = c(0.05, 0.95), na.rm = TRUE, names = FALSE )) h <- abs(x = r[2L] - r[1L]) / 1.34 h <- ifelse(test = h == 0, yes = 1, no = h) bandwidth <- 4 * 1.06 * min(sqrt(x = apply(X = data, MARGIN = 2, FUN = var)), h) * nrow(x = data) ^ (-0.2) return(bandwidth) } # Blend expression values together # # @param data A two-column data frame with expression values for two features # # @return A three-column data frame with transformed and blended expression values # BlendExpression <- function(data) { if (ncol(x = data) != 2) { stop("'BlendExpression' only blends two features") } features <- colnames(x = data) data <- as.data.frame(x = apply( X = data, MARGIN = 2, FUN = function(x) { return(round(x = 9 * (x - min(x)) / (max(x) - min(x)))) } )) data[, 3] <- data[, 1] + data[, 2] * 10 colnames(x = data) <- c(features, paste(features, collapse = '_')) for (i in 1:ncol(x = data)) { data[, i] <- factor(x = data[, i]) } return(data) } # Create a heatmap of blended colors # # @param color.matrix A color matrix of blended colors # # @return A ggplot object # #' @importFrom grid unit #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string scale_fill_manual geom_raster #' theme scale_y_continuous scale_x_continuous scale_fill_manual # # @seealso \code{\link{BlendMatrix}} # BlendMap <- function(color.matrix) { color.heat <- matrix( data = 1:prod(dim(x = color.matrix)) - 1, nrow = nrow(x = color.matrix), ncol = ncol(x = color.matrix), dimnames = list( 1:nrow(x = color.matrix), 1:ncol(x = color.matrix) ) ) xbreaks <- seq.int(from = 0, to = nrow(x = color.matrix), by = 2) ybreaks <- seq.int(from = 0, to = ncol(x = color.matrix), by = 2) color.heat <- Melt(x = color.heat) color.heat$rows <- as.numeric(x = as.character(x = color.heat$rows)) color.heat$cols <- as.numeric(x = as.character(x = color.heat$cols)) color.heat$vals <- factor(x = color.heat$vals) plot <- ggplot( data = color.heat, mapping = aes_string(x = 'rows', y = 'cols', fill = 'vals') ) + geom_raster(show.legend = FALSE) + theme(plot.margin = unit(x = rep.int(x = 0, times = 4), units = 'cm')) + scale_x_continuous(breaks = xbreaks, expand = c(0, 0), labels = xbreaks) + scale_y_continuous(breaks = ybreaks, expand = c(0, 0), labels = ybreaks) + scale_fill_manual(values = as.vector(x = color.matrix)) + theme_cowplot() return(plot) } # Create a color matrix of blended colors # # @param n Dimensions of blended matrix (n x n) # @param col.threshold The color cutoff from weak signal to strong signal; ranges from 0 to 1. # @param two.colors Two colors used for the blend expression. # # @return An n x n matrix of blended colors # #' @importFrom grDevices col2rgb # BlendMatrix <- function( n = 10, col.threshold = 0.5, two.colors = c("#ff0000", "#00ff00"), negative.color = "black" ) { if (0 > col.threshold || col.threshold > 1) { stop("col.threshold must be between 0 and 1") } C0 <- as.vector(col2rgb(negative.color, alpha = TRUE)) C1 <- as.vector(col2rgb(two.colors[1], alpha = TRUE)) C2 <- as.vector(col2rgb(two.colors[2], alpha = TRUE)) blend_alpha <- (C1[4] + C2[4])/2 C0 <- C0[-4] C1 <- C1[-4] C2 <- C2[-4] merge.weight <- min(255 / (C1 + C2 + C0 + 0.01)) sigmoid <- function(x) { return(1 / (1 + exp(-x))) } blend_color <- function( i, j, col.threshold, n, C0, C1, C2, alpha, merge.weight ) { c.min <- sigmoid(5 * (1 / n - col.threshold)) c.max <- sigmoid(5 * (1 - col.threshold)) c1_weight <- sigmoid(5 * (i / n - col.threshold)) c2_weight <- sigmoid(5 * (j / n - col.threshold)) c0_weight <- sigmoid(5 * ((i + j) / (2 * n) - col.threshold)) c1_weight <- (c1_weight - c.min) / (c.max - c.min) c2_weight <- (c2_weight - c.min) / (c.max - c.min) c0_weight <- (c0_weight - c.min) / (c.max - c.min) C1_length <- sqrt(sum((C1 - C0) ** 2)) C2_length <- sqrt(sum((C2 - C0) ** 2)) C1_unit <- (C1 - C0) / C1_length C2_unit <- (C2 - C0) / C2_length C1_weight <- C1_unit * c1_weight C2_weight <- C2_unit * c2_weight C_blend <- C1_weight * (i - 1) * C1_length / (n - 1) + C2_weight * (j - 1) * C2_length / (n - 1) + (i - 1) * (j - 1) * c0_weight * C0 / (n - 1) ** 2 + C0 C_blend[C_blend > 255] <- 255 C_blend[C_blend < 0] <- 0 return(rgb( red = C_blend[1], green = C_blend[2], blue = C_blend[3], alpha = alpha, maxColorValue = 255 )) } blend_matrix <- matrix(nrow = n, ncol = n) for (i in 1:n) { for (j in 1:n) { blend_matrix[i, j] <- blend_color( i = i, j = j, col.threshold = col.threshold, n = n, C0 = C0, C1 = C1, C2 = C2, alpha = blend_alpha, merge.weight = merge.weight ) } } return(blend_matrix) } # Convert R colors to hexadecimal # # @param ... R colors # # @return The hexadecimal representations of input colors # #' @importFrom grDevices rgb col2rgb # Col2Hex <- function(...) { colors <- as.character(x = c(...)) alpha <- rep.int(x = 255, times = length(x = colors)) if (sum(sapply(X = colors, FUN = grepl, pattern = '^#')) != 0) { hex <- colors[which(x = grepl(pattern = '^#', x = colors))] hex.length <- sapply(X = hex, FUN = nchar) if (9 %in% hex.length) { hex.alpha <- hex[which(x = hex.length == 9)] hex.vals <- sapply(X = hex.alpha, FUN = substr, start = 8, stop = 9) dec.vals <- sapply(X = hex.vals, FUN = strtoi, base = 16) alpha[match(x = hex[which(x = hex.length == 9)], table = colors)] <- dec.vals } } colors <- t(x = col2rgb(col = colors)) colors <- mapply( FUN = function(i, alpha) { return(rgb(colors[i, , drop = FALSE], alpha = alpha, maxColorValue = 255)) }, i = 1:nrow(x = colors), alpha = alpha ) return(colors) } # Plot feature expression by identity # # Basically combines the codebase for VlnPlot and RidgePlot # # @param object Seurat object # @param type Plot type, choose from 'ridge', 'violin', or 'splitViolin' # @param features Features to plot (gene expression, metrics, PC scores, # anything that can be retreived by FetchData) # @param idents Which classes to include in the plot (default is all) # @param ncol Number of columns if multiple plots are displayed # @param sort Sort identity classes (on the x-axis) by the average expression of the attribute being potted, # or, if stack is True, sort both identity classes and features by hierarchical clustering # @param y.max Maximum y axis value # @param same.y.lims Set all the y-axis limits to the same values # @param adjust Adjust parameter for geom_violin # @param pt.size Point size for points # @param alpha Alpha value for points # @param cols Colors to use for plotting # @param group.by Group (color) cells in different ways (for example, orig.ident) # @param split.by A variable to split the plot by # @param log plot Y axis on log scale # @param slot Slot to pull expression data from (e.g. "counts" or "data") # @param stack Horizontally stack plots for multiple feature # @param combine Combine plots into a single \code{\link[patchwork]{patchwork}ed} # ggplot object. If \code{FALSE}, return a list of ggplot objects # @param fill.by Color violins/ridges based on either 'feature' or 'ident' # @param flip flip plot orientation (identities on x-axis) # @param add.noise determine if adding a small noise for plotting # @param raster Convert points to raster format, default is \code{NULL} which # automatically rasterizes if plotting more than 100,000 cells # # @return A \code{\link[patchwork]{patchwork}ed} ggplot object if # \code{combine = TRUE}; otherwise, a list of ggplot objects # #' @importFrom scales hue_pal #' @importFrom ggplot2 xlab ylab #' @importFrom patchwork wrap_plots # ExIPlot <- function( object, features, type = 'violin', idents = NULL, ncol = NULL, sort = FALSE, assay = NULL, y.max = NULL, same.y.lims = FALSE, adjust = 1, cols = NULL, pt.size = 0, alpha = 1, group.by = NULL, split.by = NULL, log = FALSE, slot = deprecated(), layer = 'data', stack = FALSE, combine = TRUE, fill.by = NULL, flip = FALSE, add.noise = TRUE, raster = NULL ) { if (is_present(arg = slot)) { layer <- layer %||% slot } assay <- assay %||% DefaultAssay(object = object) DefaultAssay(object = object) <- assay cells <- Cells(x = object, assay = NULL) if (isTRUE(x = stack)) { if (!is.null(x = ncol)) { warning( "'ncol' is ignored with 'stack' is TRUE", call. = FALSE, immediate. = TRUE ) } if (!is.null(x = y.max)) { warning( "'y.max' is ignored when 'stack' is TRUE", call. = FALSE, immediate. = TRUE ) } } else { ncol <- ncol %||% ifelse( test = length(x = features) > 9, yes = 4, no = min(length(x = features), 3) ) } if (!is.null(x = idents)) { cells <- intersect( x = names(x = Idents(object = object)[Idents(object = object) %in% idents]), y = cells ) } data <- FetchData(object = object, vars = features, slot = layer, cells = cells) pt.size <- pt.size %||% AutoPointSize(data = object) features <- colnames(x = data) data <- data[cells, , drop = FALSE] idents <- if (is.null(x = group.by)) { Idents(object = object)[cells] } else { object[[group.by, drop = TRUE]][cells] } if (!is.factor(x = idents)) { idents <- factor(x = idents) } if (is.null(x = split.by)) { split <- NULL } else { split <- FetchData(object,split.by)[cells,split.by] if (!is.factor(x = split)) { split <- factor(x = split) } if (is.null(x = cols)) { cols <- hue_pal()(length(x = levels(x = idents))) cols <- Interleave(cols, InvertHex(hexadecimal = cols)) } else if (length(x = cols) == 1 && cols == 'interaction') { split <- interaction(idents, split) cols <- hue_pal()(length(x = levels(x = idents))) } else { cols <- Col2Hex(cols) } if (length(x = cols) < length(x = levels(x = split))) { cols <- Interleave(cols, InvertHex(hexadecimal = cols)) } cols <- rep_len(x = cols, length.out = length(x = levels(x = split))) names(x = cols) <- levels(x = split) if ((length(x = cols) > 2) & (type == "splitViolin")) { warning("Split violin is only supported for <3 groups, using multi-violin.") type <- "violin" } } if (same.y.lims && is.null(x = y.max)) { y.max <- max(data) } if (isTRUE(x = stack)) { return(MultiExIPlot( type = type, data = data, idents = idents, split = split, sort = sort, same.y.lims = same.y.lims, adjust = adjust, cols = cols, pt.size = pt.size, log = log, fill.by = fill.by, add.noise = add.noise, flip = flip )) } plots <- lapply( X = features, FUN = function(x) { return(SingleExIPlot( type = type, data = data[, x, drop = FALSE], idents = idents, split = split, sort = sort, y.max = y.max, adjust = adjust, cols = cols, pt.size = pt.size, alpha = alpha, log = log, add.noise = add.noise, raster = raster )) } ) label.fxn <- switch( EXPR = type, 'violin' = if (stack) { xlab } else { ylab }, "splitViolin" = if (stack) { xlab } else { ylab }, 'ridge' = xlab, stop("Unknown ExIPlot type ", type, call. = FALSE) ) for (i in 1:length(x = plots)) { key <- paste0(unlist(x = strsplit(x = features[i], split = '_'))[1], '_') obj <- names(x = which(x = Key(object = object) == key)) if (length(x = obj) == 1) { if (inherits(x = object[[obj]], what = 'DimReduc')) { plots[[i]] <- plots[[i]] + label.fxn(label = 'Embeddings Value') } else if (inherits(x = object[[obj]], what = 'Assay') || inherits(x = object[[obj]], what = 'Assay5')) { next } else { warning("Unknown object type ", class(x = object), immediate. = TRUE, call. = FALSE) plots[[i]] <- plots[[i]] + label.fxn(label = NULL) } } else if (!features[i] %in% rownames(x = object)) { plots[[i]] <- plots[[i]] + label.fxn(label = NULL) } } if (combine) { plots <- wrap_plots(plots, ncol = ncol) if (length(x = features) > 1) { plots <- plots & NoLegend() } } return(plots) } # Make a theme for facet plots # # @inheritParams SeuratTheme # @export # # @rdname SeuratTheme # @aliases FacetTheme # FacetTheme <- function(...) { return(theme( strip.background = element_blank(), strip.text = element_text(face = 'bold'), # Validate the theme validate = TRUE, ... )) } #' @importFrom RColorBrewer brewer.pal #' @importFrom grDevices colorRampPalette #' #' SpatialColors <- colorRampPalette(colors = rev(x = brewer.pal(n = 11, name = "Spectral"))) # Feature plot palettes # FeaturePalettes <- list( 'Spatial' = SpatialColors(n = 100), 'Seurat' = c('lightgrey', 'blue') ) # Splits features into groups based on log expression levels # # @param object Seurat object # @param assay Assay for expression data # @param min.cells Only compute for features in at least this many cells # @param ngroups Number of groups to split into # # @return A Seurat object with the feature group stored as a factor in # metafeatures # #' @importFrom Matrix rowMeans rowSums # GetFeatureGroups <- function(object, assay, min.cells = 5, ngroups = 6) { cm <- GetAssayData(object = object[[assay]], slot = "counts") # subset to keep only genes detected in at least min.cells cells cm <- cm[rowSums(cm > 0) >= min.cells, ] # use the geometric mean of the features to group them # (using the arithmetic mean would usually not change things much) # could use sctransform:::row_gmean here but not exported feature.gmean <- exp(x = rowMeans(log1p(x = cm))) - 1 feature.grp.breaks <- seq( from = min(log10(x = feature.gmean)) - 10*.Machine$double.eps, to = max(log10(x = feature.gmean)), length.out = ngroups + 1 ) feature.grp <- cut( x = log10(x = feature.gmean), breaks = feature.grp.breaks, ordered_result = TRUE ) feature.grp <- factor( x = feature.grp, levels = rev(x = levels(x = feature.grp)), ordered = TRUE ) names(x = feature.grp) <- names(x = feature.gmean) return(feature.grp) } # Get X and Y aesthetics from a plot for a certain geom # # @param plot A ggplot2 object # @param geom Geom class to filter to # @param plot.first Use plot-wide X/Y aesthetics before geom-specific aesthetics # # @return A named list with values 'x' for the name of the x aesthetic and 'y' for the y aesthetic # #' @importFrom rlang as_label # GetXYAesthetics <- function(plot, geom = 'GeomPoint', plot.first = TRUE) { geoms <- sapply( X = plot$layers, FUN = function(layer) { return(class(x = layer$geom)[1]) } ) # handle case where raster is set to True if (geom == "GeomPoint" && "GeomScattermore" %in% geoms){ geom <- "GeomScattermore" } geoms <- which(x = geoms == geom) if (length(x = geoms) == 0) { stop("Cannot find a geom of class ", geom) } geoms <- min(geoms) if (plot.first) { # x <- as.character(x = plot$mapping$x %||% plot$layers[[geoms]]$mapping$x)[2] x <- as_label(x = plot$mapping$x %||% plot$layers[[geoms]]$mapping$x) # y <- as.character(x = plot$mapping$y %||% plot$layers[[geoms]]$mapping$y)[2] y <- as_label(x = plot$mapping$y %||% plot$layers[[geoms]]$mapping$y) } else { x <- as_label(x = plot$layers[[geoms]]$mapping$x %||% plot$mapping$x) y <- as_label(x = plot$layers[[geoms]]$mapping$y %||% plot$mapping$y) } return(list('x' = x, 'y' = y)) } # For plotting the tissue image #' @importFrom ggplot2 ggproto Geom aes ggproto_parent alpha draw_key_point #' @importFrom grid unit gpar editGrob pointsGrob viewport gTree addGrob grobName #' GeomSpatial <- ggproto( "GeomSpatial", Geom, required_aes = c("x", "y"), extra_params = c("na.rm", "image", "image.alpha", "crop"), default_aes = aes( shape = 21, colour = "black", point.size.factor = 1.0, fill = NA, alpha = NA, stroke = 0.25 ), setup_data = function(self, data, params) { data <- ggproto_parent(Geom, self)$setup_data(data, params) # We need to flip the image as the Y coordinates are reversed data$y = max(data$y) - data$y + min(data$y) data }, draw_key = draw_key_point, draw_panel = function(data, panel_scales, coord, image, image.alpha, crop) { # This should be in native units, where # Locations and sizes are relative to the x- and yscales for the current viewport. if (!crop) { y.transform <- c(0, nrow(x = image)) - panel_scales$y.range data$y <- data$y + sum(y.transform) panel_scales$x$continuous_range <- c(0, ncol(x = image)) panel_scales$y$continuous_range <- c(0, nrow(x = image)) panel_scales$y.range <- c(0, nrow(x = image)) panel_scales$x.range <- c(0, ncol(x = image)) } z <- coord$transform( data.frame(x = c(0, ncol(x = image)), y = c(0, nrow(x = image))), panel_scales ) # Flip Y axis for image z$y <- -rev(z$y) + 1 wdth <- z$x[2] - z$x[1] hgth <- z$y[2] - z$y[1] vp <- viewport( x = unit(x = z$x[1], units = "npc"), y = unit(x = z$y[1], units = "npc"), width = unit(x = wdth, units = "npc"), height = unit(x = hgth, units = "npc"), just = c("left", "bottom") ) img.grob <- GetImage(object = image) img <- editGrob(grob = img.grob, vp = vp) # spot.size <- slot(object = image, name = "spot.radius") spot.size <- Radius(object = image) coords <- coord$transform(data, panel_scales) pts <- pointsGrob( x = coords$x, y = coords$y, pch = data$shape, size = unit(spot.size, "npc") * data$point.size.factor, gp = gpar( col = alpha(colour = coords$colour, alpha = coords$alpha), fill = alpha(colour = coords$fill, alpha = coords$alpha), lwd = coords$stroke) ) vp <- viewport() gt <- gTree(vp = vp) if (image.alpha > 0) { if (image.alpha != 1) { img$raster = as.raster( x = matrix( data = alpha(colour = img$raster, alpha = image.alpha), nrow = nrow(x = img$raster), ncol = ncol(x = img$raster), byrow = TRUE) ) } gt <- addGrob(gTree = gt, child = img) } gt <- addGrob(gTree = gt, child = pts) # Replacement for ggname gt$name <- grobName(grob = gt, prefix = 'geom_spatial') return(gt) # ggplot2:::ggname("geom_spatial", gt) } ) # influenced by: https://stackoverflow.com/questions/49475201/adding-tables-to-ggplot2-with-facet-wrap-in-r # https://ggplot2.tidyverse.org/articles/extending-ggplot2.html #' @importFrom ggplot2 layer #' #' geom_spatial <- function( mapping = NULL, data = NULL, image = image, image.alpha = image.alpha, crop = crop, stat = "identity", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, ... ) { layer( geom = GeomSpatial, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list(na.rm = na.rm, image = image, image.alpha = image.alpha, crop = crop, ...) ) } #' @importFrom grid viewport editGrob grobName #' @importFrom ggplot2 ggproto Geom ggproto_parent # GeomSpatialInteractive <- ggproto( "GeomSpatialInteractive", Geom, setup_data = function(self, data, params) { data <- ggproto_parent(parent = Geom, self = self)$setup_data(data, params) data }, draw_group = function(data, panel_scales, coord) { vp <- viewport(x = data$x, y = data$y) g <- editGrob(grob = data$grob[[1]], vp = vp) # Replacement for ggname g$name <- grobName(grob = g, prefix = 'geom_spatial_interactive') return(g) # return(ggname(prefix = "geom_spatial", grob = g)) }, required_aes = c("grob","x","y") ) #' @importFrom ggplot2 layer # geom_spatial_interactive <- function( mapping = NULL, data = NULL, stat = "identity", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = FALSE, ... ) { layer( geom = GeomSpatialInteractive, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list(na.rm = na.rm, ...) ) } # A split violin plot geom # #' @importFrom scales zero_range #' @importFrom ggplot2 GeomPolygon #' @importFrom grid grobTree grobName # # @author jan-glx on StackOverflow # @references \url{https://stackoverflow.com/questions/35717353/split-violin-plot-with-ggplot2} # @seealso \code{\link[ggplot2]{geom_violin}} # GeomSplitViolin <- ggproto( "GeomSplitViolin", GeomViolin, # setup_data = function(data, params) { # data$width <- data$width %||% params$width %||% (resolution(data$x, FALSE) * 0.9) # data <- plyr::ddply(data, "group", transform, xmin = x - width/2, xmax = x + width/2) # e <- globalenv() # name <- paste(sample(x = letters, size = 5), collapse = '') # message("Saving initial data to ", name) # e[[name]] <- data # return(data) # }, draw_group = function(self, data, ..., draw_quantiles = NULL) { data$xminv <- data$x - data$violinwidth * (data$x - data$xmin) data$xmaxv <- data$x + data$violinwidth * (data$xmax - data$x) grp <- data[1, 'group'] if (grp %% 2 == 1) { data$x <- data$xminv data.order <- data$y } else { data$x <- data$xmaxv data.order <- -data$y } newdata <- data[order(data.order), , drop = FALSE] newdata <- rbind( newdata[1, ], newdata, newdata[nrow(x = newdata), ], newdata[1, ] ) newdata[c(1, nrow(x = newdata) - 1, nrow(x = newdata)), 'x'] <- round(x = newdata[1, 'x']) grob <- if (length(x = draw_quantiles) > 0 & !zero_range(x = range(data$y))) { stopifnot(all(draw_quantiles >= 0), all(draw_quantiles <= 1)) quantiles <- QuantileSegments(data = data, draw.quantiles = draw_quantiles) aesthetics <- data[rep.int(x = 1, times = nrow(x = quantiles)), setdiff(x = names(x = data), y = c("x", "y")), drop = FALSE] aesthetics$alpha <- rep.int(x = 1, nrow(x = quantiles)) both <- cbind(quantiles, aesthetics) quantile.grob <- GeomPath$draw_panel(both, ...) grobTree(GeomPolygon$draw_panel(newdata, ...), name = quantile.grob) } else { GeomPolygon$draw_panel(newdata, ...) } grob$name <- grobName(grob = grob, prefix = 'geom_split_violin') return(grob) } ) # Create a split violin plot geom # # @inheritParams ggplot2::geom_violin # #' @importFrom ggplot2 layer # # @author jan-glx on StackOverflow # @references \url{https://stackoverflow.com/questions/35717353/split-violin-plot-with-ggplot2} # @seealso \code{\link[ggplot2]{geom_violin}} # geom_split_violin <- function( mapping = NULL, data = NULL, stat = 'ydensity', position = 'identity', ..., draw_quantiles = NULL, trim = TRUE, scale = 'area', na.rm = FALSE, show.legend = NA, inherit.aes = TRUE ) { return(layer( data = data, mapping = mapping, stat = stat, geom = GeomSplitViolin, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list( trim = trim, scale = scale, draw_quantiles = draw_quantiles, na.rm = na.rm, ... ) )) } # Convert a ggplot2 scatterplot to base R graphics # # @param plot A ggplot2 scatterplot # @param do.plot Create the plot with base R graphics # @param cols A named vector of column names to pull. Vector names must be 'x', # 'y', 'colour', 'shape', and/or 'size'; vector values must be the names of # columns in plot data that correspond to these values. May pass only values that # differ from the default (eg. \code{cols = c('size' = 'point.size.factor')}) # @param ... Extra parameters passed to PlotBuild # # @return A dataframe with the data that created the ggplot2 scatterplot # #' @importFrom ggplot2 ggplot_build # GGpointToBase <- function( plot, do.plot = TRUE, cols = c( 'x' = 'x', 'y' = 'y', 'colour' = 'colour', 'shape' = 'shape', 'size' = 'size' ), ... ) { plot.build <- ggplot_build(plot = plot) default.cols <- c( 'x' = 'x', 'y' = 'y', 'colour' = 'colour', 'shape' = 'shape', 'size' = 'size' ) cols <- cols %||% default.cols if (is.null(x = names(x = cols))) { if (length(x = cols) > length(x = default.cols)) { warning( "Too many columns provided, selecting only first ", length(x = default.cols), call. = FALSE, immediate. = TRUE ) cols <- cols[1:length(x = default.cols)] } names(x = cols) <- names(x = default.cols)[1:length(x = cols)] } cols <- c( cols[intersect(x = names(x = default.cols), y = names(x = cols))], default.cols[setdiff(x = names(x = default.cols), y = names(x = cols))] ) cols <- cols[names(x = default.cols)] build.use <- which(x = vapply( X = plot.build$data, FUN = function(dat) { return(all(cols %in% colnames(x = dat))) }, FUN.VALUE = logical(length = 1L) )) if (length(x = build.use) == 0) { stop("GGpointToBase only works on geom_point ggplot objects") } build.data <- plot.build$data[[min(build.use)]] plot.data <- build.data[, cols] names(x = plot.data) <- c( plot.build$plot$labels$x, plot.build$plot$labels$y, 'color', 'pch', 'cex' ) if (do.plot) { PlotBuild(data = plot.data, ...) } return(plot.data) } # Convert a ggplot2 scatterplot to plotly graphics # # @inheritParams GGpointToBase # @param information Extra information for hovering # @param ... Ignored # # @return A dataframe with the data that greated the ggplot2 scatterplot #' @importFrom ggplot2 ggplot_build # GGpointToPlotlyBuild <- function( plot, information = NULL, cols = eval(expr = formals(fun = GGpointToBase)$cols), ... ) { CheckDots(...) plot.build <- GGpointToBase(plot = plot, do.plot = FALSE, cols = cols) data <- ggplot_build(plot = plot)$plot$data rownames(x = plot.build) <- rownames(data) # Reset the names to 'x' and 'y' names(x = plot.build) <- c( 'x', 'y', names(x = plot.build)[3:length(x = plot.build)] ) # Add the hover information we're looking for if (is.null(x = information)) { plot.build$feature <- rownames(x = data) } else { info <- apply( X = information, MARGIN = 1, FUN = function(x, names) { return(paste0(names, ': ', x, collapse = '
')) }, names = colnames(x = information) ) data.info <- data.frame( feature = paste(rownames(x = information), info, sep = '
'), row.names = rownames(x = information) ) plot.build <- merge(x = plot.build, y = data.info, by = 0) rownames(x = plot.build) <- plot.build$Row.names plot.build <- plot.build[, which(x = colnames(x = plot.build) != 'Row.names'), drop = FALSE] } return(plot.build) } #' @importFrom stats quantile #' InvertCoordinate <- function(x, MARGIN = 2) { if (!is.null(x = x)) { switch( EXPR = MARGIN, '1' = { rmin <- 'left' rmax <- 'right' cmin <- 'xmin' cmax <- 'xmax' }, '2' = { rmin <- 'bottom' rmax <- 'top' cmin <- 'ymin' cmax <- 'ymax' }, stop("'MARGIN' must be either 1 or 2", call. = FALSE) ) # Fix the range so that rmin becomes rmax and vice versa # Needed for both points and brushes range <- x$range x$range[[rmin]] <- range[[rmax]] x$range[[rmax]] <- range[[rmin]] # Fix the cmin and cmax values, if provided # These are used for brush boundaries coords <- c(x[[cmin]], x[[cmax]]) if (all(!is.null(x = coords))) { names(x = coords) <- c(cmin, cmax) x[[cmin]] <- quantile( x = x$range[[rmin]]:x$range[[rmax]], probs = 1 - (coords[cmax] / x$range[[rmax]]), names = FALSE ) x[[cmax]] <- quantile( x = x$range[[rmin]]:x$range[[rmax]], probs = 1 - (coords[cmin] / x$range[[rmax]]), names = FALSE ) } } return(x) } # Invert a Hexadecimal color # # @param hexadecimal A character vector of hexadecimal colors # # @return Hexadecimal representations of the inverted color # # @author Matt Lagrandeur # @references \url{http://www.mattlag.com/scripting/hexcolorinverter.php} # InvertHex <- function(hexadecimal) { return(vapply( X = toupper(x = hexadecimal), FUN = function(hex) { hex <- unlist(x = strsplit( x = gsub(pattern = '#', replacement = '', x = hex), split = '' )) key <- toupper(x = as.hexmode(x = 15:0)) if (!all(hex %in% key)) { stop('All hexadecimal colors must be valid hexidecimal numbers from 0-9 and A-F') } if (length(x = hex) == 8) { alpha <- hex[7:8] hex <- hex[1:6] } else if (length(x = hex) == 6) { alpha <- NULL } else { stop("All hexidecimal colors must be either 6 or 8 characters in length, excluding the '#'") } value <- rev(x = key) inv.hex <- vapply( X = hex, FUN = function(x) { return(value[grep(pattern = x, x = key)]) }, FUN.VALUE = character(length = 1L) ) inv.hex <- paste(inv.hex, collapse = '') return(paste0('#', inv.hex, paste(alpha, collapse = ''))) }, FUN.VALUE = character(length = 1L), USE.NAMES = FALSE )) } # Make label information for ggplot2-based scatter plots # # @param data A three-column data frame (accessed with \code{plot$data}) # The first column should be the X axis, the second the Y, and the third should be grouping information # # @return A dataframe with three columns: centers along the X axis, centers along the Y axis, and group information # #' @importFrom stats median na.omit # MakeLabels <- function(data) { groups <- as.character(x = na.omit(object = unique(x = data[, 3]))) labels <- lapply( X = groups, FUN = function(group) { data.use <- data[data[, 3] == group, 1:2] return(apply(X = data.use, MARGIN = 2, FUN = median, na.rm = TRUE)) } ) names(x = labels) <- groups labels <- as.data.frame(x = t(x = as.data.frame(x = labels))) labels[, colnames(x = data)[3]] <- groups return(labels) } # Plot expression of multiple features by identity on a plot # # @param data Data to plot # @param idents Idents to use # @param type Make either a 'ridge' or 'violin' plot # @param sort Sort identity classes and features based on hierarchical clustering # @param same.y.lims Indicates whether to use the same ylim for each feature # @param adjust Adjust parameter for geom_violin # @param cols Colors to use for plotting # @param log plot Y axis on log scale # @param fill.by Color violins/ridges based on either 'feature' or 'ident' # @param seed.use Random seed to use. If NULL, don't set a seed # @param flip flip plot orientation (identities on x-axis) # # @return A ggplot-based Expression-by-Identity plot # #' @importFrom cowplot theme_cowplot #' @importFrom utils globalVariables #' @importFrom stats rnorm dist hclust #' @importFrom ggridges geom_density_ridges theme_ridges #' @importFrom ggplot2 ggplot aes_string facet_grid theme labs geom_rect #' geom_violin geom_jitter ylim position_jitterdodge scale_fill_manual #' scale_y_log10 scale_x_log10 scale_y_discrete scale_x_continuous #' scale_y_continuous waiver #' MultiExIPlot <- function( data, idents, split = NULL, type = 'violin', sort = FALSE, same.y.lims = same.y.lims, adjust = 1, pt.size = 0, cols = NULL, seed.use = 42, log = FALSE, fill.by = NULL, add.noise = TRUE, flip = NULL ) { if (!(fill.by %in% c("feature", "ident"))) { stop("`fill.by` must be either `feature` or `ident`") } if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.data.frame(x = data) || ncol(x = data) < 2) { stop("MultiExIPlot requires a data frame with >1 column") } data <- Melt(x = data) data <- data.frame( feature = data$cols, expression = data$vals, ident = rep_len(x = idents, length.out = nrow(x = data)) ) if ((is.character(x = sort) && nchar(x = sort) > 0) || sort) { data$feature <- as.vector(x = data$feature) data$ident <- as.vector(x = data$ident) # build matrix of average expression (#-features by #-idents), lexical ordering avgs.matrix <- sapply( X = split(x = data, f = data$ident), FUN = function(df) { return(tapply( X = df$expression, INDEX = df$feature, FUN = mean )) } ) idents.order <- hclust(d = dist(x = t(x = L2Norm(mat = avgs.matrix, MARGIN = 2))))$order avgs.matrix <- avgs.matrix[,idents.order] avgs.matrix <- L2Norm(mat = avgs.matrix, MARGIN = 1) # order feature clusters by position of their "rank-1 idents" position <- apply(X = avgs.matrix, MARGIN = 1, FUN = which.max) mat <- hclust(d = dist(x = avgs.matrix))$merge orderings <- list() for (i in 1:nrow(mat)) { x <- if (mat[i,1] < 0) -mat[i,1] else orderings[[mat[i,1]]] y <- if (mat[i,2] < 0) -mat[i,2] else orderings[[mat[i,2]]] x.pos <- min(x = position[x]) y.pos <- min(x = position[y]) orderings[[i]] <- if (x.pos < y.pos) { c(x, y) } else { c(y, x) } } features.order <- orderings[[length(x = orderings)]] data$feature <- factor( x = data$feature, levels = unique(x = sort(x = data$feature))[features.order] ) data$ident <- factor( x = data$ident, levels = unique(x = sort(x = data$ident))[rev(x = idents.order)] ) } else { data$feature <- factor(x = data$feature, levels = unique(x = data$feature)) } if (log) { noise <- rnorm(n = nrow(x = data)) / 200 data$expression <- data$expression + 1 } else { noise <- rnorm(n = nrow(x = data)) / 100000 } if (!add.noise) { noise <- noise*0 } for (f in unique(x = data$feature)) { if (all(data$expression[(data$feature == f)] == data$expression[(data$feature == f)][1])) { warning( "All cells have the same value of ", f, call. = FALSE, immediate. = TRUE ) } else { data$expression[(data$feature == f)] <- data$expression[(data$feature == f)] + noise[(data$feature == f)] } } if (type == 'violin' && !is.null(x = split)) { data$split <- rep_len(x = split, length.out = nrow(data)) vln.geom <- geom_violin fill.by <- 'split' } else if (type == 'splitViolin' && !is.null(x = split)) { data$split <- rep_len(x = split, length.out = nrow(data)) vln.geom <- geom_split_violin fill.by <- 'split' type <- 'violin' } else { vln.geom <- geom_violin } switch( EXPR = type, 'violin' = { geom <- list(vln.geom(scale = 'width', adjust = adjust, trim = TRUE)) }, 'ridge' = { geom <- list( geom_density_ridges(scale = 4), theme_ridges(), scale_y_discrete(expand = c(0.01, 0)) ) }, stop("Unknown plot type: ", type) ) if (flip) { x <- 'ident' x.label <- 'Identity' y <- 'expression' y.label <- 'Expression Level' } else { y <- 'ident' y.label <- 'Identity' x <- 'expression' x.label <- 'Expression Level' } plot <- ggplot( data = data, mapping = aes_string(x = x, y = y, fill = fill.by)[c(2, 3, 1)] ) + labs(x = x.label, y = y.label, fill = NULL) + theme_cowplot() plot <- do.call(what = '+', args = list(plot, geom)) if (flip) { plot <- plot + scale_y_continuous( expand = c(0, 0), labels = function(x) c(rep(x = '', times = length(x)-2), x[length(x) - 1], '')) + facet_grid(feature ~ ., scales = (if (same.y.lims) 'fixed' else 'free')) + FacetTheme( panel.spacing = unit(0, 'lines'), panel.background = element_rect(fill = NA, color = "black"), axis.text.y = element_text(size = 7), axis.text.x = element_text(angle = 45, hjust = 1), strip.text.y.right = element_text(angle = 0)) } else { plot <- plot + scale_x_continuous( expand = c(0, 0), labels = function(x) c(rep(x = '', times = length(x)-2), x[length(x) - 1], '')) + facet_grid(. ~ feature, scales = (if (same.y.lims) 'fixed' else 'free')) + FacetTheme( panel.spacing = unit(0, 'lines'), panel.background = element_rect(fill = NA, color = "black"), axis.text.x = element_text(size = 7), strip.text.x = element_text(angle = -90)) } if (log) { plot <- plot + scale_x_log10() } if (!is.null(x = cols)) { if (!is.null(x = split)) { idents <- unique(x = as.vector(x = data$ident)) splits <- unique(x = as.vector(x = data$split)) labels <- if (length(x = splits) == 2) { splits } else { unlist(x = lapply( X = idents, FUN = function(pattern, x) { x.mod <- gsub( pattern = paste0(pattern, '.'), replacement = paste0(pattern, ': '), x = x, fixed = TRUE ) x.keep <- grep(pattern = ': ', x = x.mod, fixed = TRUE) x.return <- x.mod[x.keep] names(x = x.return) <- x[x.keep] return(x.return) }, x = unique(x = as.vector(x = data$split)) )) } if (is.null(x = names(x = labels))) { names(x = labels) <- labels } } else { labels <- levels(x = droplevels(data$ident)) } plot <- plot + scale_fill_manual(values = cols, labels = labels) } return(plot) } # Create a scatterplot with data from a ggplot2 scatterplot # # @param plot.data The original ggplot2 scatterplot data # This is taken from ggplot2::ggplot_build # @param dark.theme Plot using a dark theme # @param smooth Use a smooth scatterplot instead of a standard scatterplot # @param ... Extra parameters passed to graphics::plot or graphics::smoothScatter # #' @importFrom graphics axis plot smoothScatter # PlotBuild <- function(data, dark.theme = FALSE, smooth = FALSE, ...) { # Do we use a smooth scatterplot? # Take advantage of functions as first class objects # to dynamically choose normal vs smooth scatterplot myplot <- ifelse(test = smooth, yes = smoothScatter, no = plot) CheckDots(..., fxns = myplot) if (dark.theme) { par(bg = 'black') axes = FALSE col.lab = 'white' } else { axes = 'TRUE' col.lab = 'black' } myplot( data[, c(1, 2)], col = data$color, pch = data$pch, cex = vapply( X = data$cex, FUN = function(x) { return(max(x / 2, 0.5)) }, FUN.VALUE = numeric(1) ), axes = axes, col.lab = col.lab, col.main = col.lab, ... ) if (dark.theme) { axis( side = 1, at = NULL, labels = TRUE, col.axis = col.lab, col = col.lab ) axis( side = 2, at = NULL, labels = TRUE, col.axis = col.lab, col = col.lab ) } } # Locate points on a plot and return them # # @param plot A ggplot2 plot # @param recolor Do we recolor the plot to highlight selected points? # @param dark.theme Plot using a dark theme # @param ... Exptra parameters to PlotBuild # # @return A dataframe of x and y coordinates for points selected # #' @importFrom graphics locator # @importFrom SDMTools pnt.in.poly # PointLocator <- function(plot, recolor = TRUE, dark.theme = FALSE, ...) { .Defunct(new = "CellSelector") # # Convert the ggplot object to a data.frame # PackageCheck('SDMTools') # plot.data <- GGpointToBase(plot = plot, dark.theme = dark.theme, ...) # npoints <- nrow(x = plot.data) # cat("Click around the cluster of points you wish to select\n") # cat("ie. select the vertecies of a shape around the cluster you\n") # cat("are interested in. Press when finished (right click for R-terminal users)\n\n") # polygon <- locator(n = npoints, type = 'l') # polygon <- data.frame(polygon) # # pnt.in.poly returns a data.frame of points # points.all <- SDMTools::pnt.in.poly( # pnts = plot.data[, c(1, 2)], # poly.pnts = polygon # ) # # Find the located points # points.located <- points.all[which(x = points.all$pip == 1), ] # # If we're recoloring, do the recolor # if (recolor) { # no <- ifelse(test = dark.theme, yes = 'white', no = '#C3C3C3') # points.all$color <- ifelse(test = points.all$pip == 1, yes = '#DE2D26', no = no) # plot.data$color <- points.all$color # PlotBuild(data = plot.data, dark.theme = dark.theme, ...) # } # return(points.located[, c(1, 2)]) } # Create quantile segments for quantiles on violin plots in ggplot2 # # @param data Data being plotted # @param draw.quantiles Quantiles to draw # #' @importFrom stats approxfun # # @author Hadley Wickham (I presume) # @seealso \code{\link[ggplot2]{geom_violin}} # QuantileSegments <- function(data, draw.quantiles) { densities <- cumsum(x = data$density) / sum(data$density) ecdf <- approxfun(x = densities, y = data$y) ys <- ecdf(v = draw.quantiles) violin.xminvs <- approxfun(x = data$y, y = data$xminv)(v = ys) violin.xmaxvs <- approxfun(x = data$y, y = data$xmaxv)(v = ys) return(data.frame( x = as.vector(x = t(x = data.frame(violin.xminvs, violin.xmaxvs))), y = rep(x = ys, each = 2), group = rep(x = ys, each = 2) )) } # Scale vector to min and max cutoff values # # @param vec a vector # @param cutoffs A two-length vector of cutoffs to be passed to \code{\link{SetQuantile}} # # @return Returns a vector # ScaleColumn <- function(vec, cutoffs) { if (!length(x = cutoffs) == 2) { stop("Two cutoffs (a low and high) are needed") } cutoffs <- sapply( X = cutoffs, FUN = SetQuantile, data = vec ) vec[vec < min(cutoffs)] <- min(cutoffs) vec[vec > max(cutoffs)] <- max(cutoffs) return(vec) } # Set highlight information # # @param cells.highlight Cells to highlight # @param cells.all A character vector of all cell names # @param sizes.highlight Sizes of cells to highlight # @param cols.highlight Colors to highlight cells as # @param col.base Base color to use for unselected cells # @param pt.size Size of unselected cells # @param raster Convert points to raster format, default is \code{NULL} which # automatically rasterizes if plotting more than 100,000 cells # # @return A list will cell highlight information # \describe{ # \item{plot.order}{An order to plot cells in} # \item{highlight}{A vector giving group information for each cell} # \item{size}{A vector giving size information for each cell} # \item{color}{Colors for highlighting in the order of plot.order} # } # SetHighlight <- function( cells.highlight, cells.all, sizes.highlight, cols.highlight, col.base = 'black', pt.size = 1, raster = NULL ) { if (is.character(x = cells.highlight)) { cells.highlight <- list(cells.highlight) } else if (is.data.frame(x = cells.highlight) || !is.list(x = cells.highlight)) { cells.highlight <- as.list(x = cells.highlight) } cells.highlight <- lapply( X = cells.highlight, FUN = function(cells) { cells.return <- if (is.character(x = cells)) { cells[cells %in% cells.all] } else { cells <- as.numeric(x = cells) cells <- cells[cells <= length(x = cells.all)] cells.all[cells] } return(cells.return) } ) cells.highlight <- Filter(f = length, x = cells.highlight) names.highlight <- if (is.null(x = names(x = cells.highlight))) { paste0('Group_', 1L:length(x = cells.highlight)) } else { names(x = cells.highlight) } sizes.highlight <- rep_len( x = sizes.highlight, length.out = length(x = cells.highlight) ) cols.highlight <- c( col.base, rep_len(x = cols.highlight, length.out = length(x = cells.highlight)) ) size <- rep_len(x = pt.size, length.out = length(x = cells.all)) highlight <- rep_len(x = NA_character_, length.out = length(x = cells.all)) if (length(x = cells.highlight) > 0) { for (i in 1:length(x = cells.highlight)) { cells.check <- cells.highlight[[i]] index.check <- match(x = cells.check, cells.all) highlight[index.check] <- names.highlight[i] size[index.check] <- sizes.highlight[i] } } # Check for raster if (isTRUE(x = raster)) { size <- sizes.highlight[1] } plot.order <- sort(x = unique(x = highlight), na.last = TRUE) plot.order[is.na(x = plot.order)] <- 'Unselected' highlight[is.na(x = highlight)] <- 'Unselected' highlight <- factor(x = highlight, levels = plot.order) return(list( plot.order = plot.order, highlight = highlight, size = size, color = cols.highlight )) } #' @importFrom shiny brushedPoints # ShinyBrush <- function(plot.data, brush, outputs, inverts = character(length = 0L)) {#}, selected = NULL) { selected <- NULL if (!is.null(x = brush)) { if (brush$outputId %in% outputs) { selected <- rownames(x = brushedPoints(df = plot.data, brush = brush)) } else if (brush$outputId %in% inverts) { selected <- rownames(x = brushedPoints( df = plot.data, brush = InvertCoordinate(x = brush) )) } } return(selected) } globalVariables(names = '..density..', package = 'Seurat') #' A single correlation plot #' #' @param data A data frame with two columns to be plotted #' @param col.by A vector or factor of values to color the plot by #' @param cols An optional vector of colors to use #' @param pt.size Point size for the plot #' @param smooth Make a smoothed scatter plot #' @param rows.highlight A vector of rows to highlight (like cells.highlight in #' \code{\link{SingleDimPlot}}) #' @param legend.title Optional legend title #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is #' greater than 100,000 #' @param raster.dpi the pixel resolution for rastered plots, passed to geom_scattermore(). #' Default is c(512, 512) #' @param plot.cor ... #' @param jitter Jitter for easier visualization of crowded points #' #' @return A ggplot2 object #' #' @importFrom stats cor #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 ggplot aes_string geom_point labs scale_color_brewer #' scale_color_manual guides stat_density2d aes scale_fill_continuous #' @importFrom scattermore geom_scattermore #' #' @keywords internal #' #' @export #' SingleCorPlot <- function( data, col.by = NULL, cols = NULL, pt.size = NULL, smooth = FALSE, rows.highlight = NULL, legend.title = NULL, na.value = 'grey50', span = NULL, raster = NULL, raster.dpi = NULL, plot.cor = TRUE, jitter = TRUE ) { pt.size <- pt.size %||% AutoPointSize(data = data, raster = raster) if ((nrow(x = data) > 1e5) & is.null(x = raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") } raster <- raster %||% (nrow(x = data) > 1e5) if (!is.null(x = raster.dpi)) { if (!is.numeric(x = raster.dpi) || length(x = raster.dpi) != 2) stop("'raster.dpi' must be a two-length numeric vector") } orig.names <- colnames(x = data) names.plot <- colnames(x = data) <- gsub( pattern = '-', replacement = '.', x = colnames(x = data), fixed = TRUE ) names.plot <- colnames(x = data) <- gsub( pattern = ':', replacement = '.', x = colnames(x = data), fixed = TRUE ) names.plot <- colnames(x = data) <- gsub( pattern = ' ', replacement = '.', x = colnames(x = data), fixed = TRUE ) if (ncol(x = data) < 2) { msg <- "Too few variables passed" if (ncol(x = data) == 1) { msg <- paste0(msg, ', only have ', colnames(x = data)[1]) } stop(msg, call. = FALSE) } plot.cor <- if (isTRUE(x = plot.cor)) { round(x = cor(x = data[, 1], y = data[, 2]), digits = 2) } else( "" ) if (!is.null(x = rows.highlight)) { highlight.info <- SetHighlight( cells.highlight = rows.highlight, cells.all = rownames(x = data), sizes.highlight = pt.size, cols.highlight = 'red', col.base = 'black', pt.size = pt.size, raster = raster ) cols <- highlight.info$color col.by <- factor( x = highlight.info$highlight, levels = rev(x = highlight.info$plot.order) ) plot.order <- order(col.by) data <- data[plot.order, ] col.by <- col.by[plot.order] } if (!is.null(x = col.by)) { data$colors <- col.by } plot <- ggplot( data = data, mapping = aes_string(x = names.plot[1], y = names.plot[2]) ) + labs( x = orig.names[1], y = orig.names[2], title = plot.cor, color = legend.title ) if (smooth) { # density <- kde2d(x = data[, names.plot[1]], y = data[, names.plot[2]], h = Bandwidth(data = data[, names.plot]), n = 200) # density <- data.frame( # expand.grid( # x = density$x, # y = density$y # ), # density = as.vector(x = density$z) # ) plot <- plot + stat_density2d( mapping = aes(fill = ..density.. ^ 0.25), geom = 'tile', contour = FALSE, n = 200, h = Bandwidth(data = data[, names.plot]) ) + # geom_tile( # mapping = aes_string( # x = 'x', # y = 'y', # fill = 'density' # ), # data = density # ) + scale_fill_continuous(low = 'white', high = 'dodgerblue4') + guides(fill = FALSE) } position <- NULL if (jitter) { position <- 'jitter' } else { position <- 'identity' } if (!is.null(x = col.by)) { if (raster) { plot <- plot + geom_scattermore( mapping = aes_string(color = 'colors'), position = position, pointsize = pt.size, pixels = raster.dpi ) } else { plot <- plot + geom_point( mapping = aes_string(color = 'colors'), position = position, size = pt.size ) } } else { if (raster) { plot <- plot + geom_scattermore(position = position, pointsize = pt.size, pixels = raster.dpi) } else { plot <- plot + geom_point(position = position, size = pt.size) } } if (!is.null(x = cols)) { cols.scale <- if (length(x = cols) == 1 && cols %in% rownames(x = brewer.pal.info)) { scale_color_brewer(palette = cols) } else { scale_color_manual(values = cols, na.value = na.value) } plot <- plot + cols.scale if (!is.null(x = rows.highlight)) { plot <- plot + guides(color = FALSE) } } plot <- plot + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) if (!is.null(x = span)) { plot <- plot + geom_smooth( mapping = aes_string(x = names.plot[1], y = names.plot[2]), method = 'loess', span = span ) } return(plot) } #' Plot a single dimension #' #' @param data Data to plot #' @param dims A two-length numeric vector with dimensions to use #' @param col.by ... #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character or numeric value corresponding to a #' palette as specified by \code{\link[RColorBrewer]{brewer.pal.info}}.By #' default, ggplot2 assigns colors #' @param pt.size Adjust point size for plotting #' @param shape.by If NULL, all points are circles (default). You can specify #' any cell attribute (that can be pulled with \code{\link{FetchData}}) #' allowing for both different colors and different shapes on cells. #' @param alpha Alpha value for plotting (default is 1) #' @param alpha.by Mapping variable for the point alpha value #' @param order Specify the order of plotting for the idents. This can be #' useful for crowded plots if points of interest are being buried. Provide #' either a full list of valid idents or a subset to be plotted last (on top). #' @param label Whether to label the clusters #' @param repel Repel labels #' @param label.size Sets size of labels #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply #' pass a vector instead of a list. If set, colors selected cells to the color(s) #' in \code{cols.highlight} and other cells black (white if dark.theme = TRUE); #' will also resize to the size(s) passed to \code{sizes.highlight} #' @param cols.highlight A vector of colors to highlight the cells as; will #' repeat to the length groups in cells.highlight #' @param sizes.highlight Size of highlighted cells; will repeat to the length #' groups in cells.highlight #' @param na.value Color value for NA points when using custom scale. #' @param raster Convert points to raster format, default is \code{NULL} #' which will automatically use raster if the number of points plotted is #' greater than 100,000 #' @param raster.dpi the pixel resolution for rastered plots, passed to geom_scattermore(). #' Default is c(512, 512) #' #' @return A ggplot2 object #' #' @importFrom cowplot theme_cowplot #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 ggplot aes_string geom_point labs guides scale_color_brewer #' scale_color_manual element_rect guide_legend discrete_scale #' #' @keywords internal #' #' @export #' SingleDimPlot <- function( data, dims, col.by = NULL, cols = NULL, pt.size = NULL, shape.by = NULL, alpha = 1, alpha.by = NULL, order = NULL, label = FALSE, repel = FALSE, label.size = 4, cells.highlight = NULL, cols.highlight = '#DE2D26', sizes.highlight = 1, na.value = 'grey50', raster = NULL, raster.dpi = NULL ) { if ((nrow(x = data) > 1e5) & is.null(x = raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") } raster <- raster %||% (nrow(x = data) > 1e5) pt.size <- pt.size %||% AutoPointSize(data = data, raster = raster) if (!is.null(x = cells.highlight) && pt.size != AutoPointSize(data = data, raster = raster) && sizes.highlight != pt.size && isTRUE(x = raster)) { warning("When `raster = TRUE` highlighted and non-highlighted cells must be the same size. Plot will use the value provided to 'sizes.highlight'.") } if (!is.null(x = raster.dpi)) { if (!is.numeric(x = raster.dpi) || length(x = raster.dpi) != 2) stop("'raster.dpi' must be a two-length numeric vector") } if (length(x = dims) != 2) { stop("'dims' must be a two-length vector") } if (!is.data.frame(x = data)) { data <- as.data.frame(x = data) } if (is.character(x = dims) && !all(dims %in% colnames(x = data))) { stop("Cannot find dimensions to plot in data") } else if (is.numeric(x = dims)) { dims <- colnames(x = data)[dims] } if (!is.null(x = cells.highlight)) { if (inherits(x = cells.highlight, what = "data.frame")) { stop("cells.highlight cannot be a dataframe. ", "Please supply a vector or list") } highlight.info <- SetHighlight( cells.highlight = cells.highlight, cells.all = rownames(x = data), sizes.highlight = sizes.highlight %||% pt.size, cols.highlight = cols.highlight, col.base = cols[1] %||% '#C3C3C3', pt.size = pt.size, raster = raster ) order <- highlight.info$plot.order data$highlight <- highlight.info$highlight col.by <- 'highlight' pt.size <- highlight.info$size cols <- highlight.info$color } if (!is.null(x = order) && !is.null(x = col.by)) { if (typeof(x = order) == "logical") { if (order) { data <- data[order(!is.na(x = data[, col.by]), data[, col.by]), ] } } else { order <- rev(x = c( order, setdiff(x = unique(x = data[, col.by]), y = order) )) data[, col.by] <- factor(x = data[, col.by], levels = order) new.order <- order(x = data[, col.by]) data <- data[new.order, ] if (length(x = pt.size) == length(x = new.order)) { pt.size <- pt.size[new.order] } } } if (!is.null(x = col.by) && !col.by %in% colnames(x = data)) { warning("Cannot find ", col.by, " in plotting data, not coloring plot") col.by <- NULL } else { # col.index <- grep(pattern = col.by, x = colnames(x = data), fixed = TRUE) col.index <- match(x = col.by, table = colnames(x = data)) if (grepl(pattern = '^\\d', x = col.by)) { # Do something for numbers col.by <- paste0('x', col.by) } else if (grepl(pattern = '-', x = col.by)) { # Do something for dashes col.by <- gsub(pattern = '-', replacement = '.', x = col.by) } colnames(x = data)[col.index] <- col.by } if (!is.null(x = shape.by) && !shape.by %in% colnames(x = data)) { warning("Cannot find ", shape.by, " in plotting data, not shaping plot") } if (!is.null(x = alpha.by) && !alpha.by %in% colnames(x = data)) { warning( "Cannot find alpha variable ", alpha.by, " in data, setting to NULL", call. = FALSE, immediate. = TRUE ) alpha.by <- NULL } plot <- ggplot(data = data) plot <- if (isTRUE(x = raster)) { plot + geom_scattermore( mapping = aes_string( x = dims[1], y = dims[2], color = paste0("`", col.by, "`"), shape = shape.by, alpha = alpha.by ), pointsize = pt.size, alpha = alpha, pixels = raster.dpi ) } else { plot + geom_point( mapping = aes_string( x = dims[1], y = dims[2], color = paste0("`", col.by, "`"), shape = shape.by, alpha = alpha.by ), size = pt.size, alpha = alpha ) } plot <- plot + guides(color = guide_legend(override.aes = list(size = 3, alpha = 1))) + labs(color = NULL, title = col.by) + CenterTitle() if (label && !is.null(x = col.by)) { plot <- LabelClusters( plot = plot, id = col.by, repel = repel, size = label.size ) } if (!is.null(x = cols)) { if (length(x = cols) == 1 && (is.numeric(x = cols) || cols %in% rownames(x = brewer.pal.info))) { scale <- scale_color_brewer(palette = cols, na.value = na.value) } else if (length(x = cols) == 1 && (cols %in% c('alphabet', 'alphabet2', 'glasbey', 'polychrome', 'stepped'))) { colors <- DiscretePalette(length(unique(data[[col.by]])), palette = cols) scale <- scale_color_manual(values = colors, na.value = na.value) } else { scale <- scale_color_manual(values = cols, na.value = na.value) } plot <- plot + scale } plot <- plot + theme_cowplot() return(plot) } #' Plot a single expression by identity on a plot #' #' @param data Data to plot #' @param idents Idents to use #' @param split Use a split violin plot #' @param type Make either a \dQuote{ridge} or \dQuote{violin} plot #' @param sort Sort identity classes (on the x-axis) by the average #' expression of the attribute being potted #' @param y.max Maximum Y value to plot #' @param adjust Adjust parameter for geom_violin #' @param pt.size Size of points for violin plots #' @param alpha Alpha vlaue for violin plots #' @param cols Colors to use for plotting #' @param seed.use Random seed to use. If NULL, don't set a seed #' @param log plot Y axis on log10 scale #' @param add.noise determine if adding small noise for plotting #' @param raster Convert points to raster format. Requires 'ggrastr' to be installed. #' default is \code{NULL} which automatically rasterizes if ggrastr is installed and #' number of points exceed 100,000. #' #' @return A ggplot-based Expression-by-Identity plot #' #' @importFrom stats rnorm #' @importFrom utils globalVariables #' @importFrom ggridges geom_density_ridges theme_ridges #' @importFrom ggplot2 ggplot aes_string theme labs geom_violin geom_jitter #' ylim position_jitterdodge scale_fill_manual scale_y_log10 scale_x_log10 #' scale_y_discrete scale_x_continuous waiver #' @importFrom cowplot theme_cowplot #' #' @keywords internal #' @export #' SingleExIPlot <- function( data, idents, split = NULL, type = 'violin', sort = FALSE, y.max = NULL, adjust = 1, pt.size = 0, alpha = 1, cols = NULL, seed.use = 42, log = FALSE, add.noise = TRUE, raster = NULL ) { if (!is.null(x = raster) && isTRUE(x = raster)){ if (!PackageCheck('ggrastr', error = FALSE)) { stop("Please install ggrastr from CRAN to enable rasterization.") } } if (PackageCheck('ggrastr', error = FALSE)) { # Set rasterization to true if ggrastr is installed and # number of points exceeds 100,000 if ((nrow(x = data) > 1e5) & is.null(x = raster)){ message("Rasterizing points since number of points exceeds 100,000.", "\nTo disable this behavior set `raster=FALSE`") # change raster to TRUE raster <- TRUE } } if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.data.frame(x = data) || ncol(x = data) != 1) { stop("'SingleExIPlot requires a data frame with 1 column") } feature <- colnames(x = data) data$ident <- idents if ((is.character(x = sort) && nchar(x = sort) > 0) || sort) { data$ident <- factor( x = data$ident, levels = names(x = rev(x = sort( x = tapply( X = data[, feature], INDEX = data$ident, FUN = mean ), decreasing = grepl(pattern = paste0('^', tolower(x = sort)), x = 'decreasing') ))) ) } if (log) { noise <- rnorm(n = length(x = data[, feature])) / 200 data[, feature] <- data[, feature] + 1 } else { noise <- rnorm(n = length(x = data[, feature])) / 100000 } if (!add.noise) { noise <- noise * 0 } if (all(data[, feature] == data[, feature][1])) { warning(paste0("All cells have the same value of ", feature, ".")) } else{ data[, feature] <- data[, feature] + noise } axis.label <- 'Expression Level' y.max <- y.max %||% max(data[, feature][is.finite(x = data[, feature])]) if (type == 'violin' && !is.null(x = split)) { data$split <- split vln.geom <- geom_violin fill <- 'split' } else if (type == 'splitViolin' && !is.null(x = split )) { data$split <- split vln.geom <- geom_split_violin fill <- 'split' type <- 'violin' } else { vln.geom <- geom_violin fill <- 'ident' } switch( EXPR = type, 'violin' = { x <- 'ident' y <- paste0("`", feature, "`") xlab <- 'Identity' ylab <- axis.label geom <- list( vln.geom(scale = 'width', adjust = adjust, trim = TRUE), theme(axis.text.x = element_text(angle = 45, hjust = 1)) ) if (is.null(x = split)) { if (isTRUE(x = raster)) { jitter <- ggrastr::rasterize(geom_jitter(height = 0, size = pt.size, alpha = alpha, show.legend = FALSE)) } else { jitter <- geom_jitter(height = 0, size = pt.size, alpha = alpha, show.legend = FALSE) } } else { if (isTRUE(x = raster)) { jitter <- ggrastr::rasterize(geom_jitter( position = position_jitterdodge(jitter.width = 0.4, dodge.width = 0.9), size = pt.size, alpha = alpha, show.legend = FALSE )) } else { jitter <- geom_jitter( position = position_jitterdodge(jitter.width = 0.4, dodge.width = 0.9), size = pt.size, alpha = alpha, show.legend = FALSE ) } } log.scale <- scale_y_log10() axis.scale <- ylim }, 'ridge' = { x <- paste0("`", feature, "`") y <- 'ident' xlab <- axis.label ylab <- 'Identity' geom <- list( geom_density_ridges(scale = 4), theme_ridges(), scale_y_discrete(expand = c(0.01, 0)), scale_x_continuous(expand = c(0, 0)) ) jitter <- geom_jitter(width = 0, size = pt.size, alpha = alpha, show.legend = FALSE) log.scale <- scale_x_log10() axis.scale <- function(...) { invisible(x = NULL) } }, stop("Unknown plot type: ", type) ) plot <- ggplot( data = data, mapping = aes_string(x = x, y = y, fill = fill)[c(2, 3, 1)] ) + labs(x = xlab, y = ylab, title = feature, fill = NULL) + theme_cowplot() + theme(plot.title = element_text(hjust = 0.5)) plot <- do.call(what = '+', args = list(plot, geom)) plot <- plot + if (log) { log.scale } else { axis.scale(min(data[, feature]), y.max) } if (pt.size > 0) { plot <- plot + jitter } if (!is.null(x = cols)) { if (!is.null(x = split)) { idents <- unique(x = as.vector(x = data$ident)) splits <- unique(x = as.vector(x = data$split)) labels <- if (length(x = splits) == 2) { splits } else { unlist(x = lapply( X = idents, FUN = function(pattern, x) { x.mod <- gsub( pattern = paste0(pattern, '.'), replacement = paste0(pattern, ': '), x = x, fixed = TRUE ) x.keep <- grep(pattern = ': ', x = x.mod, fixed = TRUE) x.return <- x.mod[x.keep] names(x = x.return) <- x[x.keep] return(x.return) }, x = unique(x = as.vector(x = data$split)) )) } if (is.null(x = names(x = labels))) { names(x = labels) <- labels } } else { labels <- levels(x = droplevels(data$ident)) } plot <- plot + scale_fill_manual(values = cols, labels = labels) } return(plot) } #' A single heatmap from base R using \code{\link[graphics]{image}} #' #' @param data matrix of data to plot #' @param order optional vector of cell names to specify order in plot #' @param title Title for plot #' #' @return No return, generates a base-R heatmap using \code{\link[graphics]{image}} #' #' @importFrom graphics axis image par plot.new title #' #' @keywords internal #' #' @export #' SingleImageMap <- function(data, order = NULL, title = NULL) { if (!is.null(x = order)) { data <- data[order, ] } par(mar = c(1, 1, 3, 3)) plot.new() image( x = as.matrix(x = data), axes = FALSE, add = TRUE, col = PurpleAndYellow() ) axis( side = 4, at = seq(from = 0, to = 1, length = ncol(x = data)), labels = colnames(x = data), las = 1, tick = FALSE, mgp = c(0, -0.5, 0), cex.axis = 0.75 ) title(main = title) } #' Single Spatial Plot #' #' @param data A data frame with at least the following columns: #' \itemize{ #' \item \dQuote{\code{x}}: Spatial-resolved \emph{x} coordinates, will be #' plotted on the \emph{y}-axis #' \item \dQuote{\code{y}}: Spatially-resolved \emph{y} coordinates, will be #' plotted on the \emph{x}-axis #' \item \dQuote{\code{cell}}: Cell name #' \item \dQuote{\code{boundary}}: Segmentation boundary label; when plotting #' multiple segmentation layers, the order of boundary transparency is set by #' factor levels for this column #' } #' Can pass \code{NA} to \code{data} suppress segmentation visualization #' @param col.by Name of column in \code{data} to color cell segmentations by; #' pass \code{NA} to suppress coloring #' @param col.factor Are the colors a factor or discrete? #' @param cols Colors for cell segmentations; can be one of the #' following: #' \itemize{ #' \item \code{NULL} for default ggplot2 colors #' \item A numeric value or name of a #' \link[RColorBrewer:RColorBrewer]{color brewer palette} #' \item Name of a palette for \code{\link{DiscretePalette}} #' \item A vector of colors equal to the length of unique levels of #' \code{data$col.by} #' } #' @param shuffle.cols Randomly shuffle colors when a palette or #' vector of colors is provided to \code{cols} #' @param size Point size for cells when plotting centroids #' @param molecules A data frame with spatially-resolved molecule coordinates; #' should have the following columns: #' \itemize{ #' \item \dQuote{\code{x}}: Spatial-resolved \emph{x} coordinates, will be #' plotted on the \emph{y}-axis #' \item \dQuote{\code{y}}: Spatially-resolved \emph{y} coordinates, will be #' plotted on the \emph{x}-axis #' \item \dQuote{\code{molecule}}: Molecule name #' } #' @param mols.size Point size for molecules #' @param mols.cols A vector of color for molecules. The "Set1" palette from #' RColorBrewer is used by default. #' @param mols.alpha Alpha value for molecules, should be between 0 and 1 #' @param alpha Alpha value, should be between 0 and 1; when plotting multiple #' boundaries, \code{alpha} is equivalent to max alpha #' @param border.color Color of cell segmentation border; pass \code{NA} #' to suppress borders for segmentation-based plots #' @param border.size Thickness of cell segmentation borders; pass \code{NA} #' to suppress borders for centroid-based plots #' @param na.value Color value for \code{NA} segmentations when #' using custom scale #' @param ... Ignored #' #' @return A ggplot object #' #' @importFrom rlang is_na #' @importFrom SeuratObject %NA% %!NA% #' @importFrom RColorBrewer brewer.pal.info #' @importFrom ggplot2 aes_string geom_point geom_polygon ggplot guides #' guide_legend scale_alpha_manual scale_color_manual scale_fill_brewer #' scale_fill_manual #' #' @keywords internal #' #' @export #' SingleImagePlot <- function( data, col.by = NA, col.factor = TRUE, cols = NULL, shuffle.cols = FALSE, size = 0.1, molecules = NULL, mols.size = 0.1, mols.cols = NULL, mols.alpha = 1.0, alpha = molecules %iff% 0.3 %||% 0.6, border.color = 'white', border.size = NULL, na.value = 'grey50', dark.background = TRUE, ... ) { # Check input data if (!is_na(x = data)) { if (!all(c('x', 'y', 'cell', 'boundary') %in% colnames(x = data))) { stop("Invalid data coordinates") } if (!is_na(x = col.by)) { if (!col.by %in% colnames(x = data)) { warning( "Cannot find 'col.by' ('", col.by, "') in data coordinates", immediate. = TRUE ) col.by <- NA } else if (isTRUE(x = col.factor) && !is.factor(x = data[[col.by]])) { data[[col.by]] <- factor( x = data[[col.by]], levels = unique(x = data[[col.by]]) ) } else if (isFALSE(x = col.factor) && is.factor(x = data[[col.by]])) { data[[col.by]] <- as.vector(x = data[[col.by]]) } } if (is_na(x = col.by) && !is.null(x = cols)) { col.by <- RandomName(length = 7L) data[[col.by]] <- TRUE } if (!is.factor(x = data$boundary)) { data$boundary <- factor( x = data$boundary, levels = unique(x = data$boundary) ) } # Determine alphas if (is.na(x = alpha)) { alpha <- 1L } alpha.min <- ifelse( test = alpha < 1L, yes = 1 * (10 ^ .FindE(x = alpha)), no = 0.1 ) if (alpha.min == alpha) { alpha.min <- 1 * (10 ^ (.FindE(x = alpha) - 1)) } alphas <- .Cut( min = alpha.min, max = alpha, n = length(x = levels(x = data$boundary)) ) } # Assemble plot plot <- ggplot( data = data %NA% NULL, mapping = aes_string( x = 'y', y = 'x', alpha = 'boundary', fill = col.by %NA% NULL ) ) if (!is_na(x = data)) { plot <- plot + scale_alpha_manual(values = alphas) + if (anyDuplicated(x = data$cell)) { if (is.null(border.size)) { border.size <- 0.3 } geom_polygon( mapping = aes_string(group = 'cell'), color = border.color, size = border.size ) } else { # Default to no borders when plotting centroids if (is.null(border.size)) { border.size <- 0.0 } geom_point( shape = 21, color = border.color, stroke = border.size, size = size ) } if (!is.null(x = cols)) { plot <- plot + if (is.numeric(x = cols) || cols[1L] %in% rownames(x = brewer.pal.info)) { palette <- brewer.pal(n = length(x = levels(x = data[[col.by]])), cols) if (length(palette) < length(x = levels(x = data[[col.by]]))) { num.blank <- length(x = levels(x = data[[col.by]])) - length(palette) palette <- c(palette, rep(na.value, num.blank)) } if (isTRUE(shuffle.cols)) { palette <- sample(palette) } scale_fill_manual(values = palette, na.value = na.value) } else if (cols[1] %in% DiscretePalette(n = NULL)) { scale_fill_manual( values = DiscretePalette( n = length(x = levels(x = data[[col.by]])), palette = cols, shuffle = shuffle.cols ), na.value = na.value ) } else { if (isTRUE(shuffle.cols)) { cols <- sample(cols) } scale_fill_manual(values = cols, na.value = na.value) } } if (length(x = levels(x = data$boundary)) == 1L) { plot <- plot + guides(alpha = 'none') } # Adjust guides if (isTRUE(x = col.factor) && length(x = levels(x = data[[col.by]])) <= 1L) { plot <- plot + guides(fill = 'none') } } # Add molecule sets if (is.data.frame(x = molecules)) { if (all(c('x', 'y', 'molecule') %in% colnames(x = molecules))) { if (!is.factor(x = molecules$molecule)) { molecules$molecule <- factor( x = molecules$molecule, levels = unique(x = molecules$molecule) ) } plot <- plot + geom_point( mapping = aes_string(fill = NULL, alpha = NULL, color = "molecule"), data = molecules, size = mols.size, alpha = mols.alpha, show.legend = c(color = TRUE, fill = FALSE, alpha = FALSE) ) + guides(color = guide_legend(override.aes = list(size = 3L))) + scale_color_manual( name = 'Molecules', values = mols.cols %||% brewer.pal( n = length(x = levels(x = molecules$molecule)), name = "Set1" ), guide = guide_legend() ) } else { warning("Invalid molecule coordinates", immediate. = TRUE) } } if (isTRUE(dark.background)) { plot <- plot + DarkTheme() } return(plot) } # A single polygon plot # # @param data Data to plot # @param group.by Grouping variable # @param ... Extra parameters passed to \code{\link[cowplot]{theme_cowplot}} # # @return A ggplot-based plot # #' @importFrom cowplot theme_cowplot #' @importFrom ggplot2 ggplot aes_string geom_polygon # # @seealso \code{\link[cowplot]{theme_cowplot}} # SinglePolyPlot <- function(data, group.by, ...) { plot <- ggplot(data = data, mapping = aes_string(x = 'x', y = 'y')) + geom_polygon(mapping = aes_string(fill = group.by, group = 'cell')) + coord_fixed() + theme_cowplot(...) return(plot) } #' A single heatmap from ggplot2 using geom_raster #' #' @param data A matrix or data frame with data to plot #' @param raster switch between geom_raster and geom_tile #' @param cell.order ... #' @param feature.order ... #' @param colors A vector of colors to use #' @param disp.min Minimum display value (all values below are clipped) #' @param disp.max Maximum display value (all values above are clipped) #' @param limits A two-length numeric vector with the limits for colors on the plot #' @param group.by A vector to group cells by, should be one grouping identity per cell #' #' @return A ggplot2 object # #' @importFrom ggplot2 ggplot aes_string geom_raster scale_fill_gradient #' scale_fill_gradientn theme element_blank labs geom_point guides #' guide_legend geom_tile #' #' @keywords internal #' #' @export # SingleRasterMap <- function( data, raster = TRUE, cell.order = NULL, feature.order = NULL, colors = PurpleAndYellow(), disp.min = -2.5, disp.max = 2.5, limits = NULL, group.by = NULL ) { data <- MinMax(data = data, min = disp.min, max = disp.max) data <- Melt(x = t(x = data)) colnames(x = data) <- c('Feature', 'Cell', 'Expression') if (!is.null(x = feature.order)) { data$Feature <- factor(x = data$Feature, levels = unique(x = feature.order)) } if (!is.null(x = cell.order)) { data$Cell <- factor(x = data$Cell, levels = unique(x = cell.order)) } if (!is.null(x = group.by)) { data$Identity <- group.by[data$Cell] } limits <- limits %||% c(min(data$Expression), max(data$Expression)) if (length(x = limits) != 2 || !is.numeric(x = limits)) { stop("limits' must be a two-length numeric vector") } my_geom <- ifelse(test = raster, yes = geom_raster, no = geom_tile) plot <- ggplot(data = data) + my_geom(mapping = aes_string(x = 'Cell', y = 'Feature', fill = 'Expression')) + theme(axis.text.x = element_blank(), axis.ticks.x = element_blank()) + scale_fill_gradientn(limits = limits, colors = colors, na.value = "white") + labs(x = NULL, y = NULL, fill = group.by %iff% 'Expression') + WhiteBackground() + NoAxes(keep.text = TRUE) if (!is.null(x = group.by)) { plot <- plot + geom_point( mapping = aes_string(x = 'Cell', y = 'Feature', color = 'Identity'), alpha = 0 ) + guides(color = guide_legend(override.aes = list(alpha = 1))) } return(plot) } #' Base plotting function for all Spatial plots #' #' @param data Data.frame with info to be plotted #' @param image \code{SpatialImage} object to be plotted #' @param cols Vector of colors, each color corresponds to an identity class. #' This may also be a single character #' or numeric value corresponding to a palette as specified by #' \code{\link[RColorBrewer]{brewer.pal.info}}. By default, ggplot2 assigns #' colors #' @param image.alpha Adjust the opacity of the background images. Set to 0 to #' remove. #' @param pt.alpha Adjust the opacity of the points if plotting a #' \code{SpatialDimPlot} #' @param crop Crop the plot in to focus on points plotted. Set to \code{FALSE} #' to show entire background image. #' @param pt.size.factor Sets the size of the points relative to spot.radius #' @param stroke Control the width of the border around the spots #' @param col.by Mapping variable for the point color #' @param alpha.by Mapping variable for the point alpha value #' @param cells.highlight A list of character or numeric vectors of cells to #' highlight. If only one group of cells desired, can simply pass a vector #' instead of a list. If set, colors selected cells to the color(s) in #' cols.highlight #' @param cols.highlight A vector of colors to highlight the cells as; ordered #' the same as the groups in cells.highlight; last color corresponds to #' unselected cells. #' @param geom Switch between normal spatial geom and geom to enable hover #' functionality #' @param na.value Color for spots with NA values #' #' @return A ggplot2 object #' #' @importFrom tibble tibble #' @importFrom ggplot2 ggplot aes_string coord_fixed geom_point xlim ylim #' coord_cartesian labs theme_void theme scale_fill_brewer #' #' @keywords internal #' #' @export #' SingleSpatialPlot <- function( data, image, cols = NULL, image.alpha = 1, pt.alpha = NULL, crop = TRUE, pt.size.factor = NULL, stroke = 0.25, col.by = NULL, alpha.by = NULL, cells.highlight = NULL, cols.highlight = c('#DE2D26', 'grey50'), geom = c('spatial', 'interactive', 'poly'), na.value = 'grey50' ) { geom <- match.arg(arg = geom) if (!is.null(x = col.by) && !col.by %in% colnames(x = data)) { warning("Cannot find '", col.by, "' in data, not coloring", call. = FALSE, immediate. = TRUE) col.by <- NULL } col.by <- col.by %iff% paste0("`", col.by, "`") alpha.by <- alpha.by %iff% paste0("`", alpha.by, "`") if (!is.null(x = cells.highlight)) { highlight.info <- SetHighlight( cells.highlight = cells.highlight, cells.all = rownames(x = data), sizes.highlight = pt.size.factor, cols.highlight = cols.highlight[1], col.base = cols.highlight[2] ) order <- highlight.info$plot.order data$highlight <- highlight.info$highlight col.by <- 'highlight' levels(x = data$ident) <- c(order, setdiff(x = levels(x = data$ident), y = order)) data <- data[order(data$ident), ] } plot <- ggplot(data = data, aes_string( x = colnames(x = data)[2], y = colnames(x = data)[1], fill = col.by, alpha = alpha.by )) plot <- switch( EXPR = geom, 'spatial' = { if (is.null(x = pt.alpha)) { plot <- plot + geom_spatial( point.size.factor = pt.size.factor, data = data, image = image, image.alpha = image.alpha, crop = crop, stroke = stroke, ) } else { plot <- plot + geom_spatial( point.size.factor = pt.size.factor, data = data, image = image, image.alpha = image.alpha, crop = crop, stroke = stroke, alpha = pt.alpha ) } plot + coord_fixed() + theme(aspect.ratio = 1) }, 'interactive' = { plot + geom_spatial_interactive( data = tibble(grob = list(GetImage(object = image, mode = 'grob'))), mapping = aes_string(grob = 'grob'), x = 0.5, y = 0.5 ) + geom_point(mapping = aes_string(color = col.by)) + xlim(0, ncol(x = image)) + ylim(nrow(x = image), 0) + coord_cartesian(expand = FALSE) }, 'poly' = { data$cell <- rownames(x = data) data[, c('x', 'y')] <- NULL data <- merge( x = data, y = GetTissueCoordinates(object = image, qhulls = TRUE), by = "cell" ) plot + geom_polygon( data = data, mapping = aes_string(fill = col.by, group = 'cell') ) + coord_fixed() + theme_cowplot() }, stop("Unknown geom, choose from 'spatial' or 'interactive'", call. = FALSE) ) if (!is.null(x = cells.highlight)) { plot <- plot + scale_fill_manual(values = cols.highlight) } if (!is.null(x = cols) && is.null(x = cells.highlight)) { if (length(x = cols) == 1 && (is.numeric(x = cols) || cols %in% rownames(x = brewer.pal.info))) { scale <- scale_fill_brewer(palette = cols, na.value = na.value) } else if (length(x = cols) == 1 && (cols %in% c('alphabet', 'alphabet2', 'glasbey', 'polychrome', 'stepped'))) { colors <- DiscretePalette(length(unique(data[[col.by]])), palette = cols) scale <- scale_fill_manual(values = colors, na.value = na.value) } else { cols <- cols[names(x = cols) %in% data[[gsub(pattern = '`', replacement = "", x = col.by)]]] scale <- scale_fill_manual(values = cols, na.value = na.value) } plot <- plot + scale } plot <- plot + NoAxes() + theme(panel.background = element_blank()) return(plot) } # Reimplementation of ggplot2 coord$transform # # @param data A data frame with x-coordinates in the first column and y-coordinates # in the second # @param xlim,ylim X- and Y-limits for the transformation, must be two-length # numeric vectors # # @return \code{data} with transformed coordinates # #' @importFrom ggplot2 transform_position #' @importFrom scales rescale squish_infinite # Transform <- function(data, xlim = c(-Inf, Inf), ylim = c(-Inf, Inf)) { # Quick input argument checking if (!all(sapply(X = list(xlim, ylim), FUN = length) == 2)) { stop("'xlim' and 'ylim' must be two-length numeric vectors", call. = FALSE) } # Save original names df.names <- colnames(x = data) colnames(x = data)[1:2] <- c('x', 'y') # Rescale the X and Y values data <- transform_position( df = data, trans_x = function(df) { return(rescale(x = df, from = xlim)) }, trans_y = function(df) { return(rescale(x = df, from = ylim)) } ) # Something that ggplot2 does data <- transform_position( df = data, trans_x = squish_infinite, trans_y = squish_infinite ) # Restore original names colnames(x = data) <- df.names return(data) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # S4 Methods #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% setMethod( f = '.PrepImageData', signature = c(data = 'data.frame', cells = 'rle'), definition = function(data, cells, ...) { data <- sapply( X = colnames(x = data), FUN = function(x) { j <- data[[x]] names(x = j) <- rownames(x = data) return(.PrepImageData(data = j, cells = cells, name = x)) }, simplify = FALSE, USE.NAMES = TRUE ) return(do.call(what = 'cbind', args = data)) } ) #' @importFrom methods getMethod setMethod( f = '.PrepImageData', signature = c(data = 'factor', cells = 'rle'), definition = function(data, cells, name, ...) { f <- getMethod( f = '.PrepImageData', signature = c(data = 'vector', cells = 'rle') ) return(f(data = data, cells = cells, name = name, ...)) } ) setMethod( f = '.PrepImageData', signature = c(data = 'list', cells = 'rle'), definition = function(data, cells, ...) { .NotYetImplemented() } ) setMethod( f = '.PrepImageData', signature = c(data = 'NULL', cells = 'rle'), definition = function(data, cells, ...) { return(SeuratObject::EmptyDF(n = sum(cells$lengths))) } ) setMethod( f = '.PrepImageData', signature = c(data = 'vector', cells = 'rle'), definition = function(data, cells, name, ...) { name <- as.character(x = name) if (name %in% c('x', 'y', 'cell')) { stop("'name' cannot be 'x', 'y', or 'cell'", call. = FALSE) } cnames <- cells$values if (is.null(x = names(x = data))) { mlen <- min(sapply(X = list(data, cnames), FUN = length)) names(x = data)[1:mlen] <- cnames[1:mlen] } if (anyDuplicated(x = names(x = data))) { dup <- duplicated(x = names(x = data)) warning( sum(dup), ifelse(test = sum(dup) == 1, yes = ' cell', no = ' cells'), ' duplicated, using only the first occurance', call. = FALSE, immediate. = TRUE ) } if (length(x = data) < length(x = cnames)) { mcells <- setdiff(x = cnames, y = names(x = data)) warning( "Missing data for some cells, filling with NA", call. = FALSE, immediate. = TRUE ) data[mcells] <- NA } else if (length(x = data) > length(x = cnames)) { warning( "More cells provided than present", call. = FALSE, immediate. = TRUE ) } data <- data.frame(rep.int(x = data[cnames], times = cells$lengths)) colnames(x = data) <- name return(data) } ) Seurat/R/preprocessing.R0000644000176200001440000054106614525500037014741 0ustar liggesusers#' @include generics.R #' @importFrom progressr progressor #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% globalVariables( names = c('fov', 'cell_ID', 'qv'), package = 'Seurat', add = TRUE ) #' Calculate the Barcode Distribution Inflection #' #' This function calculates an adaptive inflection point ("knee") of the barcode distribution #' for each sample group. This is useful for determining a threshold for removing #' low-quality samples. #' #' The function operates by calculating the slope of the barcode number vs. rank #' distribution, and then finding the point at which the distribution changes most #' steeply (the "knee"). Of note, this calculation often must be restricted as to the #' range at which it performs, so `threshold` parameters are provided to restrict the #' range of the calculation based on the rank of the barcodes. [BarcodeInflectionsPlot()] #' is provided as a convenience function to visualize and test different thresholds and #' thus provide more sensical end results. #' #' See [BarcodeInflectionsPlot()] to visualize the calculated inflection points and #' [SubsetByBarcodeInflections()] to subsequently subset the Seurat object. #' #' @param object Seurat object #' @param barcode.column Column to use as proxy for barcodes ("nCount_RNA" by default) #' @param group.column Column to group by ("orig.ident" by default) #' @param threshold.high Ignore barcodes of rank above thisf threshold in inflection calculation #' @param threshold.low Ignore barcodes of rank below this threshold in inflection calculation #' #' @return Returns Seurat object with a new list in the `tools` slot, `CalculateBarcodeInflections` with values: #' #' * `barcode_distribution` - contains the full barcode distribution across the entire dataset #' * `inflection_points` - the calculated inflection points within the thresholds #' * `threshold_values` - the provided (or default) threshold values to search within for inflections #' * `cells_pass` - the cells that pass the inflection point calculation #' #' @importFrom methods slot #' @importFrom stats ave aggregate #' #' @export #' @concept preprocessing #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{BarcodeInflectionsPlot}} \code{\link{SubsetByBarcodeInflections}} #' #' @examples #' data("pbmc_small") #' CalculateBarcodeInflections(pbmc_small, group.column = 'groups') #' CalculateBarcodeInflections <- function( object, barcode.column = "nCount_RNA", group.column = "orig.ident", threshold.low = NULL, threshold.high = NULL ) { ## Check that barcode.column exists in meta.data if (!(barcode.column %in% colnames(x = object[[]]))) { stop("`barcode.column` specified not present in Seurat object provided") } # Calculation of barcode distribution ## Append rank by grouping x umi column # barcode_dist <- as.data.frame(object@meta.data)[, c(group.column, barcode.column)] barcode_dist <- object[[c(group.column, barcode.column)]] barcode_dist <- barcode_dist[do.call(what = order, args = barcode_dist), ] # order by columns left to right barcode_dist$rank <- ave( x = barcode_dist[, barcode.column], barcode_dist[, group.column], FUN = function(x) { return(rev(x = order(x))) } ) barcode_dist <- barcode_dist[order(barcode_dist[, group.column], barcode_dist[, 'rank']), ] ## calculate rawdiff and append per group top <- aggregate( x = barcode_dist[, barcode.column], by = list(barcode_dist[, group.column]), FUN = function(x) { return(c(0, diff(x = log10(x = x + 1)))) })$x bot <- aggregate( x = barcode_dist[, 'rank'], by = list(barcode_dist[, group.column]), FUN = function(x) { return(c(0, diff(x = x))) } )$x barcode_dist$rawdiff <- unlist(x = mapply( FUN = function(x, y) { return(ifelse(test = is.na(x = x / y), yes = 0, no = x / y)) }, x = top, y = bot )) # Calculation of inflection points ## Set thresholds for rank of barcodes to ignore threshold.low <- threshold.low %||% 1 threshold.high <- threshold.high %||% max(barcode_dist$rank) ## Subset the barcode distribution by thresholds barcode_dist_sub <- barcode_dist[barcode_dist$rank > threshold.low & barcode_dist$rank < threshold.high, ] ## Calculate inflection points ## note: if thresholds are s.t. it produces the same length across both groups, ## aggregate will create a data.frame with x.* columns, where * is the length ## using the same combine approach will yield non-symmetrical results! whichmin_list <- aggregate( x = barcode_dist_sub[, 'rawdiff'], by = list(barcode_dist_sub[, group.column]), FUN = function(x) { return(x == min(x)) } )$x ## workaround for aggregate behavior noted above if (is.list(x = whichmin_list)) { # uneven lengths is_inflection <- unlist(x = whichmin_list) } else if (is.matrix(x = whichmin_list)) { # even lengths is_inflection <- as.vector(x = t(x = whichmin_list)) } tmp <- cbind(barcode_dist_sub, is_inflection) # inflections <- tmp[tmp$is_inflection == TRUE, c(group.column, barcode.column, "rank")] inflections <- tmp[which(x = tmp$is_inflection), c(group.column, barcode.column, 'rank')] # Use inflection point for what cells to keep ## use the inflection points to cut the subsetted dist to what to keep ## keep only the barcodes above the inflection points keep <- unlist(x = lapply( X = whichmin_list, FUN = function(x) { keep <- !x if (sum(keep) == length(x = keep)) { return(keep) # prevents bug in case of keeping all cells } # toss <- which(keep == FALSE):length(x = keep) # the end cells below knee toss <- which(x = !keep):length(x = keep) keep[toss] <- FALSE return(keep) } )) barcode_dist_sub_keep <- barcode_dist_sub[keep, ] cells_keep <- rownames(x = barcode_dist_sub_keep) # Bind thresholds to keep track of where they are placed thresholds <- data.frame( threshold = c('threshold.low', 'threshold.high'), rank = c(threshold.low, threshold.high) ) # Combine relevant info together ## Combine Barcode dist, inflection point, and cells to keep into list info <- list( barcode_distribution = barcode_dist, inflection_points = inflections, threshold_values = thresholds, cells_pass = cells_keep ) # save results into object Tool(object = object) <- info return(object) } #' Demultiplex samples based on data from cell 'hashing' #' #' Assign sample-of-origin for each cell, annotate doublets. #' #' @param object Seurat object. Assumes that the hash tag oligo (HTO) data has been added and normalized. #' @param assay Name of the Hashtag assay (HTO by default) #' @param positive.quantile The quantile of inferred 'negative' distribution for each hashtag - over which the cell is considered 'positive'. Default is 0.99 #' @param init Initial number of clusters for hashtags. Default is the # of hashtag oligo names + 1 (to account for negatives) #' @param kfunc Clustering function for initial hashtag grouping. Default is "clara" for fast k-medoids clustering on large applications, also support "kmeans" for kmeans clustering #' @param nsamples Number of samples to be drawn from the dataset used for clustering, for kfunc = "clara" #' @param nstarts nstarts value for k-means clustering (for kfunc = "kmeans"). 100 by default #' @param seed Sets the random seed. If NULL, seed is not set #' @param verbose Prints the output #' #' @return The Seurat object with the following demultiplexed information stored in the meta data: #' \describe{ #' \item{hash.maxID}{Name of hashtag with the highest signal} #' \item{hash.secondID}{Name of hashtag with the second highest signal} #' \item{hash.margin}{The difference between signals for hash.maxID and hash.secondID} #' \item{classification}{Classification result, with doublets/multiplets named by the top two highest hashtags} #' \item{classification.global}{Global classification result (singlet, doublet or negative)} #' \item{hash.ID}{Classification result where doublet IDs are collapsed} #' } #' #' @importFrom cluster clara #' @importFrom Matrix colSums #' @importFrom fitdistrplus fitdist #' @importFrom stats pnbinom kmeans #' #' @export #' @concept preprocessing #' #' @seealso \code{\link{HTOHeatmap}} #' #' @examples #' \dontrun{ #' object <- HTODemux(object) #' } #' HTODemux <- function( object, assay = "HTO", positive.quantile = 0.99, init = NULL, nstarts = 100, kfunc = "clara", nsamples = 100, seed = 42, verbose = TRUE ) { if (!is.null(x = seed)) { set.seed(seed = seed) } #initial clustering assay <- assay %||% DefaultAssay(object = object) data <- GetAssayData(object = object, assay = assay) counts <- GetAssayData( object = object, assay = assay, slot = 'counts' )[, colnames(x = object)] counts <- as.matrix(x = counts) ncenters <- init %||% (nrow(x = data) + 1) switch( EXPR = kfunc, 'kmeans' = { init.clusters <- kmeans( x = t(x = GetAssayData(object = object, assay = assay)), centers = ncenters, nstart = nstarts ) #identify positive and negative signals for all HTO Idents(object = object, cells = names(x = init.clusters$cluster)) <- init.clusters$cluster }, 'clara' = { #use fast k-medoid clustering init.clusters <- clara( x = t(x = GetAssayData(object = object, assay = assay)), k = ncenters, samples = nsamples ) #identify positive and negative signals for all HTO Idents(object = object, cells = names(x = init.clusters$clustering), drop = TRUE) <- init.clusters$clustering }, stop("Unknown k-means function ", kfunc, ", please choose from 'kmeans' or 'clara'") ) #average hto signals per cluster #work around so we don't average all the RNA levels which takes time average.expression <- suppressWarnings( AverageExpression( object = object, assays = assay, verbose = FALSE )[[assay]] ) #checking for any cluster with all zero counts for any barcode if (sum(average.expression == 0) > 0) { stop("Cells with zero counts exist as a cluster.") } #create a matrix to store classification result discrete <- GetAssayData(object = object, assay = assay) discrete[discrete > 0] <- 0 # for each HTO, we will use the minimum cluster for fitting for (iter in rownames(x = data)) { values <- counts[iter, colnames(object)] #commented out if we take all but the top cluster as background #values_negative=values[setdiff(object@cell.names,WhichCells(object,which.max(average.expression[iter,])))] values.use <- values[WhichCells( object = object, idents = levels(x = Idents(object = object))[[which.min(x = average.expression[iter, ])]] )] fit <- suppressWarnings(expr = fitdist(data = values.use, distr = "nbinom")) cutoff <- as.numeric(x = quantile(x = fit, probs = positive.quantile)$quantiles[1]) discrete[iter, names(x = which(x = values > cutoff))] <- 1 if (verbose) { message(paste0("Cutoff for ", iter, " : ", cutoff, " reads")) } } # now assign cells to HTO based on discretized values npositive <- colSums(x = discrete) classification.global <- npositive classification.global[npositive == 0] <- "Negative" classification.global[npositive == 1] <- "Singlet" classification.global[npositive > 1] <- "Doublet" donor.id = rownames(x = data) hash.max <- apply(X = data, MARGIN = 2, FUN = max) hash.maxID <- apply(X = data, MARGIN = 2, FUN = which.max) hash.second <- apply(X = data, MARGIN = 2, FUN = MaxN, N = 2) hash.maxID <- as.character(x = donor.id[sapply( X = 1:ncol(x = data), FUN = function(x) { return(which(x = data[, x] == hash.max[x])[1]) } )]) hash.secondID <- as.character(x = donor.id[sapply( X = 1:ncol(x = data), FUN = function(x) { return(which(x = data[, x] == hash.second[x])[1]) } )]) hash.margin <- hash.max - hash.second doublet_id <- sapply( X = 1:length(x = hash.maxID), FUN = function(x) { return(paste(sort(x = c(hash.maxID[x], hash.secondID[x])), collapse = "_")) } ) # doublet_names <- names(x = table(doublet_id))[-1] # Not used classification <- classification.global classification[classification.global == "Negative"] <- "Negative" classification[classification.global == "Singlet"] <- hash.maxID[which(x = classification.global == "Singlet")] classification[classification.global == "Doublet"] <- doublet_id[which(x = classification.global == "Doublet")] classification.metadata <- data.frame( hash.maxID, hash.secondID, hash.margin, classification, classification.global ) colnames(x = classification.metadata) <- paste( assay, c('maxID', 'secondID', 'margin', 'classification', 'classification.global'), sep = '_' ) object <- AddMetaData(object = object, metadata = classification.metadata) Idents(object) <- paste0(assay, '_classification') # Idents(object, cells = rownames(object@meta.data[object@meta.data$classification.global == "Doublet", ])) <- "Doublet" doublets <- rownames(x = object[[]])[which(object[[paste0(assay, "_classification.global")]] == "Doublet")] Idents(object = object, cells = doublets) <- 'Doublet' # object@meta.data$hash.ID <- Idents(object) object$hash.ID <- Idents(object = object) return(object) } #' Calculate pearson residuals of features not in the scale.data #' #' This function calls sctransform::get_residuals. #' #' @param object A seurat object #' @param features Name of features to add into the scale.data #' @param assay Name of the assay of the seurat object generated by SCTransform #' @param umi.assay Name of the assay of the seurat object containing UMI matrix #' and the default is RNA #' @param clip.range Numeric of length two specifying the min and max values the #' Pearson residual will be clipped to #' @param replace.value Recalculate residuals for all features, even if they are #' already present. Useful if you want to change the clip.range. #' @param na.rm For features where there is no feature model stored, return NA #' for residual value in scale.data when na.rm = FALSE. When na.rm is TRUE, only #' return residuals for features with a model stored for all cells. #' @param verbose Whether to print messages and progress bars #' #' @return Returns a Seurat object containing Pearson residuals of added #' features in its scale.data #' #' @importFrom sctransform get_residuals #' @importFrom matrixStats rowAnyNAs #' #' @export #' @concept preprocessing #' #' @seealso \code{\link[sctransform]{get_residuals}} #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small <- SCTransform(object = pbmc_small, variable.features.n = 20) #' pbmc_small <- GetResidual(object = pbmc_small, features = c('MS4A1', 'TCL1A')) #' } #' GetResidual <- function( object, features, assay = NULL, umi.assay = "RNA", clip.range = NULL, replace.value = FALSE, na.rm = TRUE, verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) if (IsSCT(assay = object[[assay]])) { object[[assay]] <- as(object[[assay]], 'SCTAssay') } if (!inherits(x = object[[assay]], what = "SCTAssay")) { stop(assay, " assay was not generated by SCTransform") } sct.models <- levels(x = object[[assay]]) if (length(x = sct.models) == 0) { warning("SCT model not present in assay", call. = FALSE, immediate. = TRUE) return(object) } possible.features <- unique(x = unlist(x = lapply(X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) } ))) bad.features <- setdiff(x = features, y = possible.features) if (length(x = bad.features) > 0) { warning("The following requested features are not present in any models: ", paste(bad.features, collapse = ", "), call. = FALSE) features <- intersect(x = features, y = possible.features) } features.orig <- features if (na.rm) { # only compute residuals when feature model info is present in all features <- names(x = which(x = table(unlist(x = lapply( X = sct.models, FUN = function(x) { rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = x)) } ))) == length(x = sct.models))) if (length(x = features) == 0) { return(object) } } features <- intersect(x = features.orig, y = features) if (length(x = sct.models) > 1 && verbose) { message( "This SCTAssay contains multiple SCT models. Computing residuals for cells using different models" ) } if (!umi.assay %in% Assays(object = object) || length(x = Layers(object = object[[umi.assay]], search = 'counts')) == 0) { return(object) } if (inherits(x = object[[umi.assay]], what = 'Assay')) { new.residuals <- lapply( X = sct.models, FUN = function(x) { GetResidualSCTModel( object = object, assay = assay, SCTModel = x, new_features = features, replace.value = replace.value, clip.range = clip.range, verbose = verbose ) } ) } else if (inherits(x = object[[umi.assay]], what = 'Assay5')) { new.residuals <- lapply( X = sct.models, FUN = function(x) { FetchResidualSCTModel(object = object, assay = assay, umi.assay = umi.assay, SCTModel = x, new_features = features, replace.value = replace.value, clip.range = clip.range, verbose = verbose) } ) } existing.data <- GetAssayData(object = object, slot = 'scale.data', assay = assay) all.features <- union(x = rownames(x = existing.data), y = features) new.scale <- matrix( data = NA, nrow = length(x = all.features), ncol = ncol(x = object), dimnames = list(all.features, Cells(x = object)) ) if (nrow(x = existing.data) > 0){ new.scale[1:nrow(x = existing.data), ] <- existing.data } if (length(x = new.residuals) == 1 & is.list(x = new.residuals)) { new.residuals <- new.residuals[[1]] } else { new.residuals <- Reduce(cbind, new.residuals) } new.scale[rownames(x = new.residuals), colnames(x = new.residuals)] <- new.residuals if (na.rm) { new.scale <- new.scale[!rowAnyNAs(x = new.scale), ] } object <- SetAssayData( object = object, assay = assay, slot = "scale.data", new.data = new.scale ) if (any(!features.orig %in% rownames(x = new.scale))) { bad.features <- features.orig[which(!features.orig %in% rownames(x = new.scale))] warning("Residuals not computed for the following requested features: ", paste(bad.features, collapse = ", "), call. = FALSE) } return(object) } #' Load a 10x Genomics Visium Spatial Experiment into a \code{Seurat} object #' #' @inheritParams Read10X #' @inheritParams SeuratObject::CreateSeuratObject #' @param data.dir Directory containing the H5 file specified by \code{filename} #' and the image data in a subdirectory called \code{spatial} #' @param filename Name of H5 file containing the feature barcode matrix #' @param slice Name for the stored image of the tissue slice #' @param filter.matrix Only keep spots that have been determined to be over #' tissue #' @param to.upper Converts all feature names to upper case. Can be useful when #' analyses require comparisons between human and mouse gene names for example. #' @param ... Arguments passed to \code{\link{Read10X_h5}} #' @param image Name of image to pull the coordinates from #' #' @return A \code{Seurat} object #' #' @importFrom png readPNG #' @importFrom grid rasterGrob #' @importFrom jsonlite fromJSON #' @importFrom purrr imap #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show filtered_feature_bc_matrix.h5 #' Load10X_Spatial(data.dir = data_dir) #' } #' Load10X_Spatial <- function( data.dir, filename = 'filtered_feature_bc_matrix.h5', assay = 'Spatial', slice = 'slice1', filter.matrix = TRUE, to.upper = FALSE, image = NULL, ... ) { if (length(x = data.dir) > 1) { warning("'Load10X_Spatial' accepts only one 'data.dir'", immediate. = TRUE) data.dir <- data.dir[1] } data <- Read10X_h5(filename = file.path(data.dir, filename), ...) if (to.upper) { data <- imap(data, ~{ rownames(.x) <- toupper(x = rownames(.x)) .x }) } if (is.list(data) & "Antibody Capture" %in% names(data)) { matrix_gex <- data$`Gene Expression` matrix_protein <- data$`Antibody Capture` object <- CreateSeuratObject(counts = matrix_gex, assay = assay) object_protein <- CreateAssayObject(counts = matrix_protein) object[["Protein"]] <- object_protein } else { object <- CreateSeuratObject(counts = data, assay = assay) } if (is.null(x = image)) { image <- Read10X_Image(image.dir = file.path(data.dir,"spatial"), filter.matrix = filter.matrix) } else { if (!inherits(x = image, what = "VisiumV1")) stop("Image must be an object of class 'VisiumV1'.") } image <- image[Cells(x = object)] DefaultAssay(object = image) <- assay object[[slice]] <- image # if using the meta-data available for probes add to @misc slot file_path <- file.path(data.dir, filename) infile <- hdf5r::H5File$new(filename = file_path, mode = 'r') if("matrix/features/probe_region" %in% hdf5r::list.objects(infile)) { probe.metadata <- Read10X_probe_metadata(data.dir, filename) Misc(object = object[['Spatial']], slot = "probe_metadata") <- probe.metadata } return(object) } #' Read10x Probe Metadata #' #' This function reads the probe metadata from a 10x Genomics probe barcode matrix file in HDF5 format. #' #' @param data.dir The directory where the file is located. #' @param filename The name of the file containing the raw probe barcode matrix in HDF5 format. The default filename is 'raw_probe_bc_matrix.h5'. #' #' @return Returns a data.frame containing the probe metadata. #' #' @export Read10X_probe_metadata <- function( data.dir, filename = 'raw_probe_bc_matrix.h5' ) { if (!requireNamespace('hdf5r', quietly = TRUE)) { stop("Please install hdf5r to read HDF5 files") } file.path = paste0(data.dir,"/", filename) if (!file.exists(file.path)) { stop("File not found") } infile <- hdf5r::H5File$new(filename = file.path, mode = 'r') if("matrix/features/probe_region" %in% hdf5r::list.objects(infile)) { probe.name <- infile[['matrix/features/name']][] probe.region<- infile[['matrix/features/probe_region']][] meta.data <- data.frame(probe.name, probe.region) return(meta.data) } } #' Load STARmap data #' #' @param data.dir location of data directory that contains the counts matrix, #' gene name, qhull, and centroid files. #' @param counts.file name of file containing the counts matrix (csv) #' @param gene.file name of file containing the gene names (csv) #' @param qhull.file name of file containing the hull coordinates (tsv) #' @param centroid.file name of file containing the centroid positions (tsv) #' @param assay Name of assay to associate spatial data to #' @param image Name of "image" object storing spatial coordinates #' #' @return A \code{\link{Seurat}} object #' #' @importFrom methods new #' @importFrom utils read.csv read.table #' #' @seealso \code{\link{STARmap}} #' #' @export #' @concept preprocessing #' LoadSTARmap <- function( data.dir, counts.file = "cell_barcode_count.csv", gene.file = "genes.csv", qhull.file = "qhulls.tsv", centroid.file = "centroids.tsv", assay = "Spatial", image = "image" ) { if (!dir.exists(paths = data.dir)) { stop("Cannot find directory ", data.dir, call. = FALSE) } counts <- read.csv( file = file.path(data.dir, counts.file), as.is = TRUE, header = FALSE ) gene.names <- read.csv( file = file.path(data.dir, gene.file), as.is = TRUE, header = FALSE ) qhulls <- read.table( file = file.path(data.dir, qhull.file), sep = '\t', col.names = c('cell', 'y', 'x'), as.is = TRUE ) centroids <- read.table( file = file.path(data.dir, centroid.file), sep = '\t', as.is = TRUE, col.names = c('y', 'x') ) colnames(x = counts) <- gene.names[, 1] rownames(x = counts) <- paste0('starmap', seq(1:nrow(x = counts))) counts <- as.matrix(x = counts) rownames(x = centroids) <- rownames(x = counts) qhulls$cell <- paste0('starmap', qhulls$cell) centroids <- as.matrix(x = centroids) starmap <- CreateSeuratObject(counts = t(x = counts), assay = assay) starmap[[image]] <- new( Class = 'STARmap', assay = assay, coordinates = as.data.frame(x = centroids), qhulls = qhulls ) return(starmap) } #' Load Curio Seeker data #' #' @param data.dir location of data directory that contains the counts matrix, #' gene names, barcodes/beads, and barcodes/bead location files. #' @param assay Name of assay to associate spatial data to #' #' @return A \code{\link{Seurat}} object #' #' @importFrom Matrix readMM #' #' @export #' @concept preprocessing #' LoadCurioSeeker <- function(data.dir, assay = "Spatial") { # check and find input files if (length(x = data.dir) > 1) { warning("'LoadCurioSeeker' accepts only one 'data.dir'", immediate. = TRUE) data.dir <- data.dir[1] } mtx.file <- list.files( data.dir, pattern = "*MoleculesPerMatchedBead.mtx", full.names = TRUE) if (length(x = mtx.file) > 1) { warning("Multiple files matched the pattern '*MoleculesPerMatchedBead.mtx'", immediate. = TRUE) } else if (length(x = mtx.file) == 0) { stop("No file matched the pattern '*MoleculesPerMatchedBead.mtx'", call. = FALSE) } mtx.file <- mtx.file[1] barcodes.file <- list.files( data.dir, pattern = "*barcodes.tsv", full.names = TRUE) if (length(x = barcodes.file) > 1) { warning("Multiple files matched the pattern '*barcodes.tsv'", immediate. = TRUE) } else if (length(x = barcodes.file) == 0) { stop("No file matched the pattern '*barcodes.tsv'", call. = FALSE) } barcodes.file <- barcodes.file[1] genes.file <- list.files( data.dir, pattern = "*genes.tsv", full.names = TRUE) if (length(x = genes.file) > 1) { warning("Multiple files matched the pattern '*genes.tsv'", immediate. = TRUE) } else if (length(x = genes.file) == 0) { stop("No file matched the pattern '*genes.tsv'", call. = FALSE) } genes.file <- genes.file[1] coordinates.file <- list.files( data.dir, pattern = "*MatchedBeadLocation.csv", full.names = TRUE) if (length(x = coordinates.file) > 1) { warning("Multiple files matched the pattern '*MatchedBeadLocation.csv'", immediate. = TRUE) } else if (length(x = coordinates.file) == 0) { stop("No file matched the pattern '*MatchedBeadLocation.csv'", call. = FALSE) } coordinates.file <- coordinates.file[1] # load counts matrix and create seurat object mtx <- readMM(mtx.file) mtx <- as.sparse(mtx) barcodes <- read.csv(barcodes.file, header = FALSE) genes <- read.csv(genes.file, header = FALSE) colnames(mtx) <- barcodes$V1 rownames(mtx) <- genes$V1 object <- CreateSeuratObject(counts = mtx, assay = assay) # load positions of each bead and store in a SlideSeq object in images slot coords <- read.csv(coordinates.file) colnames(coords) <- c("cell", "x", "y") coords$y <- -coords$y rownames(coords) <- coords$cell coords$cell <- NULL image <- new(Class = 'SlideSeq', assay = assay, coordinates = coords) object[["Slice"]] <- image return(object) } #' Demultiplex samples based on classification method from MULTI-seq (McGinnis et al., bioRxiv 2018) #' #' Identify singlets, doublets and negative cells from multiplexing experiments. Annotate singlets by tags. #' #' @param object Seurat object. Assumes that the specified assay data has been added #' @param assay Name of the multiplexing assay (HTO by default) #' @param quantile The quantile to use for classification #' @param autoThresh Whether to perform automated threshold finding to define the best quantile. Default is FALSE #' @param maxiter Maximum number of iterations if autoThresh = TRUE. Default is 5 #' @param qrange A range of possible quantile values to try if autoThresh = TRUE #' @param verbose Prints the output #' #' @return A Seurat object with demultiplexing results stored at \code{object$MULTI_ID} #' #' @export #' @concept preprocessing #' #' @references \url{https://www.biorxiv.org/content/10.1101/387241v1} #' #' @examples #' \dontrun{ #' object <- MULTIseqDemux(object) #' } #' MULTIseqDemux <- function( object, assay = "HTO", quantile = 0.7, autoThresh = FALSE, maxiter = 5, qrange = seq(from = 0.1, to = 0.9, by = 0.05), verbose = TRUE ) { assay <- assay %||% DefaultAssay(object = object) multi_data_norm <- t(x = GetAssayData( object = object, slot = "data", assay = assay )) if (autoThresh) { iter <- 1 negatives <- c() neg.vector <- c() while (iter <= maxiter) { # Iterate over q values to find ideal barcode thresholding results by maximizing singlet classifications bar.table_sweep.list <- list() n <- 0 for (q in qrange) { n <- n + 1 # Generate list of singlet/doublet/negative classifications across q sweep bar.table_sweep.list[[n]] <- ClassifyCells(data = multi_data_norm, q = q) names(x = bar.table_sweep.list)[n] <- paste0("q=" , q) } # Determine which q values results in the highest pSinglet res_round <- FindThresh(call.list = bar.table_sweep.list)$res res.use <- res_round[res_round$Subset == "pSinglet", ] q.use <- res.use[which.max(res.use$Proportion),"q"] if (verbose) { message("Iteration ", iter) message("Using quantile ", q.use) } round.calls <- ClassifyCells(data = multi_data_norm, q = q.use) #remove negative cells neg.cells <- names(x = round.calls)[which(x = round.calls == "Negative")] neg.vector <- c(neg.vector, rep(x = "Negative", length(x = neg.cells))) negatives <- c(negatives, neg.cells) if (length(x = neg.cells) == 0) { break } multi_data_norm <- multi_data_norm[-which(x = rownames(x = multi_data_norm) %in% neg.cells), ] iter <- iter + 1 } names(x = neg.vector) <- negatives demux_result <- c(round.calls,neg.vector) demux_result <- demux_result[rownames(x = object[[]])] } else{ demux_result <- ClassifyCells(data = multi_data_norm, q = quantile) } demux_result <- demux_result[rownames(x = object[[]])] object[['MULTI_ID']] <- factor(x = demux_result) Idents(object = object) <- "MULTI_ID" bcs <- colnames(x = multi_data_norm) bc.max <- bcs[apply(X = multi_data_norm, MARGIN = 1, FUN = which.max)] bc.second <- bcs[unlist(x = apply( X = multi_data_norm, MARGIN = 1, FUN = function(x) { return(which(x == MaxN(x))) } ))] doublet.names <- unlist(x = lapply( X = 1:length(x = bc.max), FUN = function(x) { return(paste(sort(x = c(bc.max[x], bc.second[x])), collapse = "_")) } )) doublet.id <- which(x = demux_result == "Doublet") MULTI_classification <- as.character(object$MULTI_ID) MULTI_classification[doublet.id] <- doublet.names[doublet.id] object$MULTI_classification <- factor(x = MULTI_classification) return(object) } #' Load in data from 10X #' #' Enables easy loading of sparse data matrices provided by 10X genomics. #' #' @param data.dir Directory containing the matrix.mtx, genes.tsv (or features.tsv), and barcodes.tsv #' files provided by 10X. A vector or named vector can be given in order to load #' several data directories. If a named vector is given, the cell barcode names #' will be prefixed with the name. #' @param gene.column Specify which column of genes.tsv or features.tsv to use for gene names; default is 2 #' @param cell.column Specify which column of barcodes.tsv to use for cell names; default is 1 #' @param unique.features Make feature names unique (default TRUE) #' @param strip.suffix Remove trailing "-1" if present in all cell barcodes. #' #' @return If features.csv indicates the data has multiple data types, a list #' containing a sparse matrix of the data from each type will be returned. #' Otherwise a sparse matrix containing the expression data will be returned. #' #' @importFrom Matrix readMM #' @importFrom utils read.delim #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' # For output from CellRanger < 3.0 #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show barcodes.tsv, genes.tsv, and matrix.mtx #' expression_matrix <- Read10X(data.dir = data_dir) #' seurat_object = CreateSeuratObject(counts = expression_matrix) #' #' # For output from CellRanger >= 3.0 with multiple data types #' data_dir <- 'path/to/data/directory' #' list.files(data_dir) # Should show barcodes.tsv.gz, features.tsv.gz, and matrix.mtx.gz #' data <- Read10X(data.dir = data_dir) #' seurat_object = CreateSeuratObject(counts = data$`Gene Expression`) #' seurat_object[['Protein']] = CreateAssayObject(counts = data$`Antibody Capture`) #' } #' Read10X <- function( data.dir, gene.column = 2, cell.column = 1, unique.features = TRUE, strip.suffix = FALSE ) { full.data <- list() has_dt <- requireNamespace("data.table", quietly = TRUE) && requireNamespace("R.utils", quietly = TRUE) for (i in seq_along(along.with = data.dir)) { run <- data.dir[i] if (!dir.exists(paths = run)) { stop("Directory provided does not exist") } barcode.loc <- file.path(run, 'barcodes.tsv') gene.loc <- file.path(run, 'genes.tsv') features.loc <- file.path(run, 'features.tsv.gz') matrix.loc <- file.path(run, 'matrix.mtx') # Flag to indicate if this data is from CellRanger >= 3.0 pre_ver_3 <- file.exists(gene.loc) if (!pre_ver_3) { addgz <- function(s) { return(paste0(s, ".gz")) } barcode.loc <- addgz(s = barcode.loc) matrix.loc <- addgz(s = matrix.loc) } if (!file.exists(barcode.loc)) { stop("Barcode file missing. Expecting ", basename(path = barcode.loc)) } if (!pre_ver_3 && !file.exists(features.loc) ) { stop("Gene name or features file missing. Expecting ", basename(path = features.loc)) } if (!file.exists(matrix.loc)) { stop("Expression matrix file missing. Expecting ", basename(path = matrix.loc)) } data <- readMM(file = matrix.loc) if (has_dt) { cell.barcodes <- as.data.frame(data.table::fread(barcode.loc, header = FALSE)) } else { cell.barcodes <- read.table(file = barcode.loc, header = FALSE, sep = '\t', row.names = NULL) } if (ncol(x = cell.barcodes) > 1) { cell.names <- cell.barcodes[, cell.column] } else { cell.names <- readLines(con = barcode.loc) } if (all(grepl(pattern = "\\-1$", x = cell.names)) & strip.suffix) { cell.names <- as.vector(x = as.character(x = sapply( X = cell.names, FUN = ExtractField, field = 1, delim = "-" ))) } if (is.null(x = names(x = data.dir))) { if (length(x = data.dir) < 2) { colnames(x = data) <- cell.names } else { colnames(x = data) <- paste0(i, "_", cell.names) } } else { colnames(x = data) <- paste0(names(x = data.dir)[i], "_", cell.names) } if (has_dt) { feature.names <- as.data.frame(data.table::fread(ifelse(test = pre_ver_3, yes = gene.loc, no = features.loc), header = FALSE)) } else { feature.names <- read.delim( file = ifelse(test = pre_ver_3, yes = gene.loc, no = features.loc), header = FALSE, stringsAsFactors = FALSE ) } if (any(is.na(x = feature.names[, gene.column]))) { warning( 'Some features names are NA. Replacing NA names with ID from the opposite column requested', call. = FALSE, immediate. = TRUE ) na.features <- which(x = is.na(x = feature.names[, gene.column])) replacement.column <- ifelse(test = gene.column == 2, yes = 1, no = 2) feature.names[na.features, gene.column] <- feature.names[na.features, replacement.column] } if (unique.features) { fcols = ncol(x = feature.names) if (fcols < gene.column) { stop(paste0("gene.column was set to ", gene.column, " but feature.tsv.gz (or genes.tsv) only has ", fcols, " columns.", " Try setting the gene.column argument to a value <= to ", fcols, ".")) } rownames(x = data) <- make.unique(names = feature.names[, gene.column]) } # In cell ranger 3.0, a third column specifying the type of data was added # and we will return each type of data as a separate matrix if (ncol(x = feature.names) > 2) { data_types <- factor(x = feature.names$V3) lvls <- levels(x = data_types) if (length(x = lvls) > 1 && length(x = full.data) == 0) { message("10X data contains more than one type and is being returned as a list containing matrices of each type.") } expr_name <- "Gene Expression" if (expr_name %in% lvls) { # Return Gene Expression first lvls <- c(expr_name, lvls[-which(x = lvls == expr_name)]) } data <- lapply( X = lvls, FUN = function(l) { return(data[data_types == l, , drop = FALSE]) } ) names(x = data) <- lvls } else{ data <- list(data) } full.data[[length(x = full.data) + 1]] <- data } # Combine all the data from different directories into one big matrix, note this # assumes that all data directories essentially have the same features files list_of_data <- list() for (j in 1:length(x = full.data[[1]])) { list_of_data[[j]] <- do.call(cbind, lapply(X = full.data, FUN = `[[`, j)) # Fix for Issue #913 list_of_data[[j]] <- as.sparse(x = list_of_data[[j]]) } names(x = list_of_data) <- names(x = full.data[[1]]) # If multiple features, will return a list, otherwise # a matrix. if (length(x = list_of_data) == 1) { return(list_of_data[[1]]) } else { return(list_of_data) } } #' Read 10X hdf5 file #' #' Read count matrix from 10X CellRanger hdf5 file. #' This can be used to read both scATAC-seq and scRNA-seq matrices. #' #' @param filename Path to h5 file #' @param use.names Label row names with feature names rather than ID numbers. #' @param unique.features Make feature names unique (default TRUE) #' #' @return Returns a sparse matrix with rows and columns labeled. If multiple #' genomes are present, returns a list of sparse matrices (one per genome). #' #' @export #' @concept preprocessing #' Read10X_h5 <- function(filename, use.names = TRUE, unique.features = TRUE) { if (!requireNamespace('hdf5r', quietly = TRUE)) { stop("Please install hdf5r to read HDF5 files") } if (!file.exists(filename)) { stop("File not found") } infile <- hdf5r::H5File$new(filename = filename, mode = 'r') genomes <- names(x = infile) output <- list() if (hdf5r::existsGroup(infile, 'matrix')) { # cellranger version 3 if (use.names) { feature_slot <- 'features/name' } else { feature_slot <- 'features/id' } } else { if (use.names) { feature_slot <- 'gene_names' } else { feature_slot <- 'genes' } } for (genome in genomes) { counts <- infile[[paste0(genome, '/data')]] indices <- infile[[paste0(genome, '/indices')]] indptr <- infile[[paste0(genome, '/indptr')]] shp <- infile[[paste0(genome, '/shape')]] features <- infile[[paste0(genome, '/', feature_slot)]][] barcodes <- infile[[paste0(genome, '/barcodes')]] sparse.mat <- sparseMatrix( i = indices[] + 1, p = indptr[], x = as.numeric(x = counts[]), dims = shp[], repr = "T" ) if (unique.features) { features <- make.unique(names = features) } rownames(x = sparse.mat) <- features colnames(x = sparse.mat) <- barcodes[] sparse.mat <- as.sparse(x = sparse.mat) # Split v3 multimodal if (infile$exists(name = paste0(genome, '/features'))) { types <- infile[[paste0(genome, '/features/feature_type')]][] types.unique <- unique(x = types) if (length(x = types.unique) > 1) { message( "Genome ", genome, " has multiple modalities, returning a list of matrices for this genome" ) sparse.mat <- sapply( X = types.unique, FUN = function(x) { return(sparse.mat[which(x = types == x), ]) }, simplify = FALSE, USE.NAMES = TRUE ) } } output[[genome]] <- sparse.mat } infile$close_all() if (length(x = output) == 1) { return(output[[genome]]) } else{ return(output) } } #' Load a 10X Genomics Visium Image #' #' @param image.dir Path to directory with 10X Genomics visium image data; #' should include files \code{tissue_lowres_iamge.png}, #' \code{scalefactors_json.json} and \code{tissue_positions_list.csv} #' @param filter.matrix Filter spot/feature matrix to only include spots that #' have been determined to be over tissue. #' @param ... Ignored for now #' #' @return A \code{\link{VisiumV1}} object #' #' @importFrom png readPNG #' @importFrom jsonlite fromJSON #' #' @seealso \code{\link{VisiumV1}} \code{\link{Load10X_Spatial}} #' #' @export #' @concept preprocessing #' Read10X_Image <- function(image.dir, filter.matrix = TRUE, ...) { image <- readPNG(source = file.path(image.dir, 'tissue_lowres_image.png')) scale.factors <- fromJSON(txt = file.path(image.dir, 'scalefactors_json.json')) tissue.positions.path <- Sys.glob(paths = file.path(image.dir, 'tissue_positions*')) tissue.positions <- read.csv( file = tissue.positions.path, col.names = c('barcodes', 'tissue', 'row', 'col', 'imagerow', 'imagecol'), header = ifelse( test = basename(tissue.positions.path) == "tissue_positions.csv", yes = TRUE, no = FALSE ), as.is = TRUE, row.names = 1 ) if (filter.matrix) { tissue.positions <- tissue.positions[which(x = tissue.positions$tissue == 1), , drop = FALSE] } unnormalized.radius <- scale.factors$fiducial_diameter_fullres * scale.factors$tissue_lowres_scalef spot.radius <- unnormalized.radius / max(dim(x = image)) return(new( Class = 'VisiumV1', image = image, scale.factors = scalefactors( spot = scale.factors$spot_diameter_fullres, fiducial = scale.factors$fiducial_diameter_fullres, hires = scale.factors$tissue_hires_scalef, scale.factors$tissue_lowres_scalef ), coordinates = tissue.positions, spot.radius = spot.radius )) } #' Read and Load Akoya CODEX data #' #' @param filename Path to matrix generated by upstream processing. #' @param type Specify which type matrix is being provided. #' \itemize{ #' \item \dQuote{\code{processor}}: matrix generated by CODEX Processor #' \item \dQuote{\code{inform}}: matrix generated by inForm #' \item \dQuote{\code{qupath}}: matrix generated by QuPath #' } #' @param filter A pattern to filter features by; pass \code{NA} to #' skip feature filtering #' @param inform.quant When \code{type} is \dQuote{\code{inform}}, the #' quantification level to read in #' #' @return \code{ReadAkoya}: A list with some combination of the following values #' \itemize{ #' \item \dQuote{\code{matrix}}: a #' \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells #' are columns and features are rows #' \item \dQuote{\code{centroids}}: a data frame with cell centroid #' coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{metadata}}: a data frame with cell-level meta data; #' includes all columns in \code{filename} that aren't in #' \dQuote{\code{matrix}} or \dQuote{\code{centroids}} #' } #' When \code{type} is \dQuote{\code{inform}}, additional expression matrices #' are returned and named using their segmentation type (eg. #' \dQuote{nucleus}, \dQuote{membrane}). The \dQuote{Entire Cell} segmentation #' type is returned in the \dQuote{\code{matrix}} entry of the list #' #' @export #' #' @order 1 #' #' @concept preprocessing #' #' @template section-progressr #' #' @templateVar pkg data.table #' @template note-reqdpkg #' ReadAkoya <- function( filename, type = c('inform', 'processor', 'qupath'), filter = 'DAPI|Blank|Empty', inform.quant = c('mean', 'total', 'min', 'max', 'std') ) { if (!requireNamespace("data.table", quietly = TRUE)) { stop("Please install 'data.table' for this function") } # Check arguments if (!file.exists(filename)) { stop(paste("Can't file file:", filename)) } type <- tolower(x = type[1L]) type <- match.arg(arg = type) # outs <- list(matrix = NULL, centroids = NULL) ratio <- getOption(x = 'Seurat.input.sparse_ratio', default = 0.4) p <- progressor() # Preload matrix p(message = "Preloading Akoya matrix", class = 'sticky', amount = 0) sep <- switch(EXPR = type, 'inform' = '\t', ',') mtx <- data.table::fread( file = filename, sep = sep, data.table = FALSE, verbose = FALSE ) # Assemble outputs p( message = paste0("Parsing matrix in '", type, "' format"), class = 'sticky', amount = 0 ) outs <- switch( EXPR = type, 'processor' = { # Create centroids data frame p( message = 'Creating centroids coordinates', class = 'sticky', amount = 0 ) centroids <- data.frame( x = mtx[['x:x']], y = mtx[['y:y']], cell = as.character(x = mtx[['cell_id:cell_id']]), stringsAsFactors = FALSE ) rownames(x = mtx) <- as.character(x = mtx[['cell_id:cell_id']]) # Create metadata data frame p(message = 'Creating meta data', class = 'sticky', amount = 0) md <- mtx[, !grepl(pattern = '^cyc', x = colnames(x = mtx)), drop = FALSE] colnames(x = md) <- vapply( X = strsplit(x = colnames(x = md), split = ':'), FUN = '[[', FUN.VALUE = character(length = 1L), 2L ) # Create expression matrix p(message = 'Creating expression matrix', class = 'sticky', amount = 0) mtx <- mtx[, grepl(pattern = '^cyc', x = colnames(x = mtx)), drop = FALSE] colnames(x = mtx) <- vapply( X = strsplit(x = colnames(x = mtx), split = ':'), FUN = '[[', FUN.VALUE = character(length = 1L), 2L ) if (!is.na(x = filter)) { p( message = paste0("Filtering features with pattern '", filter, "'"), class = 'sticky', amount = 0 ) mtx <- mtx[, !grepl(pattern = filter, x = colnames(x = mtx)), drop = FALSE] } mtx <- t(x = mtx) if ((sum(mtx == 0) / length(x = mtx)) > ratio) { p( message = 'Converting expression to sparse matrix', class = 'sticky', amount = 0 ) mtx <- as.sparse(x = mtx) } list(matrix = mtx, centroids = centroids, metadata = md) }, 'inform' = { inform.quant <- tolower(x = inform.quant[1L]) inform.quant <- match.arg(arg = inform.quant) expr.key <- c( mean = 'Mean', total = 'Total', min = 'Min', max = 'Max', std = 'Std Dev' )[inform.quant] expr.pattern <- '\\(Normalized Counts, Total Weighting\\)' rownames(x = mtx) <- mtx[['Cell ID']] mtx <- mtx[, setdiff(x = colnames(x = mtx), y = 'Cell ID'), drop = FALSE] # Create centroids p( message = 'Creating centroids coordinates', class = 'sticky', amount = 0 ) centroids <- data.frame( x = mtx[['Cell X Position']], y = mtx[['Cell Y Position']], cell = rownames(x = mtx), stringsAsFactors = FALSE ) # Create metadata p(message = 'Creating meta data', class = 'sticky', amount = 0) cols <- setdiff( x = grep( pattern = expr.pattern, x = colnames(x = mtx), value = TRUE, invert = TRUE ), y = paste('Cell', c('X', 'Y'), 'Position') ) md <- mtx[, cols, drop = FALSE] # Create expression matrices exprs <- data.frame( cols = grep( pattern = paste(expr.key, expr.pattern), x = colnames(x = mtx), value = TRUE ) ) exprs$feature <- vapply( X = trimws(x = gsub( pattern = paste(expr.key, expr.pattern), replacement = '', x = exprs$cols )), FUN = function(x) { x <- unlist(x = strsplit(x = x, split = ' ')) x <- x[length(x = x)] return(gsub(pattern = '\\(|\\)', replacement = '', x = x)) }, FUN.VALUE = character(length = 1L) ) exprs$class <- tolower(x = vapply( X = strsplit(x = exprs$cols, split = ' '), FUN = '[[', FUN.VALUE = character(length = 1L), 1L )) classes <- unique(x = exprs$class) outs <- vector( mode = 'list', length = length(x = classes) + 2L ) names(x = outs) <- c( 'matrix', 'centroids', 'metadata', setdiff(x = classes, y = 'entire') ) outs$centroids <- centroids outs$metadata <- md # browser() for (i in classes) { p( message = paste( 'Creating', switch(EXPR = i, 'entire' = 'entire cell', i), 'expression matrix' ), class = 'sticky', amount = 0 ) df <- exprs[exprs$class == i, , drop = FALSE] expr <- mtx[, df$cols] colnames(x = expr) <- df$feature if (!is.na(x = filter)) { p( message = paste0("Filtering features with pattern '", filter, "'"), class = 'sticky', amount = 0 ) expr <- expr[, !grepl(pattern = filter, x = colnames(x = expr)), drop = FALSE] } expr <- t(x = expr) if ((sum(expr == 0, na.rm = TRUE) / length(x = expr)) > ratio) { p( message = paste( 'Converting', switch(EXPR = i, 'entire' = 'entire cell', i), 'expression to sparse matrix' ), class = 'sticky', amount = 0 ) expr <- as.sparse(x = expr) } outs[[switch(EXPR = i, 'entire' = 'matrix', i)]] <- expr } outs }, 'qupath' = { rownames(x = mtx) <- as.character(x = seq_len(length.out = nrow(x = mtx))) # Create centroids p( message = 'Creating centroids coordinates', class = 'sticky', amount = 0 ) xpos <- sort( x = grep(pattern = 'Centroid X', x = colnames(x = mtx), value = TRUE), decreasing = TRUE )[1L] ypos <- sort( x = grep(pattern = 'Centroid Y', x = colnames(x = mtx), value = TRUE), decreasing = TRUE )[1L] centroids <- data.frame( x = mtx[[xpos]], y = mtx[[ypos]], cell = rownames(x = mtx), stringsAsFactors = FALSE ) # Create metadata p(message = 'Creating meta data', class = 'sticky', amount = 0) cols <- setdiff( x = grep( pattern = 'Cell: Mean', x = colnames(x = mtx), ignore.case = TRUE, value = TRUE, invert = TRUE ), y = c(xpos, ypos) ) md <- mtx[, cols, drop = FALSE] # Create expression matrix p(message = 'Creating expression matrix', class = 'sticky', amount = 0) idx <- which(x = grepl( pattern = 'Cell: Mean', x = colnames(x = mtx), ignore.case = TRUE )) mtx <- mtx[, idx, drop = FALSE] colnames(x = mtx) <- vapply( X = strsplit(x = colnames(x = mtx), split = ':'), FUN = '[[', FUN.VALUE = character(length = 1L), 1L ) if (!is.na(x = filter)) { p( message = paste0("Filtering features with pattern '", filter, "'"), class = 'sticky', amount = 0 ) mtx <- mtx[, !grepl(pattern = filter, x = colnames(x = mtx)), drop = FALSE] } mtx <- t(x = mtx) if ((sum(mtx == 0) / length(x = mtx)) > ratio) { p( message = 'Converting expression to sparse matrix', class = 'sticky', amount = 0 ) mtx <- as.sparse(x = mtx) } list(matrix = mtx, centroids = centroids, metadata = md) }, stop("Unknown matrix type: ", type) ) return(outs) } #' Load in data from remote or local mtx files #' #' Enables easy loading of sparse data matrices #' #' @param mtx Name or remote URL of the mtx file #' @param cells Name or remote URL of the cells/barcodes file #' @param features Name or remote URL of the features/genes file #' @param cell.column Specify which column of cells file to use for cell names; default is 1 #' @param feature.column Specify which column of features files to use for feature/gene names; default is 2 #' @param cell.sep Specify the delimiter in the cell name file #' @param feature.sep Specify the delimiter in the feature name file #' @param skip.cell Number of lines to skip in the cells file before beginning to read cell names #' @param skip.feature Number of lines to skip in the features file before beginning to gene names #' @param mtx.transpose Transpose the matrix after reading in #' @param unique.features Make feature names unique (default TRUE) #' @param strip.suffix Remove trailing "-1" if present in all cell barcodes. #' #' @return A sparse matrix containing the expression data. #' #' @importFrom Matrix readMM #' @importFrom utils read.delim #' @importFrom httr build_url parse_url #' @importFrom tools file_ext #' #' #' @export #' @concept preprocessing #' #' @examples #' \dontrun{ #' # For local files: #' #' expression_matrix <- ReadMtx( #' mtx = "count_matrix.mtx.gz", features = "features.tsv.gz", #' cells = "barcodes.tsv.gz" #' ) #' seurat_object <- CreateSeuratObject(counts = expression_matrix) #' #' # For remote files: #' #' expression_matrix <- ReadMtx(mtx = "http://localhost/matrix.mtx", #' cells = "http://localhost/barcodes.tsv", #' features = "http://localhost/genes.tsv") #' seurat_object <- CreateSeuratObject(counts = data) #' } #' ReadMtx <- function( mtx, cells, features, cell.column = 1, feature.column = 2, cell.sep = "\t", feature.sep = "\t", skip.cell = 0, skip.feature = 0, mtx.transpose = FALSE, unique.features = TRUE, strip.suffix = FALSE ) { all.files <- list( "expression matrix" = mtx, "barcode list" = cells, "feature list" = features ) for (i in seq_along(along.with = all.files)) { uri <- tryCatch( expr = { con <- url(description = all.files[[i]]) close(con = con) all.files[[i]] }, error = function(...) { return(normalizePath(path = all.files[[i]], winslash = '/')) } ) err <- paste("Cannot find", names(x = all.files)[i], "at", uri) uri <- build_url(url = parse_url(url = uri)) if (grepl(pattern = '^[A-Z]?:///', x = uri)) { uri <- gsub(pattern = '^://', replacement = '', x = uri) if (!file.exists(uri)) { stop(err, call. = FALSE) } } else { if (!Online(url = uri, seconds = 2L)) { stop(err, call. = FALSE) } if (file_ext(uri) == 'gz') { con <- url(description = uri) uri <- gzcon(con = con, text = TRUE) } } all.files[[i]] <- uri } cell.barcodes <- read.table( file = all.files[['barcode list']], header = FALSE, sep = cell.sep, row.names = NULL, skip = skip.cell ) feature.names <- read.table( file = all.files[['feature list']], header = FALSE, sep = feature.sep, row.names = NULL, skip = skip.feature ) # read barcodes bcols <- ncol(x = cell.barcodes) if (bcols < cell.column) { stop( "cell.column was set to ", cell.column, " but ", cells, " only has ", bcols, " columns.", " Try setting the cell.column argument to a value <= to ", bcols, "." ) } cell.names <- cell.barcodes[, cell.column] if (all(grepl(pattern = "\\-1$", x = cell.names)) & strip.suffix) { cell.names <- as.vector(x = as.character(x = sapply( X = cell.names, FUN = ExtractField, field = 1, delim = "-" ))) } # read features fcols <- ncol(x = feature.names) if (fcols < feature.column) { stop( "feature.column was set to ", feature.column, " but ", features, " only has ", fcols, " column(s).", " Try setting the feature.column argument to a value <= to ", fcols, "." ) } if (any(is.na(x = feature.names[, feature.column]))) { na.features <- which(x = is.na(x = feature.names[, feature.column])) replacement.column <- ifelse(test = feature.column == 2, yes = 1, no = 2) if (replacement.column > fcols) { stop( "Some features names are NA in column ", feature.column, ". Try specifiying a different column.", call. = FALSE ) } else { warning( "Some features names are NA in column ", feature.column, ". Replacing NA names with ID from column ", replacement.column, ".", call. = FALSE ) } feature.names[na.features, feature.column] <- feature.names[na.features, replacement.column] } feature.names <- feature.names[, feature.column] if (unique.features) { feature.names <- make.unique(names = feature.names) } data <- readMM(file = all.files[['expression matrix']]) if (mtx.transpose) { data <- t(x = data) } if (length(x = cell.names) != ncol(x = data)) { stop( "Matrix has ", ncol(data), " columns but found ", length(cell.names), " barcodes. ", ifelse( test = length(x = cell.names) > ncol(x = data), yes = "Try increasing `skip.cell`. ", no = "" ), call. = FALSE ) } if (length(x = feature.names) != nrow(x = data)) { stop( "Matrix has ", nrow(data), " rows but found ", length(feature.names), " features. ", ifelse( test = length(x = feature.names) > nrow(x = data), yes = "Try increasing `skip.feature`. ", no = "" ), call. = FALSE ) } colnames(x = data) <- cell.names rownames(x = data) <- feature.names data <- as.sparse(x = data) return(data) } #' Read and Load Nanostring SMI data #' #' @param data.dir Directory containing all Nanostring SMI files with #' default filenames #' @param mtx.file Path to Nanostring cell x gene matrix CSV #' @param metadata.file Contains metadata including cell center, area, #' and stain intensities #' @param molecules.file Path to molecules file #' @param segmentations.file Path to segmentations CSV #' @param type Type of cell spatial coordinate matrices to read; choose one #' or more of: #' \itemize{ #' \item \dQuote{centroids}: cell centroids in pixel coordinate space #' \item \dQuote{segmentations}: cell segmentations in pixel coordinate space #' } #' @param mol.type Type of molecule spatial coordinate matrices to read; #' choose one or more of: #' \itemize{ #' \item \dQuote{pixels}: molecule coordinates in pixel space #' } #' @param metadata Type of available metadata to read; #' choose zero or more of: #' \itemize{ #' \item \dQuote{Area}: number of pixels in cell segmentation #' \item \dQuote{fov}: cell's fov #' \item \dQuote{Mean.MembraneStain}: mean membrane stain intensity #' \item \dQuote{Mean.DAPI}: mean DAPI stain intensity #' \item \dQuote{Mean.G}: mean green channel stain intensity #' \item \dQuote{Mean.Y}: mean yellow channel stain intensity #' \item \dQuote{Mean.R}: mean red channel stain intensity #' \item \dQuote{Max.MembraneStain}: max membrane stain intensity #' \item \dQuote{Max.DAPI}: max DAPI stain intensity #' \item \dQuote{Max.G}: max green channel stain intensity #' \item \dQuote{Max.Y}: max yellow stain intensity #' \item \dQuote{Max.R}: max red stain intensity #' } #' @param mols.filter Filter molecules that match provided string #' @param genes.filter Filter genes from cell x gene matrix that match #' provided string #' @param fov.filter Only load in select FOVs. Nanostring SMI data contains #' 30 total FOVs. #' @param subset.counts.matrix If the counts matrix should be built from #' molecule coordinates for a specific segmentation; One of: #' \itemize{ #' \item \dQuote{Nuclear}: nuclear segmentations #' \item \dQuote{Cytoplasm}: cell cytoplasm segmentations #' \item \dQuote{Membrane}: cell membrane segmentations #' } #' @param cell.mols.only If TRUE, only load molecules within a cell #' #' @return \code{ReadNanostring}: A list with some combination of the #' following values: #' \itemize{ #' \item \dQuote{\code{matrix}}: a #' \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells #' are columns and features are rows #' \item \dQuote{\code{centroids}}: a data frame with cell centroid #' coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates #' in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} #' } #' #' @importFrom future.apply future_lapply #' #' @export #' #' @order 1 #' #' @concept preprocessing #' #' @template section-progressr #' @template section-future #' #' @templateVar pkg data.table #' @template note-reqdpkg #' ReadNanostring <- function( data.dir, mtx.file = NULL, metadata.file = NULL, molecules.file = NULL, segmentations.file = NULL, type = 'centroids', mol.type = 'pixels', metadata = NULL, mols.filter = NA_character_, genes.filter = NA_character_, fov.filter = NULL, subset.counts.matrix = NULL, cell.mols.only = TRUE ) { if (!requireNamespace("data.table", quietly = TRUE)) { stop("Please install 'data.table' for this function") } # Argument checking type <- match.arg( arg = type, choices = c('centroids', 'segmentations'), several.ok = TRUE ) mol.type <- match.arg( arg = mol.type, choices = c('pixels'), several.ok = TRUE ) if (!is.null(metadata)) { metadata <- match.arg( arg = metadata, choices = c( "Area", "fov", "Mean.MembraneStain", "Mean.DAPI", "Mean.G", "Mean.Y", "Mean.R", "Max.MembraneStain", "Max.DAPI", "Max.G", "Max.Y", "Max.R" ), several.ok = TRUE ) } use.dir <- all(vapply( X = c(mtx.file, metadata.file, molecules.file), FUN = function(x) { return(is.null(x = x) || is.na(x = x)) }, FUN.VALUE = logical(length = 1L) )) if (use.dir && !dir.exists(paths = data.dir)) { stop("Cannot find Nanostring directory ", data.dir) } # Identify input files files <- c( matrix = mtx.file %||% '[_a-zA-Z0-9]*_exprMat_file.csv', metadata.file = metadata.file %||% '[_a-zA-Z0-9]*_metadata_file.csv', molecules.file = molecules.file %||% '[_a-zA-Z0-9]*_tx_file.csv', segmentations.file = segmentations.file %||% '[_a-zA-Z0-9]*-polygons.csv' ) files <- vapply( X = files, FUN = function(x) { x <- as.character(x = x) if (isTRUE(x = dirname(path = x) == '.')) { fnames <- list.files( path = data.dir, pattern = x, recursive = FALSE, full.names = TRUE ) return(sort(x = fnames, decreasing = TRUE)[1L]) } else { return(x) } }, FUN.VALUE = character(length = 1L), USE.NAMES = TRUE ) files[!file.exists(files)] <- NA_character_ if (all(is.na(x = files))) { stop("Cannot find Nanostring input files in ", data.dir) } # Checking for loading spatial coordinates if (!is.na(x = files[['metadata.file']])) { pprecoord <- progressor() pprecoord( message = "Preloading cell spatial coordinates", class = 'sticky', amount = 0 ) md <- data.table::fread( file = files[['metadata.file']], sep = ',', data.table = FALSE, verbose = FALSE ) # filter metadata file by FOVs if (!is.null(x = fov.filter)) { md <- md[md$fov %in% fov.filter,] } pprecoord(type = 'finish') } if (!is.na(x = files[['segmentations.file']])) { ppresegs <- progressor() ppresegs( message = "Preloading cell segmentation vertices", class = 'sticky', amount = 0 ) segs <- data.table::fread( file = files[['segmentations.file']], sep = ',', data.table = FALSE, verbose = FALSE ) # filter metadata file by FOVs if (!is.null(x = fov.filter)) { segs <- segs[segs$fov %in% fov.filter,] } ppresegs(type = 'finish') } # Check for loading of molecule coordinates if (!is.na(x = files[['molecules.file']])) { ppremol <- progressor() ppremol( message = "Preloading molecule coordinates", class = 'sticky', amount = 0 ) mx <- data.table::fread( file = files[['molecules.file']], sep = ',', verbose = FALSE ) # filter molecules file by FOVs if (!is.null(x = fov.filter)) { mx <- mx[mx$fov %in% fov.filter,] } # Molecules outside of a cell have a cell_ID of 0 if (cell.mols.only) { mx <- mx[mx$cell_ID != 0,] } if (!is.na(x = mols.filter)) { ppremol( message = paste("Filtering molecules with pattern", mols.filter), class = 'sticky', amount = 0 ) mx <- mx[!grepl(pattern = mols.filter, x = mx$target), , drop = FALSE] } ppremol(type = 'finish') mols <- rep_len(x = files[['molecules.file']], length.out = length(x = mol.type)) names(x = mols) <- mol.type files <- c(files, mols) files <- files[setdiff(x = names(x = files), y = 'molecules.file')] } files <- files[!is.na(x = files)] outs <- list("matrix"=NULL, "pixels"=NULL, "centroids"=NULL) if (!is.null(metadata)) { outs <- append(outs, list("metadata" = NULL)) } if ("segmentations" %in% type) { outs <- append(outs, list("segmentations" = NULL)) } for (otype in names(x = outs)) { outs[[otype]] <- switch( EXPR = otype, 'matrix' = { ptx <- progressor() ptx(message = 'Reading counts matrix', class = 'sticky', amount = 0) if (!is.null(subset.counts.matrix)) { tx <- build.cellcomp.matrix(mols.df=mx, class=subset.counts.matrix) } else { tx <- data.table::fread( file = files[[otype]], sep = ',', data.table = FALSE, verbose = FALSE ) # Combination of Cell ID (for non-zero cell_IDs) and FOV are assumed to be unique. Used to create barcodes / rownames. bcs <- paste0(as.character(tx$cell_ID), "_", tx$fov) rownames(x = tx) <- bcs # remove all rows which represent counts of mols not assigned to a cell for each FOV tx <- tx[!tx$cell_ID == 0,] # filter fovs from counts matrix if (!is.null(x = fov.filter)) { tx <- tx[tx$fov %in% fov.filter,] } tx <- subset(tx, select = -c(fov, cell_ID)) } tx <- as.data.frame(t(x = as.matrix(x = tx))) if (!is.na(x = genes.filter)) { ptx( message = paste("Filtering genes with pattern", genes.filter), class = 'sticky', amount = 0 ) tx <- tx[!grepl(pattern = genes.filter, x = rownames(x = tx)), , drop = FALSE] } # only keep cells with counts greater than 0 tx <- tx[, which(colSums(tx) != 0)] ratio <- getOption(x = 'Seurat.input.sparse_ratio', default = 0.4) if ((sum(tx == 0) / length(x = tx)) > ratio) { ptx( message = 'Converting counts to sparse matrix', class = 'sticky', amount = 0 ) tx <- as.sparse(x = tx) } ptx(type = 'finish') tx }, 'centroids' = { pcents <- progressor() pcents( message = 'Creating centroid coordinates', class = 'sticky', amount = 0 ) pcents(type = 'finish') data.frame( x = md$CenterX_global_px, y = md$CenterY_global_px, cell = paste0(as.character(md$cell_ID), "_", md$fov), stringsAsFactors = FALSE ) }, 'segmentations' = { pcents <- progressor() pcents( message = 'Creating segmentation coordinates', class = 'sticky', amount = 0 ) pcents(type = 'finish') data.frame( x = segs$x_global_px, y = segs$y_global_px, cell = paste0(as.character(segs$cellID), "_", segs$fov), # cell_ID column in this file doesn't have an underscore stringsAsFactors = FALSE ) }, 'metadata' = { pmeta <- progressor() pmeta( message = 'Loading metadata', class = 'sticky', amount = 0 ) pmeta(type = 'finish') df <- md[,metadata] df$cell <- paste0(as.character(md$cell_ID), "_", md$fov) df }, 'pixels' = { ppixels <- progressor() ppixels( message = 'Creating pixel-level molecule coordinates', class = 'sticky', amount = 0 ) df <- data.frame( x = mx$x_global_px, y = mx$y_global_px, gene = mx$target, stringsAsFactors = FALSE ) ppixels(type = 'finish') df }, # 'microns' = { # pmicrons <- progressor() # pmicrons( # message = "Creating micron-level molecule coordinates", # class = 'sticky', # amount = 0 # ) # df <- data.frame( # x = mx$global_x, # y = mx$global_y, # gene = mx$gene, # stringsAsFactors = FALSE # ) # pmicrons(type = 'finish') # df # }, stop("Unknown Nanostring input type: ", outs[[otype]]) ) } return(outs) } #' Read and Load 10x Genomics Xenium in-situ data #' #' @param data.dir Directory containing all Xenium output files with #' default filenames #' @param outs Types of molecular outputs to read; choose one or more of: #' \itemize{ #' \item \dQuote{matrix}: the counts matrix #' \item \dQuote{microns}: molecule coordinates #' } #' @param type Type of cell spatial coordinate matrices to read; choose one #' or more of: #' \itemize{ #' \item \dQuote{centroids}: cell centroids in pixel coordinate space #' \item \dQuote{segmentations}: cell segmentations in pixel coordinate space #' } #' @param mols.qv.threshold Remove transcript molecules with #' a QV less than this threshold. QV >= 20 is the standard threshold #' used to construct the cell x gene count matrix. #' #' @return \code{ReadXenium}: A list with some combination of the #' following values: #' \itemize{ #' \item \dQuote{\code{matrix}}: a #' \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells #' are columns and features are rows #' \item \dQuote{\code{centroids}}: a data frame with cell centroid #' coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates #' in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} #' } #' #' #' @export #' @concept preprocessing #' ReadXenium <- function( data.dir, outs = c("matrix", "microns"), type = "centroids", mols.qv.threshold = 20 ) { # Argument checking type <- match.arg( arg = type, choices = c("centroids", "segmentations"), several.ok = TRUE ) outs <- match.arg( arg = outs, choices = c("matrix", "microns"), several.ok = TRUE ) outs <- c(outs, type) has_dt <- requireNamespace("data.table", quietly = TRUE) && requireNamespace("R.utils", quietly = TRUE) data <- sapply(outs, function(otype) { switch( EXPR = otype, 'matrix' = { pmtx <- progressor() pmtx(message = 'Reading counts matrix', class = 'sticky', amount = 0) matrix <- suppressWarnings(Read10X(data.dir = file.path(data.dir, "cell_feature_matrix/"))) pmtx(type = "finish") matrix }, 'centroids' = { pcents <- progressor() pcents( message = 'Loading cell centroids', class = 'sticky', amount = 0 ) if (has_dt) { cell_info <- as.data.frame(data.table::fread(file.path(data.dir, "cells.csv.gz"))) } else { cell_info <- read.csv(file.path(data.dir, "cells.csv.gz")) } cell_centroid_df <- data.frame( x = cell_info$x_centroid, y = cell_info$y_centroid, cell = cell_info$cell_id, stringsAsFactors = FALSE ) pcents(type = 'finish') cell_centroid_df }, 'segmentations' = { psegs <- progressor() psegs( message = 'Loading cell segmentations', class = 'sticky', amount = 0 ) # load cell boundaries if (has_dt) { cell_boundaries_df <- as.data.frame(data.table::fread(file.path(data.dir, "cell_boundaries.csv.gz"))) } else { cell_boundaries_df <- read.csv(file.path(data.dir, "cell_boundaries.csv.gz"), stringsAsFactors = FALSE) } names(cell_boundaries_df) <- c("cell", "x", "y") psegs(type = "finish") cell_boundaries_df }, 'microns' = { pmicrons <- progressor() pmicrons( message = "Loading molecule coordinates", class = 'sticky', amount = 0 ) # molecules if (has_dt) { tx_dt <- as.data.frame(data.table::fread(file.path(data.dir, "transcripts.csv.gz"))) transcripts <- subset(tx_dt, qv >= mols.qv.threshold) } else { transcripts <- read.csv(file.path(data.dir, "transcripts.csv.gz")) transcripts <- subset(transcripts, qv >= mols.qv.threshold) } df <- data.frame( x = transcripts$x_location, y = transcripts$y_location, gene = transcripts$feature_name, stringsAsFactors = FALSE ) pmicrons(type = 'finish') df }, stop("Unknown Xenium input type: ", otype) ) }, USE.NAMES = TRUE) return(data) } #' Load Slide-seq spatial data #' #' @param coord.file Path to csv file containing bead coordinate positions #' @param assay Name of assay to associate image to #' #' @return A \code{\link{SlideSeq}} object #' #' @importFrom utils read.csv #' #' @seealso \code{\link{SlideSeq}} #' #' @export #' @concept preprocessing #' ReadSlideSeq <- function(coord.file, assay = 'Spatial') { if (!file.exists(paths = coord.file)) { stop("Cannot find coord file ", coord.file, call. = FALSE) } slide.seq <- new( Class = 'SlideSeq', assay = assay, coordinates = read.csv( file = coord.file, header = TRUE, as.is = TRUE, row.names = 1 ) ) return(slide.seq) } #' Read Data From Vitessce #' #' Read in data from Vitessce-formatted JSON files #' #' @param counts Path or URL to a Vitessce-formatted JSON file with #' expression data; should end in \dQuote{\code{.genes.json}} or #' \dQuote{\code{.clusters.json}}; pass \code{NULL} to skip #' @param coords Path or URL to a Vitessce-formatted JSON file with cell/spot #' spatial coordinates; should end in \dQuote{\code{.cells.json}}; #' pass \code{NULL} to skip #' @param molecules Path or URL to a Vitessce-formatted JSON file with molecule #' spatial coordinates; should end in \dQuote{\code{.molecules.json}}; #' pass \code{NULL} to skip #' @param type Type of cell/spot spatial coordinates to return, #' choose one or more from: #' \itemize{ #' \item \dQuote{segmentations} cell/spot segmentations #' \item \dQuote{centroids} cell/spot centroids #' } #' @param filter A character to filter molecules by, pass \code{NA} to skip #' molecule filtering #' #' @return \code{ReadVitessce}: A list with some combination of the #' following values: #' \itemize{ #' \item \dQuote{\code{counts}}: if \code{counts} is not \code{NULL}, an #' expression matrix with cells as columns and features as rows #' \item \dQuote{\code{centroids}}: if \code{coords} is not \code{NULL} and #' \code{type} is contains\dQuote{centroids}, a data frame with cell centroids #' in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{segmentations}}: if \code{coords} is not \code{NULL} and #' \code{type} contains \dQuote{centroids}, a data frame with cell #' segmentations in three columns: \dQuote{x}, \dQuote{y} and \dQuote{cell} #' \item \dQuote{\code{molecules}}: if \code{molecules} is not \code{NULL}, a #' data frame with molecule spatial coordinates in three columns: \dQuote{x}, #' \dQuote{y}, and \dQuote{gene} #' } #' #' @importFrom jsonlite read_json #' @importFrom tools file_ext file_path_sans_ext #' #' @export #' #' @order 1 #' #' @concept preprocessing #' #' @template section-progressr #' #' @templateVar pkg jsonlite #' @template note-reqdpkg #' #' @examples #' \dontrun{ #' coords <- ReadVitessce( #' counts = #' "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.genes.json", #' coords = #' "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.cells.json", #' molecules = #' "https://s3.amazonaws.com/vitessce-data/0.0.31/master_release/wang/wang.molecules.json" #' ) #' names(coords) #' coords$counts[1:10, 1:10] #' head(coords$centroids) #' head(coords$segmentations) #' head(coords$molecules) #' } #' ReadVitessce <- function( counts = NULL, coords = NULL, molecules = NULL, type = c('segmentations', 'centroids'), filter = NA_character_ ) { if (!requireNamespace('jsonlite', quietly = TRUE)) { stop("Please install 'jsonlite' for this function") } type <- match.arg(arg = type, several.ok = TRUE) nouts <- c( counts %iff% 'counts', coords %iff% type, molecules %iff% 'molecules' ) outs <- vector(mode = 'list', length = length(x = nouts)) names(x = outs) <- nouts if (!is.null(x = coords)) { ppreload <- progressor() ppreload(message = "Preloading coordinates", class = 'sticky', amount = 0) cells <- read_json(path = coords) ppreload(type = 'finish') } for (i in nouts) { outs[[i]] <- switch( EXPR = i, 'counts' = { counts.type <- file_ext(x = basename(path = file_path_sans_ext( x = counts ))) cts <- switch( EXPR = counts.type, 'clusters' = .ReadVitessceClusters(counts = counts), 'genes' = .ReadVitessceGenes(counts = counts), stop("Unknown Vitessce counts filetype: '", counts.type, "'") ) pcts <- progressor() if (!is.na(x = filter)) { pcts( message = paste("Filtering genes with pattern", filter), class = 'sticky', amount = 0 ) cts <- cts[!grepl(pattern = filter, x = rownames(x = cts)), , drop = FALSE] } ratio <- getOption(x = 'Seurat.input.sparse_ratio', default = 0.4) if ((sum(cts == 0) / length(x = cts)) > ratio) { pcts( message = 'Converting counts to sparse matrix', class = 'sticky', amount = 0 ) cts <- as.sparse(x = cts) } pcts(type = 'finish') cts }, 'centroids' = { pcents <- progressor(steps = length(x = cells)) pcents(message = "Reading centroids", class = 'sticky', amount = 0) centroids <- lapply( X = names(x = cells), FUN = function(x) { cents <- cells[[x]]$xy names(x = cents) <- c('x', 'y') cents <- as.data.frame(x = cents) cents$cell <- x pcents() return(cents) } ) pcents(type = 'finish') do.call(what = 'rbind', args = centroids) }, 'segmentations' = { psegs <- progressor(steps = length(x = cells)) psegs(message = "Reading segmentations", class = 'sticky', amount = 0) segmentations <- lapply( X = names(x = cells), FUN = function(x) { poly <- cells[[x]]$poly poly <- lapply(X = poly, FUN = unlist) poly <- as.data.frame(x = do.call(what = 'rbind', args = poly)) colnames(x = poly) <- c('x', 'y') poly$cell <- x psegs() return(poly) } ) psegs(type = 'finish') do.call(what = 'rbind', args = segmentations) }, 'molecules' = { pmols1 <- progressor() pmols1(message = "Reading molecules", class = 'sticky', amount = 0) pmols1(type = 'finish') mols <- read_json(path = molecules) pmols2 <- progressor(steps = length(x = mols)) mols <- lapply( X = names(x = mols), FUN = function(m) { x <- mols[[m]] x <- lapply(X = x, FUN = unlist) x <- as.data.frame(x = do.call(what = 'rbind', args = x)) colnames(x = x) <- c('x', 'y') x$gene <- m pmols2() return(x) } ) mols <- do.call(what = 'rbind', args = mols) pmols2(type = 'finish') if (!is.na(x = filter)) { pmols3 <- progressor() pmols3( message = paste("Filtering molecules with pattern", filter), class = 'sticky', amount = 0 ) pmols3(type = 'finish') mols <- mols[!grepl(pattern = filter, x = mols$gene), , drop = FALSE] } mols }, stop("Unknown data type: ", i) ) } return(outs) } #' Read and Load MERFISH Input from Vizgen #' #' Read and load in MERFISH data from Vizgen-formatted files #' #' @inheritParams ReadVitessce #' @param data.dir Path to the directory with Vizgen MERFISH files; requires at #' least one of the following files present: #' \itemize{ #' \item \dQuote{\code{cell_by_gene.csv}}: used for reading count matrix #' \item \dQuote{\code{cell_metadata.csv}}: used for reading cell spatial #' coordinate matrices #' \item \dQuote{\code{detected_transcripts.csv}}: used for reading molecule #' spatial coordinate matrices #' } #' @param transcripts Optional file path for counts matrix; pass \code{NA} to #' suppress reading counts matrix #' @param spatial Optional file path for spatial metadata; pass \code{NA} to #' suppress reading spatial coordinates. If \code{spatial} is provided and #' \code{type} is \dQuote{segmentations}, uses \code{dirname(spatial)} instead of #' \code{data.dir} to find HDF5 files #' @param molecules Optional file path for molecule coordinates file; pass #' \code{NA} to suppress reading spatial molecule information #' @param type Type of cell spatial coordinate matrices to read; choose one #' or more of: #' \itemize{ #' \item \dQuote{segmentations}: cell segmentation vertices; requires #' \href{https://cran.r-project.org/package=hdf5r}{\pkg{hdf5r}} to be #' installed and requires a directory \dQuote{\code{cell_boundaries}} within #' \code{data.dir}. Within \dQuote{\code{cell_boundaries}}, there must be #' one or more HDF5 file named \dQuote{\code{feature_data_##.hdf5}} #' \item \dQuote{centroids}: cell centroids in micron coordinate space #' \item \dQuote{boxes}: cell box outlines in micron coordinate space #' } #' @param mol.type Type of molecule spatial coordinate matrices to read; #' choose one or more of: #' \itemize{ #' \item \dQuote{pixels}: molecule coordinates in pixel space #' \item \dQuote{microns}: molecule coordinates in micron space #' } #' @param metadata Type of available metadata to read; #' choose zero or more of: #' \itemize{ #' \item \dQuote{volume}: estimated cell volume #' \item \dQuote{fov}: cell's fov #' } #' @param z Z-index to load; must be between 0 and 6, inclusive #' #' @return \code{ReadVizgen}: A list with some combination of the #' following values: #' \itemize{ #' \item \dQuote{\code{transcripts}}: a #' \link[Matrix:dgCMatrix-class]{sparse matrix} with expression data; cells #' are columns and features are rows #' \item \dQuote{\code{segmentations}}: a data frame with cell polygon outlines in #' three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{centroids}}: a data frame with cell centroid #' coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{boxes}}: a data frame with cell box outlines in three #' columns: \dQuote{x}, \dQuote{y}, and \dQuote{cell} #' \item \dQuote{\code{microns}}: a data frame with molecule micron #' coordinates in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} #' \item \dQuote{\code{pixels}}: a data frame with molecule pixel coordinates #' in three columns: \dQuote{x}, \dQuote{y}, and \dQuote{gene} #' \item \dQuote{\code{metadata}}: a data frame with the cell-level metadata #' requested by \code{metadata} #' } #' #' @importFrom future.apply future_lapply #' #' @export #' #' @order 1 #' #' @concept preprocessing #' #' @template section-progressr #' @template section-future #' #' @templateVar pkg data.table #' @template note-reqdpkg #' ReadVizgen <- function( data.dir, transcripts = NULL, spatial = NULL, molecules = NULL, type = 'segmentations', mol.type = 'microns', metadata = NULL, filter = NA_character_, z = 3L ) { # TODO: handle multiple segmentations per z-plane if (!requireNamespace("data.table", quietly = TRUE)) { stop("Please install 'data.table' for this function") } # hdf5r is only used for loading polygon boundaries # Not needed for all Vizgen input hdf5 <- requireNamespace("hdf5r", quietly = TRUE) # Argument checking type <- match.arg( arg = type, choices = c('segmentations', 'centroids', 'boxes'), several.ok = TRUE ) mol.type <- match.arg( arg = mol.type, choices = c('pixels', 'microns'), several.ok = TRUE ) if (!is.null(x = metadata)) { metadata <- match.arg( arg = metadata, choices = c("volume", "fov"), several.ok = TRUE ) } if (!z %in% seq.int(from = 0L, to = 6L)) { stop("The z-index must be in the range [0, 6]") } use.dir <- all(vapply( X = c(transcripts, spatial, molecules), FUN = function(x) { return(is.null(x = x) || is.na(x = x)) }, FUN.VALUE = logical(length = 1L) )) if (use.dir && !dir.exists(paths = data.dir)) { stop("Cannot find Vizgen directory ", data.dir) } # Identify input files files <- c( transcripts = transcripts %||% 'cell_by_gene[_a-zA-Z0-9]*.csv', spatial = spatial %||% 'cell_metadata[_a-zA-Z0-9]*.csv', molecules = molecules %||% 'detected_transcripts[_a-zA-Z0-9]*.csv' ) files[is.na(x = files)] <- NA_character_ h5dir <- file.path( ifelse( test = dirname(path = files['spatial']) == '.', yes = data.dir, no = dirname(path = files['spatial']) ), 'cell_boundaries' ) zidx <- paste0('zIndex_', z) files <- vapply( X = files, FUN = function(x) { x <- as.character(x = x) if (isTRUE(x = dirname(path = x) == '.')) { fnames <- list.files( path = data.dir, pattern = x, recursive = FALSE, full.names = TRUE ) return(sort(x = fnames, decreasing = TRUE)[1L]) } else { return(x) } }, FUN.VALUE = character(length = 1L), USE.NAMES = TRUE ) files[!file.exists(files)] <- NA_character_ if (all(is.na(x = files))) { stop("Cannot find Vizgen input files in ", data.dir) } # Checking for loading spatial coordinates if (!is.na(x = files[['spatial']])) { pprecoord <- progressor() pprecoord( message = "Preloading cell spatial coordinates", class = 'sticky', amount = 0 ) sp <- data.table::fread( file = files[['spatial']], sep = ',', data.table = FALSE, verbose = FALSE # showProgress = progressr:::progressr_in_globalenv(action = 'query') # showProgress = verbose ) pprecoord(type = 'finish') rownames(x = sp) <- as.character(x = sp[, 1]) sp <- sp[, -1, drop = FALSE] # Check to see if we should load segmentations if ('segmentations' %in% type) { poly <- if (isFALSE(x = hdf5)) { warning( "Cannot find hdf5r; unable to load segmentation vertices", immediate. = TRUE ) FALSE } else if (!dir.exists(paths = h5dir)) { warning("Cannot find cell boundary H5 files", immediate. = TRUE) FALSE } else { TRUE } if (isFALSE(x = poly)) { type <- setdiff(x = type, y = 'segmentations') } } spatials <- rep_len(x = files[['spatial']], length.out = length(x = type)) names(x = spatials) <- type files <- c(files, spatials) files <- files[setdiff(x = names(x = files), y = 'spatial')] } else if (!is.null(x = metadata)) { warning( "metadata can only be loaded when spatial coordinates are loaded", immediate. = TRUE ) metadata <- NULL } # Check for loading of molecule coordinates if (!is.na(x = files[['molecules']])) { ppremol <- progressor() ppremol( message = "Preloading molecule coordinates", class = 'sticky', amount = 0 ) mx <- data.table::fread( file = files[['molecules']], sep = ',', verbose = FALSE # showProgress = verbose ) mx <- mx[mx$global_z == z, , drop = FALSE] if (!is.na(x = filter)) { ppremol( message = paste("Filtering molecules with pattern", filter), class = 'sticky', amount = 0 ) mx <- mx[!grepl(pattern = filter, x = mx$gene), , drop = FALSE] } ppremol(type = 'finish') mols <- rep_len(x = files[['molecules']], length.out = length(x = mol.type)) names(x = mols) <- mol.type files <- c(files, mols) files <- files[setdiff(x = names(x = files), y = 'molecules')] } files <- files[!is.na(x = files)] # Read input data outs <- vector(mode = 'list', length = length(x = files)) names(x = outs) <- names(x = files) if (!is.null(metadata)) { outs <- c(outs, list(metadata = NULL)) } for (otype in names(x = outs)) { outs[[otype]] <- switch( EXPR = otype, 'transcripts' = { ptx <- progressor() ptx(message = 'Reading counts matrix', class = 'sticky', amount = 0) tx <- data.table::fread( file = files[[otype]], sep = ',', data.table = FALSE, verbose = FALSE ) rownames(x = tx) <- as.character(x = tx[, 1]) tx <- t(x = as.matrix(x = tx[, -1, drop = FALSE])) if (!is.na(x = filter)) { ptx( message = paste("Filtering genes with pattern", filter), class = 'sticky', amount = 0 ) tx <- tx[!grepl(pattern = filter, x = rownames(x = tx)), , drop = FALSE] } ratio <- getOption(x = 'Seurat.input.sparse_ratio', default = 0.4) if ((sum(tx == 0) / length(x = tx)) > ratio) { ptx( message = 'Converting counts to sparse matrix', class = 'sticky', amount = 0 ) tx <- as.sparse(x = tx) } ptx(type = 'finish') tx }, 'centroids' = { pcents <- progressor() pcents( message = 'Creating centroid coordinates', class = 'sticky', amount = 0 ) pcents(type = 'finish') data.frame( x = sp$center_x, y = sp$center_y, cell = rownames(x = sp), stringsAsFactors = FALSE ) }, 'segmentations' = { ppoly <- progressor(steps = length(x = unique(x = sp$fov))) ppoly( message = "Creating polygon coordinates", class = 'sticky', amount = 0 ) pg <- future_lapply( X = unique(x = sp$fov), FUN = function(f, ...) { fname <- file.path(h5dir, paste0('feature_data_', f, '.hdf5')) if (!file.exists(fname)) { warning( "Cannot find HDF5 file for field of view ", f, immediate. = TRUE ) return(NULL) } hfile <- hdf5r::H5File$new(filename = fname, mode = 'r') on.exit(expr = hfile$close_all()) cells <- rownames(x = subset(x = sp, subset = fov == f)) df <- lapply( X = cells, FUN = function(x) { return(tryCatch( expr = { cc <- hfile[['featuredata']][[x]][[zidx]][['p_0']][['coordinates']]$read() cc <- as.data.frame(x = t(x = cc)) colnames(x = cc) <- c('x', 'y') cc$cell <- x cc }, error = function(...) { return(NULL) } )) } ) ppoly() return(do.call(what = 'rbind', args = df)) } ) ppoly(type = 'finish') pg <- do.call(what = 'rbind', args = pg) npg <- length(x = unique(x = pg$cell)) if (npg < nrow(x = sp)) { warning( nrow(x = sp) - npg, " cells missing polygon information", immediate. = TRUE ) } pg }, 'boxes' = { pbox <- progressor(steps = nrow(x = sp)) pbox(message = "Creating box coordinates", class = 'sticky', amount = 0) bx <- future_lapply( X = rownames(x = sp), FUN = function(cell) { row <- sp[cell, ] df <- expand.grid( x = c(row$min_x, row$max_x), y = c(row$min_y, row$max_y), cell = cell, KEEP.OUT.ATTRS = FALSE, stringsAsFactors = FALSE ) df <- df[c(1, 3, 4, 2), , drop = FALSE] pbox() return(df) } ) pbox(type = 'finish') do.call(what = 'rbind', args = bx) }, 'metadata' = { pmeta <- progressor() pmeta( message = 'Loading metadata', class = 'sticky', amount = 0 ) pmeta(type = 'finish') sp[, metadata, drop = FALSE] }, 'pixels' = { ppixels <- progressor() ppixels( message = 'Creating pixel-level molecule coordinates', class = 'sticky', amount = 0 ) df <- data.frame( x = mx$x, y = mx$y, gene = mx$gene, stringsAsFactors = FALSE ) # if (!is.na(x = filter)) { # ppixels( # message = paste("Filtering molecules with pattern", filter), # class = 'sticky', # amount = 0 # ) # df <- df[!grepl(pattern = filter, x = df$gene), , drop = FALSE] # } ppixels(type = 'finish') df }, 'microns' = { pmicrons <- progressor() pmicrons( message = "Creating micron-level molecule coordinates", class = 'sticky', amount = 0 ) df <- data.frame( x = mx$global_x, y = mx$global_y, gene = mx$gene, stringsAsFactors = FALSE ) # if (!is.na(x = filter)) { # pmicrons( # message = paste("Filtering molecules with pattern", filter), # class = 'sticky', # amount = 0 # ) # df <- df[!grepl(pattern = filter, x = df$gene), , drop = FALSE] # } pmicrons(type = 'finish') df }, stop("Unknown MERFISH input type: ", type) ) } return(outs) } #' Normalize raw data to fractions #' #' Normalize count data to relative counts per cell by dividing by the total #' per cell. Optionally use a scale factor, e.g. for counts per million (CPM) #' use \code{scale.factor = 1e6}. #' #' @param data Matrix with the raw count data #' @param scale.factor Scale the result. Default is 1 #' @param verbose Print progress #' @return Returns a matrix with the relative counts #' #' @importFrom methods as #' @importFrom Matrix colSums #' #' @export #' @concept preprocessing #' #' @examples #' mat <- matrix(data = rbinom(n = 25, size = 5, prob = 0.2), nrow = 5) #' mat #' mat_norm <- RelativeCounts(data = mat) #' mat_norm #' RelativeCounts <- function(data, scale.factor = 1, verbose = TRUE) { if (is.data.frame(x = data)) { data <- as.matrix(x = data) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as.sparse(x = data) } if (verbose) { cat("Performing relative-counts-normalization\n", file = stderr()) } norm.data <- data norm.data@x <- norm.data@x / rep.int(Matrix::colSums(norm.data), diff(norm.data@p)) * scale.factor return(norm.data) } #' Run the mark variogram computation on a given position matrix and expression #' matrix. #' #' Wraps the functionality of markvario from the spatstat package. #' #' @param spatial.location A 2 column matrix giving the spatial locations of #' each of the data points also in data #' @param data Matrix containing the data used as "marks" (e.g. gene expression) #' @param ... Arguments passed to markvario #' #' @importFrom spatstat.explore markvario #' @importFrom spatstat.geom ppp #' #' @export #' @concept preprocessing #' RunMarkVario <- function( spatial.location, data, ... ) { pp <- ppp( x = spatial.location[, 1], y = spatial.location[, 2], xrange = range(spatial.location[, 1]), yrange = range(spatial.location[, 2]) ) if (nbrOfWorkers() > 1) { chunks <- nbrOfWorkers() features <- rownames(x = data) features <- split( x = features, f = ceiling(x = seq_along(along.with = features) / (length(x = features) / chunks)) ) mv <- future_lapply(X = features, FUN = function(x) { pp[["marks"]] <- as.data.frame(x = t(x = data[x, ])) markvario(X = pp, normalise = TRUE, ...) }) mv <- unlist(x = mv, recursive = FALSE) names(x = mv) <- rownames(x = data) } else { pp[["marks"]] <- as.data.frame(x = t(x = data)) mv <- markvario(X = pp, normalise = TRUE, ...) } return(mv) } #' Compute Moran's I value. #' #' Wraps the functionality of the Moran.I function from the ape package. #' Weights are computed as 1/distance. #' #' @param data Expression matrix #' @param pos Position matrix #' @param verbose Display messages/progress #' #' @importFrom stats dist #' #' @export #' @concept preprocessing #' RunMoransI <- function(data, pos, verbose = TRUE) { mysapply <- sapply if (verbose) { message("Computing Moran's I") mysapply <- pbsapply } Rfast2.installed <- PackageCheck("Rfast2", error = FALSE) if (Rfast2.installed) { MyMoran <- Rfast2::moranI } else if (!PackageCheck('ape', error = FALSE)) { stop( "'RunMoransI' requires either Rfast2 or ape to be installed", call. = FALSE ) } else { MyMoran <- ape::Moran.I if (getOption('Seurat.Rfast2.msg', TRUE)) { message( "For a more efficient implementation of the Morans I calculation,", "\n(selection.method = 'moransi') please install the Rfast2 package", "\n--------------------------------------------", "\ninstall.packages('Rfast2')", "\n--------------------------------------------", "\nAfter installation of Rfast2, Seurat will automatically use the more ", "\nefficient implementation (no further action necessary).", "\nThis message will be shown once per session" ) options(Seurat.Rfast2.msg = FALSE) } } pos.dist <- dist(x = pos) pos.dist.mat <- as.matrix(x = pos.dist) # weights as 1/dist^2 weights <- 1/pos.dist.mat^2 diag(x = weights) <- 0 results <- mysapply(X = 1:nrow(x = data), FUN = function(x) { tryCatch( expr = MyMoran(data[x, ], weights), error = function(x) c(1,1,1,1) ) }) pcol <- ifelse(test = Rfast2.installed, yes = 2, no = 4) results <- data.frame( observed = unlist(x = results[1, ]), p.value = unlist(x = results[pcol, ]) ) rownames(x = results) <- rownames(x = data) return(results) } #' Sample UMI #' #' Downsample each cell to a specified number of UMIs. Includes #' an option to upsample cells below specified UMI as well. #' #' @param data Matrix with the raw count data #' @param max.umi Number of UMIs to sample to #' @param upsample Upsamples all cells with fewer than max.umi #' @param verbose Display the progress bar #' #' @importFrom methods as #' #' @return Matrix with downsampled data #' #' @export #' @concept preprocessing #' #' @examples #' data("pbmc_small") #' counts = as.matrix(x = GetAssayData(object = pbmc_small, assay = "RNA", slot = "counts")) #' downsampled = SampleUMI(data = counts) #' head(x = downsampled) #' SampleUMI <- function( data, max.umi = 1000, upsample = FALSE, verbose = FALSE ) { data <- as.sparse(x = data) if (length(x = max.umi) == 1) { new_data <- RunUMISampling( data = data, sample_val = max.umi, upsample = upsample, display_progress = verbose ) } else if (length(x = max.umi) != ncol(x = data)) { stop("max.umi vector not equal to number of cells") } else { new_data <- RunUMISamplingPerCell( data = data, sample_val = max.umi, upsample = upsample, display_progress = verbose ) } dimnames(x = new_data) <- dimnames(x = data) return(new_data) } #' Use regularized negative binomial regression to normalize UMI count data #' #' This function calls sctransform::vst. The sctransform package is available at #' https://github.com/satijalab/sctransform. #' Use this function as an alternative to the NormalizeData, #' FindVariableFeatures, ScaleData workflow. Results are saved in a new assay #' (named SCT by default) with counts being (corrected) counts, data being log1p(counts), #' scale.data being pearson residuals; sctransform::vst intermediate results are saved #' in misc slot of new assay. #' #' @param object UMI counts matrix #' @param cell.attr A metadata with cell attributes #' @param reference.SCT.model If not NULL, compute residuals for the object #' using the provided SCT model; supports only log_umi as the latent variable. #' If residual.features are not specified, compute for the top variable.features.n #' specified in the model which are also present in the object. If #' residual.features are specified, the variable features of the resulting SCT #' assay are set to the top variable.features.n in the model. #' @param do.correct.umi Place corrected UMI matrix in assay counts slot; default is TRUE #' @param ncells Number of subsampling cells used to build NB regression; default is 5000 #' @param residual.features Genes to calculate residual features for; default is NULL (all genes). #' If specified, will be set to VariableFeatures of the returned object. #' @param variable.features.n Use this many features as variable features after #' ranking by residual variance; default is 3000. Only applied if residual.features is not set. #' @param variable.features.rv.th Instead of setting a fixed number of variable features, #' use this residual variance cutoff; this is only used when \code{variable.features.n} #' is set to NULL; default is 1.3. Only applied if residual.features is not set. #' @param vars.to.regress Variables to regress out in a second non-regularized linear #' regression. For example, percent.mito. Default is NULL #' @param do.scale Whether to scale residuals to have unit variance; default is FALSE #' @param do.center Whether to center residuals to have mean zero; default is TRUE #' @param clip.range Range to clip the residuals to; default is \code{c(-sqrt(n/30), sqrt(n/30))}, #' where n is the number of cells #' @param vst.flavor When set to 'v2' sets method = glmGamPoi_offset, n_cells=2000, #' and exclude_poisson = TRUE which causes the model to learn theta and intercept #' only besides excluding poisson genes from learning and regularization #' @param conserve.memory If set to TRUE the residual matrix for all genes is never #' created in full; useful for large data sets, but will take longer to run; #' this will also set return.only.var.genes to TRUE; default is FALSE #' @param return.only.var.genes If set to TRUE the scale.data matrices in output assay are #' subset to contain only the variable genes; default is TRUE #' @param seed.use Set a random seed. By default, sets the seed to 1448145. Setting #' NULL will not set a seed. #' @param verbose Whether to print messages and progress bars #' @param ... Additional parameters passed to \code{sctransform::vst} #' #' @return Returns a Seurat object with a new assay (named SCT by default) with #' counts being (corrected) counts, data being log1p(counts), scale.data being #' pearson residuals; sctransform::vst intermediate results are saved in misc #' slot of the new assay. #' #' @importFrom stats setNames #' @importFrom Matrix colSums #' @importFrom SeuratObject as.sparse #' @importFrom sctransform vst get_residual_var get_residuals correct_counts #' #' @seealso \code{\link[sctransform]{correct_counts}} \code{\link[sctransform]{get_residuals}} #' #' @rdname SCTransform #' @concept preprocessing #' @export #' SCTransform.default <- function( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = umi) / 30), sqrt(x = ncol(x = umi) / 30)), vst.flavor = 'v2', conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } vst.args <- list(...) object <- as.sparse(x = object) umi <- object # check for batch_var in meta data if ('batch_var' %in% names(x = vst.args)) { if (!(vst.args[['batch_var']] %in% colnames(x = cell.attr))) { stop('batch_var not found in seurat object meta data') } } # parameter checking when reference.SCT.model is set if (!is.null(x = reference.SCT.model) ) { if (inherits(x = reference.SCT.model, what = "SCTModel")) { reference.SCT.model <- SCTModel_to_vst(SCTModel = reference.SCT.model) } if (is.list(x = reference.SCT.model) & inherits(x = reference.SCT.model[[1]], what = "SCTModel")) { stop("reference.SCT.model must be one SCTModel rather than a list of SCTModel") } if ('latent_var' %in% names(x = vst.args)) { stop('custom latent variables are not supported when reference.SCT.model is given') } if (reference.SCT.model$model_str != 'y ~ log_umi') { stop('reference.SCT.model must be derived using default SCT regression formula, `y ~ log_umi`') } } # check for latent_var in meta data if ('latent_var' %in% names(x = vst.args)) { known.attr <- c('umi', 'gene', 'log_umi', 'log_gene', 'umi_per_gene', 'log_umi_per_gene') if (!all(vst.args[['latent_var']] %in% c(colnames(x = cell.attr), known.attr))) { stop('latent_var values are not from the set of cell attributes sctransform calculates by default and cannot be found in seurat object meta data') } } # check for vars.to.regress in meta data if (any(!vars.to.regress %in% colnames(x = cell.attr))) { stop('problem with second non-regularized linear regression; not all variables found in seurat object meta data; check vars.to.regress parameter') } if (any(c('cell_attr', 'verbosity', 'return_cell_attr', 'return_gene_attr', 'return_corrected_umi') %in% names(x = vst.args))) { warning( 'the following arguments will be ignored because they are set within this function:', paste( c( 'cell_attr', 'verbosity', 'return_cell_attr', 'return_gene_attr', 'return_corrected_umi' ), collapse = ', ' ), call. = FALSE, immediate. = TRUE ) } if (!is.null(x = vst.flavor) && !vst.flavor %in% c("v1", "v2")){ stop("vst.flavor can be 'v1' or 'v2'. Default is 'v2'") } if (!is.null(x = vst.flavor) && vst.flavor == "v1"){ vst.flavor <- NULL } vst.args[['vst.flavor']] <- vst.flavor vst.args[['umi']] <- umi vst.args[['cell_attr']] <- cell.attr vst.args[['verbosity']] <- as.numeric(x = verbose) * 1 vst.args[['return_cell_attr']] <- TRUE vst.args[['return_gene_attr']] <- TRUE vst.args[['return_corrected_umi']] <- do.correct.umi vst.args[['n_cells']] <- min(ncells, ncol(x = umi)) residual.type <- vst.args[['residual_type']] %||% 'pearson' res.clip.range <- vst.args[['res_clip_range']] %||% c(-sqrt(x = ncol(x = umi)), sqrt(x = ncol(x = umi))) # set sct normalization method if (!is.null( reference.SCT.model)) { sct.method <- "reference.model" } else if (!is.null(x = residual.features)) { sct.method <- "residual.features" } else if (conserve.memory) { sct.method <- "conserve.memory" } else { sct.method <- "default" } # set vst model vst.out <- switch( EXPR = sct.method, 'reference.model' = { if (verbose) { message("Using reference SCTModel to calculate pearson residuals") } do.center <- FALSE do.correct.umi <- FALSE vst.out <- reference.SCT.model clip.range <- vst.out$arguments$sct.clip.range cell_attr <- data.frame(log_umi = log10(x = colSums(umi))) rownames(cell_attr) <- colnames(x = umi) vst.out$cell_attr <- cell_attr all.features <- intersect( x = rownames(x = vst.out$gene_attr), y = rownames(x = umi) ) vst.out$gene_attr <- vst.out$gene_attr[all.features ,] vst.out$model_pars_fit <- vst.out$model_pars_fit[all.features,] vst.out }, 'residual.features' = { if (verbose) { message("Computing residuals for the ", length(x = residual.features), " specified features") } return.only.var.genes <- TRUE do.correct.umi <- FALSE vst.args[['return_corrected_umi']] <- FALSE vst.args[['residual_type']] <- 'none' vst.out <- do.call(what = 'vst', args = vst.args) vst.out$gene_attr$residual_variance <- NA_real_ vst.out }, 'conserve.memory' = { return.only.var.genes <- TRUE vst.args[['residual_type']] <- 'none' vst.out <- do.call(what = 'vst', args = vst.args) feature.variance <- get_residual_var( vst_out = vst.out, umi = umi, residual_type = residual.type, res_clip_range = res.clip.range ) vst.out$gene_attr$residual_variance <- NA_real_ vst.out$gene_attr[names(x = feature.variance), 'residual_variance'] <- feature.variance vst.out }, 'default' = { vst.out <- do.call(what = 'vst', args = vst.args) vst.out }) feature.variance <- vst.out$gene_attr[,"residual_variance"] names(x = feature.variance) <- rownames(x = vst.out$gene_attr) if (verbose) { message('Determine variable features') } feature.variance <- sort(x = feature.variance, decreasing = TRUE) if (!is.null(x = variable.features.n)) { top.features <- names(x = feature.variance)[1:min(variable.features.n, length(x = feature.variance))] } else { top.features <- names(x = feature.variance)[feature.variance >= variable.features.rv.th] } # get residuals vst.out <- switch( EXPR = sct.method, 'reference.model' = { if (is.null(x = residual.features)) { residual.features <- top.features } residual.features <- Reduce( f = intersect, x = list(residual.features, rownames(x = umi), rownames(x = vst.out$model_pars_fit)) ) residual.feature.mat <- get_residuals( vst_out = vst.out, umi = umi[residual.features, , drop = FALSE], verbosity = as.numeric(x = verbose)*2 ) vst.out$gene_attr <- vst.out$gene_attr[residual.features ,] ref.residuals.mean <- vst.out$gene_attr[,"residual_mean"] vst.out$y <- sweep( x = residual.feature.mat, MARGIN = 1, STATS = ref.residuals.mean, FUN = "-" ) vst.out }, 'residual.features' = { residual.features <- intersect( x = residual.features, y = rownames(x = vst.out$gene_attr) ) residual.feature.mat <- get_residuals( vst_out = vst.out, umi = umi[residual.features, , drop = FALSE], verbosity = as.numeric(x = verbose)*2 ) vst.out$y <- residual.feature.mat vst.out$gene_attr$residual_mean <- NA_real_ vst.out$gene_attr$residual_variance <- NA_real_ vst.out$gene_attr[residual.features, "residual_mean"] <- rowMeans2(x = vst.out$y) vst.out$gene_attr[residual.features, "residual_variance"] <- RowVar(x = vst.out$y) vst.out }, 'conserve.memory' = { vst.out$y <- get_residuals( vst_out = vst.out, umi = umi[top.features, ], residual_type = residual.type, res_clip_range = res.clip.range, verbosity = as.numeric(x = verbose)*2 ) vst.out$gene_attr$residual_mean <- NA_real_ vst.out$gene_attr[top.features, "residual_mean"] = rowMeans2(x = vst.out$y) if (do.correct.umi & residual.type == 'pearson') { vst.out$umi_corrected <- correct_counts( x = vst.out, umi = umi, verbosity = as.numeric(x = verbose) * 1 ) } vst.out }, 'default' = { if (return.only.var.genes) { vst.out$y <- vst.out$y[top.features, ] } vst.out }) scale.data <- vst.out$y # clip the residuals scale.data[scale.data < clip.range[1]] <- clip.range[1] scale.data[scale.data > clip.range[2]] <- clip.range[2] # 2nd regression scale.data <- ScaleData( scale.data, features = NULL, vars.to.regress = vars.to.regress, latent.data = cell.attr[, vars.to.regress, drop = FALSE], model.use = 'linear', use.umi = FALSE, do.scale = do.scale, do.center = do.center, scale.max = Inf, block.size = 750, min.cells.to.block = 3000, verbose = verbose ) vst.out$y <- scale.data vst.out$variable_features <- residual.features %||% top.features if (!do.correct.umi) { vst.out$umi_corrected <- umi } min_var <- vst.out$arguments$min_variance return(vst.out) } #' @rdname SCTransform #' @concept preprocessing #' @export #' @method SCTransform Assay #' SCTransform.Assay <- function( object, cell.attr, reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object) / 30), sqrt(x = ncol(x = object) / 30)), vst.flavor = 'v2', conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } if (!is.null(reference.SCT.model)){ do.correct.umi <- FALSE do.center <- FALSE } umi <- GetAssayData(object = object, slot = 'counts') vst.out <- SCTransform(object = umi, cell.attr = cell.attr, reference.SCT.model = reference.SCT.model, do.correct.umi = do.correct.umi, ncells = ncells, residual.features = residual.features, variable.features.n = variable.features.n, variable.features.rv.th = variable.features.rv.th, vars.to.regress = vars.to.regress, do.scale = do.scale, do.center = do.center, clip.range = clip.range, vst.flavor = vst.flavor, conserve.memory = conserve.memory, return.only.var.genes = return.only.var.genes, seed.use = seed.use, verbose = verbose, ...) residual.type <- vst.out[['residual_type']] %||% 'pearson' sct.method <- vst.out[["sct.method"]] # create output assay and put (corrected) umi counts in count slot if (do.correct.umi & residual.type == 'pearson') { if (verbose) { message('Place corrected count matrix in counts slot') } assay.out <- CreateAssayObject(counts = vst.out$umi_corrected) vst.out$umi_corrected <- NULL } else { # TODO: restore once check.matrix is in SeuratObject # assay.out <- CreateAssayObject(counts = umi, check.matrix = FALSE) assay.out <- CreateAssayObject(counts = umi) } # set the variable genes VariableFeatures(object = assay.out) <- vst.out$variable_features # put log1p transformed counts in data assay.out <- SetAssayData( object = assay.out, slot = 'data', new.data = log1p(x = GetAssayData(object = assay.out, slot = 'counts')) ) scale.data <- vst.out$y assay.out <- SetAssayData( object = assay.out, slot = 'scale.data', new.data = scale.data ) vst.out$y <- NULL # save clip.range into vst model vst.out$arguments$sct.clip.range <- clip.range vst.out$arguments$sct.method <- sct.method Misc(object = assay.out, slot = 'vst.out') <- vst.out assay.out <- as(object = assay.out, Class = "SCTAssay") return(assay.out) } #' @param assay Name of assay to pull the count data from; default is 'RNA' #' @param new.assay.name Name for the new assay containing the normalized data; default is 'SCT' #' #' @rdname SCTransform #' @concept preprocessing #' @export #' @method SCTransform Seurat #' SCTransform.Seurat <- function( object, assay = "RNA", new.assay.name = 'SCT', reference.SCT.model = NULL, do.correct.umi = TRUE, ncells = 5000, residual.features = NULL, variable.features.n = 3000, variable.features.rv.th = 1.3, vars.to.regress = NULL, do.scale = FALSE, do.center = TRUE, clip.range = c(-sqrt(x = ncol(x = object[[assay]]) / 30), sqrt(x = ncol(x = object[[assay]]) / 30)), vst.flavor = "v2", conserve.memory = FALSE, return.only.var.genes = TRUE, seed.use = 1448145, verbose = TRUE, ... ) { if (!is.null(x = seed.use)) { set.seed(seed = seed.use) } assay <- assay %||% DefaultAssay(object = object) if (assay == "SCT") { # if re-running SCTransform, use the RNA assay assay <- "RNA" warning("Running SCTransform on the RNA assay while default assay is SCT.") } if (verbose){ message("Running SCTransform on assay: ", assay) } cell.attr <- slot(object = object, name = 'meta.data')[colnames(object[[assay]]),] assay.data <- SCTransform(object = object[[assay]], cell.attr = cell.attr, reference.SCT.model = reference.SCT.model, do.correct.umi = do.correct.umi, ncells = ncells, residual.features = residual.features, variable.features.n = variable.features.n, variable.features.rv.th = variable.features.rv.th, vars.to.regress = vars.to.regress, do.scale = do.scale, do.center = do.center, clip.range = clip.range, vst.flavor = vst.flavor, conserve.memory = conserve.memory, return.only.var.genes = return.only.var.genes, seed.use = seed.use, verbose = verbose, ...) assay.data <- SCTAssay(assay.data, assay.orig = assay) slot(object = slot(object = assay.data, name = "SCTModel.list")[[1]], name = "umi.assay") <- assay object[[new.assay.name]] <- assay.data if (verbose) { message(paste("Set default assay to", new.assay.name)) } DefaultAssay(object = object) <- new.assay.name object <- LogSeuratCommand(object = object) return(object) } #' Subset a Seurat Object based on the Barcode Distribution Inflection Points #' #' This convenience function subsets a Seurat object based on calculated inflection points. #' #' See [CalculateBarcodeInflections()] to calculate inflection points and #' [BarcodeInflectionsPlot()] to visualize and test inflection point calculations. #' #' @param object Seurat object #' #' @return Returns a subsetted Seurat object. #' #' @export #' @concept preprocessing #' #' @author Robert A. Amezquita, \email{robert.amezquita@fredhutch.org} #' @seealso \code{\link{CalculateBarcodeInflections}} \code{\link{BarcodeInflectionsPlot}} #' #' @examples #' data("pbmc_small") #' pbmc_small <- CalculateBarcodeInflections( #' object = pbmc_small, #' group.column = 'groups', #' threshold.low = 20, #' threshold.high = 30 #' ) #' SubsetByBarcodeInflections(object = pbmc_small) #' SubsetByBarcodeInflections <- function(object) { cbi.data <- Tool(object = object, slot = 'CalculateBarcodeInflections') if (is.null(x = cbi.data)) { stop("Barcode inflections not calculated, please run CalculateBarcodeInflections") } return(object[, cbi.data$cells_pass]) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param selection.method How to choose top variable features. Choose one of : #' \itemize{ #' \item \dQuote{\code{vst}}: First, fits a line to the relationship of #' log(variance) and log(mean) using local polynomial regression (loess). #' Then standardizes the feature values using the observed mean and #' expected variance (given by the fitted line). Feature variance is then #' calculated on the standardized values #' after clipping to a maximum (see clip.max parameter). #' \item \dQuote{\code{mean.var.plot}} (mvp): First, uses a function to #' calculate average expression (mean.function) and dispersion #' (dispersion.function) for each feature. Next, divides features into #' \code{num.bin} (deafult 20) bins based on their average expression, #' and calculates z-scores for dispersion within each bin. The purpose of #' this is to identify variable features while controlling for the #' strong relationship between variability and average expression #' \item \dQuote{\code{dispersion}} (disp): selects the genes with the #' highest dispersion values #' } #' @param loess.span (vst method) Loess span parameter used when fitting the #' variance-mean relationship #' @param clip.max (vst method) After standardization values larger than #' clip.max will be set to clip.max; default is 'auto' which sets this value to #' the square root of the number of cells #' @param mean.function Function to compute x-axis value (average expression). #' Default is to take the mean of the detected (i.e. non-zero) values #' @param dispersion.function Function to compute y-axis value (dispersion). #' Default is to take the standard deviation of all values #' @param num.bin Total number of bins to use in the scaled analysis (default #' is 20) #' @param binning.method Specifies how the bins should be computed. Available #' methods are: #' \itemize{ #' \item \dQuote{\code{equal_width}}: each bin is of equal width along the #' x-axis (default) #' \item \dQuote{\code{equal_frequency}}: each bin contains an equal number #' of features (can increase statistical power to detect overdispersed #' eatures at high expression values, at the cost of reduced resolution #' along the x-axis) #' } #' @param verbose show progress bar for calculations #' #' @rdname FindVariableFeatures #' @concept preprocessing #' @export #' FindVariableFeatures.V3Matrix <- function( object, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", verbose = TRUE, ... ) { CheckDots(...) if (!inherits(x = object, 'Matrix')) { object <- as(object = as.matrix(x = object), Class = 'Matrix') } if (!inherits(x = object, what = 'dgCMatrix')) { object <- as.sparse(x = object) } if (selection.method == "vst") { if (clip.max == 'auto') { clip.max <- sqrt(x = ncol(x = object)) } hvf.info <- data.frame(mean = rowMeans(x = object)) hvf.info$variance <- SparseRowVar2( mat = object, mu = hvf.info$mean, display_progress = verbose ) hvf.info$variance.expected <- 0 hvf.info$variance.standardized <- 0 not.const <- hvf.info$variance > 0 fit <- loess( formula = log10(x = variance) ~ log10(x = mean), data = hvf.info[not.const, ], span = loess.span ) hvf.info$variance.expected[not.const] <- 10 ^ fit$fitted # use c function to get variance after feature standardization hvf.info$variance.standardized <- SparseRowVarStd( mat = object, mu = hvf.info$mean, sd = sqrt(hvf.info$variance.expected), vmax = clip.max, display_progress = verbose ) colnames(x = hvf.info) <- paste0('vst.', colnames(x = hvf.info)) } else { if (!inherits(x = mean.function, what = 'function')) { stop("'mean.function' must be a function") } if (!inherits(x = dispersion.function, what = 'function')) { stop("'dispersion.function' must be a function") } feature.mean <- mean.function(object, verbose) feature.dispersion <- dispersion.function(object, verbose) names(x = feature.mean) <- names(x = feature.dispersion) <- rownames(x = object) feature.dispersion[is.na(x = feature.dispersion)] <- 0 feature.mean[is.na(x = feature.mean)] <- 0 data.x.breaks <- switch( EXPR = binning.method, 'equal_width' = num.bin, 'equal_frequency' = c( -1, quantile( x = feature.mean[feature.mean > 0], probs = seq.int(from = 0, to = 1, length.out = num.bin) ) ), stop("Unknown binning method: ", binning.method) ) data.x.bin <- cut(x = feature.mean, breaks = data.x.breaks) names(x = data.x.bin) <- names(x = feature.mean) mean.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = mean) sd.y <- tapply(X = feature.dispersion, INDEX = data.x.bin, FUN = sd) feature.dispersion.scaled <- (feature.dispersion - mean.y[as.numeric(x = data.x.bin)]) / sd.y[as.numeric(x = data.x.bin)] names(x = feature.dispersion.scaled) <- names(x = feature.mean) hvf.info <- data.frame(feature.mean, feature.dispersion, feature.dispersion.scaled) rownames(x = hvf.info) <- rownames(x = object) colnames(x = hvf.info) <- paste0('mvp.', c('mean', 'dispersion', 'dispersion.scaled')) } return(hvf.info) } #' @param nfeatures Number of features to select as top variable features; #' only used when \code{selection.method} is set to \code{'dispersion'} or #' \code{'vst'} #' @param mean.cutoff A two-length numeric vector with low- and high-cutoffs for #' feature means #' @param dispersion.cutoff A two-length numeric vector with low- and high-cutoffs for #' feature dispersions #' #' @rdname FindVariableFeatures #' @concept preprocessing #' #' @importFrom utils head #' @export #' @method FindVariableFeatures Assay #' FindVariableFeatures.Assay <- function( object, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) { if (length(x = mean.cutoff) != 2 || length(x = dispersion.cutoff) != 2) { stop("Both 'mean.cutoff' and 'dispersion.cutoff' must be two numbers") } if (selection.method == "vst") { data <- GetAssayData(object = object, slot = "counts") # if (ncol(x = data) < 1 || nrow(x = data) < 1) { if (IsMatrixEmpty(x = data)) { warning("selection.method set to 'vst' but count slot is empty; will use data slot instead") data <- GetAssayData(object = object, slot = "data") } } else { data <- GetAssayData(object = object, slot = "data") } hvf.info <- FindVariableFeatures( object = data, selection.method = selection.method, loess.span = loess.span, clip.max = clip.max, mean.function = mean.function, dispersion.function = dispersion.function, num.bin = num.bin, binning.method = binning.method, verbose = verbose, ... ) object[[names(x = hvf.info)]] <- hvf.info hvf.info <- hvf.info[which(x = hvf.info[, 1, drop = TRUE] != 0), ] if (selection.method == "vst") { hvf.info <- hvf.info[order(hvf.info$vst.variance.standardized, decreasing = TRUE), , drop = FALSE] } else { hvf.info <- hvf.info[order(hvf.info$mvp.dispersion, decreasing = TRUE), , drop = FALSE] } selection.method <- switch( EXPR = selection.method, 'mvp' = 'mean.var.plot', 'disp' = 'dispersion', selection.method ) top.features <- switch( EXPR = selection.method, 'mean.var.plot' = { means.use <- (hvf.info[, 1] > mean.cutoff[1]) & (hvf.info[, 1] < mean.cutoff[2]) dispersions.use <- (hvf.info[, 3] > dispersion.cutoff[1]) & (hvf.info[, 3] < dispersion.cutoff[2]) rownames(x = hvf.info)[which(x = means.use & dispersions.use)] }, 'dispersion' = head(x = rownames(x = hvf.info), n = nfeatures), 'vst' = head(x = rownames(x = hvf.info), n = nfeatures), stop("Unkown selection method: ", selection.method) ) VariableFeatures(object = object) <- top.features vf.name <- ifelse( test = selection.method == 'vst', yes = 'vst', no = 'mvp' ) vf.name <- paste0(vf.name, '.variable') object[[vf.name]] <- rownames(x = object[[]]) %in% top.features return(object) } #' @rdname FindVariableFeatures #' @export #' @method FindVariableFeatures SCTAssay #' FindVariableFeatures.SCTAssay <- function( object, nfeatures = 2000, ... ) { if (length(x = slot(object = object, name = "SCTModel.list")) > 1) { stop("SCT assay is comprised of multiple SCT models. To change the variable features, please set manually with VariableFeatures<-", call. = FALSE) } feature.attr <- SCTResults(object = object, slot = "feature.attributes") nfeatures <- min(nfeatures, nrow(x = feature.attr)) top.features <- rownames(x = feature.attr)[order(feature.attr$residual_variance, decreasing = TRUE)[1:nfeatures]] VariableFeatures(object = object) <- top.features return(object) } #' @param assay Assay to use #' #' @rdname FindVariableFeatures #' @concept preprocessing #' @export #' @method FindVariableFeatures Seurat #' FindVariableFeatures.Seurat <- function( object, assay = NULL, selection.method = "vst", loess.span = 0.3, clip.max = 'auto', mean.function = FastExpMean, dispersion.function = FastLogVMR, num.bin = 20, binning.method = "equal_width", nfeatures = 2000, mean.cutoff = c(0.1, 8), dispersion.cutoff = c(1, Inf), verbose = TRUE, ... ) { assay <- assay[1L] %||% DefaultAssay(object = object) assay <- match.arg(arg = assay, Assays(object = object)) assay.data <- FindVariableFeatures( object = object[[assay]], selection.method = selection.method, loess.span = loess.span, clip.max = clip.max, mean.function = mean.function, dispersion.function = dispersion.function, num.bin = num.bin, binning.method = binning.method, nfeatures = nfeatures, mean.cutoff = mean.cutoff, dispersion.cutoff = dispersion.cutoff, verbose = verbose, ... ) object[[assay]] <- assay.data if (inherits(x = object[[assay]], what = "SCTAssay")) { object <- GetResidual( object = object, assay = assay, features = VariableFeatures(object = assay.data), verbose = FALSE ) } object <- LogSeuratCommand(object = object) return(object) } #' @param object A Seurat object, assay, or expression matrix #' @param spatial.location Coordinates for each cell/spot/bead #' @param selection.method Method for selecting spatially variable features. #' \itemize{ #' \item \code{markvariogram}: See \code{\link{RunMarkVario}} for details #' \item \code{moransi}: See \code{\link{RunMoransI}} for details. #' } #' #' @param r.metric r value at which to report the "trans" value of the mark #' variogram #' @param x.cuts Number of divisions to make in the x direction, helps define #' the grid over which binning is performed #' @param y.cuts Number of divisions to make in the y direction, helps define #' the grid over which binning is performed #' @param verbose Print messages and progress #' #' @method FindSpatiallyVariableFeatures default #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' #' FindSpatiallyVariableFeatures.default <- function( object, spatial.location, selection.method = c('markvariogram', 'moransi'), r.metric = 5, x.cuts = NULL, y.cuts = NULL, verbose = TRUE, ... ) { # error check dimensions if (ncol(x = object) != nrow(x = spatial.location)) { stop("Please provide the same number of observations as spatial locations.") } if (!is.null(x = x.cuts) & !is.null(x = y.cuts)) { binned.data <- BinData( data = object, pos = spatial.location, x.cuts = x.cuts, y.cuts = y.cuts, verbose = verbose ) object <- binned.data$data spatial.location <- binned.data$pos } svf.info <- switch( EXPR = selection.method, 'markvariogram' = RunMarkVario( spatial.location = spatial.location, data = object ), 'moransi' = RunMoransI( data = object, pos = spatial.location, verbose = verbose ), stop("Invalid selection method. Please choose one of: markvariogram, moransi.") ) return(svf.info) } #' @param slot Slot in the Assay to pull data from #' @param features If provided, only compute on given features. Otherwise, #' compute for all features. #' @param nfeatures Number of features to mark as the top spatially variable. #' #' @method FindSpatiallyVariableFeatures Assay #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' FindSpatiallyVariableFeatures.Assay <- function( object, slot = "scale.data", spatial.location, selection.method = c('markvariogram', 'moransi'), features = NULL, r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = nfeatures, verbose = TRUE, ... ) { features <- features %||% rownames(x = object) if (selection.method == "markvariogram" && "markvariogram" %in% names(x = Misc(object = object))) { features.computed <- names(x = Misc(object = object, slot = "markvariogram")) features <- features[! features %in% features.computed] } data <- GetAssayData(object = object, slot = slot) data <- as.matrix(x = data[features, ]) data <- data[RowVar(x = data) > 0, ] if (nrow(x = data) != 0) { svf.info <- FindSpatiallyVariableFeatures( object = data, spatial.location = spatial.location, selection.method = selection.method, r.metric = r.metric, x.cuts = x.cuts, y.cuts = y.cuts, verbose = verbose, ... ) } else { svf.info <- c() } if (selection.method == "markvariogram") { if ("markvariogram" %in% names(x = Misc(object = object))) { svf.info <- c(svf.info, Misc(object = object, slot = "markvariogram")) } suppressWarnings(expr = Misc(object = object, slot = "markvariogram") <- svf.info) svf.info <- ComputeRMetric(mv = svf.info, r.metric) svf.info <- svf.info[order(svf.info[, 1]), , drop = FALSE] } if (selection.method == "moransi") { colnames(x = svf.info) <- paste0("MoransI_", colnames(x = svf.info)) svf.info <- svf.info[order(svf.info[, 2], -abs(svf.info[, 1])), , drop = FALSE] } var.name <- paste0(selection.method, ".spatially.variable") var.name.rank <- paste0(var.name, ".rank") svf.info[[var.name]] <- FALSE svf.info[[var.name]][1:(min(nrow(x = svf.info), nfeatures))] <- TRUE svf.info[[var.name.rank]] <- 1:nrow(x = svf.info) object[[names(x = svf.info)]] <- svf.info return(object) } #' @param assay Assay to pull the features (marks) from #' @param image Name of image to pull the coordinates from #' #' @method FindSpatiallyVariableFeatures Seurat #' @rdname FindSpatiallyVariableFeatures #' @concept preprocessing #' @concept spatial #' @export #' FindSpatiallyVariableFeatures.Seurat <- function( object, assay = NULL, slot = "scale.data", features = NULL, image = NULL, selection.method = c('markvariogram', 'moransi'), r.metric = 5, x.cuts = NULL, y.cuts = NULL, nfeatures = 2000, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) features <- features %||% rownames(x = object[[assay]]) image <- image %||% DefaultImage(object = object) tc <- GetTissueCoordinates(object = object[[image]]) # check if markvariogram has been run on necessary features # only run for new ones object[[assay]] <- FindSpatiallyVariableFeatures( object = object[[assay]], slot = slot, features = features, spatial.location = tc, selection.method = selection.method, r.metric = r.metric, x.cuts = x.cuts, y.cuts = y.cuts, nfeatures = nfeatures, verbose = verbose, ... ) object <- LogSeuratCommand(object = object) } #' @rdname LogNormalize #' @method LogNormalize data.frame #' @export #' LogNormalize.data.frame <- function( data, scale.factor = 1e4, margin = 2L, verbose = TRUE, ... ) { return(LogNormalize( data = as.matrix(x = data), scale.factor = scale.factor, verbose = verbose, ... )) } #' @rdname LogNormalize #' @method LogNormalize V3Matrix #' @export #' LogNormalize.V3Matrix <- function( data, scale.factor = 1e4, margin = 2L, verbose = TRUE, ... ) { # if (is.data.frame(x = data)) { # data <- as.matrix(x = data) # } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as(object = data, Class = "dgCMatrix") } # call Rcpp function to normalize if (verbose) { cat("Performing log-normalization\n", file = stderr()) } norm.data <- LogNorm(data, scale_factor = scale.factor, display_progress = verbose) colnames(x = norm.data) <- colnames(x = data) rownames(x = norm.data) <- rownames(x = data) return(norm.data) } #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' #' @param normalization.method Method for normalization. #' \itemize{ #' \item \dQuote{\code{LogNormalize}}: Feature counts for each cell are #' divided by the total counts for that cell and multiplied by the #' \code{scale.factor}. This is then natural-log transformed using \code{log1p} #' \item \dQuote{\code{CLR}}: Applies a centered log ratio transformation #' \item \dQuote{\code{RC}}: Relative counts. Feature counts for each cell #' are divided by the total counts for that cell and multiplied by the #' \code{scale.factor}. No log-transformation is applied. For counts per #' million (CPM) set \code{scale.factor = 1e6} #' } #' @param scale.factor Sets the scale factor for cell-level normalization #' @param margin If performing CLR normalization, normalize across features (1) or cells (2) # @param across If performing CLR normalization, normalize across either "features" or "cells". #' @param block.size How many cells should be run in each chunk, will try to split evenly across threads #' @param verbose display progress bar for normalization procedure #' #' @rdname NormalizeData #' @concept preprocessing #' @export #' NormalizeData.V3Matrix <- function( object, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, block.size = NULL, verbose = TRUE, ... ) { CheckDots(...) if (is.null(x = normalization.method)) { return(object) } normalized.data <- if (nbrOfWorkers() > 1) { norm.function <- switch( EXPR = normalization.method, 'LogNormalize' = LogNormalize, 'CLR' = CustomNormalize, 'RC' = RelativeCounts, stop("Unknown normalization method: ", normalization.method) ) if (normalization.method != 'CLR') { margin <- 2 } tryCatch( expr = Parenting(parent.find = 'Seurat', margin = margin), error = function(e) { invisible(x = NULL) } ) dsize <- switch( EXPR = margin, '1' = nrow(x = object), '2' = ncol(x = object), stop("'margin' must be 1 or 2") ) chunk.points <- ChunkPoints( dsize = dsize, csize = block.size %||% ceiling(x = dsize / nbrOfWorkers()) ) normalized.data <- future_lapply( X = 1:ncol(x = chunk.points), FUN = function(i) { block <- chunk.points[, i] data <- if (margin == 1) { object[block[1]:block[2], , drop = FALSE] } else { object[, block[1]:block[2], drop = FALSE] } clr_function <- function(x) { return(log1p(x = x / (exp(x = sum(log1p(x = x[x > 0]), na.rm = TRUE) / length(x = x))))) } args <- list( data = data, scale.factor = scale.factor, verbose = FALSE, custom_function = clr_function, margin = margin ) args <- args[names(x = formals(fun = norm.function))] return(do.call( what = norm.function, args = args )) } ) do.call( what = switch( EXPR = margin, '1' = 'rbind', '2' = 'cbind', stop("'margin' must be 1 or 2") ), args = normalized.data ) } else { switch( EXPR = normalization.method, 'LogNormalize' = LogNormalize( data = object, scale.factor = scale.factor, verbose = verbose ), 'CLR' = CustomNormalize( data = object, custom_function = function(x) { return(log1p(x = x / (exp(x = sum(log1p(x = x[x > 0]), na.rm = TRUE) / length(x = x))))) }, margin = margin, verbose = verbose # across = across ), 'RC' = RelativeCounts( data = object, scale.factor = scale.factor, verbose = verbose ), stop("Unkown normalization method: ", normalization.method) ) } return(normalized.data) } #' @rdname NormalizeData #' @concept preprocessing #' @export #' @method NormalizeData Assay #' NormalizeData.Assay <- function( object, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, verbose = TRUE, ... ) { object <- SetAssayData( object = object, slot = 'data', new.data = NormalizeData( object = GetAssayData(object = object, slot = 'counts'), normalization.method = normalization.method, scale.factor = scale.factor, verbose = verbose, margin = margin, ... ) ) return(object) } #' @param assay Name of assay to use #' #' @rdname NormalizeData #' @concept preprocessing #' @export #' @method NormalizeData Seurat #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' pmbc_small <- NormalizeData(object = pbmc_small) #' } #' NormalizeData.Seurat <- function( object, assay = NULL, normalization.method = "LogNormalize", scale.factor = 1e4, margin = 1, verbose = TRUE, ... ) { assay <- assay %||% DefaultAssay(object = object) assay.data <- NormalizeData( object = object[[assay]], normalization.method = normalization.method, scale.factor = scale.factor, verbose = verbose, margin = margin, ... ) object[[assay]] <- assay.data object <- LogSeuratCommand(object = object) return(object) } #' @importFrom future nbrOfWorkers #' #' @param features Vector of features names to scale/center. Default is variable features. #' @param vars.to.regress Variables to regress out (previously latent.vars in #' RegressOut). For example, nUMI, or percent.mito. #' @param latent.data Extra data to regress out, should be cells x latent data #' @param split.by Name of variable in object metadata or a vector or factor defining #' grouping of cells. See argument \code{f} in \code{\link[base]{split}} for more details #' @param model.use Use a linear model or generalized linear model #' (poisson, negative binomial) for the regression. Options are 'linear' #' (default), 'poisson', and 'negbinom' #' @param use.umi Regress on UMI count data. Default is FALSE for linear #' modeling, but automatically set to TRUE if model.use is 'negbinom' or 'poisson' #' @param do.scale Whether to scale the data. #' @param do.center Whether to center the data. #' @param scale.max Max value to return for scaled data. The default is 10. #' Setting this can help reduce the effects of features that are only expressed in #' a very small number of cells. If regressing out latent variables and using a #' non-linear model, the default is 50. #' @param block.size Default size for number of features to scale at in a single #' computation. Increasing block.size may speed up calculations but at an #' additional memory cost. #' @param min.cells.to.block If object contains fewer than this number of cells, #' don't block for scaling calculations. #' @param verbose Displays a progress bar for scaling procedure #' #' @importFrom future.apply future_lapply #' #' @rdname ScaleData #' @concept preprocessing #' @export #' ScaleData.default <- function( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { CheckDots(...) features <- features %||% rownames(x = object) features <- as.vector(x = intersect(x = features, y = rownames(x = object))) object <- object[features, , drop = FALSE] object.names <- dimnames(x = object) min.cells.to.block <- min(min.cells.to.block, ncol(x = object)) suppressWarnings(expr = Parenting( parent.find = "ScaleData.Assay", features = features, min.cells.to.block = min.cells.to.block )) split.by <- split.by %||% TRUE split.cells <- split(x = colnames(x = object), f = split.by) CheckGC() if (!is.null(x = vars.to.regress)) { if (is.null(x = latent.data)) { latent.data <- data.frame(row.names = colnames(x = object)) } else { latent.data <- latent.data[colnames(x = object), , drop = FALSE] rownames(x = latent.data) <- colnames(x = object) } if (any(vars.to.regress %in% rownames(x = object))) { latent.data <- cbind( latent.data, t(x = object[vars.to.regress[vars.to.regress %in% rownames(x = object)], , drop=FALSE]) ) } # Currently, RegressOutMatrix will do nothing if latent.data = NULL notfound <- setdiff(x = vars.to.regress, y = colnames(x = latent.data)) if (length(x = notfound) == length(x = vars.to.regress)) { stop( "None of the requested variables to regress are present in the object.", call. = FALSE ) } else if (length(x = notfound) > 0) { warning( "Requested variables to regress not in object: ", paste(notfound, collapse = ", "), call. = FALSE, immediate. = TRUE ) vars.to.regress <- colnames(x = latent.data) } if (verbose) { message("Regressing out ", paste(vars.to.regress, collapse = ', ')) } chunk.points <- ChunkPoints(dsize = nrow(x = object), csize = block.size) if (nbrOfWorkers() > 1) { # TODO: lapply chunks <- expand.grid( names(x = split.cells), 1:ncol(x = chunk.points), stringsAsFactors = FALSE ) object <- future_lapply( X = 1:nrow(x = chunks), FUN = function(i) { row <- chunks[i, ] group <- row[[1]] index <- as.numeric(x = row[[2]]) return(RegressOutMatrix( data.expr = object[chunk.points[1, index]:chunk.points[2, index], split.cells[[group]], drop = FALSE], latent.data = latent.data[split.cells[[group]], , drop = FALSE], features.regress = features, model.use = model.use, use.umi = use.umi, verbose = FALSE )) } ) if (length(x = split.cells) > 1) { merge.indices <- lapply( X = 1:length(x = split.cells), FUN = seq.int, to = length(x = object), by = length(x = split.cells) ) object <- lapply( X = merge.indices, FUN = function(x) { return(do.call(what = 'rbind', args = object[x])) } ) object <- do.call(what = 'cbind', args = object) } else { object <- do.call(what = 'rbind', args = object) } } else { object <- lapply( X = names(x = split.cells), FUN = function(x) { if (verbose && length(x = split.cells) > 1) { message("Regressing out variables from split ", x) } return(RegressOutMatrix( data.expr = object[, split.cells[[x]], drop = FALSE], latent.data = latent.data[split.cells[[x]], , drop = FALSE], features.regress = features, model.use = model.use, use.umi = use.umi, verbose = verbose )) } ) object <- do.call(what = 'cbind', args = object) } dimnames(x = object) <- object.names CheckGC() } if (verbose && (do.scale || do.center)) { msg <- paste( na.omit(object = c( ifelse(test = do.center, yes = 'centering', no = NA_character_), ifelse(test = do.scale, yes = 'scaling', no = NA_character_) )), collapse = ' and ' ) msg <- paste0( toupper(x = substr(x = msg, start = 1, stop = 1)), substr(x = msg, start = 2, stop = nchar(x = msg)), ' data matrix' ) message(msg) } if (inherits(x = object, what = c('dgCMatrix', 'dgTMatrix'))) { scale.function <- FastSparseRowScale } else { object <- as.matrix(x = object) scale.function <- FastRowScale } if (nbrOfWorkers() > 1) { blocks <- ChunkPoints(dsize = length(x = features), csize = block.size) chunks <- expand.grid( names(x = split.cells), 1:ncol(x = blocks), stringsAsFactors = FALSE ) scaled.data <- future_lapply( X = 1:nrow(x = chunks), FUN = function(index) { row <- chunks[index, ] group <- row[[1]] block <- as.vector(x = blocks[, as.numeric(x = row[[2]])]) arg.list <- list( mat = object[features[block[1]:block[2]], split.cells[[group]], drop = FALSE], scale = do.scale, center = do.center, scale_max = scale.max, display_progress = FALSE ) arg.list <- arg.list[intersect(x = names(x = arg.list), y = names(x = formals(fun = scale.function)))] data.scale <- do.call(what = scale.function, args = arg.list) dimnames(x = data.scale) <- dimnames(x = object[features[block[1]:block[2]], split.cells[[group]]]) suppressWarnings(expr = data.scale[is.na(x = data.scale)] <- 0) CheckGC() return(data.scale) } ) if (length(x = split.cells) > 1) { merge.indices <- lapply( X = 1:length(x = split.cells), FUN = seq.int, to = length(x = scaled.data), by = length(x = split.cells) ) scaled.data <- lapply( X = merge.indices, FUN = function(x) { return(suppressWarnings(expr = do.call(what = 'rbind', args = scaled.data[x]))) } ) scaled.data <- suppressWarnings(expr = do.call(what = 'cbind', args = scaled.data)) } else { suppressWarnings(expr = scaled.data <- do.call(what = 'rbind', args = scaled.data)) } } else { scaled.data <- matrix( data = NA_real_, nrow = nrow(x = object), ncol = ncol(x = object), dimnames = object.names ) max.block <- ceiling(x = length(x = features) / block.size) for (x in names(x = split.cells)) { if (verbose) { if (length(x = split.cells) > 1 && (do.scale || do.center)) { message(gsub(pattern = 'matrix', replacement = 'from split ', x = msg), x) } pb <- txtProgressBar(min = 0, max = max.block, style = 3, file = stderr()) } for (i in 1:max.block) { my.inds <- ((block.size * (i - 1)):(block.size * i - 1)) + 1 my.inds <- my.inds[my.inds <= length(x = features)] arg.list <- list( mat = object[features[my.inds], split.cells[[x]], drop = FALSE], scale = do.scale, center = do.center, scale_max = scale.max, display_progress = FALSE ) arg.list <- arg.list[intersect(x = names(x = arg.list), y = names(x = formals(fun = scale.function)))] data.scale <- do.call(what = scale.function, args = arg.list) dimnames(x = data.scale) <- dimnames(x = object[features[my.inds], split.cells[[x]]]) scaled.data[features[my.inds], split.cells[[x]]] <- data.scale rm(data.scale) CheckGC() if (verbose) { setTxtProgressBar(pb = pb, value = i) } } if (verbose) { close(con = pb) } } } dimnames(x = scaled.data) <- object.names scaled.data[is.na(x = scaled.data)] <- 0 CheckGC() return(scaled.data) } #' @rdname ScaleData #' @concept preprocessing #' @export #' @method ScaleData IterableMatrix #' ScaleData.IterableMatrix <- function( object, features = NULL, do.scale = TRUE, do.center = TRUE, scale.max = 10, ... ) { features <- features %||% rownames(x = object) features <- as.vector(x = intersect(x = features, y = rownames(x = object))) object <- object[features, , drop = FALSE] if (do.center) { features.mean <- BPCells::matrix_stats( matrix = object, row_stats = 'mean')$row_stats['mean',] } else { features.mean <- 0 } if (do.scale) { features.sd <- sqrt(BPCells::matrix_stats( matrix = object, row_stats = 'variance')$row_stats['variance',]) features.sd[features.sd == 0] <- 0.01 } else { features.sd <- 1 } if (scale.max != Inf) { object <- BPCells::min_by_row(mat = object, vals = scale.max * features.sd + features.mean) } scaled.data <- (object - features.mean) / features.sd return(scaled.data) } #' @rdname ScaleData #' @concept preprocessing #' @export #' @method ScaleData Assay #' ScaleData.Assay <- function( object, features = NULL, vars.to.regress = NULL, latent.data = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { use.umi <- ifelse(test = model.use != 'linear', yes = TRUE, no = use.umi) slot.use <- ifelse(test = use.umi, yes = 'counts', no = 'data') features <- features %||% VariableFeatures(object) if (length(x = features) == 0) { features <- rownames(x = GetAssayData(object = object, slot = slot.use)) } object <- SetAssayData( object = object, slot = 'scale.data', new.data = ScaleData( object = GetAssayData(object = object, slot = slot.use), features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) ) suppressWarnings(expr = Parenting( parent.find = "ScaleData.Seurat", features = features, min.cells.to.block = min.cells.to.block, use.umi = use.umi )) return(object) } #' @param assay Name of Assay to scale #' #' @rdname ScaleData #' @concept preprocessing #' @export #' @method ScaleData Seurat #' ScaleData.Seurat <- function( object, features = NULL, assay = NULL, vars.to.regress = NULL, split.by = NULL, model.use = 'linear', use.umi = FALSE, do.scale = TRUE, do.center = TRUE, scale.max = 10, block.size = 1000, min.cells.to.block = 3000, verbose = TRUE, ... ) { assay <- assay[1L] %||% DefaultAssay(object = object) assay <- match.arg(arg = assay, choices = Assays(object = object)) if (any(vars.to.regress %in% colnames(x = object[[]]))) { latent.data <- object[[vars.to.regress[vars.to.regress %in% colnames(x = object[[]])]]] } else { latent.data <- NULL } if (is.character(x = split.by) && length(x = split.by) == 1) { split.by <- object[[split.by]] } assay.data <- ScaleData( # object = assay.data, object = object[[assay]], features = features, vars.to.regress = vars.to.regress, latent.data = latent.data, split.by = split.by, model.use = model.use, use.umi = use.umi, do.scale = do.scale, do.center = do.center, scale.max = scale.max, block.size = block.size, min.cells.to.block = min.cells.to.block, verbose = verbose, ... ) object[[assay]] <- assay.data object <- LogSeuratCommand(object = object) return(object) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' Read Vitessce Expression Data #' #' @inheritParams ReadVitessce #' #' @return An expression matrix with cells as columns and features as rows #' #' @name vitessce-helpers #' @rdname vitessce-helpers #' #' @importFrom jsonlite read_json #' #' @keywords internal #' #' @noRd #' .ReadVitessceGenes <- function(counts) { p1 <- progressor() p1( message = "Reading counts in Vitessce genes format", class = 'sticky', amount = 0 ) p1(type = 'finish') cts <- read_json(path = counts) p2 <- progressor(steps = length(x = cts)) cts <- lapply( X = names(x = cts), FUN = function(x) { expr <- cts[[x]]$cells expr <- as.matrix(x = expr) colnames(x = expr) <- x p2() return(expr) } ) p2(type = 'finish') cts <- Reduce( f = function(x, y) { a <- merge(x = x, y = y, by = 0, all = TRUE) rownames(x = a) <- a$Row.names a$Row.names <- NULL return(as.matrix(x = a)) }, x = cts ) cts[is.na(x = cts)] <- 0 return(t(x = cts)) } #' @name vitessce-helpers #' @rdname vitessce-helpers #' #' @importFrom jsonlite read_json #' #' @keywords internal #' #' @noRd #' .ReadVitessceClusters <- function(counts) { p1 <- progressor() p1( message = "Reading counts in Vitessce clusters format", class = 'sticky', amount = 0 ) p1(type = 'finish') cts <- read_json(path = counts) # p2 <- progressor(steps = length(x = cts)) cells <- unlist(x = cts$cols) features <- unlist(x = cts$rows) cts <- lapply(X = cts[['matrix']], FUN = unlist) cts <- t(x = as.data.frame(x = cts)) dimnames(x = cts) <- list(features, cells) return(cts) } #' @name nanostring-helpers #' @rdname nanostring-helpers #' #' @return data frame containing counts for cells based on a single class of segmentation (eg Nuclear) #' #' @keywords internal #' #' @noRd #' build.cellcomp.matrix <- function(mols.df, class=NULL) { if (!is.null(class)) { if (!(class %in% c("Nuclear", "Membrane", "Cytoplasm"))) { stop(paste("Cannot subset matrix based on segmentation:", class)) } mols.df <- mols.df[mols.df$CellComp == class,] # subset based on cell class } mols.df$bc <- paste0(as.character(mols.df$cell_ID), "_", as.character(mols.df$fov)) ncol <- length(unique(mols.df$target)) nrow <- length(unique(mols.df$bc)) # will mols.df already have a cell barcode column at this point mtx <- matrix(data=rep(0, nrow*ncol), nrow=nrow, ncol=ncol) colnames(mtx) <- unique(mols.df$target) rownames(mtx) <- unique(mols.df$bc) for (row in 1:nrow(mols.df)) { mol <- mols.df[row, "target"] bc <- mols.df[row, "bc"] mtx[bc, mol] <- mtx[bc, mol] + 1 } return(as.data.frame(mtx)) } # Bin spatial regions into grid and average expression values # # @param dat Expression data # @param pos Position information/coordinates for each sample # @param x.cuts Number of cuts to make in the x direction (defines grid along # with y.cuts) # @param y.cuts Number of cuts to make in the y direction # # @return returns a list with positions as centers of the bins and average # expression within the bins # #' @importFrom Matrix rowMeans # BinData <- function(data, pos, x.cuts = 10, y.cuts = x.cuts, verbose = TRUE) { if (verbose) { message("Binning spatial data") } pos$x.cuts <- cut(x = pos[, 1], breaks = x.cuts) pos$y.cuts <- cut(x = pos[, 2], breaks = y.cuts) pos$bin <- paste0(pos$x.cuts, "_", pos$y.cuts) all.bins <- unique(x = pos$bin) new.pos <- matrix(data = numeric(), nrow = length(x = all.bins), ncol = 2) new.dat <- matrix(data = numeric(), nrow = nrow(x = data), ncol = length(x = all.bins)) for(i in 1:length(x = all.bins)) { samples <- rownames(x = pos)[which(x = pos$bin == all.bins[i])] dat <- data[, samples] if (is.null(x = dim(x = dat))) { new.dat[, i] <- dat } else { new.dat[, i] <- rowMeans(data[, samples]) } new.pos[i, 1] <- mean(pos[samples, "x"]) new.pos[i, 2] <- mean(pos[samples, "y"]) } rownames(x = new.dat) <- rownames(x = data) colnames(x = new.dat) <- all.bins rownames(x = new.pos) <- all.bins colnames(x = new.pos) <- colnames(x = pos)[1:2] return(list(data = new.dat, pos = new.pos)) } # Sample classification from MULTI-seq # # Identify singlets, doublets and negative cells from multiplexing experiments. # # @param data Data frame with the raw count data (cell x tags) # @param q Scale the data. Default is 1e4 # # @return Returns a named vector with demultiplexed identities # #' @importFrom KernSmooth bkde #' @importFrom stats approxfun quantile # # @author Chris McGinnis, Gartner Lab, UCSF # # @examples # demux_result <- ClassifyCells(data = counts_data, q = 0.7) # ClassifyCells <- function(data, q) { ## Generate Thresholds: Gaussian KDE with bad barcode detection, outlier trimming ## local maxima estimation with bad barcode detection, threshold definition and adjustment # n_BC <- ncol(x = data) n_cells <- nrow(x = data) bc_calls <- vector(mode = "list", length = n_cells) n_bc_calls <- numeric(length = n_cells) for (i in 1:ncol(x = data)) { model <- tryCatch( expr = approxfun(x = bkde(x = data[, i], kernel = "normal")), error = function(e) { message("No threshold found for ", colnames(x = data)[i], "...") } ) if (is.null(x = model)) { next } x <- seq.int( from = quantile(x = data[, i], probs = 0.001), to = quantile(x = data[, i], probs = 0.999), length.out = 100 ) extrema <- LocalMaxima(x = model(x)) if (length(x = extrema) <= 1) { message("No threshold found for ", colnames(x = data)[i], "...") next } low.extremum <- min(extrema) high.extremum <- max(extrema) thresh <- (x[high.extremum] + x[low.extremum])/2 ## Account for GKDE noise by adjusting low threshold to most prominent peak low.extremae <- extrema[which(x = x[extrema] <= thresh)] new.low.extremum <- low.extremae[which.max(x = model(x)[low.extremae])] thresh <- quantile(x = c(x[high.extremum], x[new.low.extremum]), probs = q) ## Find which cells are above the ith threshold cell_i <- which(x = data[, i] >= thresh) n <- length(x = cell_i) if (n == 0) { ## Skips to next BC if no cells belong to the ith group next } bc <- colnames(x = data)[i] if (n == 1) { bc_calls[[cell_i]] <- c(bc_calls[[cell_i]], bc) n_bc_calls[cell_i] <- n_bc_calls[cell_i] + 1 } else { # have to iterate, lame for (cell in cell_i) { bc_calls[[cell]] <- c(bc_calls[[cell]], bc) n_bc_calls[cell] <- n_bc_calls[cell] + 1 } } } calls <- character(length = n_cells) for (i in 1:n_cells) { if (n_bc_calls[i] == 0) { calls[i] <- "Negative"; next } if (n_bc_calls[i] > 1) { calls[i] <- "Doublet"; next } if (n_bc_calls[i] == 1) { calls[i] <- bc_calls[[i]] } } names(x = calls) <- rownames(x = data) return(calls) } # Computes the metric at a given r (radius) value and stores in meta.features # # @param mv Results of running markvario # @param r.metric r value at which to report the "trans" value of the mark # variogram # # @return Returns a data.frame with r.metric values # # ComputeRMetric <- function(mv, r.metric = 5) { r.metric.results <- unlist(x = lapply( X = mv, FUN = function(x) { x$trans[which.min(x = abs(x = x$r - r.metric))] } )) r.metric.results <- as.data.frame(x = r.metric.results) colnames(r.metric.results) <- paste0("r.metric.", r.metric) return(r.metric.results) } # Normalize a given data matrix # # Normalize a given matrix with a custom function. Essentially just a wrapper # around apply. Used primarily in the context of CLR normalization. # # @param data Matrix with the raw count data # @param custom_function A custom normalization function # @param margin Which way to we normalize. Set 1 for rows (features) or 2 for columns (genes) # @parm across Which way to we normalize? Choose form 'cells' or 'features' # @param verbose Show progress bar # # @return Returns a matrix with the custom normalization # #' @importFrom Matrix t #' @importFrom methods as #' @importFrom pbapply pbapply # CustomNormalize <- function(data, custom_function, margin, verbose = TRUE) { if (is.data.frame(x = data)) { data <- as.matrix(x = data) } if (!inherits(x = data, what = 'dgCMatrix')) { data <- as.sparse(x = data) } myapply <- ifelse(test = verbose, yes = pbapply, no = apply) # margin <- switch( # EXPR = across, # 'cells' = 2, # 'features' = 1, # stop("'across' must be either 'cells' or 'features'") # ) if (verbose) { message("Normalizing across ", c('features', 'cells')[margin]) } norm.data <- myapply( X = data, MARGIN = margin, FUN = custom_function) if (margin == 1) { norm.data = Matrix::t(x = norm.data) } colnames(x = norm.data) <- colnames(x = data) rownames(x = norm.data) <- rownames(x = data) return(norm.data) } # Inter-maxima quantile sweep to find ideal barcode thresholds # # Finding ideal thresholds for positive-negative signal classification per multiplex barcode # # @param call.list A list of sample classification result from different quantiles using ClassifyCells # # @return A list with two values: \code{res} and \code{extrema}: # \describe{ # \item{res}{A data.frame named res_id documenting the quantile used, subset, number of cells and proportion} # \item{extrema}{...} # } # # @author Chris McGinnis, Gartner Lab, UCSF # # @examples # FindThresh(call.list = bar.table_sweep.list) # FindThresh <- function(call.list) { # require(reshape2) res <- as.data.frame(x = matrix( data = 0L, nrow = length(x = call.list), ncol = 4 )) colnames(x = res) <- c("q","pDoublet","pNegative","pSinglet") q.range <- unlist(x = strsplit(x = names(x = call.list), split = "q=")) res$q <- as.numeric(x = q.range[grep(pattern = "0", x = q.range)]) nCell <- length(x = call.list[[1]]) for (i in 1:nrow(x = res)) { temp <- table(call.list[[i]]) if ("Doublet" %in% names(x = temp) == TRUE) { res$pDoublet[i] <- temp[which(x = names(x = temp) == "Doublet")] } if ( "Negative" %in% names(temp) == TRUE ) { res$pNegative[i] <- temp[which(x = names(x = temp) == "Negative")] } res$pSinglet[i] <- sum(temp[which(x = !names(x = temp) %in% c("Doublet", "Negative"))]) } res.q <- res$q q.ind <- grep(pattern = 'q', x = colnames(x = res)) res <- Melt(x = res[, -q.ind]) res[, 1] <- rep.int(x = res.q, times = length(x = unique(res[, 2]))) colnames(x = res) <- c('q', 'variable', 'value') res[, 4] <- res$value/nCell colnames(x = res)[2:4] <- c("Subset", "nCells", "Proportion") extrema <- res$q[LocalMaxima(x = res$Proportion[which(x = res$Subset == "pSinglet")])] return(list(res = res, extrema = extrema)) } # Calculate pearson residuals of features not in the scale.data # This function is the secondary function under GetResidual # # @param object A seurat object # @param features Name of features to add into the scale.data # @param assay Name of the assay of the seurat object generated by SCTransform # @param vst_out The SCT parameter list # @param clip.range Numeric of length two specifying the min and max values the Pearson residual # will be clipped to # Useful if you want to change the clip.range. # @param verbose Whether to print messages and progress bars # # @return Returns a matrix containing not-centered pearson residuals of added features # #' @importFrom sctransform get_residuals # GetResidualSCTModel <- function( object, assay, SCTModel, new_features, clip.range, replace.value, verbose ) { clip.range <- clip.range %||% SCTResults(object = object[[assay]], slot = "clips", model = SCTModel)$sct model.features <- rownames(x = SCTResults(object = object[[assay]], slot = "feature.attributes", model = SCTModel)) umi.assay <- SCTResults(object = object[[assay]], slot = "umi.assay", model = SCTModel) model.cells <- Cells(x = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) sct.method <- SCTResults(object = object[[assay]], slot = "arguments", model = SCTModel)$sct.method %||% "default" scale.data.cells <- colnames(x = GetAssayData(object = object, assay = assay, slot = "scale.data")) if (length(x = setdiff(x = model.cells, y = scale.data.cells)) == 0) { existing_features <- names(x = which(x = ! apply( X = GetAssayData(object = object, assay = assay, slot = "scale.data")[, model.cells], MARGIN = 1, FUN = anyNA) )) } else { existing_features <- character() } if (replace.value) { features_to_compute <- new_features } else { features_to_compute <- setdiff(x = new_features, y = existing_features) } if (sct.method == "reference.model") { if (verbose) { message("sct.model ", SCTModel, " is from reference, so no residuals will be recalculated") } features_to_compute <- character() } if (!umi.assay %in% Assays(object = object)) { warning("The umi assay (", umi.assay, ") is not present in the object. ", "Cannot compute additional residuals.", call. = FALSE, immediate. = TRUE) return(NULL) } diff_features <- setdiff(x = features_to_compute, y = model.features) intersect_features <- intersect(x = features_to_compute, y = model.features) if (length(x = diff_features) == 0) { umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts" )[features_to_compute, model.cells, drop = FALSE] } else { warning( "In the SCTModel ", SCTModel, ", the following ", length(x = diff_features), " features do not exist in the counts slot: ", paste(diff_features, collapse = ", ") ) if (length(x = intersect_features) == 0) { return(matrix( data = NA, nrow = length(x = features_to_compute), ncol = length(x = model.cells), dimnames = list(features_to_compute, model.cells) )) } umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts")[intersect_features, model.cells, drop = FALSE] } clip.max <- max(clip.range) clip.min <- min(clip.range) if (nrow(x = umi) > 0) { vst_out <- SCTModel_to_vst(SCTModel = slot(object = object[[assay]], name = "SCTModel.list")[[SCTModel]]) if (verbose) { message("sct.model: ", SCTModel) } new_residual <- get_residuals( vst_out = vst_out, umi = umi, residual_type = "pearson", res_clip_range = c(clip.min, clip.max), verbosity = as.numeric(x = verbose) * 2 ) new_residual <- as.matrix(x = new_residual) # centered data new_residual <- new_residual - rowMeans(x = new_residual) } else { new_residual <- matrix(data = NA, nrow = 0, ncol = length(x = model.cells), dimnames = list(c(), model.cells)) } old.features <- setdiff(x = new_features, y = features_to_compute) if (length(x = old.features) > 0) { old_residuals <- GetAssayData(object = object[[assay]], slot = "scale.data")[old.features, model.cells, drop = FALSE] new_residual <- rbind(new_residual, old_residuals)[new_features, ] } return(new_residual) } # Convert SCTModel class to vst_out used in the sctransform # @param SCTModel # @return Return a list containing sct model # SCTModel_to_vst <- function(SCTModel) { feature.params <- c("theta", "(Intercept)", "log_umi") feature.attrs <- c("residual_mean", "residual_variance" ) vst_out <- list() vst_out$model_str <- slot(object = SCTModel, name = "model") vst_out$model_pars_fit <- as.matrix(x = slot(object = SCTModel, name = "feature.attributes")[, feature.params]) vst_out$gene_attr <- slot(object = SCTModel, name = "feature.attributes")[, feature.attrs] vst_out$cell_attr <- slot(object = SCTModel, name = "cell.attributes") vst_out$arguments <- slot(object = SCTModel, name = "arguments") return(vst_out) } # Local maxima estimator # # Finding local maxima given a numeric vector # # @param x A continuous vector # # @return Returns a (named) vector showing positions of local maximas # # @author Tommy # @references \url{https://stackoverflow.com/questions/6836409/finding-local-maxima-and-minima} # # @examples # x <- c(1, 2, 9, 9, 2, 1, 1, 5, 5, 1) # LocalMaxima(x = x) # LocalMaxima <- function(x) { # Use -Inf instead if x is numeric (non-integer) y <- diff(x = c(-.Machine$integer.max, x)) > 0L y <- cumsum(x = rle(x = y)$lengths) y <- y[seq.int(from = 1L, to = length(x = y), by = 2L)] if (x[[1]] == x[[2]]) { y <- y[-1] } return(y) } # #' @importFrom stats residuals # NBResiduals <- function(fmla, regression.mat, gene, return.mode = FALSE) { fit <- 0 try( fit <- glm.nb( formula = fmla, data = regression.mat ), silent = TRUE) if (is.numeric(x = fit)) { message(sprintf('glm.nb failed for gene %s; falling back to scale(log(y+1))', gene)) resid <- scale(x = log(x = regression.mat[, 'GENE'] + 1))[, 1] mode <- 'scale' } else { resid <- residuals(fit, type = 'pearson') mode = 'nbreg' } do.return <- list(resid = resid, mode = mode) if (return.mode) { return(do.return) } else { return(do.return$resid) } } # Regress out techincal effects and cell cycle from a matrix # # Remove unwanted effects from a matrix # # @parm data.expr An expression matrix to regress the effects of latent.data out # of should be the complete expression matrix in genes x cells # @param latent.data A matrix or data.frame of latent variables, should be cells # x latent variables, the colnames should be the variables to regress # @param features.regress An integer vector representing the indices of the # genes to run regression on # @param model.use Model to use, one of 'linear', 'poisson', or 'negbinom'; pass # NULL to simply return data.expr # @param use.umi Regress on UMI count data # @param verbose Display a progress bar # #' @importFrom stats as.formula lm #' @importFrom utils txtProgressBar setTxtProgressBar # RegressOutMatrix <- function( data.expr, latent.data = NULL, features.regress = NULL, model.use = NULL, use.umi = FALSE, verbose = TRUE ) { # Do we bypass regression and simply return data.expr? bypass <- vapply( X = list(latent.data, model.use), FUN = is.null, FUN.VALUE = logical(length = 1L) ) if (any(bypass)) { return(data.expr) } # Check model.use possible.models <- c("linear", "poisson", "negbinom") if (!model.use %in% possible.models) { stop(paste( model.use, "is not a valid model. Please use one the following:", paste0(possible.models, collapse = ", ") )) } # Check features.regress if (is.null(x = features.regress)) { features.regress <- 1:nrow(x = data.expr) } if (is.character(x = features.regress)) { features.regress <- intersect(x = features.regress, y = rownames(x = data.expr)) if (length(x = features.regress) == 0) { stop("Cannot use features that are beyond the scope of data.expr") } } else if (max(features.regress) > nrow(x = data.expr)) { stop("Cannot use features that are beyond the scope of data.expr") } # Check dataset dimensions if (nrow(x = latent.data) != ncol(x = data.expr)) { stop("Uneven number of cells between latent data and expression data") } use.umi <- ifelse(test = model.use != 'linear', yes = TRUE, no = use.umi) # Create formula for regression vars.to.regress <- colnames(x = latent.data) fmla <- paste('GENE ~', paste(vars.to.regress, collapse = '+')) fmla <- as.formula(object = fmla) if (model.use == "linear") { # In this code, we'll repeatedly regress different Y against the same X # (latent.data) in order to calculate residuals. Rather that repeatedly # call lm to do this, we'll avoid recalculating the QR decomposition for the # latent.data matrix each time by reusing it after calculating it once regression.mat <- cbind(latent.data, data.expr[1,]) colnames(regression.mat) <- c(colnames(x = latent.data), "GENE") qr <- lm(fmla, data = regression.mat, qr = TRUE)$qr rm(regression.mat) } # Make results matrix data.resid <- matrix( nrow = nrow(x = data.expr), ncol = ncol(x = data.expr) ) if (verbose) { pb <- txtProgressBar(char = '=', style = 3, file = stderr()) } for (i in 1:length(x = features.regress)) { x <- features.regress[i] regression.mat <- cbind(latent.data, data.expr[x, ]) colnames(x = regression.mat) <- c(vars.to.regress, 'GENE') regression.mat <- switch( EXPR = model.use, 'linear' = qr.resid(qr = qr, y = data.expr[x,]), 'poisson' = residuals(object = glm( formula = fmla, family = 'poisson', data = regression.mat), type = 'pearson' ), 'negbinom' = NBResiduals( fmla = fmla, regression.mat = regression.mat, gene = x ) ) data.resid[i, ] <- regression.mat if (verbose) { setTxtProgressBar(pb = pb, value = i / length(x = features.regress)) } } if (verbose) { close(con = pb) } if (use.umi) { data.resid <- log1p(x = Sweep( x = data.resid, MARGIN = 1, STATS = apply(X = data.resid, MARGIN = 1, FUN = min), FUN = '-' )) } dimnames(x = data.resid) <- dimnames(x = data.expr) return(data.resid) } Seurat/R/differential_expression.R0000644000176200001440000024230714525500056016766 0ustar liggesusers#' @include generics.R #' NULL #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Functions #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% globalVariables( names = c('myAUC', 'p_val', 'avg_logFC'), package = 'Seurat', add = TRUE ) #' Gene expression markers for all identity classes #' #' Finds markers (differentially expressed genes) for each of the identity classes in a dataset #' #' @inheritParams FindMarkers #' @param node A node to find markers for and all its children; requires #' \code{\link{BuildClusterTree}} to have been run previously; replaces \code{FindAllMarkersNode} #' @param return.thresh Only return markers that have a p-value < return.thresh, or a power > return.thresh (if the test is ROC) #' #' @return Matrix containing a ranked list of putative markers, and associated #' statistics (p-values, ROC score, etc.) #' #' @importFrom stats setNames #' #' @export #' #' @aliases FindAllMarkersNode #' @concept differential_expression #' #' @examples #' data("pbmc_small") #' # Find markers for all clusters #' all.markers <- FindAllMarkers(object = pbmc_small) #' head(x = all.markers) #' \dontrun{ #' # Pass a value to node as a replacement for FindAllMarkersNode #' pbmc_small <- BuildClusterTree(object = pbmc_small) #' all.markers <- FindAllMarkers(object = pbmc_small, node = 4) #' head(x = all.markers) #' } #' FindAllMarkers <- function( object, assay = NULL, features = NULL, logfc.threshold = 0.1, test.use = 'wilcox', slot = 'data', min.pct = 0.01, min.diff.pct = -Inf, node = NULL, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, mean.fxn = NULL, fc.name = NULL, base = 2, return.thresh = 1e-2, densify = FALSE, ... ) { MapVals <- function(vec, from, to) { vec2 <- setNames(object = to, nm = from)[as.character(x = vec)] vec2[is.na(x = vec2)] <- vec[is.na(x = vec2)] return(unname(obj = vec2)) } if ((test.use == "roc") && (return.thresh == 1e-2)) { return.thresh <- 0.7 } if (is.null(x = node)) { idents.all <- sort(x = unique(x = Idents(object = object))) } else { if (!PackageCheck('ape', error = FALSE)) { stop(cluster.ape, call. = FALSE) } tree <- Tool(object = object, slot = 'BuildClusterTree') if (is.null(x = tree)) { stop("Please run 'BuildClusterTree' before finding markers on nodes") } descendants <- DFT(tree = tree, node = node, include.children = TRUE) all.children <- sort(x = tree$edge[, 2][!tree$edge[, 2] %in% tree$edge[, 1]]) descendants <- MapVals( vec = descendants, from = all.children, to = tree$tip.label ) drop.children <- setdiff(x = tree$tip.label, y = descendants) keep.children <- setdiff(x = tree$tip.label, y = drop.children) orig.nodes <- c( node, as.numeric(x = setdiff(x = descendants, y = keep.children)) ) tree <- ape::drop.tip(phy = tree, tip = drop.children) new.nodes <- unique(x = tree$edge[, 1, drop = TRUE]) idents.all <- (tree$Nnode + 2):max(tree$edge) } genes.de <- list() messages <- list() for (i in 1:length(x = idents.all)) { if (verbose) { message("Calculating cluster ", idents.all[i]) } genes.de[[i]] <- tryCatch( expr = { FindMarkers( object = object, assay = assay, ident.1 = if (is.null(x = node)) { idents.all[i] } else { tree }, ident.2 = if (is.null(x = node)) { NULL } else { idents.all[i] }, features = features, logfc.threshold = logfc.threshold, test.use = test.use, slot = slot, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, mean.fxn = mean.fxn, fc.name = fc.name, base = base, densify = densify, ... ) }, error = function(cond) { return(cond$message) } ) if (is.character(x = genes.de[[i]])) { messages[[i]] <- genes.de[[i]] genes.de[[i]] <- NULL } } gde.all <- data.frame() for (i in 1:length(x = idents.all)) { if (is.null(x = unlist(x = genes.de[i]))) { next } gde <- genes.de[[i]] if (nrow(x = gde) > 0) { if (test.use == "roc") { gde <- subset( x = gde, subset = (myAUC > return.thresh | myAUC < (1 - return.thresh)) ) } else if (is.null(x = node) || test.use %in% c('bimod', 't')) { gde <- gde[order(gde$p_val, -abs(gde$pct.1-gde$pct.2)), ] gde <- subset(x = gde, subset = p_val < return.thresh) } if (nrow(x = gde) > 0) { gde$cluster <- idents.all[i] gde$gene <- rownames(x = gde) } if (nrow(x = gde) > 0) { gde.all <- rbind(gde.all, gde) } } } if ((only.pos) && nrow(x = gde.all) > 0) { return(subset(x = gde.all, subset = gde.all[, 2] > 0)) } rownames(x = gde.all) <- make.unique(names = as.character(x = gde.all$gene)) if (nrow(x = gde.all) == 0) { warning("No DE genes identified", call. = FALSE, immediate. = TRUE) } if (length(x = messages) > 0) { warning("The following tests were not performed: ", call. = FALSE, immediate. = TRUE) for (i in 1:length(x = messages)) { if (!is.null(x = messages[[i]])) { warning("When testing ", idents.all[i], " versus all:\n\t", messages[[i]], call. = FALSE, immediate. = TRUE) } } } if (!is.null(x = node)) { gde.all$cluster <- MapVals( vec = gde.all$cluster, from = new.nodes, to = orig.nodes ) } return(gde.all) } #' Finds markers that are conserved between the groups #' #' @inheritParams FindMarkers #' @param ident.1 Identity class to define markers for #' @param ident.2 A second identity class for comparison. If NULL (default) - #' use all other cells for comparison. #' @param grouping.var grouping variable #' @param assay of assay to fetch data for (default is RNA) #' @param meta.method method for combining p-values. Should be a function from #' the metap package (NOTE: pass the function, not a string) #' @param \dots parameters to pass to FindMarkers #' #' @return data.frame containing a ranked list of putative conserved markers, and #' associated statistics (p-values within each group and a combined p-value #' (such as Fishers combined p-value or others from the metap package), #' percentage of cells expressing the marker, average differences). Name of group is appended to each #' associated output column (e.g. CTRL_p_val). If only one group is tested in the grouping.var, max #' and combined p-values are not returned. #' #' @export #' @concept differential_expression #' #' @examples #' \dontrun{ #' data("pbmc_small") #' pbmc_small #' # Create a simulated grouping variable #' pbmc_small[['groups']] <- sample(x = c('g1', 'g2'), size = ncol(x = pbmc_small), replace = TRUE) #' FindConservedMarkers(pbmc_small, ident.1 = 0, ident.2 = 1, grouping.var = "groups") #' } #' FindConservedMarkers <- function( object, ident.1, ident.2 = NULL, grouping.var, assay = 'RNA', slot = 'data', min.cells.group = 3, meta.method = metap::minimump, verbose = TRUE, ... ) { metap.installed <- PackageCheck("metap", error = FALSE) if (!metap.installed[1]) { stop( "Please install the metap package to use FindConservedMarkers.", "\nThis can be accomplished with the following commands: ", "\n----------------------------------------", "\ninstall.packages('BiocManager')", "\nBiocManager::install('multtest')", "\ninstall.packages('metap')", "\n----------------------------------------", call. = FALSE ) } if (!is.function(x = meta.method)) { stop("meta.method should be a function from the metap package. Please see https://cran.r-project.org/web/packages/metap/metap.pdf for a detailed description of the available functions.") } object.var <- FetchData(object = object, vars = grouping.var) object <- SetIdent( object = object, cells = colnames(x = object), value = paste(Idents(object = object), object.var[, 1], sep = "_") ) levels.split <- names(x = sort(x = table(object.var[, 1]))) num.groups <- length(levels.split) cells <- list() for (i in 1:num.groups) { cells[[i]] <- rownames( x = object.var[object.var[, 1] == levels.split[i], , drop = FALSE] ) } marker.test <- list() # do marker tests ident.2.save <- ident.2 for (i in 1:num.groups) { level.use <- levels.split[i] ident.use.1 <- paste(ident.1, level.use, sep = "_") ident.use.1.exists <- ident.use.1 %in% Idents(object = object) if (!all(ident.use.1.exists)) { bad.ids <- ident.1[!ident.use.1.exists] warning( "Identity: ", paste(bad.ids, collapse = ", "), " not present in group ", level.use, ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } ident.2 <- ident.2.save cells.1 <- WhichCells(object = object, idents = ident.use.1) if (length(cells.1) < min.cells.group) { warning( level.use, " has fewer than ", min.cells.group, " cells in Identity: ", paste(ident.1, collapse = ", "), ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } if (is.null(x = ident.2)) { cells.2 <- setdiff(x = cells[[i]], y = cells.1) ident.use.2 <- names(x = which(x = table(Idents(object = object)[cells.2]) > 0)) ident.2 <- gsub(pattern = paste0("_", level.use), replacement = "", x = ident.use.2) if (length(x = ident.use.2) == 0) { stop(paste("Only one identity class present:", ident.1)) } } else { ident.use.2 <- paste(ident.2, level.use, sep = "_") } if (verbose) { message( "Testing group ", level.use, ": (", paste(ident.1, collapse = ", "), ") vs (", paste(ident.2, collapse = ", "), ")" ) } ident.use.2.exists <- ident.use.2 %in% Idents(object = object) if (!all(ident.use.2.exists)) { bad.ids <- ident.2[!ident.use.2.exists] warning( "Identity: ", paste(bad.ids, collapse = ", "), " not present in group ", level.use, ". Skipping ", level.use, call. = FALSE, immediate. = TRUE ) next } marker.test[[i]] <- FindMarkers( object = object, assay = assay, slot = slot, ident.1 = ident.use.1, ident.2 = ident.use.2, verbose = verbose, ... ) names(x = marker.test)[i] <- levels.split[i] } marker.test <- Filter(f = Negate(f = is.null), x = marker.test) genes.conserved <- Reduce( f = intersect, x = lapply( X = marker.test, FUN = function(x) { return(rownames(x = x)) } ) ) markers.conserved <- list() for (i in 1:length(x = marker.test)) { markers.conserved[[i]] <- marker.test[[i]][genes.conserved, ] colnames(x = markers.conserved[[i]]) <- paste( names(x = marker.test)[i], colnames(x = markers.conserved[[i]]), sep = "_" ) } markers.combined <- Reduce(cbind, markers.conserved) pval.codes <- colnames(x = markers.combined)[grepl(pattern = "*_p_val$", x = colnames(x = markers.combined))] if (length(x = pval.codes) > 1) { markers.combined$max_pval <- apply( X = markers.combined[, pval.codes, drop = FALSE], MARGIN = 1, FUN = max ) combined.pval <- data.frame(cp = apply( X = markers.combined[, pval.codes, drop = FALSE], MARGIN = 1, FUN = function(x) { return(meta.method(x)$p) } )) meta.method.name <- as.character(x = formals()$meta.method) if (length(x = meta.method.name) == 3) { meta.method.name <- meta.method.name[3] } colnames(x = combined.pval) <- paste0(meta.method.name, "_p_val") markers.combined <- cbind(markers.combined, combined.pval) markers.combined <- markers.combined[order(markers.combined[, paste0(meta.method.name, "_p_val")]), ] } else { warning("Only a single group was tested", call. = FALSE, immediate. = TRUE) } return(markers.combined) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Methods for Seurat-defined generics #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #' @param cells.1 Vector of cell names belonging to group 1 #' @param cells.2 Vector of cell names belonging to group 2 #' @param counts Count matrix if using scale.data for DE tests. This is used for #' computing pct.1 and pct.2 and for filtering features based on fraction #' expressing #' @param features Genes to test. Default is to use all genes #' @param logfc.threshold Limit testing to genes which show, on average, at least #' X-fold difference (log-scale) between the two groups of cells. Default is 0.1 #' Increasing logfc.threshold speeds up the function, but can miss weaker signals. #' @param test.use Denotes which test to use. Available options are: #' \itemize{ #' \item{"wilcox"} : Identifies differentially expressed genes between two #' groups of cells using a Wilcoxon Rank Sum test (default); will use a fast #' implementation by Presto if installed #' \item{"wilcox_limma"} : Identifies differentially expressed genes between two #' groups of cells using the limma implementation of the Wilcoxon Rank Sum test; #' set this option to reproduce results from Seurat v4 #' \item{"bimod"} : Likelihood-ratio test for single cell gene expression, #' (McDavid et al., Bioinformatics, 2013) #' \item{"roc"} : Identifies 'markers' of gene expression using ROC analysis. #' For each gene, evaluates (using AUC) a classifier built on that gene alone, #' to classify between two groups of cells. An AUC value of 1 means that #' expression values for this gene alone can perfectly classify the two #' groupings (i.e. Each of the cells in cells.1 exhibit a higher level than #' each of the cells in cells.2). An AUC value of 0 also means there is perfect #' classification, but in the other direction. A value of 0.5 implies that #' the gene has no predictive power to classify the two groups. Returns a #' 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of putative differentially #' expressed genes. #' \item{"t"} : Identify differentially expressed genes between two groups of #' cells using the Student's t-test. #' \item{"negbinom"} : Identifies differentially expressed genes between two #' groups of cells using a negative binomial generalized linear model. #' Use only for UMI-based datasets #' \item{"poisson"} : Identifies differentially expressed genes between two #' groups of cells using a poisson generalized linear model. #' Use only for UMI-based datasets #' \item{"LR"} : Uses a logistic regression framework to determine differentially #' expressed genes. Constructs a logistic regression model predicting group #' membership based on each feature individually and compares this to a null #' model with a likelihood ratio test. #' \item{"MAST"} : Identifies differentially expressed genes between two groups #' of cells using a hurdle model tailored to scRNA-seq data. Utilizes the MAST #' package to run the DE testing. #' \item{"DESeq2"} : Identifies differentially expressed genes between two groups #' of cells based on a model using DESeq2 which uses a negative binomial #' distribution (Love et al, Genome Biology, 2014).This test does not support #' pre-filtering of genes based on average difference (or percent detection rate) #' between cell groups. However, genes may be pre-filtered based on their #' minimum detection rate (min.pct) across both cell groups. To use this method, #' please install DESeq2, using the instructions at #' https://bioconductor.org/packages/release/bioc/html/DESeq2.html #' } #' @param min.pct only test genes that are detected in a minimum fraction of #' min.pct cells in either of the two populations. Meant to speed up the function #' by not testing genes that are very infrequently expressed. Default is 0.01 #' @param min.diff.pct only test genes that show a minimum difference in the #' fraction of detection between the two groups. Set to -Inf by default #' @param only.pos Only return positive markers (FALSE by default) #' @param verbose Print a progress bar once expression testing begins #' @param max.cells.per.ident Down sample each identity class to a max number. #' Default is no downsampling. Not activated by default (set to Inf) #' @param random.seed Random seed for downsampling #' @param latent.vars Variables to test, used only when \code{test.use} is one of #' 'LR', 'negbinom', 'poisson', or 'MAST' #' @param min.cells.feature Minimum number of cells expressing the feature in at least one #' of the two groups, currently only used for poisson and negative binomial tests #' @param min.cells.group Minimum number of cells in one of the groups #' @param pseudocount.use Pseudocount to add to averaged expression values when #' calculating logFC. 1 by default. #' @param fc.results data.frame from FoldChange #' @param densify Convert the sparse matrix to a dense form before running the DE test. This can provide speedups but might require higher memory; default is FALSE #' #' #' @importFrom Matrix rowMeans #' @importFrom stats p.adjust #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers default #' FindMarkers.default <- function( object, slot = "data", counts = numeric(), cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = 'wilcox', min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, fc.results = NULL, densify = FALSE, ... ) { ValidateCellGroups( object = object, cells.1 = cells.1, cells.2 = cells.2, min.cells.group = min.cells.group ) features <- features %||% rownames(x = object) # reset parameters so no feature filtering is performed if (test.use %in% DEmethods_noprefilter()) { features <- rownames(x = object) min.diff.pct <- -Inf logfc.threshold <- 0 } data <- switch( EXPR = slot, 'scale.data' = counts, object ) # feature selection (based on percentages) alpha.min <- pmax(fc.results$pct.1, fc.results$pct.2) names(x = alpha.min) <- rownames(x = fc.results) features <- names(x = which(x = alpha.min >= min.pct)) if (length(x = features) == 0) { warning("No features pass min.pct threshold; returning empty data.frame") return(fc.results[features, ]) } alpha.diff <- alpha.min - pmin(fc.results$pct.1, fc.results$pct.2) features <- names( x = which(x = alpha.min >= min.pct & alpha.diff >= min.diff.pct) ) if (length(x = features) == 0) { warning("No features pass min.diff.pct threshold; returning empty data.frame") return(fc.results[features, ]) } # feature selection (based on logFC) if (slot != "scale.data") { total.diff <- fc.results[, 1] #first column is logFC names(total.diff) <- rownames(fc.results) features.diff <- if (only.pos) { names(x = which(x = total.diff >= logfc.threshold)) } else { names(x = which(x = abs(x = total.diff) >= logfc.threshold)) } features <- intersect(x = features, y = features.diff) if (length(x = features) == 0) { warning("No features pass logfc.threshold threshold; returning empty data.frame") return(fc.results[features, ]) } } # subsample cell groups if they are too large if (max.cells.per.ident < Inf) { set.seed(seed = random.seed) if (length(x = cells.1) > max.cells.per.ident) { cells.1 <- sample(x = cells.1, size = max.cells.per.ident) } if (length(x = cells.2) > max.cells.per.ident) { cells.2 <- sample(x = cells.2, size = max.cells.per.ident) } if (!is.null(x = latent.vars)) { latent.vars <- latent.vars[c(cells.1, cells.2), , drop = FALSE] } } if (inherits(x = object, what = "IterableMatrix")){ if(test.use != "wilcox"){ stop("Differential expression with BPCells currently only supports the 'wilcox' method.", " Please rerun with test.use = 'wilcox'") } data.use <- object[features, c(cells.1, cells.2), drop = FALSE] groups <- c(rep("foreground", length(cells.1)), rep("background", length(cells.2))) de.results <- suppressMessages( BPCells::marker_features(data.use, group = groups, method = "wilcoxon") ) de.results <- subset(de.results, de.results$foreground == "foreground") de.results <- data.frame(feature = de.results$feature, p_val = de.results$p_val_raw) rownames(de.results) <- de.results$feature de.results$feature <- NULL } else { de.results <- PerformDE( object = object, cells.1 = cells.1, cells.2 = cells.2, features = features, test.use = test.use, verbose = verbose, min.cells.feature = min.cells.feature, latent.vars = latent.vars, densify = densify, ... ) } de.results <- cbind(de.results, fc.results[rownames(x = de.results), , drop = FALSE]) if (only.pos) { de.results <- de.results[de.results[, 2] > 0, , drop = FALSE] } if (test.use %in% DEmethods_nocorrect()) { de.results <- de.results[order(-de.results$power, -de.results[, 1]), ] } else { de.results <- de.results[order(de.results$p_val, -abs(de.results$pct.1-de.results$pct.2)), ] de.results$p_val_adj = p.adjust( p = de.results$p_val, method = "bonferroni", n = nrow(x = object) ) } return(de.results) } #' @param norm.method Normalization method for fold change calculation when #' \code{slot} is \dQuote{\code{data}} #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers Assay #' FindMarkers.Assay <- function( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = 'wilcox', min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, norm.method = NULL, ... ) { data.slot <- ifelse( test = test.use %in% DEmethods_counts(), yes = 'counts', no = slot ) if (length(x = Layers(object = object, search = slot)) > 1) { stop(slot, ' layers are not joined. Please run JoinLayers') } data.use <- GetAssayData(object = object, slot = data.slot) counts <- switch( EXPR = data.slot, 'scale.data' = GetAssayData(object = object, slot = "counts"), numeric() ) fc.results <- FoldChange( object = object, slot = data.slot, cells.1 = cells.1, cells.2 = cells.2, features = features, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, fc.name = fc.name, base = base, norm.method = norm.method ) de.results <- FindMarkers( object = data.use, slot = data.slot, counts = counts, cells.1 = cells.1, cells.2 = cells.2, features = features, logfc.threshold = logfc.threshold, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, fc.results = fc.results, densify = densify, ... ) return(de.results) } #' @method FindMarkers StdAssay #' @export #' FindMarkers.StdAssay <- FindMarkers.Assay #' @param recorrect_umi Recalculate corrected UMI counts using minimum of the median UMIs when performing DE using multiple SCT objects; default is TRUE #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers SCTAssay #' FindMarkers.SCTAssay <- function( object, slot = "data", cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = 'wilcox', min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, recorrect_umi = TRUE, ... ) { data.slot <- ifelse( test = test.use %in% DEmethods_counts(), yes = 'counts', no = slot ) if (test.use %in% DEmethods_counts()){ # set slot to counts if (slot !="counts") { message(paste0("Setting slot to counts for ", test.use, " (counts based test: ")) slot <- "counts" } } if (recorrect_umi && length(x = levels(x = object)) > 1) { cell_attributes <- SCTResults(object = object, slot = "cell.attributes") observed_median_umis <- lapply( X = cell_attributes, FUN = function(x) median(x[, "umi"]) ) model.list <- slot(object = object, "SCTModel.list") median_umi.status <- lapply(X = model.list, FUN = function(x) { return(tryCatch( expr = slot(object = x, name = 'median_umi'), error = function(...) {return(NULL)}) )}) if (any(is.null(unlist(median_umi.status)))){ stop("SCT assay does not contain median UMI information.", "Run `PrepSCTFindMarkers()` before running `FindMarkers()` or invoke `FindMarkers(recorrect_umi=FALSE)`.") } model_median_umis <- SCTResults(object = object, slot = "median_umi") min_median_umi <- min(unlist(x = observed_median_umis)) if (any(unlist(model_median_umis) != min_median_umi)){ stop("Object contains multiple models with unequal library sizes. Run `PrepSCTFindMarkers()` before running `FindMarkers()`.") } } data.use <- GetAssayData(object = object, slot = data.slot) counts <- switch( EXPR = data.slot, 'scale.data' = GetAssayData(object = object, slot = "counts"), numeric() ) # Default assumes the input is log1p(corrected counts) default.mean.fxn <- function(x) { # return(log(x = rowMeans(x = expm1(x = x)) + pseudocount.use, base = base)) return(log(x = (rowSums(x = expm1(x = x)) + pseudocount.use)/NCOL(x), base = base)) } mean.fxn <- mean.fxn %||% switch( EXPR = slot, 'counts' = function(x) { # return(log(x = rowMeans(x = x) + pseudocount.use, base = base)) return(log(x = (rowSums(x = x) + pseudocount.use)/NCOL(x), base = base)) }, 'scale.data' = rowMeans, default.mean.fxn ) fc.results <- FoldChange( object = object, slot = data.slot, cells.1 = cells.1, cells.2 = cells.2, features = features, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, fc.name = fc.name, base = base ) de.results <- FindMarkers( object = data.use, slot = data.slot, counts = counts, cells.1 = cells.1, cells.2 = cells.2, features = features, logfc.threshold = logfc.threshold, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, pseudocount.use = pseudocount.use, fc.results = fc.results, densify = densify, ... ) return(de.results) } #' @importFrom Matrix rowMeans #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers DimReduc #' FindMarkers.DimReduc <- function( object, cells.1 = NULL, cells.2 = NULL, features = NULL, logfc.threshold = 0.1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, pseudocount.use = 1, mean.fxn = rowMeans, fc.name = NULL, densify = FALSE, ... ) { if (test.use %in% DEmethods_counts()) { stop("The following tests cannot be used for differential expression on a reduction as they assume a count model: ", paste(DEmethods_counts(), collapse=", ")) } data <- t(x = Embeddings(object = object)) ValidateCellGroups( object = data, cells.1 = cells.1, cells.2 = cells.2, min.cells.group = min.cells.group ) features <- features %||% rownames(x = data) # reset parameters so no feature filtering is performed if (test.use %in% DEmethods_noprefilter()) { features <- rownames(x = data) min.diff.pct <- -Inf logfc.threshold <- 0 } fc.results <- FoldChange( object = object, cells.1 = cells.1, cells.2 = cells.2, features = features, mean.fxn = mean.fxn, fc.name = fc.name ) # subsample cell groups if they are too large if (max.cells.per.ident < Inf) { set.seed(seed = random.seed) if (length(x = cells.1) > max.cells.per.ident) { cells.1 <- sample(x = cells.1, size = max.cells.per.ident) } if (length(x = cells.2) > max.cells.per.ident) { cells.2 <- sample(x = cells.2, size = max.cells.per.ident) } if (!is.null(x = latent.vars)) { latent.vars <- latent.vars[c(cells.1, cells.2), , drop = FALSE] } } de.results <- PerformDE( object = data, cells.1 = cells.1, cells.2 = cells.2, features = features, test.use = test.use, verbose = verbose, min.cells.feature = min.cells.feature, latent.vars = latent.vars, densify = densify, ... ) de.results <- cbind(de.results, fc.results) if (only.pos) { de.results <- de.results[de.results$avg_diff > 0, , drop = FALSE] } if (test.use %in% DEmethods_nocorrect()) { de.results <- de.results[order(-de.results$power, -de.results$avg_diff), ] } else { de.results <- de.results[order(de.results$p_val, -de.results$avg_diff), ] de.results$p_val_adj = p.adjust( p = de.results$p_val, method = "bonferroni", n = ncol(x = object) ) } return(de.results) } #' @param ident.1 Identity class to define markers for; pass an object of class #' \code{phylo} or 'clustertree' to find markers for a node in a cluster tree; #' passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run #' @param ident.2 A second identity class for comparison; if \code{NULL}, #' use all other cells for comparison; if an object of class \code{phylo} or #' 'clustertree' is passed to \code{ident.1}, must pass a node to find markers for #' @param reduction Reduction to use in differential expression testing - will test for DE on cell embeddings #' @param group.by Regroup cells into a different identity class prior to performing differential expression (see example) #' @param subset.ident Subset a particular identity class prior to regrouping. Only relevant if group.by is set (see example) #' @param assay Assay to use in differential expression testing #' @param slot Slot to pull data from; note that if \code{test.use} is "negbinom", "poisson", or "DESeq2", #' \code{slot} will be set to "counts" #' @param mean.fxn Function to use for fold change or average difference calculation. #' If NULL, the appropriate function will be chose according to the slot used #' @param fc.name Name of the fold change, average difference, or custom function column #' in the output data.frame. If NULL, the fold change column will be named #' according to the logarithm base (eg, "avg_log2FC"), or if using the scale.data #' slot "avg_diff". #' @param base The base with respect to which logarithms are computed. #' #' @rdname FindMarkers #' @concept differential_expression #' @export #' @method FindMarkers Seurat #' FindMarkers.Seurat <- function( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = 'data', reduction = NULL, features = NULL, logfc.threshold = 0.1, pseudocount.use = 1, test.use = "wilcox", min.pct = 0.01, min.diff.pct = -Inf, verbose = TRUE, only.pos = FALSE, max.cells.per.ident = Inf, random.seed = 1, latent.vars = NULL, min.cells.feature = 3, min.cells.group = 3, mean.fxn = NULL, fc.name = NULL, base = 2, densify = FALSE, ... ) { if (!is.null(x = group.by)) { if (!is.null(x = subset.ident)) { object <- subset(x = object, idents = subset.ident) } Idents(object = object) <- group.by } if (!is.null(x = assay) && !is.null(x = reduction)) { stop("Please only specify either assay or reduction.") } if (length(x = ident.1) == 0) { stop("At least 1 ident must be specified in `ident.1`") } # select which data to use if (is.null(x = reduction)) { assay <- assay %||% DefaultAssay(object = object) data.use <- object[[assay]] cellnames.use <- colnames(x = data.use) } else { data.use <- object[[reduction]] cellnames.use <- rownames(x = data.use) } cells <- IdentsToCells( object = object, ident.1 = ident.1, ident.2 = ident.2, cellnames.use = cellnames.use ) cells <- sapply( X = cells, FUN = intersect, y = cellnames.use, simplify = FALSE, USE.NAMES = TRUE ) if (!all(vapply(X = cells, FUN = length, FUN.VALUE = integer(length = 1L)))) { abort( message = "Cells in one or both identity groups are not present in the data requested" ) } # fetch latent.vars if (!is.null(x = latent.vars)) { latent.vars <- FetchData( object = object, vars = latent.vars, cells = c(cells$cells.1, cells$cells.2) ) } # check normalization method norm.command <- paste0("NormalizeData.", assay) norm.method <- if (norm.command %in% Command(object = object) && is.null(x = reduction)) { Command( object = object, command = norm.command, value = "normalization.method" ) } else if (length(x = intersect(x = c("FindIntegrationAnchors", "FindTransferAnchors"), y = Command(object = object)))) { command <- intersect(x = c("FindIntegrationAnchors", "FindTransferAnchors"), y = Command(object = object))[1] Command( object = object, command = command, value = "normalization.method" ) } else { NULL } de.results <- FindMarkers( object = data.use, slot = slot, cells.1 = cells$cells.1, cells.2 = cells$cells.2, features = features, logfc.threshold = logfc.threshold, pseudocount.use = pseudocount.use, test.use = test.use, min.pct = min.pct, min.diff.pct = min.diff.pct, verbose = verbose, only.pos = only.pos, max.cells.per.ident = max.cells.per.ident, random.seed = random.seed, latent.vars = latent.vars, min.cells.feature = min.cells.feature, min.cells.group = min.cells.group, mean.fxn = mean.fxn, base = base, fc.name = fc.name, densify = densify, norm.method = norm.method, ... ) return(de.results) } #' @param cells.1 Vector of cell names belonging to group 1 #' @param cells.2 Vector of cell names belonging to group 2 #' @param features Features to calculate fold change for. #' If NULL, use all features #' @importFrom Matrix rowSums #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange default FoldChange.default <- function( object, cells.1, cells.2, mean.fxn, fc.name, features = NULL, ... ) { features <- features %||% rownames(x = object) # Calculate percent expressed thresh.min <- 0 pct.1 <- round( x = rowSums(x = object[features, cells.1, drop = FALSE] > thresh.min) / length(x = cells.1), digits = 3 ) pct.2 <- round( x = rowSums(x = object[features, cells.2, drop = FALSE] > thresh.min) / length(x = cells.2), digits = 3 ) # Calculate fold change data.1 <- mean.fxn(object[features, cells.1, drop = FALSE]) data.2 <- mean.fxn(object[features, cells.2, drop = FALSE]) fc <- (data.1 - data.2) fc.results <- as.data.frame(x = cbind(fc, pct.1, pct.2)) colnames(fc.results) <- c(fc.name, "pct.1", "pct.2") return(fc.results) } #' @param norm.method Normalization method for mean function selection #' when \code{slot} is \dQuote{\code{data}} #' #' @importFrom Matrix rowMeans #' @importFrom Matrix rowSums #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange Assay FoldChange.Assay <- function( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, norm.method = NULL, ... ) { data <- GetAssayData(object = object, slot = slot) # By default run as if LogNormalize is done log1pdata.mean.fxn <- function(x) { # return(log(x = rowMeans(x = expm1(x = x)) + pseudocount.use, base = base)) return(log(x = (rowSums(x = expm1(x = x)) + pseudocount.use)/NCOL(x), base = base)) } scaledata.mean.fxn <- rowMeans counts.mean.fxn <- function(x) { # return(log(x = rowMeans(x = x) + pseudocount.use, base = base)) return(log(x = (rowSums(x = x) + pseudocount.use)/NCOL(x), base = base)) } if (!is.null(x = norm.method)) { # For anything apart from log normalization set to rowMeans if (norm.method!="LogNormalize") { new.mean.fxn <- counts.mean.fxn } else { new.mean.fxn <- counts.mean.fxn if (slot == "data") { new.mean.fxn <- log1pdata.mean.fxn } else if (slot == "scale.data") { new.mean.fxn <- scaledata.mean.fxn } } } else { # If no normalization method is passed use slots to decide mean function new.mean.fxn <- switch( EXPR = slot, 'data' = log1pdata.mean.fxn, 'scale.data' = scaledata.mean.fxn, 'counts' = counts.mean.fxn, log1pdata.mean.fxn ) } mean.fxn <- mean.fxn %||% new.mean.fxn # Omit the decimal value of e from the column name if base == exp(1) base.text <- ifelse( test = base == exp(1), yes = "", no = base ) fc.name <- fc.name %||% ifelse( test = slot == "scale.data", yes = "avg_diff", no = paste0("avg_log", base.text, "FC") ) FoldChange( object = data, cells.1 = cells.1, cells.2 = cells.2, features = features, mean.fxn = mean.fxn, fc.name = fc.name ) } #' @method FoldChange StdAssay #' @export #' FoldChange.StdAssay <- FoldChange.Assay #' @importFrom Matrix rowMeans #' @importFrom Matrix rowSums #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange SCTAssay FoldChange.SCTAssay <- function( object, cells.1, cells.2, features = NULL, slot = "data", pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, base = 2, ... ) { pseudocount.use <- pseudocount.use %||% 1 data <- GetAssayData(object = object, slot = slot) default.mean.fxn <- function(x) { # return(log(x = rowMeans(x = expm1(x = x)) + pseudocount.use, base = base)) return(log(x = (rowSums(x = expm1(x = x)) + pseudocount.use)/NCOL(x), base = base)) } mean.fxn <- mean.fxn %||% switch( EXPR = slot, 'data' = default.mean.fxn, 'scale.data' = rowMeans, 'counts' = function(x) { # return(log(x = rowMeans(x = x) + pseudocount.use, base = base)) return(log(x = (rowSums(x = x) + pseudocount.use)/NCOL(x), base = base)) }, default.mean.fxn ) # Omit the decimal value of e from the column name if base == exp(1) base.text <- ifelse( test = base == exp(1), yes = "", no = base ) fc.name <- fc.name %||% ifelse( test = slot == "scale.data", yes = "avg_diff", no = paste0("avg_log", base.text, "FC") ) FoldChange( object = data, cells.1 = cells.1, cells.2 = cells.2, features = features, mean.fxn = mean.fxn, fc.name = fc.name ) } #' @importFrom Matrix rowMeans #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange DimReduc FoldChange.DimReduc <- function( object, cells.1, cells.2, features = NULL, slot = NULL, pseudocount.use = 1, fc.name = NULL, mean.fxn = NULL, ... ) { mean.fxn <- mean.fxn %||% rowMeans fc.name <- fc.name %||% "avg_diff" data <- t(x = Embeddings(object = object)) features <- features %||% rownames(x = data) # Calculate avg difference data.1 <- mean.fxn(data[features, cells.1, drop = FALSE]) data.2 <- mean.fxn(data[features, cells.2, drop = FALSE]) fc <- (data.1 - data.2) fc.results <- data.frame(fc) colnames(fc.results) <- fc.name return(fc.results) } #' @param ident.1 Identity class to calculate fold change for; pass an object of class #' \code{phylo} or 'clustertree' to calculate fold change for a node in a cluster tree; #' passing 'clustertree' requires \code{\link{BuildClusterTree}} to have been run #' @param ident.2 A second identity class for comparison; if \code{NULL}, #' use all other cells for comparison; if an object of class \code{phylo} or #' 'clustertree' is passed to \code{ident.1}, must pass a node to calculate fold change for #' @param reduction Reduction to use - will calculate average difference on cell embeddings #' @param group.by Regroup cells into a different identity class prior to #' calculating fold change (see example in \code{\link{FindMarkers}}) #' @param subset.ident Subset a particular identity class prior to regrouping. #' Only relevant if group.by is set (see example in \code{\link{FindMarkers}}) #' @param assay Assay to use in fold change calculation #' @param slot Slot to pull data from #' @param pseudocount.use Pseudocount to add to averaged expression values when #' calculating logFC. #' @param mean.fxn Function to use for fold change or average difference calculation #' @param base The base with respect to which logarithms are computed. #' @param fc.name Name of the fold change, average difference, or custom function column #' in the output data.frame #' #' @rdname FoldChange #' @concept differential_expression #' @export #' @method FoldChange Seurat FoldChange.Seurat <- function( object, ident.1 = NULL, ident.2 = NULL, group.by = NULL, subset.ident = NULL, assay = NULL, slot = 'data', reduction = NULL, features = NULL, pseudocount.use = 1, mean.fxn = NULL, base = 2, fc.name = NULL, ... ) { if (!is.null(x = group.by)) { if (!is.null(x = subset.ident)) { object <- subset(x = object, idents = subset.ident) } Idents(object = object) <- group.by } if (!is.null(x = assay) && !is.null(x = reduction)) { stop("Please only specify either assay or reduction.") } # select which data to use if (is.null(x = reduction)) { assay <- assay %||% DefaultAssay(object = object) data.use <- object[[assay]] cellnames.use <- colnames(x = data.use) } else { data.use <- object[[reduction]] cellnames.use <- rownames(data.use) } cells <- IdentsToCells( object = object, ident.1 = ident.1, ident.2 = ident.2, cellnames.use = cellnames.use ) # check normalization method norm.command <- paste0("NormalizeData.", assay) norm.method <- if (norm.command %in% Command(object = object) && is.null(x = reduction)) { Command( object = object, command = norm.command, value = "normalization.method" ) } else if (length(x = intersect(x = c("FindIntegrationAnchors", "FindTransferAnchors"), y = Command(object = object)))) { command <- intersect(x = c("FindIntegrationAnchors", "FindTransferAnchors"), y = Command(object = object))[1] Command( object = object, command = command, value = "normalization.method" ) } else { NULL } fc.results <- FoldChange( object = data.use, cells.1 = cells$cells.1, cells.2 = cells$cells.2, features = features, slot = slot, pseudocount.use = pseudocount.use, mean.fxn = mean.fxn, base = base, fc.name = fc.name, norm.method = norm.method ) return(fc.results) } #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Internal #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # internal function to calculate AUC values #' @importFrom pbapply pblapply # AUCMarkerTest <- function(data1, data2, mygenes, print.bar = TRUE) { myAUC <- unlist(x = lapply( X = mygenes, FUN = function(x) { return(DifferentialAUC( x = as.numeric(x = data1[x, ]), y = as.numeric(x = data2[x, ]) )) } )) myAUC[is.na(x = myAUC)] <- 0 iterate.fxn <- ifelse(test = print.bar, yes = pblapply, no = lapply) avg_diff <- unlist(x = iterate.fxn( X = mygenes, FUN = function(x) { return( ExpMean( x = as.numeric(x = data1[x, ]) ) - ExpMean( x = as.numeric(x = data2[x, ]) ) ) } )) toRet <- data.frame(cbind(myAUC, avg_diff), row.names = mygenes) toRet <- toRet[rev(x = order(toRet$myAUC)), ] return(toRet) } #internal function to run mcdavid et al. DE test # #' @importFrom stats sd dnorm # bimodLikData <- function(x, xmin = 0) { x1 <- x[x <= xmin] x2 <- x[x > xmin] xal <- MinMax( data = length(x = x2) / length(x = x), min = 1e-5, max = (1 - 1e-5) ) likA <- length(x = x1) * log(x = 1 - xal) if (length(x = x2) < 2) { mysd <- 1 } else { mysd <- sd(x = x2) } likB <- length(x = x2) * log(x = xal) + sum(dnorm(x = x2, mean = mean(x = x2), sd = mysd, log = TRUE)) return(likA + likB) } # returns tests that do not support feature pre-filtering DEmethods_noprefilter <- function() { c("DESeq2") } # returns tests that support latent variables (latent.vars) DEmethods_latent <- function() { c('negbinom', 'poisson', 'MAST', "LR") } # returns tests that require CheckDots DEmethods_checkdots <- function() { c('wilcox', 'wilcox_limma', 'MAST', 'DESeq2') } # returns tests that do not use Bonferroni correction on the DE results DEmethods_nocorrect <- function() { c('roc') } # returns tests that require count data DEmethods_counts <- function() { c("negbinom", "poisson", "DESeq2") } # Differential expression using DESeq2 # # Identifies differentially expressed genes between two groups of cells using # DESeq2 # # @references Love MI, Huber W and Anders S (2014). "Moderated estimation of # fold change and dispersion for RNA-seq data with DESeq2." Genome Biology. # https://bioconductor.org/packages/release/bioc/html/DESeq2.html # @param data.use Data matrix to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param verbose Print a progress bar # @param ... Extra parameters to pass to DESeq2::results # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # # @details # This test does not support pre-filtering of genes based on average difference # (or percent detection rate) between cell groups. However, genes may be # pre-filtered based on their minimum detection rate (min.pct) across both cell # groups. To use this method, please install DESeq2, using the instructions at # https://bioconductor.org/packages/release/bioc/html/DESeq2.html # # @export # # @examples # \dontrun{ # data("pbmc_small") # pbmc_small # DESeq2DETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # } # DESeq2DETest <- function( data.use, cells.1, cells.2, verbose = TRUE, ... ) { if (!PackageCheck('DESeq2', error = FALSE)) { stop("Please install DESeq2 - learn more at https://bioconductor.org/packages/release/bioc/html/DESeq2.html") } CheckDots(..., fxns = 'DESeq2::results') group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) group.info$wellKey <- rownames(x = group.info) dds1 <- DESeq2::DESeqDataSetFromMatrix( countData = data.use, colData = group.info, design = ~ group ) dds1 <- DESeq2::estimateSizeFactors(object = dds1) dds1 <- DESeq2::estimateDispersions(object = dds1, fitType = "local") dds1 <- DESeq2::nbinomWaldTest(object = dds1) res <- DESeq2::results( object = dds1, contrast = c("group", "Group1", "Group2"), alpha = 0.05, ... ) to.return <- data.frame(p_val = res$pvalue, row.names = rownames(res)) return(to.return) } # internal function to calculate AUC values #' @importFrom ROCR prediction performance #' DifferentialAUC <- function(x, y) { prediction.use <- prediction( predictions = c(x, y), labels = c(rep(x = 1, length(x = x)), rep(x = 0, length(x = y))), label.ordering = 0:1 ) perf.use <- performance(prediction.obj = prediction.use, measure = "auc") auc.use <- round(x = perf.use@y.values[[1]], digits = 3) return(auc.use) } #internal function to run mcdavid et al. DE test # #' @importFrom stats pchisq # DifferentialLRT <- function(x, y, xmin = 0) { lrtX <- bimodLikData(x = x) lrtY <- bimodLikData(x = y) lrtZ <- bimodLikData(x = c(x, y)) lrt_diff <- 2 * (lrtX + lrtY - lrtZ) return(pchisq(q = lrt_diff, df = 3, lower.tail = F)) } # Likelihood ratio test for zero-inflated data # # Identifies differentially expressed genes between two groups of cells using # the LRT model proposed in McDavid et al, Bioinformatics, 2013 # # @inheritParams FindMarkers # @param object Seurat object # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param assay.type Type of assay to fetch data for (default is RNA) # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # @examples # data("pbmc_small") # pbmc_small # DiffExpTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # DiffExpTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(DifferentialLRT( x = as.numeric(x = data.use[x, cells.1]), y = as.numeric(x = data.use[x, cells.2]) )) } ) ) to.return <- data.frame(p_val, row.names = rownames(x = data.use)) return(to.return) } # Differential expression testing using Student's t-test # # Identify differentially expressed genes between two groups of cells using # the Student's t-test # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom stats t.test #' @importFrom pbapply pbsapply #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # # @examples # data("pbmc_small") # pbmc_small # DiffTTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) DiffTTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(data.use), FUN = function(x) { t.test(x = data.use[x, cells.1], y = data.use[x, cells.2])$p.value } ) ) to.return <- data.frame(p_val,row.names = rownames(x = data.use)) return(to.return) } # Tests for UMI-count based data # # Identifies differentially expressed genes between two groups of cells using # either a negative binomial or poisson generalized linear model # # @param data.use Data to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param min.cells Minimum number of cells threshold # @param latent.vars Latent variables to test # @param test.use parameterizes the glm # @param verbose Print progress bar # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom MASS glm.nb #' @importFrom pbapply pbsapply #' @importFrom stats var as.formula #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers #' # @export # # @examples # data("pbmc_small") # pbmc_small # # Note, not recommended for particularly small datasets - expect warnings # NegBinomDETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # GLMDETest <- function( data.use, cells.1, cells.2, min.cells = 3, latent.vars = NULL, test.use = NULL, verbose = TRUE ) { group.info <- data.frame( group = rep( x = c('Group1', 'Group2'), times = c(length(x = cells.1), length(x = cells.2)) ) ) rownames(group.info) <- c(cells.1, cells.2) group.info[, "group"] <- factor(x = group.info[, "group"]) latent.vars <- if (is.null(x = latent.vars)) { group.info } else { cbind(x = group.info, latent.vars) } latent.var.names <- colnames(x = latent.vars) my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- unlist( x = my.sapply( X = 1:nrow(data.use), FUN = function(x) { latent.vars[, "GENE"] <- as.numeric(x = data.use[x, ]) # check that gene is expressed in specified number of cells in one group if (sum(latent.vars$GENE[latent.vars$group == "Group1"] > 0) < min.cells && sum(latent.vars$GENE[latent.vars$group == "Group2"] > 0) < min.cells) { warning(paste0( "Skipping gene --- ", x, ". Fewer than ", min.cells, " cells in both clusters." )) return(2) } # check that variance between groups is not 0 if (var(x = latent.vars$GENE) == 0) { warning(paste0( "Skipping gene -- ", x, ". No variance in expression between the two clusters." )) return(2) } fmla <- as.formula(object = paste( "GENE ~", paste(latent.var.names, collapse = "+") )) p.estimate <- 2 if (test.use == "negbinom") { try( expr = p.estimate <- summary( object = glm.nb(formula = fmla, data = latent.vars) )$coef[2, 4], silent = TRUE ) return(p.estimate) } else if (test.use == "poisson") { return(summary(object = glm( formula = fmla, data = latent.vars, family = "poisson" ))$coef[2,4]) } } ) ) features.keep <- rownames(data.use) if (length(x = which(x = p_val == 2)) > 0) { features.keep <- features.keep[-which(x = p_val == 2)] p_val <- p_val[!p_val == 2] } to.return <- data.frame(p_val, row.names = features.keep) return(to.return) } # Helper function for FindMarkers.Seurat and FoldChange.Seurat # Convert idents to cells # #' @importFrom methods is # IdentsToCells <- function( object, ident.1, ident.2, cellnames.use ) { # if (is.null(x = ident.1)) { stop("Please provide ident.1") } else if ((length(x = ident.1) == 1 && ident.1[1] == 'clustertree') || is(object = ident.1, class2 = 'phylo')) { if (is.null(x = ident.2)) { stop("Please pass a node to 'ident.2' to run FindMarkers on a tree") } tree <- if (is(object = ident.1, class2 = 'phylo')) { ident.1 } else { Tool(object = object, slot = 'BuildClusterTree') } if (is.null(x = tree)) { stop("Please run 'BuildClusterTree' or pass an object of class 'phylo' as 'ident.1'") } ident.1 <- tree$tip.label[GetLeftDescendants(tree = tree, node = ident.2)] ident.2 <- tree$tip.label[GetRightDescendants(tree = tree, node = ident.2)] } if (length(x = as.vector(x = ident.1)) > 1 && any(as.character(x = ident.1) %in% cellnames.use)) { bad.cells <- cellnames.use[which(x = !as.character(x = ident.1) %in% cellnames.use)] if (length(x = bad.cells) > 0) { stop(paste0("The following cell names provided to ident.1 are not present in the object: ", paste(bad.cells, collapse = ", "))) } } else { ident.1 <- WhichCells(object = object, idents = ident.1) } # if NULL for ident.2, use all other cells if (length(x = as.vector(x = ident.2)) > 1 && any(as.character(x = ident.2) %in% cellnames.use)) { bad.cells <- cellnames.use[which(!as.character(x = ident.2) %in% cellnames.use)] if (length(x = bad.cells) > 0) { stop(paste0("The following cell names provided to ident.2 are not present in the object: ", paste(bad.cells, collapse = ", "))) } } else { if (is.null(x = ident.2)) { ident.2 <- setdiff(x = cellnames.use, y = ident.1) } else { ident.2 <- WhichCells(object = object, idents = ident.2) } } return(list(cells.1 = ident.1, cells.2 = ident.2)) } # Perform differential expression testing using a logistic regression framework # # Constructs a logistic regression model predicting group membership based on a # given feature and compares this to a null model with a likelihood ratio test. # # @param data.use expression matrix # @param cells.1 Vector of cells in group 1 # @param cells2. Vector of cells in group 2 # @param latent.vars Latent variables to include in model # @param verbose Print messages # #' @importFrom lmtest lrtest #' @importFrom pbapply pbsapply #' @importFrom stats as.formula glm #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # LRDETest <- function( data.use, cells.1, cells.2, latent.vars = NULL, verbose = TRUE ) { group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) data.use <- data.use[, rownames(group.info), drop = FALSE] latent.vars <- latent.vars[rownames(group.info), , drop = FALSE] my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { if (is.null(x = latent.vars)) { model.data <- cbind(GENE = data.use[x, ], group.info) fmla <- as.formula(object = "group ~ GENE") fmla2 <- as.formula(object = "group ~ 1") } else { model.data <- cbind(GENE = data.use[x, ], group.info, latent.vars) fmla <- as.formula(object = paste( "group ~ GENE +", paste(colnames(x = latent.vars), collapse = "+") )) fmla2 <- as.formula(object = paste( "group ~", paste(colnames(x = latent.vars), collapse = "+") )) } model1 <- glm(formula = fmla, data = model.data, family = "binomial") model2 <- glm(formula = fmla2, data = model.data, family = "binomial") lrtest <- lrtest(model1, model2) return(lrtest$Pr[2]) } ) to.return <- data.frame(p_val, row.names = rownames(data.use)) return(to.return) } # ROC-based marker discovery # # Identifies 'markers' of gene expression using ROC analysis. For each gene, # evaluates (using AUC) a classifier built on that gene alone, to classify # between two groups of cells. # # An AUC value of 1 means that expression values for this gene alone can # perfectly classify the two groupings (i.e. Each of the cells in cells.1 # exhibit a higher level than each of the cells in cells.2). An AUC value of 0 # also means there is perfect classification, but in the other direction. A # value of 0.5 implies that the gene has no predictive power to classify the # two groups. # # @return Returns a 'predictive power' (abs(AUC-0.5) * 2) ranked matrix of # putative differentially expressed genes. # # @export # # @examples # data("pbmc_small") # pbmc_small # MarkerTest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # MarkerTest <- function( data.use, cells.1, cells.2, verbose = TRUE ) { to.return <- AUCMarkerTest( data1 = data.use[, cells.1, drop = FALSE], data2 = data.use[, cells.2, drop = FALSE], mygenes = rownames(x = data.use), print.bar = verbose ) to.return$power <- abs(x = to.return$myAUC - 0.5) * 2 return(to.return) } # Differential expression using MAST # # Identifies differentially expressed genes between two groups of cells using # a hurdle model tailored to scRNA-seq data. Utilizes the MAST package to run # the DE testing. # # @references Andrew McDavid, Greg Finak and Masanao Yajima (2017). MAST: Model-based # Analysis of Single Cell Transcriptomics. R package version 1.2.1. # https://github.com/RGLab/MAST/ # # @param data.use Data to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param latent.vars Confounding variables to adjust for in DE test # @param verbose print output # @param \dots Additional parameters to zero-inflated regression (zlm) function # in MAST # @details # To use this method, please install MAST, using instructions at https://github.com/RGLab/MAST/ # # @return Returns a p-value ranked matrix of putative differentially expressed # genes. # #' @importFrom stats relevel MASTDETest <- function( data.use, cells.1, cells.2, latent.vars = NULL, verbose = TRUE, ... ) { # Check for MAST if (!PackageCheck('MAST', error = FALSE)) { stop("Please install MAST - learn more at https://github.com/RGLab/MAST") } group.info <- data.frame(row.names = c(cells.1, cells.2)) latent.vars <- latent.vars %||% group.info group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) latent.vars.names <- c("condition", colnames(x = latent.vars)) latent.vars <- cbind(latent.vars, group.info) latent.vars$wellKey <- rownames(x = latent.vars) fdat <- data.frame(rownames(x = data.use)) colnames(x = fdat)[1] <- "primerid" rownames(x = fdat) <- fdat[, 1] sca <- MAST::FromMatrix( exprsArray = as.matrix(x = data.use), check_sanity = FALSE, cData = latent.vars, fData = fdat ) cond <- factor(x = SummarizedExperiment::colData(sca)$group) cond <- relevel(x = cond, ref = "Group1") SummarizedExperiment::colData(sca)$condition <- cond fmla <- as.formula( object = paste0(" ~ ", paste(latent.vars.names, collapse = "+")) ) zlmCond <- MAST::zlm(formula = fmla, sca = sca, ...) summaryCond <- MAST::summary(object = zlmCond, doLRT = 'conditionGroup2') summaryDt <- summaryCond$datatable # fcHurdle <- merge( # summaryDt[contrast=='conditionGroup2' & component=='H', .(primerid, `Pr(>Chisq)`)], #hurdle P values # summaryDt[contrast=='conditionGroup2' & component=='logFC', .(primerid, coef, ci.hi, ci.lo)], by='primerid' # ) #logFC coefficients # fcHurdle[,fdr:=p.adjust(`Pr(>Chisq)`, 'fdr')] p_val <- summaryDt[summaryDt[, "component"] == "H", 4] genes.return <- summaryDt[summaryDt[, "component"] == "H", 1] # p_val <- subset(summaryDt, component == "H")[, 4] # genes.return <- subset(summaryDt, component == "H")[, 1] to.return <- data.frame(p_val, row.names = genes.return) return(to.return) } # compare two negative binomial regression models # model one uses only common factors (com.fac) # model two additionally uses group factor (grp.fac) # #' @importFrom stats glm anova coef # NBModelComparison <- function(y, theta, latent.data, com.fac, grp.fac) { tab <- as.matrix(x = table(y > 0, latent.data[, grp.fac])) freqs <- tab['TRUE', ] / apply(X = tab, MARGIN = 2, FUN = sum) fit2 <- 0 fit4 <- 0 try( expr = fit2 <- glm( formula = y ~ ., data = latent.data[, com.fac, drop = FALSE], family = MASS::negative.binomial(theta = theta) ), silent=TRUE ) try( fit4 <- glm( formula = y ~ ., data = latent.data[, c(com.fac, grp.fac)], family = MASS::negative.binomial(theta = theta) ), silent = TRUE ) if (is.numeric(x = fit2) || is.numeric(x = fit4)) { message('One of the glm.nb calls failed') return(c(rep(x = NA, 5), freqs)) } pval <- anova(fit2, fit4, test = 'Chisq')$'Pr(>Chi)'[2] foi <- 2 + length(x = com.fac) log2.fc <- log2(x = 1 / exp(x = coef(object = fit4)[foi])) ret <- c( fit2$deviance, fit4$deviance, pval, coef(object = fit4)[foi], log2.fc, freqs ) names(x = ret) <- c( 'dev1', 'dev2', 'pval', 'coef', 'log2.fc', 'freq1', 'freq2' ) return(ret) } PerformDE <- function( object, cells.1, cells.2, features, test.use, verbose, min.cells.feature, latent.vars, densify, ... ) { if (!(test.use %in% DEmethods_latent()) && !is.null(x = latent.vars)) { warning( "'latent.vars' is only used for the following tests: ", paste(DEmethods_latent(), collapse=", "), call. = FALSE, immediate. = TRUE ) } if (!test.use %in% DEmethods_checkdots()) { CheckDots(...) } data.use <- object[features, c(cells.1, cells.2), drop = FALSE] if (densify){ data.use <- as.matrix(x = data.use) } de.results <- switch( EXPR = test.use, 'wilcox' = WilcoxDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose, ... ), 'wilcox_limma' = WilcoxDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose, limma = TRUE, ... ), 'bimod' = DiffExpTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 'roc' = MarkerTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 't' = DiffTTest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose ), 'negbinom' = GLMDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, min.cells = min.cells.feature, latent.vars = latent.vars, test.use = test.use, verbose = verbose ), 'poisson' = GLMDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, min.cells = min.cells.feature, latent.vars = latent.vars, test.use = test.use, verbose = verbose ), 'MAST' = MASTDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, latent.vars = latent.vars, verbose = verbose, ... ), "DESeq2" = DESeq2DETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, verbose = verbose, ... ), "LR" = LRDETest( data.use = data.use, cells.1 = cells.1, cells.2 = cells.2, latent.vars = latent.vars, verbose = verbose ), stop("Unknown test: ", test.use) ) return(de.results) } #' Prepare object to run differential expression on SCT assay with multiple models #' #' Given a merged object with multiple SCT models, this function uses minimum #' of the median UMI (calculated using the raw UMI counts) of individual objects #' to reverse the individual SCT regression model using minimum of median UMI #' as the sequencing depth covariate. #' The counts slot of the SCT assay is replaced with recorrected counts and #' the data slot is replaced with log1p of recorrected counts. #' @param object Seurat object with SCT assays #' @param assay Assay name where for SCT objects are stored; Default is 'SCT' #' @param verbose Print messages and progress #' @importFrom Matrix Matrix #' @importFrom SeuratObject SparseEmptyMatrix #' @importFrom pbapply pblapply #' @importFrom future.apply future_lapply #' @importFrom future nbrOfWorkers #' @importFrom sctransform correct_counts #' @importFrom SeuratObject JoinLayers #' #' @return Returns a Seurat object with recorrected counts and data in the SCT assay. #' @export #' #' @concept differential_expression #' @template section-progressr #' @template section-future #' @examples #' data("pbmc_small") #' pbmc_small1 <- SCTransform(object = pbmc_small, variable.features.n = 20, vst.flavor="v1") #' pbmc_small2 <- SCTransform(object = pbmc_small, variable.features.n = 20, vst.flavor="v1") #' pbmc_merged <- merge(x = pbmc_small1, y = pbmc_small2) #' pbmc_merged <- PrepSCTFindMarkers(object = pbmc_merged) #' markers <- FindMarkers( #' object = pbmc_merged, #' ident.1 = "0", #' ident.2 = "1", #' assay = "SCT" #' ) #' pbmc_subset <- subset(pbmc_merged, idents = c("0", "1")) #' markers_subset <- FindMarkers( #' object = pbmc_subset, #' ident.1 = "0", #' ident.2 = "1", #' assay = "SCT", #' recorrect_umi = FALSE #' ) #' PrepSCTFindMarkers <- function(object, assay = "SCT", verbose = TRUE) { if (verbose && nbrOfWorkers() == 1) { my.lapply <- pblapply } else { my.lapply <- future_lapply } if (length(x = levels(x = object[[assay]])) == 1) { if (verbose) { message("Only one SCT model is stored - skipping recalculating corrected counts") } return(object) } observed_median_umis <- lapply( X = SCTResults(object = object[[assay]], slot = "cell.attributes"), FUN = function(x) median(x[, "umi"]) ) model.list <- slot(object = object[[assay]], name = "SCTModel.list") median_umi.status <- lapply(X = model.list, FUN = function(x) { return(tryCatch( expr = slot(object = x, name = 'median_umi'), error = function(...) {return(NULL)}) )}) if (any(is.null(x = unlist(x = median_umi.status)))){ # For old SCT objects median_umi is set to median umi as calculated from obserbed UMIs slot(object = object[[assay]], name = "SCTModel.list") <- lapply(X = model.list, FUN = UpdateSlots) SCTResults(object = object[[assay]], slot = "median_umi") <- observed_median_umis } model_median_umis <- SCTResults(object = object[[assay]], slot = "median_umi") min_median_umi <- min(unlist(x = observed_median_umis), na.rm = TRUE) if (all(unlist(x = model_median_umis) > min_median_umi)){ if (verbose){ message("Minimum UMI unchanged. Skipping re-correction.") } return(object) } if (verbose) { message(paste0("Found ", length(x = levels(x = object[[assay]])), " SCT models.", " Recorrecting SCT counts using minimum median counts: ", min_median_umi)) } umi.assay <- unique( x = unlist( x = SCTResults(object = object[[assay]], slot = "umi.assay") ) ) if (length(x = umi.assay) > 1) { stop("Multiple UMI assays are used for SCTransform: ", paste(umi.assay, collapse = ", ") ) } umi.layers <- Layers(object = object, assay = umi.assay, search = 'counts') if (length(x = umi.layers) > 1) { object[[umi.assay]] <- JoinLayers( object = object[[umi.assay]], layers = "counts", new = "counts") } raw_umi <- GetAssayData(object = object, assay = umi.assay, slot = "counts") corrected_counts <- Matrix( nrow = nrow(x = raw_umi), ncol = ncol(x = raw_umi), data = 0, dimnames = dimnames(x = raw_umi), sparse = TRUE ) cell_attr <- SCTResults(object = object[[assay]], slot = "cell.attributes") model_pars_fit <- lapply( X = SCTResults(object = object[[assay]], slot = "feature.attributes"), FUN = function(x) x[, c("theta", "(Intercept)", "log_umi")] ) arguments <- SCTResults(object = object[[assay]], slot = "arguments") model_str <- SCTResults(object = object[[assay]], slot = "model") set_median_umi <- rep(min_median_umi, length(levels(x = object[[assay]]))) names(set_median_umi) <- levels(x = object[[assay]]) set_median_umi <- as.list(set_median_umi) all_genes <- rownames(x = object[[assay]]) # correct counts my.correct_counts <- function(model_name){ model_genes <- rownames(x = model_pars_fit[[model_name]]) x <- list( model_str = model_str[[model_name]], arguments = arguments[[model_name]], model_pars_fit = as.matrix(x = model_pars_fit[[model_name]]), cell_attr = cell_attr[[model_name]] ) cells <- rownames(x = cell_attr[[model_name]]) umi <- raw_umi[all_genes, cells] umi_corrected <- correct_counts( x = x, umi = umi, verbosity = 0, scale_factor = min_median_umi ) missing_features <- setdiff(x = all_genes, y = rownames(x = umi_corrected)) corrected_counts.list <- NULL gc(verbose = FALSE) empty <- SparseEmptyMatrix(nrow = length(x = missing_features), ncol = ncol(x = umi_corrected)) rownames(x = empty) <- missing_features colnames(x = umi_corrected) <- colnames(x = umi_corrected) umi_corrected <- rbind(umi_corrected, empty)[all_genes,] return(umi_corrected) } corrected_counts.list <- my.lapply(X = levels(x = object[[assay]]), FUN = my.correct_counts) names(x = corrected_counts.list) <- levels(x = object[[assay]]) corrected_counts <- do.call(what = MergeSparseMatrices, args = corrected_counts.list) corrected_counts <- as.sparse(x = corrected_counts) corrected_data <- log1p(x = corrected_counts) suppressWarnings({object <- SetAssayData(object = object, assay = assay, slot = "counts", new.data = corrected_counts)}) suppressWarnings({object <- SetAssayData(object = object, assay = assay, slot = "data", new.data = corrected_data)}) SCTResults(object = object[[assay]], slot = "median_umi") <- set_median_umi return(object) } PrepSCTFindMarkers.V5 <- function(object, assay = "SCT", umi.assay = "RNA", layer = "counts", verbose = TRUE) { layers <- Layers(object = object[[umi.assay]], search = layer) dataset.names <- gsub(pattern = paste0(layer, "."), replacement = "", x = layers) for (i in seq_along(along.with = layers)) { l <- layers[i] counts <- LayerData( object = object[[umi.assay]], layer = l ) } cells.grid <- DelayedArray::colAutoGrid(x = counts, ncol = min(length(Cells(object)), ncol(counts))) } # given a UMI count matrix, estimate NB theta parameter for each gene # and use fit of relationship with mean to assign regularized theta to each gene # #' @importFrom stats glm loess poisson #' @importFrom utils txtProgressBar setTxtProgressBar # RegularizedTheta <- function(cm, latent.data, min.theta = 0.01, bin.size = 128) { genes.regress <- rownames(x = cm) bin.ind <- ceiling(x = 1:length(x = genes.regress) / bin.size) max.bin <- max(bin.ind) message('Running Poisson regression (to get initial mean), and theta estimation per gene') pb <- txtProgressBar(min = 0, max = max.bin, style = 3, file = stderr()) theta.estimate <- c() for (i in 1:max.bin) { genes.bin.regress <- genes.regress[bin.ind == i] bin.theta.estimate <- unlist( x = parallel::mclapply( X = genes.bin.regress, FUN = function(j) { return(as.numeric(x = MASS::theta.ml( y = cm[j, ], mu = glm( formula = cm[j, ] ~ ., data = latent.data, family = poisson )$fitted ))) } ), use.names = FALSE ) theta.estimate <- c(theta.estimate, bin.theta.estimate) setTxtProgressBar(pb = pb, value = i) } close(con = pb) UMI.mean <- apply(X = cm, MARGIN = 1, FUN = mean) var.estimate <- UMI.mean + (UMI.mean ^ 2) / theta.estimate for (span in c(1/3, 1/2, 3/4, 1)) { fit <- loess( formula = log10(x = var.estimate) ~ log10(x = UMI.mean), span = span ) if (! any(is.na(x = fit$fitted))) { message(sprintf( 'Used loess with span %1.2f to fit mean-variance relationship\n', span )) break } } if (any(is.na(x = fit$fitted))) { stop('Problem when fitting NB gene variance in RegularizedTheta - NA values were fitted.') } theta.fit <- (UMI.mean ^ 2) / ((10 ^ fit$fitted) - UMI.mean) names(x = theta.fit) <- genes.regress to.fix <- theta.fit <= min.theta | is.infinite(x = theta.fit) if (any(to.fix)) { message( 'Fitted theta below ', min.theta, ' for ', sum(to.fix), ' genes, setting them to ', min.theta ) theta.fit[to.fix] <- min.theta } return(theta.fit) } # FindMarkers helper function for cell grouping error checking ValidateCellGroups <- function( object, cells.1, cells.2, min.cells.group ) { if (length(x = cells.1) == 0) { stop("Cell group 1 is empty - no cells with identity class ", cells.1) } else if (length(x = cells.2) == 0) { stop("Cell group 2 is empty - no cells with identity class ", cells.2) return(NULL) } else if (length(x = cells.1) < min.cells.group) { stop("Cell group 1 has fewer than ", min.cells.group, " cells") } else if (length(x = cells.2) < min.cells.group) { stop("Cell group 2 has fewer than ", min.cells.group, " cells") } else if (any(!cells.1 %in% colnames(x = object))) { bad.cells <- colnames(x = object)[which(x = !as.character(x = cells.1) %in% colnames(x = object))] stop( "The following cell names provided to cells.1 are not present: ", paste(bad.cells, collapse = ", ") ) } else if (any(!cells.2 %in% colnames(x = object))) { bad.cells <- colnames(x = object)[which(x = !as.character(x = cells.2) %in% colnames(x = object))] stop( "The following cell names provided to cells.2 are not present: ", paste(bad.cells, collapse = ", ") ) } } # Differential expression using Wilcoxon Rank Sum # # Identifies differentially expressed genes between two groups of cells using # a Wilcoxon Rank Sum test. Makes use of presto::wilcoxauc for a more efficient # implementation of the wilcoxon test. If presto is not installed, or if limma # is requested, makes use of limma::rankSumTestWithCorrelation for a # more efficient implementation of the wilcoxon test. Thanks to Yunshun Chen and # Gordon Smyth for suggesting the limma implementation. If limma is also not installed, # uses wilcox.test. # # @param data.use Data matrix to test # @param cells.1 Group 1 cells # @param cells.2 Group 2 cells # @param verbose Print a progress bar # @param limma If limma should be used for testing; default is FALSE # @param ... Extra parameters passed to wilcox.test # # @return Returns a p-value ranked matrix of putative differentially expressed # features # #' @importFrom pbapply pbsapply #' @importFrom stats wilcox.test #' @importFrom future.apply future_sapply #' @importFrom future nbrOfWorkers # # @export # # @examples # data("pbmc_small") # pbmc_small # WilcoxDETest(pbmc_small, cells.1 = WhichCells(object = pbmc_small, idents = 1), # cells.2 = WhichCells(object = pbmc_small, idents = 2)) # WilcoxDETest <- function( data.use, cells.1, cells.2, verbose = TRUE, limma = FALSE, ... ) { data.use <- data.use[, c(cells.1, cells.2), drop = FALSE] j <- seq_len(length.out = length(x = cells.1)) my.sapply <- ifelse( test = verbose && nbrOfWorkers() == 1, yes = pbsapply, no = future_sapply ) overflow.check <- ifelse( test = is.na(x = suppressWarnings(length(x = data.use[1, ]) * length(x = data.use[1, ]))), yes = FALSE, no = TRUE ) presto.check <- PackageCheck("presto", error = FALSE) limma.check <- PackageCheck("limma", error = FALSE) group.info <- data.frame(row.names = c(cells.1, cells.2)) group.info[cells.1, "group"] <- "Group1" group.info[cells.2, "group"] <- "Group2" group.info[, "group"] <- factor(x = group.info[, "group"]) if (presto.check[1] && (!limma)) { data.use <- data.use[, rownames(group.info), drop = FALSE] res <- presto::wilcoxauc(X = data.use, y = group.info[, "group"]) res <- res[1:(nrow(x = res)/2),] p_val <- res$pval } else { if (getOption('Seurat.presto.wilcox.msg', TRUE) && (!limma)) { message( "For a (much!) faster implementation of the Wilcoxon Rank Sum Test,", "\n(default method for FindMarkers) please install the presto package", "\n--------------------------------------------", "\ninstall.packages('devtools')", "\ndevtools::install_github('immunogenomics/presto')", "\n--------------------------------------------", "\nAfter installation of presto, Seurat will automatically use the more ", "\nefficient implementation (no further action necessary).", "\nThis message will be shown once per session" ) options(Seurat.presto.wilcox.msg = FALSE) } if (limma.check[1] && overflow.check) { p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(min(2 * min(limma::rankSumTestWithCorrelation(index = j, statistics = data.use[x, ])), 1)) } ) } else { if (limma && overflow.check) { stop( "To use the limma implementation of the Wilcoxon Rank Sum Test, please install the limma package: -------------------------------------------- install.packages('BiocManager') BiocManager::install('limma') --------------------------------------------" ) } else { data.use <- data.use[, rownames(x = group.info), drop = FALSE] p_val <- my.sapply( X = 1:nrow(x = data.use), FUN = function(x) { return(wilcox.test(data.use[x, ] ~ group.info[, "group"], ...)$p.value) } ) } } } return(data.frame(p_val, row.names = rownames(x = data.use))) } Seurat/NEWS.md0000644000176200001440000007710614525500075012631 0ustar liggesusers# Seurat 5.0.1 (2023-11-16) ## Changes - Fixed `SCTransform.StdAssay` to pass extra arguments to `sctransform::vst()`. Fixes [#875](https://github.com/satijalab/seurat/issues/7998) - Fixed PercentageFeatureSet Layer calling [(#8009)](https://github.com/satijalab/seurat/issues/8009) - Fixed cell highlighting [(#7914)](https://github.com/satijalab/seurat/pull/7914) - Updated marker sorting to be by p-value with ties broken by absolute difference in percent expression - Fixed issue with replicated barcodes in MappingScore [(#7922)](https://github.com/satijalab/seurat/issues/7922) - Improved `PseudobulkExpression` by adding 'g' to cell names that started with numeric values - Improved `PseudobulkExpression` by adding each variable specified in `group.by` as columns in the object metadata when `return.seurat=TRUE` - Fixed `DimPlot` and `FeatureScatter` which were breaking when using the `split.by` argument with a variable that contained NAs # Seurat 5.0.0 (2023-10-25) ## Added - Add `BridgeCellsRepresentation` to construct a dictionary representation for each unimodal dataset. - Add `BuildNicheAssay` to construct a new assay where each feature is a cell label. The values represent the sum of a particular cell label neighboring a given cell. - Add `CalcDispersion` to calculate the dispersion of features. - Add `CCAIntegration` to perform Seurat-CCA Integration. - Add `CountSketch` to generate a CountSketch random matrix. - Add `CreateCategoryMatrix` to create a one-hot matrix for a given label. - Add `DISP` to find variable features based on dispersion. - Add `FastRPCAIntegration` as a convenience wrapper function around the following three functions that are often run together when performing integration. - Add `FetchResiduals_reference` as a temporary function to get residuals from the reference. - Add `FetchResiduals` to call sctransform::get_residuals. - Add `FetchResidualSCTModel` to calculate Pearson residuals of features not in the scale.data. - Add `FindBridgeAnchor` to find bridge anchors between two unimodal datasets. - Add `FindBridgeIntegrationAnchors` to find a set of anchors for integration between unimodal query and the other unimodal reference using a pre-computed BridgeReferenceSet. - Add `FindBridgeTransferAnchors` to find a set of anchors for label transfer between unimodal query and the other unimodal reference using a pre-computed BridgeReferenceSet. - Add `GaussianSketch` to perform Gaussian sketching. - Add `HarmonyIntegration` to perform Harmony integration. - Add `IntegrateLayers` to integrate layers in an assay object. - Add `JointPCAIntegration` to perform Seurat-Joint PCA Integration. - Add `LeverageScore` to compute the leverage scores for a given object. - Add `LoadCurioSeeker` to load Curio Seeker data. - Add `MVP` to find variable features based on mean.var.plot. - Add `NNtoGraph` to convert the Neighbor class to an asymmetrical Graph class. - Add `PrepareBridgeReference` to preprocess the multi-omic bridge and unimodal reference datasets into an extended reference. - Add `ProjectCellEmbeddings` to project query data onto the reference dimensional reduction. - Add `ProjectData` to project high-dimensional single-cell RNA expression data from a full dataset onto the lower-dimensional embedding of the sketch of the dataset. - Add `ProjectDimReduc` to project query data to reference dimensional reduction. - Add `ProjectIntegration` to integrate embeddings from the integrated sketched.assay. - Add `PseudobulkExpression` to normalize the count data present in a given assay. - Add `Read10X_probe_metadata` to read the probe metadata from a 10x Genomics probe barcode matrix file in HDF5 format. - Add `RPCAIntegration` to perform Seurat-RPCA Integration. - Add `RunGraphLaplacian` to run a graph Laplacian dimensionality reduction. - Add `SelectIntegrationFeatures5` to select integration features for v5 assays. - Add `SelectSCTIntegrationFeatures` to select SCT integration features. - Add `SketchData` to use sketching methods to downsample high-dimensional single-cell RNA expression data for help with scalability for large datasets. - Add `TransferSketchLabels` to transfer cell type labels from a sketched dataset to a full dataset based on the similarities in the lower-dimensional space. - Add `UnSketchEmbeddings` to transfer embeddings from sketched cells to the full data. - Add `VST` to apply a variance stabilizing transformation for selection of variable features. ## Changes - Change `FindTransferAnchors` so that anchor filtering is not performed by default - Change `merge` so that layers will be added to a single Seurat object instead of combining raw count matrices - Deprecate `slot` parameter in favor of `layers` in accessor and set methods # Seurat 4.4.0 (2023-09-27) ## Added - Add parallelization support with speed improvements for `PrepSCTFindMarkers` - Fix bug in `LoadNanostring`([#7566](https://github.com/satijalab/seurat/pull/7566)) ## Changes - Fix bug in `as.Seurat.SingleCellExperiment()` ([#6692](https://github.com/satijalab/seurat/issues/6692)) - Support for Visium probe information introduced in Spaceranger 2.1 ([#7141](https://github.com/satijalab/seurat/pull/7141)) - Add `LoadCurioSeeker` to load sequencing-based spatial datasets generated using the Curio Seeker - Fix fold change calculation for assays ([#7095](https://github.com/satijalab/seurat/issues/7095)) - Fix `pt.size` bug when rasterization is set to true ([#7379](https://github.com/satijalab/seurat/issues/7379)) - Fix `FoldChange` and `FindMarkers` to support all normalization approaches ([#7115](https://github.com/satijalab/seurat/pull/7115),[#7110](https://github.com/satijalab/seurat/issues/7110),[#7095](https://github.com/satijalab/seurat/issues/7095),[#6976](https://github.com/satijalab/seurat/issues/6976),[#6654](https://github.com/satijalab/seurat/issues/6654),[#6701](https://github.com/satijalab/seurat/issues/6701),[#6773](https://github.com/satijalab/seurat/issues/6773), [#7107](https://github.com/satijalab/seurat/issues/7107)) - Fix for handling newer ParseBio formats in `ReadParseBio` ([#7565](https://github.com/satijalab/seurat/pull/7565)) - Fix for handling rasterization by default ([#7842](https://github.com/satijalab/seurat/pull/7842)) - Fix bug in `ReadMtx()` to add back missing parameters - Fix `SCTransform()` for V5 assays to retain gene attributes ([#7557](https://github.com/satijalab/seurat/issues/7557)) - Fix `LeverageScore()` for objects with few features ([#7650](https://github.com/satijalab/seurat/issues/7650) # Seurat 4.3.0 (2022-11-18) ## Added - Add support for imaging-based spatial datasets ## Changes - Fix bug in `FindMarkers()` when run post Integration/Transfer ([#6856](https://github.com/satijalab/seurat/issues/6586)) # Seurat 4.2.1 (2022-11-08) ## Changes - Replaced import from `spatstat.core` with `spatstat.explore` - Fix bug in `FindMarkers()` when using `SCT` assay ([#6856](https://github.com/satijalab/seurat/issues/6586)) # Seurat 4.2.0 (2022-09-21) ## Changes - Fix legend color in `DoHeatmap()` ([#5783](https://github.com/satijalab/seurat/issues/5783)) - Fix bug in `ScaleData()` when regressing out one gene ([#5970](https://github.com/satijalab/seurat/pull/5970)) - Fix name pulling in `PlotPerturbScore()` ([#6081](https://github.com/satijalab/seurat/pull/6081)) - Support spaceranger 2.0 ([#6208](https://github.com/satijalab/seurat/pull/6208)) - Fix bug in `SpatialDimPlot()` when using `group.by` ([#6179](https://github.com/satijalab/seurat/issues/6179)) - Add `add.noise` parameter in `VlnPlot()` ([#5756](https://github.com/satijalab/seurat/issues/5756)) - Fix uwot model backwards compatibility ([#6345](https://github.com/satijalab/seurat/issues/6345)) - Allow `pseudocount.use` in differential expression functions to be set at the `Assay` level # Seurat 4.1.1 (2022-05-01) ## Changes - Fix `giveCsparse` related warnings in `Read10X_h5` - Fix ident labeling for `SpatialPlot` ([#5774](https://github.com/satijalab/seurat/issues/5774)) - Fix `ReadMtx` on Windows ([#5687](https://github.com/satijalab/seurat/issues/5687)) - Fix `VlnPlot` to switch on rasterization only when required ([#5846](https://github.com/satijalab/seurat/pull/5846)) - Fix `ncol` behavior in `SpatialPlot` ([#5774](https://github.com/satijalab/seurat/issues/5774)) - Set `jitter` to FALSE in `FeatureScatter` ([#5876](https://github.com/satijalab/seurat/pull/5876)) - Update `Cells` methods to new signature (`x, ...`) - Replace use of `default.stringsAsFactors()` with `getOption("stringsAsFactors")` # Seurat 4.1.0 (2022-01-14) ## Added - Add `raster.dpi` parameter to `DimPlot/FeaturePlot` to optionally rasterize individual points ([#5392](https://github.com/satijalab/seurat/pull/5392)) - Add support for sctransform v2, differential expression with SCT assay ## Changes - Update `ReadParseBio` to support split-pipe 0.9.6p ([#5446](https://github.com/satijalab/seurat/pull/5446)) - Fixes for MAST differential expression ([#5441](https://github.com/satijalab/seurat/issues/5441)) - Fix scaling options when using `split.by` in `FeaturePlot()` ([#5243](https://github.com/satijalab/seurat/issues/5243)) # Seurat 4.0.6 (2021-12-16) ## Added - Implement supervised LSI ## Changes - Add `raster` parameter to `VlnPlot` to optionally rasterize individual points ([#5076](https://github.com/satijalab/seurat/pull/5076)) - Add `min.cells.group` parameter to `FindConservedMarkers` ([#5079](https://github.com/satijalab/seurat/pull/5079)) - Set `do.center` to FALSE for `lsiproject` in `FindTransferAnchors` - Fix error message in `ReadMtx()` ([#5158](https://github.com/satijalab/seurat/issues/5158)) - Add `label.color` parameter to `FeaturePlot` ([#5314](https://github.com/satijalab/seurat/pull/5314)) - Fix issues in `ProjectUMAP` ([#5257](https://github.com/satijalab/seurat/issues/5257), [#5104](https://github.com/satijalab/seurat/issues/5104), [#5373](https://github.com/satijalab/seurat/issues/5373)) # Seurat 4.0.5 (2021-10-04) ## Changes - Update documentation for `to.upper` parameter in `Load10X_Spatial()` ([#4576](https://github.com/satijalab/seurat/issues/4576)) - Update concept tags for `RunSPCA()` ([#4978](https://github.com/satijalab/seurat/discussions/4987)) - Conditionally run tests/packages that use suggested packages ([#5160](https://github.com/satijalab/seurat/pull/5160)) - Set random state in `RunUMAP()` when using the `umap-learn` method ([#5194](https://github.com/satijalab/seurat/issues/5194)) # Seurat 4.0.4 (2021-08-19) ## Added - Add `reduction` parameter to `BuildClusterTree()` ([#4598](https://github.com/satijalab/seurat/issues/4598)) - Add DensMAP option to `RunUMAP()` ([#4630](https://github.com/satijalab/seurat/pull/4630)) - Add `image` parameter to `Load10X_Spatial()` and `image.name` parameter to `Read10X_Image()` ([#4641](https://github.com/satijalab/seurat/pull/4641)) - Add `ReadSTARsolo()` function to read output from STARsolo - Add `densify` parameter to `FindMarkers()` - Add `ReadParsebio()` function to read output from Parse Biosciences - Add the `image.alpha` parameter to `SpatialDimPlot()` and `SpatialFeaturePlot()` - Add support for the correlation metric in `RunUMAP` ([#4972](https://github.com/satijalab/seurat/issues/4972)) ## Changes - Warn and continue rather than erroring if not all features are available in `FindSpatiallyVariableFeatures()` ([#4611](https://github.com/satijalab/seurat/issues/4611)) - Bug fix for SCT-based integration in selecting proper reference model ([#4355](https://github.com/satijalab/seurat/issues/4355)) - Bug fix for reading from absolute paths in ReadMtx ([#4723](https://github.com/satijalab/seurat/issues/4723)) - Bug fix in SingleCellExperiment conversion ([#4633](https://github.com/satijalab/seurat/issues/4633)) - Bug fix in `FindVariableFeatures()` when using `selection.method = "mvp"` and `binning.method = "equal_frequency"` ([#4712](https://github.com/satijalab/seurat/issues/4712)) - Bug fix in `DoHeatmap()` to remove random characters from plot legend([#4660](https://github.com/satijalab/seurat/issues/4660)) - Fix cell renaming in `RunCCA()` - Fix issue in SingleCellExperiment conversion where the mainExp would not be set properly - Fix for default dispersion info displayed in `VariableFeaturePlot()` # Seurat 4.0.3 (2021-06-10) ## Added - Add `jitter` parameter to `FeatureScatter()` ## Changes - Fix issues with `as.SingleCellExperiment.Seurat()` for the latest verion of SingleCellExperiment ([#4532](https://github.com/satijalab/seurat/pull/4532)) - Ensure proper reference.reduction is used in `MapQuery()` - Fix to `UpdateSymbolList()`, no longer searches aliases and exposes the `search.types` parameter in `GeneSymbolThesarus()` ([#4545](https://github.com/satijalab/seurat/issues/4545)) - Transfer `scale.data` slot as well when converting with `as.SingleCellExperiment.Seurat()` - Enable `alpha` parameter for `SpatialDimPlot()` - Fix `as.SingleCellExperiment.Seurat()` conversion for atypical `reducedDim` components # Seurat 4.0.2 (2020-05-20) ## Added - New `AddAzimuthScores()` and `AddAzimuthResults()` functions - Add `shuffle` parameter to `FeatureScatter()` ([#4280](https://github.com/satijalab/seurat/pull/4280)) - Add `lsiproject` and `rpca` options for `FindTransferAnchors()` - Add `rlsi` option for `FindIntegrationAnchors()` ## Changes - Preserve feature metadata when converting from `SingleCellExperiment` to `SeuratObject` class ([#4205](https://github.com/satijalab/seurat/issues/4205)) - Preserve multiple assays when converting from `SingleCellExperiment` to `SeuratObject` class ([#3764](https://github.com/satijalab/seurat/issues/3764)) - Fix passing of `score.thresh` parameter in `ScoreJackStraw()` ([#4268](https://github.com/satijalab/seurat/pull/4268)) - Fix FC calculation in `FindMarkers()` non-log transformed data. - Add umap-learn version >= 0.5.0 compatibility for `RunUMAP()` - Fix `DotPlot` to use `log1p` when `scale=False` ([#4298](https://github.com/satijalab/seurat/issues/4298)) - Fix split and shuffled `DimPlot` - Disallow NULL or another length 0 vector for `ident.1` in `FindMarkers()` - Fix range shift when labeling clusters on a GeomSpatial plot - Fix SpatialPlot distortion for non-square images. - Fix future-related warnings in `FindIntegrationAnchors()` - Fix `fc.name` parameter in `FindMarkers()` ([#4474](https://github.com/satijalab/seurat/issues/4474)) - Deprecate `group.by` parameter in `PlotPerturbScore()` in favor of `mixscape.class`. # Seurat 4.0.1 (2020-03-17) ## Added - Add direction option to `PlotClusterTree()` - Add `cols` parameter to `JackStrawPlot()` - Add `ReadMtx()` to read local and remote mtx files with associated cell and feature name files ## Changes - Equality added to differential expression thresholds in `FindMarkers` (e.g, >= logfc.threshold rather than >) - `Read10X()` now prepends dataset number for first dataset when reading multiple datasets - Bug fix for `subset.AnchorSet()` - Bug fix for fold change values in `FindMarkers()` when setting a different pseudocount ([#4111](https://github.com/satijalab/seurat/pull/4111)) - Bug fix for `RunLDA()` related to proper passing of assay parameter. - When using `order=TRUE` in `SingleDimPlot()`, print NA points under all others. - Remove default parameter value for `data.dir` in `Read10X()` - Import spatstat fxns from subpackages (spatstat.core, spatstat.geom) - `RunUMAP` now checks for graph/neighbor consistency # Seurat 4.0.0 (2020-01-27) ## Added - Expose `FoldChange()` component in `FindMarkers()`. - Add the `merge.DimReduc` method - Add `IntegrateEmbeddings()` to correct embeddings of `DimReduc`s - Add `ProjectUMAP()` to project query cells into a reference UMAP space - Add `MapQuery()` as a wrapper around `IntegrateData()`, `IntegrateEmbeddings()`, and `ProjectUMAP()` - Add `MappingScore` to compute a per-cell mapping score used in Azimuth - Add `AggregateExpression()` for summation based pseudobulk calculations - Add mixscape functionality via `CalcPerturbSig()`, `PrepLDA()`, `RunLDA()`, `DEenrichRPlot()`, `MixscapeHeatmap()`, `MixscapeLDA()`, `PlotPerturbScore()`, `RunMixscape()` - Add `FindSubCluster()` to further cluster existing clusters - Add supervised PCA functionality via `RunSPCA()` - Add functionality to enable weighted nearest neighbor analyses via `FindMultiModalNeighbors()` - Add neighbor visualization plot via `NNPlot()`. - Add `PredictAssay()` to impute expression or embeddings from nearest neighbors - Add `Graphs()` function to access the names of the stored Graph objects or pull a specific one - Add checks for NA, NaN, logical, non-integer, and infinite values during CreateAssayObject and NormalizeData.default - Add `AnnotateAnchors()` to aid in AnchorSet interpretation as well as `subset.AnchorSet()` - Add flexibility of choice for cell column in `Read10X()` - Add rasterization option to `FeatureScatter()` and `VariableFeaturePlot()` - Add step1 feature parameters in the SCTModel via `PrepVSTResults()` ## Changes - Default neighbor finding algorithm changed from "rann" to "annoy" - Default `ncells` parameter in `SCTransform()` changed to 5000 - Default fold change in `FindMarkers()` changed from ln to log2 - Implementation improvements to `AverageExpression()` - `AnchorSet` class re-implemented as a virtual class from which `IntegrationAnchorSet` and `TransferAnchorSet` now inherit. - Point size in `VlnPlot()` now set automatically if not specified - Return the sample.tree properly when integrating with a single reference dataset - Replace `as.character.quosure` usage with `as_label` due to deprecation - Minor changes to the exact calculation of the anchor weight matrix - Default rasterization limit in `DimPlot()` and `FeaturePlot()` changed from 50,000 to 100,000 - `SCTransform()` now returns a formalized `Assay` subclass `SCTAssay()` - When using `normalization.method='SCT'` in `FindTransferAnchors()`, normalize query using reference SCT model when possible. - Change default Neighbor name in `FindNeighbors` to `Assay.nn` ## Removed - `CreateGeneActivityMatrix` replaced by `Signac::GeneActivity()` - `RunLSI` replaced by by `Signac::RunTFIDF()` and `Signac::RunSVD()` - `ReadAlevin` and `ReadAlevinCsv` moved to SeuratWrappers - `ExportToCellbrowser` and `StopCellbrowser` moved to SeuratWrappers # Seurat 3.2.3 - 2020-12-14 ## Added - Titles added to `DimPlot` when specifying `group.by` parameter - `keep.scale` parameter added to `FeaturePlot` to control scaling across multiple features and/or splits. ## Changes - `Same` deprecated in favor of `base::identity` - Fix in `DietSeurat` to work with specialized `Assay` objects - Fix p-value return when using the `ape` implementation of Moran's I - Fix bug in FindMarkers when using MAST with a latent variable - Updates to `Key<-.DimReduc` that allow handling of empty reduction column names - Allow setting `ctrl` in `CellCycleScoring` - Modify subset.Seurat to allow specialized Assay subsetting methods - Fix image selection in interactive spatial plots - Update Rcpp functions with `export(rng=FALSE)` to avoid potential future warnings - Fix RenameCells bug for integrated SCT assays - Fix highlight order with proper factor levels when using `SetHighlight` in plots - Small change in CellRanger version detection logic of h5 file to improve robustness to outside tools. - `do.cpp` deprecated and will default to true # Seurat 3.2.2 (2020-09-25) ## Changes - Set the seed in `WhichCells` regardless of whether or not `idents` is passed - Retain Graph and Neighbor objects when subsetting only on features - Fix data.frame input to `CreateAssayObject()` when data.frame has no rownames. - Default annoy search to sequential if not using multicore future plans. - Require sctransform >= 0.3.0 # Seurat 3.2.1 (2020-09-04) ## Added - Added support for nearest neighbor input and `return.model` parameter in `RunUMAP()` - Enable named color vectors in `DoHeatmap()` - Add `label.color` and `label.box` parameters to `DimPlot` - Added `shuffle` and `seed` parameters to `DimPlot()` to help with overplotting - Added new stacked violin plot functionality ## Changes - Allow setting `slot` parameter in `RunUMAP` - Added support for FIt-SNE v1.2+ - Fix for `Spatial*Plot` when running with interactive=TRUE - Set max for number of items returned by `Top` and remove duplicate items when balanced=TRUE - Fix logging bug when functions were run via `do.call()` - Fix handling of weight.by.var parameter when approx=FALSE in `RunPCA()` - Fix issue where feature names with dashes crashed `CellSelector` - Fix issue where errors in subsetting were being swallowed - Fix issue where labeling uncropped spatial plots was broken ## Deprecated - `CreateActivityMatrix` deprecated in favor of `Signac::GeneActivity` - `ReadAlevin` and `ReadAlevinCsv` deprecated in favor of `SeuratWrappers::ReadAlevin` - `ExportToCellbrowser` and `StopCellbrowser` deprecated in favor of `SeuratWrappers::ExportToCellbrowser` and `SeuratWrappers::StopCellbrowser` - `ReadH5AD` and `WriteH5AD` deprecated in favor of h5Seurat/H5AD functionality found in SeuratDisk - `as.loom` and `as.Seurat.loom` deprecated in favor of functionality found in SeuratDisk # Seurat 3.2.0 (2020-07-15) ## Added - Added ability to create a Seurat object from an existing Assay object, or any object inheriting from the Assay class - Added ability to cluster idents and group features in `DotPlot` - Added ability to use RColorBrewer plaettes for split `DotPlots` - Added visualization and analysis functionality for spatially resolved datasets (Visium, Slide-seq). ## Changes - Removed `add.iter` parameter from `RunTSNE` function - Fixed integer overflow error in the WilcoxDETest function - Minor visual fixes in `DoHeatmap` group bar + labels - Efficiency improvements in anchor scoring (`ScoreAnchors`) - Fix bug in `FindClusters()` when the last node has no edges - Default to weighted = TRUE when constructing igraph objects in `RunLeiden`. Remove corresponding weights parameter from `FindClusters()`. - Fix handling of keys in `FeatureScatter()` - Change `CellSelector` to use Shiny gadgets instead of SDMTools - Mark `PointLocator` as defunct - Remove `SDMTools` - Fixed data slot return in `AverageExpression` when subsetting features and returning a Seurat object # Seurat 3.1.5 (2020-04-14) ## Added - New `scale` parameter in `DotPlot` - New `keep.sparse parameter in `CreateGeneActivityMatrix` for a more memory efficient option - Added ability to store model learned by UMAP and project new data - New `strip.suffix` option in `Read10X`. **This changes the default behavior of `Read10X`**. A trailing `-1` present in all cell names will not be removed by default. - Added `group.by` parameter to `FeatureScatter` ## Changes - Replace wilcox.test with limma implementation for a faster FindMarkers default method - Better point separation for `VlnPlot`s when using the `split.by` option - Efficiency improvements for anchor pairing - Deprecate redundant `sort.cell` parameter in `FeaturePlot` - Fixes to ensure correct class of Matrix passed to c++ functions - Fixes for underscores in ident labels for `DotPlot` - Ensure preservation of matrix dimnames in `SampleUMI` - Fix non-standard evaluation problems in `subset` and `WhichCells` - Default split violin option is now a multi group option - Preserve alpha in `FeaturePlot` when using `blend` - Update `assay.used` slot for `DimReduc`s when Assay is renamed # Seurat 3.1.4 (2020-02-20) ## Changes - Fixes to `DoHeatmap` to remain compatible with ggplot2 v3.3 - Adoption of `patchwork` framework to replace `CombinePlots` # Seurat 3.1.3 (2020-02-07) ## Added - New system agnostic `Which` function to address problems with FItSNE on Windows ## Changes - Export `CellsByIdentities` and `RowMergeSparseMatrices` functions - nCount and nFeature metadata variables retained after subset and updated properly with `UpdateSeuratObject` - Fix uwot support for running directly on feature matrices - Fixes for keys with underscores - Fix issue with leiden option for `FindClusters` - Fix for data transfer when using sctransform - SDMTools moved to Suggests as package is orphaned # Seurat 3.1.2 (2019-12-11) ## Added - New silent slot updater - New random seed options to `RunCCA`, `RunTSNE`, `WhichCells`, `HTODemux`, `AddModuleScore`, `VlnPlot`, and `RidgePlot` - Enhancements for dealing with `Assay`-derived objects ## Changed - Only run `CalcN` (generates nFeatures and nCounts) when `counts` changes - Fix issue regarding colons in feature names - Change object class testing to use `inherits` or `is.*` for R 4.0 compatability # Seurat 3.1.1 (2019-09-20) ## Added - New `RegroupIdents` function to reassign idents based on metadata column majority - `UpdateSymbolList` function to pull new gene names from HGNC - Added support for H5AD layers as additional assays in a `Seurat` object ## Changed - Fix rownames issue when running UMAP on dist object - Add support for new H5AD `obsm` and `varm` stucture - Fix issue when trying to read non-existent feature-level metadata from an H5AD file - Fix in integration workflow when using SCTransform - Improved error checking for `AddModuleScore` - cbind fix in reference-based integration (`MapQuery`) - Fix for convenience plots error hanging - Ensure Seurat objects aren't stored in the command logs # Seurat 3.1.0 (2019-08-20) ## Added - New `PrepSCTIntegration` function to facilitate integration after `SCTransform` - Reference-based integration with the `reference` parameter in `FindIntegrationAnchors` - Reciprocal PCA as a `reduction` option in `FindIntegrationAnchors` - New `CollapseEmbeddingOutliers` function - Enable `FindTransferAnchors` after `SCTransform` - Added back `ColorDimSplit` functionality - Include a code of conduct - Added uwot support as new default UMAP method - Added `CheckDots` to catch unused parameters and suggest updated names - `Reductions` and `Assays` assays functions to list stored DimReducs and Assays ## Changed - Fix regex in `LogSeuratCommand` - Check for NAs in feature names in `Read10X` - Prevent dimnames for counts/data/scale.data matrices from being arrays - Updates `ReadH5AD` to distinguish FVF methods - Fixes to UpdateSeuratObject for v2 objects - Sink all output from stdout to stderr - Fix to scale.data cell ordering after subsetting - Enable `Assay` specification in `BuildClusterTree` - Fix `FeaturePlot` when using both `blend` and `split.by` - Fix to `WhichCells` when passing `cells` and `invert` - Fix to `HoverLocator` labels and title - Ensure features names don't contain pipes (`|`) - Deprecation of `RunLSI` and `RunALRA` - Fix legend bug when sorting in `ExIPlot` # Seurat 3.0.2 (2019-06-07) ## Added - Flag to skip singleton grouping in `FindClusters` - New custom colors for blended `FeaturePlot`s - New `GetResidual` function - New Seurat/Monocle converters ## Changed - Fix issue where certain assays weren't being shown in the `Seurat` object - Fix issue where we weren't updating `DimReduc` object column names - Fix line spacers in `DoHeatmap` - Fix uninformative labels in `FeaturePlot` - Fix unset identities when converting from SCE to Seurat - Fix single colors being interpreted as palettes in `SingleDimPlot` - Ensure factor levels are always numerically increasing after `FindClusters` - Better cell highlighting colors for `DimPlot` - Fix to `levels<-.Seurat` - Add ability to use counts/scaled data in `BuildClusterTree` - Minor fix to split `ScaleData` # Seurat 3.0.1 (2019-05-16) ## Added - Add global option (Seurat.memsafe) to skip gc() calls - Restore draw.lines to DoHeatmap, maintain size of color bar with different number of features (#1429) - Enable split.by parameter for ScaleData - Add slot parameter to FeaturePlot (#1483) - Add assay parameter to DotPlot (#1404) ## Changed - Fix to color options for VlnPlot with split.by option (#1425) - Improvements to conversion functions (loom, SCE) - Fix for cluster tree reordering (#1434) - Fix PercentageFeatureSet for single feature case - Fix to fold change calculation and filtering for other slots in FindMarkers (#1454) - Keep title vectorized in AugmentPlot (#1515) - Export LogSeuratCommand function - Fix for FindConservedMarkers when one ident is missing from a group (#1517) # Seurat 3.0.0 (2019-04-16) ## Added - New method for identifying anchors across single-cell datasets - Parallelization support via future - Additional method for demultiplexing with MULTIseqDemux - Support normalization via sctransform - New option for clustering with the Leiden algorithm - Support for reading 10X v3 files - New function to export Seurat objects for the UCSC cell browser - Support for data import from Alevin outputs - Imputation of dropped out values via ALRA ## Changed - Significant code restructuring - Most occurances of "gene(s)" in function names/arguments renamed to "feature(s)" - Changes to the Seurat object class to facilitate multimodal data - New BlendPlot implementation # Seurat 2.3.4 (2018-07-13) ## Added - GetIdent function added to pull identity info ## Changed - DiffusionMap dependency replaced with destiny to avoid archival - Java dependency removed and functionality rewritten in Rcpp - Speed and efficiency improvements for Rcpp code - More robust duplicate handling in CellCycleScoring # Seurat 2.3.3 (2018-07-02) ## Added - New HTOHeatmap function - Support for custom PNG arguments for vector-friendly plotting - Fix for 'NA'-labeled cells disappearing with custom color scale ## Changed - Replaced FNN with RANN - Removed unused compiler flags - Moved several lightly-used packages from 'imports' to 'suggests' # Seurat 2.3.2 (2018-06-11) ## Added - RenameCells added for easy renaming of all cells - Read10X_h5 added to read in 10X formatted h5 files - SetAssayData ensures cell order is the same between assay objects and the Seurat object - Compatability updates for ggplot2 v2.3.0 # Seurat 2.3.1 (2018-05-03) ## Added - Support for [UMAP](https://github.com/lmcinnes/umap) dimensional reduction technique - New conversion functions for SingleCellExperiment and anndata ## Changed - FetchData preserves cell order - Require Matrix 1.2-14 or higher - AddModuleScore no longer densifies sparse-matrices - Various visualization fixes and improvements - Default value for latent.vars in FindMarkers/FindAllMarkers changed to NULL. # Seurat 2.3.0 (2018-03-22) ## Added - Support for HTO demultiplexing - Utility functions: TransferIdent, CombineIdent, SplitObject, vector.friendly - C++ implementation for parts of BuildSNN - Preliminary parallelization support (regression and JackStraw) - Support for FItSNE ## Changed - MetaDE replaced with metap for combining p-values (MetaDE was removed from CRAN) - NMF heatmaps replaced (NMF to be archived by CRAN) # Seurat 2.2.1 (2018-02-14) ## Changed - MetaDE replaced with metap for combining p-values (MetaDE was removed from CRAN) - NMF heatmaps replaced (NMF to be archived by CRAN) # Seurat 2.2.0 (2018-01-10) ## Added - Multiple alignment functionality with RunMultiCCA and AlignSubspace extended to multiple datasets - CalcAlignmentScore added to evaluate alignment quality - MetageneBicorPlot added to guide CC selection - Change cluster order in DoHeatmap with group.order parameter - Ability to change plotting order and add a title to DimPlot - do.clean and subset.raw options for SubsetData ## Changed - JoyPlot has been replaced with RidgePlot - FindClusters is now more robust in making temp files - MetaDE support for combining p-values in DE testing # Seurat 2.1.0 (2017-10-12) ## Added - Support for using MAST and DESeq2 packages for differential expression testing in FindMarkers - Support for multi-modal single-cell data via @assay slot ## Changed - Default DE test changed to Wilcoxon rank sum test # Seurat 2.0.1 (2017-08-18) ## Added - Now available on CRAN - Updated documentation complete with examples - Example datasets: `pbmc_small` and `cc.genes` - C++ implementation for parts of FindVariableGenes - Minor bug fixes # Seurat 2.0.0 (2017-07-26) ## Added - New method for aligning scRNA-seq datasets - Significant code restructuring - New methods for scoring gene expression and cell-cycle phases - New visualization features (do.hover, do.identify) Seurat/MD50000644000176200001440000004235414525771316012050 0ustar liggesusers0097df8ac2d6e91cfc0e9fa6dfac9672 *DESCRIPTION c2ef7c6f9278eb1816e1c98c41ea726b *LICENSE 2c335cf3dab83a0f643418321309c585 *NAMESPACE 10b63012dbb42959aa6664a831d09d21 *NEWS.md 2dc8d94314ead6f77537d1fe0fc272bc *R/RcppExports.R d3c619084cdc71df7c2a4a25cabf327d *R/clustering.R eefc5a31f8b43107aa870ca0823a14f7 *R/convenience.R 0b3f35535683b39246cb65edb6ba397c *R/data.R 36272e09f52d8e8d8092c170557e2b29 *R/differential_expression.R 3d74c7a395bcef6c34e7c063223512f3 *R/dimensional_reduction.R 5ae407b8799d799d98fa2490eef4fe6d *R/generics.R 1deb1b7232ac16cc226232e3bc27e771 *R/integration.R c707ebd4842c2fcfba8aa41bf1a79185 *R/integration5.R 190890524f239ff3e619a461cd186553 *R/mixscape.R 998101342dfd1ecf9a161a8e5c52b0d1 *R/objects.R 78256248893a8938f2c48b866bcee51a *R/preprocessing.R eff659f52d012088571165c2d2ebd419 *R/preprocessing5.R 67a64edbbce592a578a7a6367b947e28 *R/reexports.R 1e0677c7f50b69a8ad847506a648c92a *R/roxygen.R 6b3cdc6d49c7c18417a1d4d745252360 *R/sketching.R 0ba1b795094e6cad78897fb544942d33 *R/tree.R c6d7a04f8a86dbdaab23fe59ea30aeba *R/utilities.R 780d44a4ce5fd74b1cfcef5cdaa49838 *R/visualization.R daed1f8c33ba13a786ce4407e06c3e88 *R/zzz.R e7f70ee58cf6ad12f36b3c053db66fc8 *README.md 782ecc6fdedc2250a76a9865c2519c62 *build/Seurat.pdf 29695b729b3f3e5eea4eae9d02b52d0d *build/partial.rdb e84661f995a9f29d1a09ebcdc27d3a82 *data/cc.genes.rda 55d7e35793436d5e91646d774d9f86c4 *data/cc.genes.updated.2019.rda fb89a5cc1f14ad979196d34e85b6f11f *inst/CITATION d07fe4eee5c09cc44e96c17e683cb10e *inst/extdata/pbmc_raw.txt 7c36195b7392ae6314ce16350d5cee25 *man/AddAzimuthResults.Rd 462158a180fdcfdf426dfaf7da866273 *man/AddAzimuthScores.Rd 1b367192a3ab961fb78b3ed2f6d21a84 *man/AddModuleScore.Rd 48b523dc136d6dd1676c4147cee2d0fd *man/AggregateExpression.Rd c7c2526a6664941b2d78b8b75944cb0c *man/AnchorSet-class.Rd 07c839ccf3fffc7a95d5035d3e294d11 *man/AnnotateAnchors.Rd 52233895af88c8e1697dd981c73667fe *man/Assay-class.Rd 66351cc0e0184ccc08cd3ec15b070079 *man/AugmentPlot.Rd d40f4485b82201107f8fcc38465cdc63 *man/AutoPointSize.Rd e5aad845e1519f9ac5bff2171f7d1d37 *man/AverageExpression.Rd 6d0a647c45a8644742532a34e84cd28d *man/BGTextColor.Rd 7c84e2390af0071f75b8d66ebefe06d0 *man/BarcodeInflectionsPlot.Rd 06d709066a703803b3a283d6253b36ed *man/BridgeCellsRepresentation.Rd a9d8f8eb55ba2cd2f47363b1e299da0d *man/BridgeReferenceSet-class.Rd 2542976bf205a83c130e07d513bca9cc *man/BuildClusterTree.Rd 2890ec69b1f72189f350ba6f4110e9dc *man/BuildNicheAssay.Rd 32aa8b58af38dea3ed57105c68770e2d *man/CCAIntegration.Rd 61123dad772d46f5e62e9ccd1e43d6de *man/CalcDispersion.Rd 33f13558c1c1be298a623aca4b19504b *man/CalcPerturbSig.Rd 6b19b5a68a1fbed9d71ac82343b21aab *man/CalculateBarcodeInflections.Rd 3550bef6cb5f40f4540f5dc70e80598d *man/CaseMatch.Rd b9abf47109862ea85c36ec6c511ac787 *man/CellCycleScoring.Rd 3aec17d0cae2627d1450ff2ea9732957 *man/CellScatter.Rd 9da9ef92375a1ad85e84fc0037a48784 *man/CellSelector.Rd ac4f0d73efb473049e346eec12d1efe6 *man/Cells.Rd 5b63f4f233cc0b576c2d4d506ff9be6b *man/CellsByImage.Rd 5600133820f9f0343d5edcff87c533c9 *man/CollapseEmbeddingOutliers.Rd 27a6b0d8531b5c59861ebca821ded6e9 *man/CollapseSpeciesExpressionMatrix.Rd f4a8b6d8003a5f59c5a933e48fa6ae41 *man/ColorDimSplit.Rd 9eeec370a2aad6836e8b9027d654812a *man/CombinePlots.Rd 5caddb0abadb42be8198edaf135bc461 *man/CountSketch.Rd aab726a6968329ea0b98a086717beac3 *man/CreateCategoryMatrix.Rd c534f9d624c27c9e4af6ea54e026dfab *man/CreateSCTAssayObject.Rd 86f23babe36a2c534f3401c93bf5a97c *man/CustomDistance.Rd 8ab923c6ef1bb0a548e140f45bf97ed5 *man/CustomPalette.Rd 6089534ef70066d4044c91ad4a8898ac *man/DEenrichRPlot.Rd 37d6cf4959bb8244720283cb8ce0041c *man/DISP.Rd ef9edd0671f22ca5ae5f67ed063cdc05 *man/DietSeurat.Rd d9cb5fcbd90eea8518965240e031c80a *man/DimHeatmap.Rd 20302f3eb8d4246fd14cbea7f46fb882 *man/DimPlot.Rd 6e505cdb81626e425958657f99813304 *man/DimReduc-class.Rd 66090a5a18ad242bb22df22a9e6e5c11 *man/DiscretePalette.Rd 5d03d4926234e9a2850913e65c9467df *man/DoHeatmap.Rd 4cbd74318729170638d34c2e07e459b1 *man/DotPlot.Rd 93448d7ad049632784186f00f0e825b8 *man/ElbowPlot.Rd 152070beaee889ad0c57d824c6b890b7 *man/ExpMean.Rd 8e2d2db070c229523b03d9841df4b089 *man/ExpSD.Rd 497e0fc0845d50baae3cff33fc98f9c7 *man/ExpVar.Rd dbbf22dede650709c7d8ea631e256877 *man/FastRPCAIntegration.Rd 8b154c79dcd203853c3984baf37684c4 *man/FastRowScale.Rd 1e2a4e6b54ffb868ec2c84985c42f088 *man/FeaturePlot.Rd f9949c5700f7579a1096e147b399ef97 *man/FeatureScatter.Rd 3b67b1f1c69ed703ebf1b671d7c99b50 *man/FetchResidualSCTModel.Rd ec07eda5ecaaa6925977bfc04d94d840 *man/FetchResiduals.Rd 54d0eb6ae613dd6b1945e5bbc8800bac *man/FetchResiduals_reference.Rd 8baf10ef47e718c9fc4aef521df50b64 *man/FilterSlideSeq.Rd b73e535c692fb3f6636f8c577d0cce51 *man/FindAllMarkers.Rd c5d574df2c03e5ea2aabda2efaac963c *man/FindBridgeAnchor.Rd ebfec4c74a323bc58da16afcfafce355 *man/FindBridgeIntegrationAnchors.Rd 5c26a4006e3ccbbdb11bdcd940698d9c *man/FindBridgeTransferAnchors.Rd 6ac2dc024325846772157273a23b8969 *man/FindClusters.Rd a53264ad3caa397f14cb257a14c1cdb9 *man/FindConservedMarkers.Rd 419942228ca9f888ba141f0c4594e2f0 *man/FindIntegrationAnchors.Rd 9d92729683c7257d83e45ef54f9bd9af *man/FindMarkers.Rd 21881fce274bec77239e6df4ffe53f1b *man/FindMultiModalNeighbors.Rd 3ab75462f7605a90eeaf339a4bac0142 *man/FindNeighbors.Rd b0e1aaa645cc85a66a852eee73e39d8f *man/FindSpatiallyVariableFeatures.Rd d7110f47c2ecbd4354ee57832fdef4d2 *man/FindSubCluster.Rd 6167271d682c91d22d43ce66196d51e4 *man/FindTransferAnchors.Rd 3ac21edcfcddf42cb021250148a81cbb *man/FindVariableFeatures.Rd 497bfbcf879355d83a84b6db7708f325 *man/FoldChange.Rd 83382d414d7ba21408069379f4b23c97 *man/GaussianSketch.Rd b666afaab9ed8ac708a81e141ec4683b *man/GetAssay.Rd d134eeb6dee0f04678bbc6b3bfce768b *man/GetImage.Rd 55c7a101f7dffe9d50f12da10aff9b2e *man/GetIntegrationData.Rd 66e88036c1539122d770234caff96198 *man/GetResidual.Rd b32f57933f1a1cc82e9bc7a94f359ea2 *man/GetTissueCoordinates.Rd 35761ade67ea12a72c813f24130029fd *man/GetTransferPredictions.Rd c4adc0a85fc90e4e43d074460a06648c *man/Graph-class.Rd a8befbd8423c290c19261556acd8ff08 *man/GroupCorrelation.Rd 885bb61fa2d0a622b047dd56fa254a91 *man/GroupCorrelationPlot.Rd 4fe184e4b4c0eb3f87bef21123e29fe2 *man/HTODemux.Rd 72111f313e6e7fd80db16c58aeef7794 *man/HTOHeatmap.Rd 2cdfdefc1ac1184785a9302fa8faed50 *man/HVFInfo.SCTAssay.Rd 5f34856aba424339e34e4cfb5efbad81 *man/HarmonyIntegration.Rd ed1b839f07cf7f7ae24be300628ac47c *man/HoverLocator.Rd f7f6b58d2fc988ab8530ee9742f84b6f *man/IFeaturePlot.Rd 6b976bd20c17a1c986a04afce7c5ebd7 *man/ISpatialDimPlot.Rd 633e9f3bd1dbe8654719f8991e20ab73 *man/ISpatialFeaturePlot.Rd 6dcf2f9b32cb20689eda2e033cf5b24f *man/ImageDimPlot.Rd aee040b41e9198710d979f3cde62d6d7 *man/ImageFeaturePlot.Rd a0d26782df23bcdf744ed46e8f69c016 *man/IntegrateData.Rd 1c69a16848d8930a49b695475b59cda7 *man/IntegrateEmbeddings.Rd 1c073484f3d7a96092704885f8b7c337 *man/IntegrateLayers.Rd 4ee5eb07563971a104e4cbab9e3d5919 *man/IntegrationAnchorSet-class.Rd d69da6ee725f4978305025b6de6df811 *man/IntegrationData-class.Rd a4c189c65347eb5a25c975e435b4cbf7 *man/JackStraw.Rd 19f4c49e6346b73f981cc2878d0cf8a0 *man/JackStrawData-class.Rd 2e3257c67d4b09ef94eb2052b532c42f *man/JackStrawPlot.Rd 79c4cb8649f0fb464254f8a417246a50 *man/JointPCAIntegration.Rd 3fe0920d29478f95724b9f0206055b25 *man/L2CCA.Rd 86eeddaaa3e6f0e51059523a23f627a3 *man/L2Dim.Rd 40c09a3566db9e1528716be9317cf882 *man/LabelClusters.Rd c7876bf181dd49fbfe1d6490c4fc5b0f *man/LabelPoints.Rd 1b92a0faebd8ffe671b33e0ebad4a0bb *man/LeverageScore.Rd 585033a07f6cd3940cee0b7194361ce8 *man/LinkedPlots.Rd 6eab43031e385eac954b10c681235177 *man/Load10X_Spatial.Rd 9250526a506c24545de8856006893c67 *man/LoadAnnoyIndex.Rd 5e8bff4d2743728091f64aa30461c1d2 *man/LoadCurioSeeker.Rd af949a6114516b3ab62bb2f4b53a0e34 *man/LoadSTARmap.Rd 95af3201e2ef04e32b9d0ffd3f0feb85 *man/LocalStruct.Rd 2199b13323996da83ac09c496d2663c7 *man/LogNormalize.Rd 39eac70a16e242177b7840ae51d2071d *man/LogVMR.Rd e170c2cd21171f253f924db5785f60e5 *man/MULTIseqDemux.Rd ce90d9d73f17ae6f81dc95d1268d0643 *man/MVP.Rd dd438d9a34737502d9a84c3abe14c423 *man/MapQuery.Rd 1ed6848f442b000f162577967509cba9 *man/MappingScore.Rd 1b9c655f92a327b11582498272a88d1a *man/MetaFeature.Rd 3c9c2bbec11d6e88c372d87d08d2dbd9 *man/MinMax.Rd 90a4c3d5498f3ccf368e0825c755134f *man/MixingMetric.Rd 25de740e86c0b92bfa4e9b73b19d7d76 *man/MixscapeHeatmap.Rd 69c36cbaa534afb6681838be5ae640bf *man/MixscapeLDA.Rd 8f87e82e16bc30af62cd4fd1df7d39e0 *man/ModalityWeights-class.Rd 9f02beb76ddcc55da09b8d379e3927c7 *man/NNPlot.Rd bf2167fff5a86d0137a3ac56355cbad6 *man/NNtoGraph.Rd efabeb1b41ce7efcf7be4662c3bd0b99 *man/Neighbor-class.Rd 85eeac1a67419c8575bdc55bfe52b498 *man/NormalizeData.Rd c3d6d895a2930a0c7d2ce6346a07a84f *man/PCASigGenes.Rd fe0087f1c4031926afee79c4a6e6a0d9 *man/PercentAbove.Rd 41b1017edf7a418ba6e5a3e806a60f37 *man/PercentageFeatureSet.Rd a9cd2601cb56b434cfdf53c0334124eb *man/PlotClusterTree.Rd 9133ec5810b07a6e4336db5eda6463ba *man/PlotPerturbScore.Rd 60eef96f3631b33a83817398d092b40f *man/PolyDimPlot.Rd c0cb6074ed23a579c3d0839f95a1c85f *man/PolyFeaturePlot.Rd f17f639ccd8cd59c5a4e03e2becf4696 *man/PredictAssay.Rd 8869bca4421ea013b77941372eb690b9 *man/PrepLDA.Rd 0f2a0fc950ec36689ce5fbac9d92c374 *man/PrepSCTFindMarkers.Rd 793b1b21898b626f5d692c4d581d8ac9 *man/PrepSCTIntegration.Rd f520cef43b4f3b7f0b499a06532c69dd *man/PrepareBridgeReference.Rd 054cbc9c1f39942e5f05539f586b442a *man/ProjectCellEmbeddings.Rd 327c82d3ad019831a82c3208c08ed45d *man/ProjectData.Rd 5a806a28fc2e940b92438c8d9ec5ed7e *man/ProjectDim.Rd b57d47361b2034f6c1b7ec55d8f8db4f *man/ProjectDimReduc.Rd 9e9d3ae9d538b40d7d2a45473007ee62 *man/ProjectIntegration.Rd e030ec03f24899c203327c87662904d2 *man/ProjectUMAP.Rd 2a08745773f7b0ecc28482d3c20ee1c2 *man/PseudobulkExpression.Rd 17b702896c10eae030e9eae9de08d1c3 *man/RPCAIntegration.Rd 78d75698b1cd44ae3b8a0f54689f7477 *man/Radius.Rd 39cd8326812aa77d6afefdb44020eb99 *man/Read10X.Rd 81e2a389906224b7522b2fb6cc243356 *man/Read10X_Image.Rd 265cea6403e2bbdc809197a2622cfd07 *man/Read10X_h5.Rd 9ad06beb1620db0cad5f25250839ed94 *man/Read10X_probe_metadata.Rd a3f28764b0382d0e38e938ab29977fb9 *man/ReadAkoya.Rd 5762e9f4a7845230cd51bc15ed3b566a *man/ReadMtx.Rd f5f3478bc3a28ec956ced2065a2f7e53 *man/ReadNanostring.Rd 66132a3df08e866a8cf5b3ca6dfde0b1 *man/ReadParseBio.Rd f1ad11d0fca856124de5a51b74030208 *man/ReadSTARsolo.Rd d1890cac6fdd515b30b1d531860a91cc *man/ReadSlideSeq.Rd 9c7c44f5c66a0cc5558b3ee531806eb5 *man/ReadVitessce.Rd 71b7c76533769db2e6adf481e9f84ee5 *man/ReadVizgen.Rd 452679aa7903d47be3ead7cb4149b11c *man/ReadXenium.Rd 1c547a95ca73f0797b3e5f5d1eedc121 *man/RegroupIdents.Rd 6b79dfaedf538506e893999f5883b60f *man/RelativeCounts.Rd a646d1b292f533da84baa95481c60e07 *man/RenameCells.Rd 8da502d67406e0764ece573bac13d4c1 *man/RidgePlot.Rd 94754f05e8e08b5b4e02b08413e2b01a *man/RunCCA.Rd f44a0003476d96e76ddc024e9b45d92e *man/RunGraphLaplacian.Rd 5e682dcc723d70793d1779200fb4f9d3 *man/RunICA.Rd dd27a4ffc07fc03a6cfb15314a83bfb9 *man/RunLDA.Rd 2cc88e408cf01a584c6ce9cbcc624eb5 *man/RunMarkVario.Rd d13af12c16f6994b7cfbe5162cac3520 *man/RunMixscape.Rd d4f90f5829c8313f3993d969b0dd4fd3 *man/RunMoransI.Rd 73c91ebf79ada8153c5f7358245d5aa3 *man/RunPCA.Rd 535d705ee2626a7e46303c41f9a8af12 *man/RunSLSI.Rd 8fc3087dafe2acaa6e8c013211b6fc51 *man/RunSPCA.Rd 37ce5f6f467602c4a96801a1c87bb64e *man/RunTSNE.Rd 79cc055ad0777623af7b2245c802739f *man/RunUMAP.Rd a05e9eeddfd09e2a16989145cecd9862 *man/SCTAssay-class.Rd 2d3d4cb04473bbca3bc30cf424121383 *man/SCTResults.Rd 7e6c1eb96a4b86e46ff1e41d37fd2112 *man/SCTransform.Rd 14aa955633f43d73c61265e4a6716a44 *man/STARmap-class.Rd d5a943cf68955f0f7d1ac2cd9c7ed9fe *man/SampleUMI.Rd f693b40dd329878965a9fde4d8f715da *man/SaveAnnoyIndex.Rd 836946b268469e87061ac24443e98514 *man/ScaleData.Rd 5847c32cd6ec6ffece89dc0f8c27994a *man/ScaleFactors.Rd 6a5a8f3d33c62a3302c03ddd7ae9aa1a *man/ScoreJackStraw.Rd 6a5621426e2d0a21126d7db9a6b71730 *man/SelectIntegrationFeatures.Rd 670dd5fa6cafcf4b0d894aad0c850d43 *man/SelectIntegrationFeatures5.Rd 0dc9c01f042fff512e8622d4c503be3d *man/SelectSCTIntegrationFeatures.Rd b9d1869d6d91a923a32b44a53ded7928 *man/SetIntegrationData.Rd 36ac188e2049ee2afd46f86d72a519db *man/SetQuantile.Rd 303755316142cecedbbb571101e68308 *man/Seurat-class.Rd 44dbf30e97c79290239b953d4e96398a *man/Seurat-package.Rd 25019aff00c3dba933466df65a45aaa7 *man/SeuratCommand-class.Rd c75bfcb240a635f5d043b11ff2ba4945 *man/SeuratTheme.Rd e35dd5cfbea354439a7473e47fdeb249 *man/SingleCorPlot.Rd ea9adb104094cc3cecc89bd72b4a1175 *man/SingleDimPlot.Rd b33fc409e901a04d68aac75a7fd03a85 *man/SingleExIPlot.Rd 66ac79ddf7b86556f61db2a84af6ab52 *man/SingleImageMap.Rd dad89dff350e297d95f9128dd2739645 *man/SingleImagePlot.Rd f7634526d44a43bb62510bbbdd583797 *man/SingleRasterMap.Rd fcc2fa12797822e6fcd6bfa151c87082 *man/SingleSpatialPlot.Rd b00d9bfaf9c29a46f64676ced81b38a7 *man/SketchData.Rd ec6714bdc3bc045d7d84fa911ef3935a *man/SlideSeq-class.Rd 2cf1ea98315c627fdb61cebb8f833dc2 *man/SpatialImage-class.Rd 336246df074a9374a7139b736476dfdd *man/SpatialPlot.Rd 693a850d4184b4e4ff54f1be3def8789 *man/SplitObject.Rd d78c8fa1d9543389a777500aae7f4633 *man/SubsetByBarcodeInflections.Rd 01f9df5bf0a798d91c7ced43f6d56052 *man/TopCells.Rd 98af7163f68d50deaf0a5247ce59e3d8 *man/TopFeatures.Rd e327422353f55d89f20ae13f1b1a153f *man/TopNeighbors.Rd 8ad4830f4b6516cc43e67c068450f52f *man/TransferAnchorSet-class.Rd f653edcc7c4b35648ed55b01e42a2230 *man/TransferData.Rd 83b5e20660c933543d09a928601a187f *man/TransferSketchLabels.Rd ab0dda3b7d66042daa51211a58238cc6 *man/UnSketchEmbeddings.Rd e217e168b9ad9c8fa569d039f2c7560b *man/UpdateSCTAssays.Rd 93c5b0322467bbbf7d21fde062ffb97f *man/UpdateSymbolList.Rd 43aecf42dc587407071dd24fc6ffe21c *man/VST.Rd 89b553d2c34a6f93573ec8586df7ecf2 *man/VariableFeaturePlot.Rd 78cac046284771a0c0bc42e2e7b28a71 *man/VisiumV1-class.Rd 84589eec66838181a13f4c26d850c6da *man/VizDimLoadings.Rd 119ab58354f04d25db6e23e9ab3ecdee *man/VlnPlot.Rd 4c05321384fe1f6619592fa1eaf1f228 *man/as.CellDataSet.Rd 03a3609f7132baaa37f738f6eacdb26b *man/as.Seurat.Rd 683d625a693896729e469f149c286fd7 *man/as.SingleCellExperiment.Rd f853c089d07c84e3f320d41951bd37b6 *man/as.sparse.Rd 18fba05b0ea57229dde949d7521f1a65 *man/cc.genes.Rd 35279bd7531c2f26183b3647e55dfd76 *man/cc.genes.updated.2019.Rd d843eff800711487e12a102038e5e7d4 *man/contrast-theory.Rd 3a3fadaeaaa7f172f82a6f6e15c870a2 *man/fortify-Spatial.Rd 5ac13a66d32c56c822f9a83c0c85a2b8 *man/merge.SCTAssay.Rd 8cffb9bb19589dc9476f3a8f29178bc4 *man/reexports.Rd 24034598a79b8f1461b1647f2ce82b3b *man/roxygen/templates/note-reqdpkg.R 47d96c85155d705c5782b78087dafe8e *man/roxygen/templates/param-dotsi.R ef420f43ac9d0dfa1267c2c79c8cd5fe *man/roxygen/templates/param-dotsm.R 84d7260dd254c3dbeff57c77049aa163 *man/roxygen/templates/section-future.R 19902d3a32e10d5c456ad83cca0e6e57 *man/roxygen/templates/section-progressr.R c734e303b87cac72c78065f539f8c63c *man/roxygen/templates/seealso-methods.R 660b46a409867f33a33a2c74cd70aba3 *man/subset.AnchorSet.Rd 627f40da0df56c5cd99e97c842805fe8 *man/writing-integration.Rd daf246d2559a7df53698b1714f020b7a *src/Makevars d3bfefbd9ecbdd96dce901d0d0193668 *src/ModularityOptimizer.cpp 92e4aaaaf2d6921d718999c5bd9e9913 *src/ModularityOptimizer.h 24a6ab7279d09348f254732fd5ce2b8f *src/RModularityOptimizer.cpp d3a792f8147b4cc85d4174d9f1945f64 *src/RcppExports.cpp eeb4b89f57a28c13b7d353eac4f0f67a *src/data_manipulation.cpp 531a0da4cef6f8521d98bac8b9160e1e *src/data_manipulation.h cee56bd61191250ef5ae907838891d4a *src/fast_NN_dist.cpp c8f85a446cb1a8373fb3e2d0fdbb944c *src/integration.cpp 91debeffa7ebcac8c26af5d56d94ee0a *src/integration.h c86d8ffb7b0af9bd0ffe54222f0553f5 *src/snn.cpp 694a8ab034ccab2c5f7c35bf47b3469e *src/snn.h 1114ed8c2f704fd1d41de74d80f35ecc *src/stats.cpp f33c09d14c160d9f29a89251fd91a036 *src/valid_pointer.c 1d5009992bb3e703bb3f2482f9b8ddd2 *tests/testdata/barcodes.tsv d2554fb8e4d5af543605d0a729e36109 *tests/testdata/cr3.0/barcodes.tsv.gz 70e1ed4c347e2563a90fd2144f375a26 *tests/testdata/cr3.0/features.tsv.gz 75dc5f8ba97f4c3bc30ceb7926c5759b *tests/testdata/cr3.0/matrix.mtx.gz 91aa47aeda59ef6d82105100a09d1497 *tests/testdata/genes.tsv 6693f4ae1a15e0985ed513ee83fad927 *tests/testdata/matrix.mtx 646ac9cb85813dbe838d804f78f717a4 *tests/testdata/nbt_small.Rdata 0231c122b953c71d602ca29437c61d53 *tests/testdata/visium/filtered_feature_bc_matrix.h5 afec4cc19a51d229e859a437c60187bc *tests/testdata/visium/raw_probe_bc_matrix.h5 ccef3ff06fc1fbc97f8335d812801186 *tests/testdata/visium/spatial/scalefactors_json.json 9d57b44060c559c4a9922ac85d0fde94 *tests/testdata/visium/spatial/tissue_lowres_image.png e940f8084d865f7121e2b29418a773b0 *tests/testdata/visium/spatial/tissue_positions_list.csv 3dcbffe8e9b7dd575e70f9f5cc8b77df *tests/testthat.R 0a75db145a45197a2262cb1b83938124 *tests/testthat/test_data_manipulation.R e294e252d65b16a5926fd9aa0336a274 *tests/testthat/test_differential_expression.R b5610ea132c2cc64345b9a07bada2f33 *tests/testthat/test_dimensional_reduction.R 80339eff79d0602614f786f2a0aec3bc *tests/testthat/test_integratedata.R 4a3e421480b45332627cb12c5187e006 *tests/testthat/test_integration.R 05a99cbde2d9c6f9b5bd2ed0bcb226fc *tests/testthat/test_load_10X.R e096ae1c8e0ea82b43c0e5ff4c5ba116 *tests/testthat/test_modularity_optimizer.R 33597bae1ade792062ad2eb6817e7247 *tests/testthat/test_objects.R 06c0efa2be3b55118235569bdb3dc4ce *tests/testthat/test_preprocessing.R 1e9febbc165630e2064509c7f2a61133 *tests/testthat/test_read_mtx.R 498c5b386f0d6516b3c1d211792b85ab *tests/testthat/test_transferdata.R d524b4f83ed6e190aa17478398c8a1e5 *tests/testthat/test_utilities.R 5124c1daa2609472441694d6ad91a26d *tests/testthat/test_visualization.R Seurat/inst/0000755000176200001440000000000014525500037012473 5ustar liggesusersSeurat/inst/extdata/0000755000176200001440000000000014525500037014125 5ustar liggesusersSeurat/inst/extdata/pbmc_raw.txt0000644000176200001440000011572014525500037016466 0ustar liggesusersATGCCAGAACGACT CATGGCCTGTGCAT GAACCTGATGAACC TGACTGGATTCTCA AGTCAGACTGCACA TCTGATACACGTGT TGGTATCTAAACAG GCAGCTCTGTTTCT GATATAACACGCAT AATGTTGACAGTCA AGGTCATGAGTGTC AGAGATGATCTCGC GGGTAACTCTAGTG CATGAGACACGGGA TACGCCACTCCGAA CTAAACCTGTGCAT GTAAGCACTCATTC TTGGTACTGAATCC CATCATACGGAGCA TACATCACGCTAAC TTACCATGAATCGC ATAGGAGAAACAGA GCGCACGACTTTAC ACTCGCACGAAAGT ATTACCTGCCTTAT CCCAACTGCAATCG AAATTCGAATCACG CCATCCGATTCGCC TCCACTCTGAGCTT CATCAGGATGCACA CTAAACCTCTGACA GATAGAGAAGGGTG CTAACGGAACCGAT AGATATACCCGTAA TACTCTGAATCGAC GCGCATCTTGCTCC GTTGACGATATCGG ACAGGTACTGGTGT GGCATATGCTTATC CATTACACCAACTG TAGGGACTGAACTC GCTCCATGAGAAGT TACAATGATGCTAG CTTCATGACCGAAT CTGCCAACAGGAGC TTGCATTGAGCTAC AAGCAAGAGCTTAG CGGCACGAACTCAG GGTGGAGATTACTC GGCCGATGTACTCT CGTAGCCTGTATGC TGAGCTGAATGCTG CCTATAACGAGACG ATAAGTTGGTACGT AAGCGACTTTGACG ACCAGTGAATACCG ATTGCACTTGCTTT CTAGGTGATGGTTG GCACTAGACCTTTA CATGCGCTAGTCAC TTGAGGACTACGCA ATACCACTCTAAGC CATATAGACTAAGC TTTAGCTGTACTCT GACATTCTCCACCT ACGTGATGCCATGA ATTGTAGATTCCCG GATAGAGATCACGA AATGCGTGGACGGA GCGTAAACACGGTT ATTCAGCTCATTGG GGCATATGGGGAGT ATCATCTGACACCA GTCATACTTCGCCT TTACGTACGTTCAG GAGTTGTGGTAGCT GACGCTCTCTCTCG AGTCTTACTTCGGA GGAACACTTCAGAC CTTGATTGATCTTC MS4A1 0 0 0 0 0 0 0 0 0 0 2 2 4 4 2 3 3 4 2 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 CD79B 1 0 0 0 0 0 0 0 0 1 2 4 3 3 2 3 1 2 2 5 0 0 0 0 0 0 0 0 0 1 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 2 2 0 0 3 0 0 0 0 4 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD79A 0 0 0 0 0 0 0 0 0 0 0 5 2 2 5 8 1 5 5 12 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 HLA-DRA 0 1 0 0 1 1 0 1 0 0 14 28 18 7 15 28 7 26 10 16 7 22 0 10 6 0 4 3 7 13 0 1 0 0 1 0 1 1 0 0 0 0 0 0 0 1 1 1 0 0 10 10 4 1 6 28 10 13 5 8 108 93 41 42 138 77 76 15 19 104 1 0 0 0 2 1 1 0 2 7 TCL1A 0 0 0 0 0 0 0 0 0 0 3 0 2 4 0 0 3 3 3 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HLA-DQB1 1 0 0 0 0 0 0 0 0 0 1 6 2 2 2 8 2 2 1 2 0 3 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 2 0 0 1 1 0 21 21 3 5 11 11 10 1 2 11 0 0 0 0 0 0 0 0 0 1 HVCN1 0 0 0 0 0 0 0 0 0 0 3 1 0 0 2 0 2 1 1 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HLA-DMB 0 0 0 0 0 0 0 0 0 0 0 4 1 1 2 2 1 2 0 1 0 1 0 1 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 0 1 3 2 1 4 5 2 1 1 1 5 0 0 0 0 0 0 0 0 0 0 LTB 3 7 11 13 3 4 6 4 2 21 2 9 2 4 4 0 3 6 5 7 1 0 0 1 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 1 1 1 7 1 0 1 5 3 1 2 0 0 1 1 1 1 2 1 0 1 0 5 0 0 0 0 1 4 0 0 1 0 0 0 0 0 0 0 LINC00926 0 0 0 0 0 0 0 0 0 0 0 2 0 1 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FCER2 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SP100 1 0 1 1 0 0 0 0 0 1 0 3 2 0 1 2 2 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 3 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NCF1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 2 2 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 PPP3CC 0 0 0 0 0 1 0 0 0 0 0 1 0 1 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EAF2 0 0 0 0 0 0 0 0 0 0 3 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 PPAPDC1B 0 0 0 0 0 0 0 0 0 0 0 3 0 1 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD19 0 0 0 0 0 0 0 0 0 0 0 1 0 2 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KIAA0125 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CYB561A3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD180 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 RP11-693J15.5 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FAM96A 0 1 0 0 0 0 0 0 0 0 1 0 0 0 2 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CXCR4 1 1 0 6 0 2 4 1 0 4 2 0 4 1 0 0 4 2 6 2 3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 4 0 7 1 3 0 6 1 0 1 0 1 0 1 0 1 0 0 0 0 1 2 12 3 1 3 0 1 2 0 0 2 0 0 0 0 0 0 0 0 0 0 STX10 0 0 1 0 0 1 0 1 0 0 2 0 0 0 2 0 0 0 1 1 0 0 0 1 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SNHG7 0 2 0 0 0 0 0 0 0 1 0 1 1 0 2 3 0 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 NT5C 0 0 0 0 0 0 0 0 0 0 2 2 1 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 BANK1 0 1 0 0 0 0 0 0 0 0 0 4 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 IGLL5 0 0 0 0 0 0 0 0 0 0 1 0 15 0 0 0 0 23 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD200 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 FCRLA 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD3D 4 4 4 5 4 4 3 2 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 7 0 0 0 0 1 0 1 0 0 2 3 0 3 15 1 3 6 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NOSIP 0 3 2 2 3 1 1 3 2 1 0 0 0 0 0 2 0 0 0 0 0 2 0 0 1 0 0 0 0 0 1 0 1 1 0 0 0 2 0 0 1 0 0 0 0 0 1 0 0 1 0 2 0 0 0 1 1 0 0 0 1 1 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 SAFB2 0 1 0 1 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD2 1 0 2 2 0 1 0 1 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 2 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IL7R 5 2 1 2 2 0 1 12 0 9 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 1 0 0 0 1 2 0 0 0 0 0 0 0 0 0 0 1 3 1 1 1 0 2 0 2 0 0 0 0 0 0 0 0 0 0 1 1 1 0 1 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 PIK3IP1 0 0 1 0 0 2 3 2 3 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 MPHOSPH6 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KHDRBS1 0 1 1 1 36 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 1 0 1 0 1 0 0 0 1 0 1 0 0 2 0 0 0 0 0 0 0 0 0 0 MAL 1 1 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCR7 0 5 0 0 2 0 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 THYN1 0 2 1 1 0 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TAF7 0 2 0 2 1 2 0 2 3 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 2 3 0 0 1 0 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 LDHB 3 2 1 6 5 3 4 0 1 6 0 1 0 0 0 0 2 0 1 0 1 2 0 2 1 0 1 0 0 0 0 0 0 2 2 0 1 0 0 0 2 1 4 0 4 4 0 0 0 2 0 0 1 0 0 2 0 1 0 1 2 0 0 5 2 2 0 1 2 0 1 0 0 0 0 1 0 0 0 1 TMEM123 3 3 0 4 2 1 1 2 1 1 0 1 1 0 0 0 1 3 1 1 0 0 0 0 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 2 0 1 0 0 0 1 0 0 1 1 0 1 1 0 0 0 0 1 0 0 0 2 3 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 CCDC104 0 0 0 2 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EPC1 1 0 1 0 0 1 0 1 1 1 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 EIF4A2 3 1 2 5 2 4 3 2 3 0 0 2 1 1 5 0 0 1 0 0 0 0 0 0 1 0 1 1 0 1 2 2 0 2 0 0 0 1 3 1 1 0 0 1 2 0 2 3 0 1 0 2 0 0 2 0 1 0 2 1 4 0 0 4 2 4 1 0 0 1 0 0 0 0 0 0 0 0 0 0 CD3E 0 2 1 4 3 1 3 4 2 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 1 0 2 0 1 2 0 1 5 2 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 TMUB1 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 BLOC1S4 1 0 2 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ACSM3 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TMEM204 1 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SRSF7 2 0 1 1 54 2 1 1 1 3 1 2 0 1 0 0 0 0 0 0 0 2 0 0 1 0 0 0 0 3 1 0 1 15 0 0 0 0 0 1 2 1 3 1 0 1 1 1 0 1 0 1 0 0 0 1 0 3 1 0 0 2 1 1 3 0 1 5 13 2 0 0 0 0 0 0 0 0 0 0 ACAP1 0 0 1 2 0 1 2 2 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 1 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 TNFAIP8 1 3 2 3 2 0 0 0 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 1 0 1 1 0 4 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 CD7 2 2 2 3 2 1 0 0 3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 1 1 1 3 4 2 1 1 2 1 4 0 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 TAGAP 1 1 1 1 0 0 0 1 2 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 DNAJB1 2 0 0 2 0 0 2 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 2 0 0 0 0 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 1 0 0 1 0 2 0 2 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 ASNSD1 1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 S1PR4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 1 1 1 0 0 1 1 1 0 0 1 0 39 0 0 0 0 0 0 0 1 0 0 2 3 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 CTSW 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 3 2 3 2 4 8 6 1 11 1 4 1 2 1 2 2 1 5 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 GZMK 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 2 0 0 2 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 NKG7 0 0 0 0 1 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 2 1 0 0 0 0 1 0 0 1 35 14 12 30 20 27 28 10 25 27 31 22 7 2 4 14 16 4 29 8 5 3 0 0 0 0 5 0 0 0 0 1 0 0 1 3 0 1 0 1 0 0 0 0 0 0 0 0 0 0 IL32 1 0 9 8 1 0 3 3 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 5 4 0 0 0 0 7 8 5 5 0 7 1 6 7 6 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 DNAJC2 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 1 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 LYAR 0 1 1 1 3 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 1 1 0 1 1 2 47 0 1 1 1 1 0 2 0 0 0 0 2 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 CST7 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 4 4 2 7 2 4 3 3 2 5 2 3 1 1 0 2 8 4 5 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 LCK 0 3 2 0 1 1 2 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 2 1 1 1 2 1 0 1 1 2 0 1 2 1 1 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCL5 0 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 2 5 14 0 29 1 7 5 25 0 14 27 3 13 17 7 3 16 12 3 1 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 8 5 4 10 11 30 8 5 9 2 HNRNPH1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SSR2 0 2 2 4 1 1 0 0 0 6 0 1 0 0 1 1 0 1 0 0 0 0 0 0 3 0 1 0 1 0 0 2 0 0 1 0 1 0 1 2 1 2 1 1 1 2 4 1 2 4 2 1 0 0 2 0 3 1 3 1 0 2 3 0 1 3 2 0 4 2 0 0 0 0 0 0 0 0 0 0 DLGAP1-AS1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GIMAP1 0 2 0 0 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 0 2 0 1 1 1 1 0 2 1 0 0 1 1 17 0 0 0 1 0 1 0 1 0 2 0 1 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 MMADHC 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 2 0 1 1 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 1 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ZNF76 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CD8A 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 3 0 1 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 PTPN22 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GYPC 1 2 2 0 0 1 0 0 2 1 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 1 0 1 1 0 1 1 0 0 1 0 1 0 1 3 0 1 0 0 7 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 HNRNPF 0 0 0 1 0 1 0 1 2 0 0 2 1 0 1 0 0 1 0 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 1 2 1 1 2 1 1 1 0 1 2 0 2 0 1 0 0 0 0 1 1 0 1 0 0 1 1 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 RPL7L1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 1 0 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 KLRG1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 3 0 2 0 1 0 0 0 0 1 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CRBN 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 2 1 1 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 2 0 1 0 1 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 SATB1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 13 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 SIT1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PMPCB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 1 0 0 0 2 1 0 1 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 NRBP1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 1 0 1 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 TCF7 0 0 1 0 1 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 HNRNPA3 0 0 0 1 2 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 2 2 1 1 1 1 0 2 1 2 0 2 1 0 1 2 0 0 1 0 0 0 2 1 1 0 1 1 0 0 0 1 4 0 1 0 2 0 0 0 0 0 0 0 0 0 0 S100A8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 5 25 5 25 6 24 40 16 11 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 2 0 4 3 0 1 1 2 0 0 2 0 2 1 9 1 23 4 0 0 1 0 0 0 1 0 0 0 2 S100A9 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 30 12 51 22 85 3 54 55 35 17 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 20 6 1 0 10 4 8 6 0 0 0 0 1 10 0 41 11 32 17 0 3 0 0 0 0 0 0 0 0 7 LYZ 1 1 1 0 0 1 0 0 1 0 1 4 0 1 0 0 0 1 1 0 50 29 25 49 98 11 59 28 34 16 0 0 1 0 2 0 0 1 0 0 0 1 1 0 0 0 0 1 0 0 41 4 3 3 14 17 7 6 9 6 76 20 24 79 53 53 87 76 42 114 3 1 1 0 1 0 0 0 0 22 CD14 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 2 4 1 0 1 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 2 2 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 FCN1 1 1 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 10 6 5 9 7 1 1 2 8 7 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 2 13 7 5 1 4 3 1 1 2 0 0 0 3 1 2 4 6 1 0 0 0 0 0 0 0 0 0 0 0 0 TYROBP 0 0 0 2 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 14 13 3 10 16 4 13 12 19 12 3 0 4 3 6 7 3 4 5 15 2 0 1 1 0 0 0 0 0 0 11 21 2 5 21 13 16 9 16 17 2 8 6 9 11 14 10 10 6 7 0 0 0 0 0 0 0 0 0 14 ASGR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 2 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 NFKBIA 0 0 1 1 0 0 0 0 0 4 0 1 1 0 1 0 0 0 1 1 3 13 5 0 11 0 2 3 5 10 0 1 0 1 0 1 0 0 0 0 5 1 1 0 0 0 1 0 1 0 2 2 2 0 2 1 1 1 2 9 2 2 0 1 1 6 1 3 2 4 0 0 0 0 0 0 0 0 0 6 TYMP 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 7 1 6 5 1 6 4 5 1 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 6 5 1 1 6 4 3 2 4 5 1 3 2 5 14 11 3 4 8 4 0 0 0 0 0 0 0 0 0 2 CTSS 1 1 0 0 1 2 0 1 1 1 1 0 1 0 0 0 1 2 0 2 15 9 1 5 7 3 4 4 11 7 0 0 1 0 0 0 1 0 0 4 1 1 0 0 1 0 0 0 0 0 8 8 7 3 10 15 18 19 4 17 5 3 1 5 0 3 6 2 0 3 1 0 0 0 0 0 0 0 0 3 TSPO 0 0 0 0 1 1 1 0 0 1 0 1 0 0 0 1 1 0 0 0 1 2 6 0 36 1 5 0 3 5 1 0 0 0 0 0 1 1 1 0 1 1 0 0 2 0 1 0 0 0 2 4 0 1 2 3 6 4 2 5 1 0 0 4 2 5 10 6 4 2 0 0 0 0 0 2 0 0 0 3 RBP7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 2 1 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CTSB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 4 1 1 7 1 1 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 4 0 1 0 1 0 3 0 0 2 1 1 0 2 0 2 2 1 0 0 0 0 0 0 1 0 0 0 0 1 LGALS1 1 0 1 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 14 10 8 11 4 6 7 22 37 3 4 9 6 1 3 14 2 1 4 1 3 0 0 0 0 1 0 1 0 5 12 4 2 16 10 6 2 12 16 8 13 21 9 20 10 23 5 28 13 0 0 0 0 1 0 0 0 0 10 FPR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 0 2 1 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 VSTM1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 1 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 BLVRA 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 1 3 1 2 0 1 0 1 1 3 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 2 2 5 1 2 0 1 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 MPEG1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 1 1 1 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 2 1 0 1 0 0 0 0 0 0 0 1 0 0 0 0 BID 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 27 0 1 1 1 0 0 1 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 3 6 1 2 2 4 2 2 2 2 0 0 3 1 0 2 0 0 0 0 0 0 0 0 0 0 SMCO4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 2 1 0 1 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 2 1 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 CFD 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 4 2 1 1 0 0 2 15 2 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 4 5 2 0 0 5 2 3 2 3 0 0 0 0 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 3 LINC00936 0 0 0 1 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 5 1 0 0 1 1 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 3 1 2 1 1 0 3 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 LGALS2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 12 6 2 1 6 0 0 0 5 2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 3 10 1 2 3 4 4 1 3 6 0 0 0 0 0 0 0 0 0 3 MS4A6A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 2 2 1 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 4 1 0 7 7 0 2 1 2 0 0 0 0 0 0 0 0 0 0 1 FCGRT 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 0 0 1 14 1 2 0 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 2 0 1 3 1 1 0 1 1 3 1 0 2 2 3 3 1 4 3 0 0 0 0 0 0 0 0 0 2 LGALS3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 4 4 1 3 0 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 2 0 0 4 2 0 2 1 0 0 5 1 0 6 2 7 2 2 0 0 0 0 0 0 1 0 0 0 1 NUP214 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 3 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 2 0 0 0 0 1 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 SCO2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 2 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 1 2 0 0 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IL17RA 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 IFI6 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 5 3 0 0 3 0 1 5 0 4 0 0 0 0 1 0 1 1 2 0 0 0 0 0 0 0 0 0 1 0 0 2 0 1 1 3 1 3 0 2 0 4 0 2 6 2 5 1 0 0 0 0 0 0 0 0 0 0 0 4 HLA-DPA1 0 0 0 0 0 0 0 0 0 0 3 8 2 2 5 9 0 5 1 5 0 13 2 1 0 1 0 0 7 6 0 1 0 2 0 0 1 0 0 0 0 0 0 0 1 3 0 0 1 0 12 4 2 1 5 5 7 14 5 11 75 52 11 19 54 23 45 10 23 37 0 0 0 0 0 0 0 0 0 5 FCER1A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 1 2 4 8 5 8 4 7 0 0 0 0 0 0 0 0 0 0 0 CLEC10A 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 2 4 2 3 6 4 2 1 0 0 0 0 0 0 0 0 0 1 HLA-DMA 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 4 1 1 0 1 0 4 1 1 0 0 0 0 1 2 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 4 0 0 0 0 6 6 5 4 6 5 6 5 3 5 0 0 0 0 0 0 0 0 0 1 RGS1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3 1 3 0 1 3 0 1 2 0 0 0 0 0 0 0 0 0 0 HLA-DPB1 0 0 0 0 0 0 0 0 0 0 4 10 4 4 8 23 7 0 4 6 0 18 1 2 0 3 0 1 7 7 2 4 0 0 0 0 0 0 0 0 0 4 0 0 1 2 0 0 0 0 8 3 5 2 3 7 6 5 9 4 102 78 23 25 69 24 43 8 10 50 1 0 0 0 0 0 0 0 0 5 HLA-DQA1 0 0 0 1 0 0 0 0 0 0 0 4 4 1 0 8 1 5 0 1 1 5 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 2 0 0 0 1 1 2 0 0 25 39 5 2 16 6 11 3 4 9 0 0 0 0 0 0 0 0 0 0 RNF130 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 2 2 2 2 0 1 1 1 6 3 5 1 0 0 0 0 0 0 0 0 0 0 HLA-DRB5 0 0 0 0 0 0 1 0 0 0 1 4 3 0 4 8 1 2 2 4 0 8 1 1 0 0 0 0 4 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 4 5 0 0 3 3 6 3 6 2 11 26 5 2 31 21 21 2 3 10 0 0 0 0 0 0 0 0 0 1 HLA-DRB1 0 0 0 0 0 0 0 0 0 0 2 10 6 1 5 16 5 11 5 8 2 12 1 5 1 0 3 0 5 3 0 2 0 1 0 0 0 0 1 0 0 0 0 0 0 3 0 0 0 0 8 4 0 0 7 7 13 6 6 4 50 53 10 9 68 36 49 3 9 26 0 0 0 0 0 0 0 0 0 4 CST3 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 13 28 15 11 13 7 37 5 20 18 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 0 0 16 32 7 9 11 17 33 10 15 25 61 31 25 14 58 112 37 18 29 125 5 1 0 0 5 1 3 0 0 16 IL1B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 0 1 0 0 2 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 8 0 3 1 2 3 6 1 0 0 0 1 0 0 0 0 0 0 5 POP7 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 1 1 33 0 0 0 0 1 3 0 0 0 0 0 0 0 0 0 0 HLA-DQA2 0 0 0 0 0 0 0 0 0 0 0 2 0 0 1 0 1 1 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 0 0 7 9 1 0 6 1 4 1 0 5 0 0 0 0 0 0 0 0 0 0 CD1C 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5 0 0 3 3 0 0 0 1 0 0 0 0 0 0 0 0 0 0 GSTP1 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 3 0 1 0 0 2 3 1 6 5 1 3 1 4 2 1 2 0 1 2 0 1 2 0 1 2 0 0 0 0 0 0 0 1 0 4 1 2 0 1 5 0 0 1 1 9 4 5 7 2 5 12 7 10 18 0 0 0 0 1 0 0 0 0 4 EIF3G 1 1 1 1 2 0 0 1 0 2 0 1 0 0 0 2 0 0 0 0 0 0 1 0 2 0 0 1 2 1 3 0 1 0 3 0 0 1 0 3 1 1 0 0 0 0 2 1 1 1 3 3 0 1 2 2 0 1 2 0 1 0 1 2 1 0 1 1 3 43 0 0 0 0 0 0 0 0 0 3 VPS28 0 0 0 3 0 0 0 0 1 0 0 0 1 0 2 0 0 0 0 0 0 0 0 1 2 0 1 1 1 0 0 0 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 2 3 0 4 3 0 1 0 1 38 0 0 1 0 0 1 0 0 2 0 0 0 2 LY86 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 2 1 1 0 0 0 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 2 0 3 2 3 1 2 0 1 8 1 0 0 0 0 0 0 0 0 0 ZFP36L1 0 0 1 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 21 0 1 0 0 0 0 0 0 0 0 0 0 0 ZNF330 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 1 32 0 0 0 0 0 0 0 0 0 0 0 ANXA2 0 0 0 0 1 1 0 0 0 1 0 1 1 0 0 1 0 0 0 0 1 3 0 3 1 1 1 0 2 3 1 0 0 4 1 0 4 1 0 1 0 0 1 0 0 0 1 1 0 2 9 3 1 0 4 2 3 2 0 6 5 1 5 1 22 10 9 1 3 3 0 0 0 0 0 0 0 0 0 4 GRN 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 1 0 1 0 5 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 1 1 2 3 0 1 1 3 6 1 0 2 5 4 8 2 4 5 0 1 0 0 0 0 0 0 0 0 CFP 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 7 1 1 1 0 2 0 2 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 3 0 0 1 1 0 3 2 4 0 2 0 1 39 1 3 5 1 0 0 0 0 0 0 0 0 0 1 HSP90AA1 2 0 1 2 3 2 2 1 0 3 0 0 1 0 0 2 4 0 0 1 0 0 0 0 0 0 0 0 3 3 1 4 5 1 1 0 1 0 0 0 0 0 0 2 0 1 0 1 3 0 3 1 0 0 0 1 0 0 1 1 3 1 0 2 64 2 3 1 1 1 0 0 0 0 0 0 0 0 0 0 FUOM 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 LST1 0 0 0 3 2 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 3 6 1 4 8 3 5 0 7 13 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 15 17 8 11 18 13 36 17 12 27 12 7 7 4 8 10 4 2 6 6 0 0 0 0 0 0 0 0 0 7 AIF1 2 0 1 0 0 0 2 1 0 0 0 0 0 0 1 0 0 0 0 1 5 7 6 5 4 3 1 2 10 12 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 7 12 7 6 32 33 12 19 18 29 6 7 1 3 11 7 9 4 1 4 0 0 0 0 1 0 0 0 0 5 PSAP 0 0 2 0 3 2 0 0 0 0 0 0 0 0 2 0 0 0 0 0 6 5 1 5 3 2 1 1 6 4 0 1 2 0 1 0 1 1 0 0 3 1 0 0 1 0 0 1 2 1 8 8 6 2 9 9 10 8 5 10 1 2 1 6 6 4 4 2 2 7 0 0 1 1 1 0 0 0 0 1 YWHAB 0 0 0 1 1 0 0 1 0 1 0 0 2 0 1 0 0 1 1 0 1 0 0 1 2 0 0 1 2 0 2 0 1 1 0 1 0 1 0 2 0 1 1 0 0 1 2 2 1 1 2 2 1 0 50 1 1 1 3 1 5 0 0 0 2 5 4 0 1 3 0 0 0 0 0 0 0 0 0 1 MYO1G 0 0 2 1 0 1 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 2 0 1 0 1 0 0 0 0 0 1 1 1 1 0 0 1 1 0 0 1 0 0 1 0 0 3 3 1 27 1 1 0 0 0 0 0 0 2 2 1 2 0 0 0 0 0 0 0 0 0 1 SAT1 0 1 0 0 0 1 1 1 1 2 0 1 0 0 2 5 0 0 0 0 4 15 8 5 4 2 8 2 11 18 3 0 0 0 0 0 1 0 1 1 0 1 3 0 0 0 0 0 2 1 21 25 6 10 26 26 16 15 11 22 10 5 5 16 2 3 16 3 4 5 3 4 2 6 3 17 3 6 4 3 RGS2 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 0 0 3 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 1 0 1 0 0 0 1 0 0 0 2 3 16 0 1 11 3 5 4 6 8 1 1 0 0 0 1 1 1 2 0 0 0 0 0 0 0 0 0 0 SERPINA1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 6 4 0 2 0 0 1 0 3 3 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 3 4 5 5 3 6 1 1 0 3 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 2 IFITM3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 5 0 0 0 2 4 1 2 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 3 4 1 11 9 2 5 7 10 0 12 2 1 3 4 4 0 0 1 0 0 0 0 0 0 0 0 0 1 FCGR3A 0 0 0 0 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 1 6 2 2 1 0 1 2 1 2 6 2 0 0 0 0 0 1 0 0 0 0 5 1 2 14 4 18 9 5 11 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 LILRA3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 3 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 S100A11 2 0 1 2 1 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 2 10 4 2 2 2 1 6 5 6 6 0 0 0 0 0 1 1 0 0 1 0 0 0 1 1 0 1 0 0 17 13 1 2 9 12 14 8 7 13 5 4 5 3 11 9 9 4 5 2 0 0 0 0 0 0 0 0 0 1 FCER1G 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 6 3 4 6 1 2 4 4 9 8 8 0 3 1 2 5 6 6 1 6 3 0 0 0 0 0 0 0 0 0 12 12 2 4 35 16 24 9 9 30 8 8 3 3 13 8 7 5 8 3 0 0 0 1 0 0 0 1 0 4 TNFRSF1B 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 3 4 1 2 1 1 0 0 0 0 0 1 0 0 0 2 0 0 0 0 0 0 0 0 0 0 IFITM2 3 0 3 3 1 3 3 1 0 3 0 1 2 0 1 0 0 0 2 3 6 4 0 0 1 1 0 1 3 6 8 2 3 5 2 1 5 1 3 2 7 4 2 2 5 1 1 4 1 2 5 10 1 4 17 8 33 8 14 19 4 7 4 3 2 2 0 1 6 4 0 0 0 0 1 1 0 0 0 1 WARS 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 2 2 1 1 0 3 1 1 2 0 0 0 0 0 2 1 0 1 0 0 0 0 0 0 0 0 0 0 0 IFI30 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 2 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3 0 1 2 1 6 1 5 6 6 0 3 1 1 3 3 0 1 4 0 0 0 0 0 0 0 0 0 1 MS4A7 0 0 0 1 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 1 0 2 4 3 1 0 2 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 C5AR1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 4 2 1 1 3 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 HCK 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 0 1 0 3 1 2 3 5 0 1 0 0 1 1 0 0 1 4 0 0 0 0 0 0 0 0 0 1 COTL1 0 0 4 2 1 2 0 1 1 3 0 2 0 0 0 1 0 1 0 0 6 15 2 4 7 3 6 0 4 20 0 1 1 0 0 0 1 0 0 0 1 2 0 0 5 1 0 0 2 0 9 20 9 3 6 9 91 11 18 18 18 2 9 11 12 11 7 5 4 25 1 2 0 3 0 2 3 0 4 7 LGALS9 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 3 0 0 6 0 0 3 0 3 0 0 1 1 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 CD68 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 4 0 0 0 3 0 1 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 4 1 0 4 3 0 4 2 8 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 0 0 1 1 1 RP11-290F20.3 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 0 0 4 0 5 2 1 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 RHOC 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 2 0 0 1 0 1 3 1 1 2 0 0 0 0 0 0 1 1 0 0 1 6 0 1 1 2 7 2 6 3 2 0 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 CARD16 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 1 1 1 0 0 2 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 2 2 0 2 2 1 0 6 3 6 1 0 0 1 2 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 LRRC25 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 1 6 4 1 0 2 0 1 1 0 0 0 1 1 0 2 0 0 0 0 0 0 0 0 0 1 COPS6 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 3 0 1 0 0 0 0 0 0 1 0 0 26 0 0 2 2 1 0 0 1 0 0 0 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 ADAR 0 0 0 1 1 0 0 0 0 0 0 1 0 1 0 0 0 0 1 0 0 1 1 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 2 25 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 PPBP 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 43 41 36 55 58 54 66 34 30 6 GPX1 0 0 0 1 1 1 0 1 0 1 0 1 1 0 1 0 1 0 0 0 4 5 3 5 12 1 15 2 3 1 0 1 0 0 0 0 2 0 1 0 0 0 0 1 1 0 1 2 0 0 5 3 0 0 1 1 0 1 1 2 6 7 2 6 24 16 28 3 6 3 18 8 12 18 18 28 11 13 16 9 TPM4 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0 0 0 2 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 1 1 0 0 0 1 2 1 0 1 1 4 4 2 2 2 15 2 1 3 2 PF4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 14 11 14 18 23 62 9 14 6 0 SDPR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 11 3 13 8 8 29 3 6 5 2 NRGN 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 2 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 0 0 0 1 5 3 3 2 7 3 1 1 2 SPARC 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 8 3 2 2 3 9 3 3 4 2 GNG11 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 5 9 10 7 23 12 6 11 1 CLU 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 14 5 8 11 15 6 4 3 5 2 HIST1H2AC 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 3 5 5 2 42 2 1 2 1 NCOA4 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 2 1 1 0 0 1 0 0 0 0 0 8 2 0 12 8 7 3 2 6 0 GP9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 3 2 3 11 6 5 3 0 FERMT3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0 0 0 1 1 0 0 1 0 0 0 0 2 0 0 0 2 0 0 1 0 0 1 2 5 4 4 1 6 0 4 0 1 ODC1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 2 0 1 0 0 0 3 0 1 2 1 14 2 0 4 1 CD9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 4 4 3 4 3 4 20 5 0 RUFY1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 3 3 2 3 2 9 0 0 1 0 TUBB1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 3 5 2 14 32 2 0 8 0 TALDO1 1 2 0 0 2 0 0 0 0 0 0 0 0 0 1 2 0 0 0 0 1 1 2 3 5 1 2 0 3 2 0 0 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 1 0 1 2 0 0 0 2 2 2 1 2 1 0 0 1 3 1 3 1 1 2 2 0 1 2 1 10 37 0 2 3 TREML1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 2 7 4 0 1 3 5 2 NGFRAP1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 2 0 2 3 1 2 4 0 PGRMC1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 4 2 6 2 2 0 0 CA2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 3 1 4 1 3 8 0 13 2 0 ITGA2B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 1 4 2 4 1 4 1 0 0 MYL9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 3 4 8 1 2 0 0 1 TMEM40 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 1 1 2 1 2 3 0 PARVB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 4 1 4 0 0 1 0 0 0 PTCRA 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 4 0 0 20 2 2 1 0 ACRBP 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 25 0 3 1 1 TSC22D1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 6 1 0 26 1 0 0 1 VDAC3 0 0 0 1 0 0 1 0 0 1 0 29 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 2 0 0 1 1 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 2 1 0 0 0 0 0 0 0 0 0 0 41 0 0 2 1 0 1 1 1 GZMB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 27 2 1 10 8 5 10 7 4 11 3 0 0 0 0 0 6 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GZMA 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 5 3 4 10 8 12 10 3 13 1 8 2 1 0 0 0 3 3 2 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 GNLY 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 35 0 15 3 29 11 22 15 18 18 10 0 0 3 0 0 4 1 3 0 0 0 0 0 0 0 1 0 0 1 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 FGFBP2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 5 3 9 2 6 3 6 8 2 5 4 1 0 0 0 2 9 0 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 AKR1C3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 7 0 1 1 0 1 5 4 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CCL4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 5 3 1 0 3 1 1 2 1 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PRF1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 14 1 4 9 7 10 10 2 4 7 6 13 0 0 0 0 6 0 5 3 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 GZMH 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 5 7 1 0 3 1 0 2 6 0 0 0 0 0 10 0 9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 XBP1 1 0 1 1 2 0 0 1 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 2 2 4 1 0 2 1 3 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 GZMM 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 2 1 1 2 3 2 2 6 2 1 0 0 1 0 3 2 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PTGDR 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 0 1 51 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 IGFBP7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 4 0 0 3 0 1 7 4 0 3 1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 1 1 0 0 0 1 3 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 TTC38 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 KLRD1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 1 1 0 1 2 2 1 0 1 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ARHGDIA 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 1 1 1 1 1 0 1 0 1 25 1 0 0 0 0 0 0 0 0 0 2 0 1 0 0 3 2 1 0 0 1 0 1 1 1 2 4 1 0 0 0 0 0 0 0 1 0 0 0 0 IL2RB 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 2 1 1 1 0 3 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 CLIC3 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 4 0 1 2 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 PPP1R18 0 1 0 0 0 1 0 0 0 1 0 0 1 0 1 0 0 0 1 0 0 2 0 0 0 0 0 1 0 0 2 2 1 1 1 1 3 0 3 1 0 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 CD247 0 1 1 0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 1 1 3 0 2 2 0 1 1 2 1 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ALOX5AP 1 0 0 0 1 0 0 1 0 0 1 0 0 0 0 2 1 0 1 0 0 0 0 0 0 0 0 1 0 0 3 0 2 1 1 3 1 2 1 2 0 2 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 XCL2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 2 0 0 0 0 1 2 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C12orf75 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 1 0 1 0 0 4 2 1 2 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 2 0 0 0 0 0 0 RARRES3 1 0 0 3 0 1 1 0 1 0 0 2 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 7 3 2 0 1 3 3 5 0 1 0 2 1 1 0 2 2 0 1 1 0 0 0 0 2 0 0 0 0 0 0 1 0 0 2 1 1 0 0 0 0 0 1 0 0 1 0 0 0 0 PCMT1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 2 1 0 58 0 0 1 0 2 1 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 2 0 1 0 0 0 2 0 0 0 4 2 1 0 3 1 0 0 0 0 0 0 0 0 0 0 LAMP1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 1 3 2 1 2 1 0 1 0 1 0 0 1 2 0 1 0 1 0 0 0 0 0 0 0 3 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 SPON2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 3 5 1 3 0 0 1 2 0 2 3 0 0 0 0 0 3 1 3 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 S100B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 10 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 Seurat/inst/CITATION0000644000176200001440000000751214525500037013635 0ustar liggesuserscitHeader("To cite Seurat in publications, please use:") bibentry(bibtype = "article", author = c( as.person("Yuhan Hao"), as.person("Tim Stuart"), as.person("Madeline H Kowalski"), as.person("Saket Choudhary"), as.person("Paul Hoffman"), as.person("Austin Hartman"), as.person("Avi Srivastava"), as.person("Gesmira Molla"), as.person("Shaista Madad"), as.person("Carlos Fernandez-Granda"), as.person("Rahul Satija") ), title = "Dictionary learning for integrative, multimodal and scalable single-cell analysis", journal = "Nature Biotechnology", year = "2023", doi = "10.1038/s41587-023-01767-y", url = "https://doi.org/10.1038/s41587-023-01767-y", textVersion = "Hao et al. Dictionary learning for integrative, multimodal and scalable single-cell analysis. Nature Biotechnology (2023) [Seurat V5]" ) bibentry(bibtype = "article", author = c( as.person("Yuhan Hao"), as.person("Stephanie Hao"), as.person("Erica Andersen-Nissen"), as.person("William M. Mauck III"), as.person("Shiwei Zheng"), as.person("Andrew Butler"), as.person("Maddie J. Lee"), as.person("Aaron J. Wilk"), as.person("Charlotte Darby"), as.person("Michael Zagar"), as.person("Paul Hoffman"), as.person("Marlon Stoeckius"), as.person("Efthymia Papalexi"), as.person("Eleni P. Mimitou"), as.person("Jaison Jain"), as.person("Avi Srivastava"), as.person("Tim Stuart"), as.person("Lamar B. Fleming"), as.person("Bertrand Yeung"), as.person("Angela J. Rogers"), as.person("Juliana M. McElrath"), as.person("Catherine A. Blish"), as.person("Raphael Gottardo"), as.person("Peter Smibert"), as.person("Rahul Satija") ), title = "Integrated analysis of multimodal single-cell data", journal = "Cell", year = "2021", doi = "10.1016/j.cell.2021.04.048", url = "https://doi.org/10.1016/j.cell.2021.04.048", textVersion = "Hao and Hao et al. Integrated analysis of multimodal single-cell data. Cell (2021) [Seurat V4]" ) bibentry(bibtype = "article", author = c( as.person("Tim Stuart"), as.person("Andrew Butler"), as.person("Paul Hoffman"), as.person("Christoph Hafemeister"), as.person("Efthymia Papalexi"), as.person("William M Mauck III"), as.person("Yuhan Hao"), as.person("Marlon Stoeckius"), as.person("Peter Smibert"), as.person("Rahul Satija") ), title = "Comprehensive Integration of Single-Cell Data", journal = "Cell", year = "2019", volume = "177", pages = "1888-1902", doi = "10.1016/j.cell.2019.05.031", url = "https://doi.org/10.1016/j.cell.2019.05.031", textVersion = "Stuart and Butler et al. Comprehensive Integration of Single-Cell Data. Cell (2019) [Seurat V3]" ) bibentry(bibtype = "article", author = c( as.person("Andrew Butler"), as.person("Paul Hoffman"), as.person("Peter Smibert"), as.person("Efthymia Papalexi"), as.person("Rahul Satija") ), title = "Integrating single-cell transcriptomic data across different conditions, technologies, and species", journal = "Nature Biotechnology", year = "2018", volume = "36", pages = "411-420", doi = "10.1038/nbt.4096", url = "https://doi.org/10.1038/nbt.4096", textVersion = "Butler et al. Integrating single-cell transcriptomic data across different conditions, technologies, and species. Nat Biotechnol (2018) [Seurat V2]" ) bibentry(bibtype = "article", author = c( as.person("Rahul Satija"), as.person("Jeffrey A Farrell"), as.person("David Gennert"), as.person("Alexander F Schier"), as.person("Aviv Regev") ), title = "Spatial reconstruction of single-cell gene expression data", journal = "Nature Biotechnology", year = "2015", volume = "33", pages = "495-502", doi = "10.1038/nbt.3192", url = "https://doi.org/10.1038/nbt.3192", textVersion = "Satija and Farrell et al. Spatial reconstruction of single-cell gene expression data. Nat Biotechnol (2015) [Seurat V1]" )

_vys%G]eg?lr7뚠d`Yia}Fl衼:Z/+=[Ysyysz~֑o&STR- $uӲ&]-ٞl脳vTA^wmGDv"%6.E ۋ|{Mpȴo&^-Vvnu_l~{7[`<ݕt.EL{ж*]{Ynߗ{Eܹ_rʥusNbs\^Nh:C)t zI3}R Sѽ`[eĄ+l f6VUBef 3Y5'l2#=4#PV}$AHSM)KPQ3h*K%uH#0$`~^DS Hߵ*k6W򴏄D ߃D0@E*jI{Vs[m+:ٰV!bGd& ͡2TfJp:YcX+SRl>~DZmw* $XO+X4ceQz`] $@ .P"HZ E 0E]rVKqٰ.]x^2[f쁡c;@ l@%IYϠ3L{6 ;CW!C 6Z[Ve(/kcb_EOvA!$M&.JDϚx[NwRr`(1';@f?feRi!2N.mYVYuр`J| /Af00> stream xڝݪd )LUIZU!r Ca8afϪSDѐ̨T}%շ{֜hGokv]kq6a~c8:~z57֌ob3Xw v ]XpX|+& .6%A08I\<'o\|9h&,Xp1bcp@r9I2`;limsd?(hIf&'G(0H6~dwr?e :bNj-2neSuw96}>1OmsϼBvSJUm-vᔪmG!IO}%k?$C4lzˇ?Ϸ?oO_(7>~ˇ/c ^]Hz/V]?od3p' ?BP]5; gٚ78?(QUeM_e7dw?O{>~soUޏ_~|OoX7zƧ]bz]|{+c^o+p>a@ :}nQ+;Xl0Ci3rF Aeh-@ Z P 4 .@jh&@\F F-` .`F02Z@F&^ Atk-虀0 L@S kyf& '&5}'l%lìI W>\L Lt0 .3Z @-d`A2`Aeh-@ Z P 4.@jh&@\$LQ Ad H&`Q A k=z-=]@ g Cx~FG`OC!gF!,!:BYCu#00Fad! a!B¨C!0> stream x}Mn$GN7p'2À/00,z1 a~!O@s!TJ*_T?|ŵ._/q]Ptݗq|e}_ >+}Ƭ51)k1/lDKT.*+_є-8b.8]pL.H0ټ/շ37/m jpq< .ޛŷ1ٹ6|ϭc.T8KcpLvOD9$_.zvdnƑhmQ8 >OMg^s&hG3!}w@uŹmܷm*L'řd -ݍNJ' ]~;9? #ź|q .GxR.vnkN=l+aN=;1D@$@Qv 3<|'^ w6Xf-h :Mh @;$f@ :IH fUWS?wg ;~ JX_=}|&]ea)/a#U%LWO0>%, w &l'`'`'@Qv 3@"|'^ w6Xf+X :MX 3 vP@`@@@ :I`$ t+R¯}~ \(%g C0F)aJY˜Kѕ010J +ad c.aFW\(%%QJ] #Ks 0F0F)at%,a%RJY˜Kѕ010J +ad c.aFW\(%%QJ] #Ks 0F0F)at%,a%RJY˜Kѕ010J +ad c.aFW\(%%QJ] #Ks 0F0+aF'JX 0PV6~H"I^aLJ$rf% ]g}~ K=(a͇$`% g( "b@$^ @t>Xf+X `:KX 34t@ @;$ftH@ :IV`%V!y endstream endobj 4963 0 obj << /Type /ObjStm /N 100 /First 974 /Length 4949 /Filter /FlateDecode >> stream xڕ\Ks8W8>ă&&fB.۞ZI鉍9PU5U԰VM|$=t42x\dyVyVU.BL2S*cZgJdb3!dV,T.Ree)ޔ"xy+)_+FcZϱ)4=j$V-RNXⓥ|͒R```H6Cjq WjZt+jbZel+j"Z%dWDvJvɮˉr]drl. D\%[bKl9-WlI-&*- DX%[`AdsL |l|"0ٜ MdU&dlJ6d3l6]]`Av1]]`AvEiuē?v9Z]?#]}x|meGpGOEbfZg>#;*-?>+! 2sӇ eZagLĥ_5Gsx8/sqۏQʜl墊ʠ`h_YE֖,ȸ]wxa3q*7L_?DrpYkӵ]ou˜Y"C }'ɐۙO/^z=F9gxۯg?b  ,N#OO8J}_C=v M5um[?̇{ѿmc\zUI oDD>mc Cou'}{:IhZ|<>U8 CDZKZ0 +_&vT1~/ IJ0L48,>7z0άK.^ ɌabΛñV<ԙ_׭9 ^H2?}k05*C ?Դu<>)rޘgVhMw~Z]*A6O$?),@4.\|p| }oߣ4K=M\ *̽9O$X %j?\/=)t)<'%. oMG_ ~mҡUu`\7q.qęqq\+d|] 7%X@ 3tSo 1{OfvnXY'A$\Ri߹ 9Z8":ۢ(Fhpj,]dx5ސm&r:u`פ/O]{hu:XMA8e3^1oSE&N"F1̓xpw:g3yS#vB3ls=O0|Q ^!|-!I)rjb9NfuGsM"n@`QcUGhL Wr(_JBźM۾~~>trCQ&*p.STvMN`],=)D!n˄^qa<(ݯdW;NA0I9%^),R?tb>UJ-u,w Ǻ/%yLZoO5BX!#uOBO$p2 Ӑ/Nة@uBxA.VxIzD*F< *F <$;f %e&cpf8MnлQR|r$go) /5(=(gQ<^/,)gؓ@L1SYGx"䧬*wc*Ocvg:ިX(_o}=$[X%FIp& BO/n:wj7,ěA_*O WCAHՈ^0[]eCQc^E^WՑ=u\&*t<3yT; ./:fN ŤtL]*Riߧrro@"1fwN/WW_^ <`TDґcafa/.zW俹ain4fd9ڽvKGKu3wƦ Rohe:x@mN}u%"عY˳ͯ~Hw9筰I>]8O&`nsofbNgZq|Qok; 7'S~jnݹS*ujuBDJvvO* vϗ7Vt8d''J]g g)ټdwIߝH5xkAzָ7oՁes}u&y*3z;rTc Aa_\NkZp[JȻiܢB$c J H|9~:p zuus5sN jztxx2JG DS-=8컽q gWhs_ +}\ǓN}%lЁbopb̈y.d̈Ry LoiB\vǯ < ۵/Ӷx!#t Ӯ?_9?0v[~w?A=x\}i0IS৯7>apn3JpˤTA;,Hڸ![a|īݞ& v P4QzeOjk:sWz-{L'BYk,l>D W)CJ@"3I2ï(Y]}ӹw-Q!tZX%dN~;N&#F4xyeH9œflQ9E OPKd0çqW͖Y_nQ endstream endobj 5039 0 obj << /Type /ObjStm /N 100 /First 1054 /Length 5261 /Filter /FlateDecode >> stream xڕ\M[7WhՋo'縝9v&rҋI/4QI5ϯRz"DRlr^.AUva;,6% <ER?B)[( EܸCWZh,A_jcFy_fal 5c†Wsj؁ œɰ'+T` Yx@ #L>E c"XqO8`0,qR zc` CEJ:N&%"E ZX?Z$$4ucO }sEޕhtAFl!YH6_y2Rd;&ZMœJ[lih1LSd XQ&K1 jGю*ɥhh5B'fAJU&'fYZZB>i5OZFLMj>i5ً)%_OZyL)ZK(Z%3U%OP^k@y!@?i5M)ZGvCE 75 i` @{ӼBРj!iMjG'(eixq 쩊O{A/TbVK,A>j4kF(r֌Z lȶo/?n_zi}y뗻ja9,/J`ڊy7}Wkw/JQѡ": 7y|\ۚ_2%N& 6u n˸]n(Hd-ݎOtŎҍ=[mz*l+(= 朚}E7ʇwBx۶YƸa?IWro޼arT6FD]3OVD[s E)nz0tɕlMI=ܓ);Y氮|>j6~w?.5pW"gDbMn~{8SgJr~\?{_"³ sU37[uB:!hLq87>|U`kqp}w;wEczV܋B9i2qO;2: ;pMj{"!E8= 27 MemζE#D&>&+;y Nq5;eh~ifi?ӆ6].ցf}f''tb͑\wŅ,Wv#r3өbZM7tOf4ӎ|׈I;0Ox+/_%I>4DyO]}?] 7[s9(>reZprp9,GD:hߟA!D9Ow:R\:<>sm A@ $$\qGiYVOp,E#S[/^M\٤Iވ״ۏ0A[.H}Xvӫf{\{$SwWR mAotiv,IμlС7x|f֤aO=mo;pr_#7Od<\T(/n±N׵Y9>^۴}8"*+;̇.efn!Maz,(:V=1T%L 7 %p:\*V¦BR6*ca}Z-mmR)f3qߔ̾d.ߊ eKfCCjPXpoZattTQd)&+ wWv]K?ն}}l)^ e(wuC6Gqb\,A*%-j'~:tG9=T-SZ"O^8Ow1y>Ýƹ)ip>?~ܬޢulYr:"n[~$Ii}(2_kԖȁM\yP _n+ʻMrZmN5m7 r?y}0TuT>N}z^Cq1z; Sky(!\Y?(TVG7::e1 BPڵnxi)',i/Lï)^;>micdk' lh?nߧ-ثӆ r{?x;\4JNeT-ΌgDk=ĪSv;ąô~ˬ|n azlICߌbҴ&&1; %cΖ;Yܧvow\|Z'[>Pq?mee6{m҆ƸB$0=Òjӑ[=+n$e>>/Wl+WvW|x^q,/ۇqY~x+{mg$ >5{0]MX{DPy;]XYOzWv]2|I,31ɕbo5wVPܴM-kq*c"~`Ͽr(_ީ}} ո&Vm_#Ui߹bTӰ?|*^tʁћb|*0bZ4, M #=,r~n|F!p0>Q@3:ڵC9AФ;*@""\,V){` S(^4ӎՁf0V(8X;v4ZI`OMJ[T=B.Z-4Ű %U"uW`GI6 Mn#eA3@*;d N=5ۦ["+L(z`Rg$bGpUv$5<%fSÃƲymj'0r)NYaRqNF14Q2^%b'M øAyȉ}0A zj'͉-b-Zb -`Z'Ӳ -U!8': 0mNA[ ;58 vtpvܼ7 pŎ;m[IwUtl^@P:y˰ (=::FK4Zhx.t=P ]BI }MK6yAz&/;>Od^07y&/eJ bg -6Fن%^^^)q#Mw<ݏ{?F)р&04q;3&zȉ-ve<ݏ{?Kɣ7Iɸ]Hܓ2Je<ݏ{?&&/dy)mANjnIM< :~Ncޏͪr)= A).'=备 d'iKȂm 5Wƞ-th\@ CsoܙyN{jŭkn"0HpwFR]νO%h5*-85R|-ΓV7|!QT(n=oKx-GƲy+Z:XAz:: LztlAj }kQ4⇗߆s2(ZQn^K]s~oD}BC`OM-ȣ SMM@^6Ip97q:Jy vD5EQ \ LKMŏL_ 7V]=50Zl%W&)a9')qp;,QeF:&##c4{#r6z[NG܅PRJ\7#is%}Mlϸm79|#3'V r>8\t==;PrA`e3Hs~mԜ3|%;=t?nA,ٷ x'PpK"x$S[Xj:ȇ5$/s<2' $W[UE!9!7Ys|M `=by0,*(DZ(ԔڈE$%^jyEH;R v |TIbfJ8A>Ub3P?B49RZQ u2:'H8j5Ҫ?Y`%bJ(h BdͽD~I/KW+7ol%1U*3PEJQBKiB@m\Y@KBU8$N #hrDnGRk7{wq2-GBP$tR"ޡ/2t4_^w.HqS~;].LLL# <Ғh Bg•%[#_E -f8fz*]EנW~{qBU+쐈#ѵH8QArl%3[cASЁ !oydE:p VZ-n@j3/L` XIVU.w<@w@_ y0Z} }9ޟ\$WV |xhB(-%#ܕ`2 )_G[4P9ΡF_M*1y-ޯPcANWftO4x%I V \tB}R <^:Tꛁ.p @+ԑuT^N=Z`TL`aPʅ@6  Tk={RVYuGHSBA7;%dkc$R19( ]tWYZc3*Pf<_d$_LBP?_y:éZAuK6TL`ƧwP?gA zzA&!bi^CC+5g6c> 7<]) endstream endobj 5140 0 obj << /Type /ObjStm /N 100 /First 1041 /Length 4152 /Filter /FlateDecode >> stream x}&E~ 'KII'0&tp7'1iT?pKEq6>ެ=m|ؑ?37|XOiF>~x4 oO?ԟ>f>lOߓԘA ϧv7OY5b<# {lzv9-Fӥ5ӲN=dV?1,1&V?Gt3~s gz~qd=-?®g>ٳf-nǙZLVֳILogfx߬<ξ9/Ӱ}:?i,3xzo.~i[cy<8b[_jN:j3U#Ooge۱,ĜNoY9sVrxo- u޼b-9=wvo.\9LY=w^Ooa8xzeiq?7WR立-ަ [tVmBvY)Y =;Nol᬴zy9ll۩vziggNo;?_?_g[9BeoY# ѷ[ZGt`TobRٳP.+w\&jޤ|~6#GvWHwn6:Sw} xFeBa@DW75D&j D7Ux3rdw DwQ]!k9W3DWf竰bR= D.[3DWf2نzyfdwŒvDѧx/G)j8uԛd\qT2ͧZKR+|hdNyۊr$_o-G- *|[T18" $ $DYdEŢ\)h.?/?/^Y!esٳ\c9(IFLD!4Qϊɘw255]=Dt]^Zy;Ye]=BtSk^y!zQlBV,DW߹]}']ݍ6Uq]!nDW}5>ވF]}! B6L9Ln$iR(_RSZ{ AgFiΥ\lKO#+TflႛFCn XPva=GCtDw.FCh7?)FvW(7UӤGGtFGtFGGw eFRc]Izv y7Y yuXh. kr>8 5vW(0DD]nCta.10ԛK D}]S؇#Š+  ]:a9 \pʵ} 5cRK,X4ݔpM]:% zQZPX:ZD0uCw q7.mAHA9#1SM= psM3hED ঞ p47&2pumS^BfSD7j+ܨgn4 8FŒ_U!Ц -MG<`8{a3l. lPIMcѢx/(SشP<56PADR\[921XğiO 4 h49+5+Dq|`w&`M[:,fb반 9& )`SmŠ{.&haMuFe1[1w޴EIy] ШK htɺ}V\᠕9J5];5]30X נF:\D8͠F/ sV( ta!dHg0 lt91х l9J6ݵ{DGoЦ  lP5P/>v(Q3 R tFC]"ؼQZ4hէC{D،yCꥠU;GYk(X3!9}CXրƈ⽸sV( ֌\ !R8@c9J+skh 95113)sV( `͐!gU|/MH4z 1ve9J5F~֜)W3RٴI5sV( ؜ )|M 6N%"qʹ[a9J6Ʃ:,DS#HqogqtnwAS\znC|㒋tqvqJzY۝2GOoUܜb*\`*$Ntq)QztpsPonNnSn +(snN9wpswps ;8e;8zw%ߚqnNI;;ܸt%Uă鿗9J6W^r6.u6.o>7 #xl 6:;ԸBwKdj\cGLC u(w pSo+85~|@pVg 7/FthS/18k6n F.ދs^wr pQo8ԸBj9J6V3\>Bs`sL<~z!osQE yI\;o)Y.8@$43w2e$kzɚ\f&h EzGq(QFCth.}h}p}hW\z;Ge2 MrmEFGt!:y+(s]]DGa!l ]ȋzwq(QFGt9 sI31]B.y +ŝ2Gѧѧ wDw D'sQ(c pKa.9a.[aNRĝ2G[@M۫ÝxeRw47QF hSdEHMwS.V$6 QFiMGBhpꄡڸsQn=HE D D |x9;Ge2u 4@s9@sIFb^ޝ2Gu_= xD vPOw2eLD=oթ,DW_8zq w2e$kjrYjɚ= gCoDR'5^ǝ2G 7\ lj@p6|5ߨWR9(s]}Ft#؈.9؈;F9(smvqS]D@ڔ7L|6rO9Y('%fw힩W;F 4F 4^Q2G9.0T.g3nԫHn쒣pc_➌2G9;tXtXtX._L/Ykpc_y(gZNp/-Gt9pcs 7ꅭ 7V̼> stream xڍ[]o}Ǻ@~ H& [W2EKV1F{-MQ4-$4-MfuAӢ}hZ4>׆qf#@phr3;[Q(RdӮ?,Y4˦ôJDe+/e,T޴h6'uVQo~ϟ.o|Ʃ\r]7=g/H?Wq~iiվCg]~| _W5pO虍f0͉0_jhG@d]ZC^П_CF D Pm2`%J.*QbD؉V`ث4A1ւ ^skkL c/bkEf0@z HH[Zgi1 s{NP >`"t &.1f$Qlc U`ۣ`n26[8_ fF'⊁mlms~y4`#m:#lc8LPm6(@:tA:tAzkMs܆Qˣpfn6rUk|NACx꿟xZ̪?3Ŭa5r yIT05959zxM F&P`9OE!mz%%V Lc8 L#ɲh!xMG#xM xM q7id !?(d}x!fSf0(qmhb`%MN2Btz۔MmJܦ6%nS=l lCܦmSMЍR؏Rd (EB#HpnJjvt?ܧNX (ᄥ§_ q JNL  )aTF )oXSFC&(b<"H[n<6;oy`OO+2L`nj vS¹Oa7mbP F '/otZ1tig *8el?oY);JN(Ḿ*M ljxmp7)F\ɺ(uzG0Zbty8y)\R ^ (\΃:TM[*̦{,5j ר7:QRa650wzM /1|p@QLP AQ*̦FԨ0,(lJ|H'_xG;J_65Zj5 Ie]c'[xG;Jvһݴ3sƲƽFcvi i1Hѻݴl͙Azܜdw:TqOgc =(xd7a{vX?! HZq7a{:. 8[9 8o58on oR՚N+?.>^=luPAC=1[A~śoY?.w?_>g?Gc2xUz87gW7ww+LXȫG8W7^oz );qbwٻݽ߿b39frT4A^}9m6BtnwMۍnr+<9b!7WJœar&/FSY߾1fvlٱeǖyvc9h)F4JڌOο=؃Fq-Pc|CW?pk}o%`O\py`-[;uyW֤:`_=˶nfo?g A8q^0o=xi-ΥxnucePZ=kS#<<=Fz)gK%WVre֧%փ\uѼސF Ŗȳvm$F'~G…\Hmh|ES,HitM#4hbŕIJHŹt #0|/R( Qu.VbE/l\9W&reɛ4^^gXgzFA*ye\eZF(\r#1bGDK$F^ #3r1#q.ODwelp")S #7rq#u.WrE9)Hп"\䲕\zzjFn`rK.vɽbrJ._NO:^ =L.zE/!Lcu,QǷgO_r \ҒKZJF?ny^.Ʌ0Kku-s_d6ިar5Ln [raKGa?)p9Ln [rחϧ#X.0K.lɅ-M޼-fۨar9L [raK?r&W-%?q4 endstream endobj 5363 0 obj << /Producer (pdfTeX-1.40.23) /Author(\376\377\000R\000a\000h\000u\000l\000\040\000S\000a\000t\000i\000j\000a)/Title(\376\377\000S\000e\000u\000r\000a\000t\000:\000\040\000T\000o\000o\000l\000s\000\040\000f\000o\000r\000\040\000S\000i\000n\000g\000l\000e\000\040\000C\000e\000l\000l\000\040\000G\000e\000n\000o\000m\000i\000c\000s)/Subject()/Creator(LaTeX with hyperref)/Keywords() /CreationDate (D:20231116155033-05'00') /ModDate (D:20231116155033-05'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX, Version 3.141592653-2.6-1.40.23 (TeX Live 2021) kpathsea version 6.3.3) >> endobj 5342 0 obj << /Type /ObjStm /N 21 /First 198 /Length 646 /Filter /FlateDecode >> stream xڥn0~ \.Eп iOEFl˰C߾),[J.D {2{6dS8 EaE㙊L5ʨX q n(/MR7B7Q$$Cx.;ElfPvcԚBsgp !"0}W.׿i]ݬN9z r0"IzBOtz,M笈)"R?1TY(H" vE)*sVB !G8+Ku`5u[HHu<ͷ ~6ϛfB-C Zp|.8U| f뱨@ twl|n`LLL0D2WE13 u0ޔ-9ut(<8$`$`y剎Y <5A1B347DAB6FEC6C81AA8BC35CDDC3A5>] /Length 12810 /Filter /FlateDecode >> stream x%yp$Q^y=R_T껫Q}Vvh5%lsgeXU` \.<ȞYBj70ekX37|ޯ2SU2_аؐViƆF -vmlfb5[U:6h.V:ފh.bw5WzlrU%#Z3FlXflGlX|i7?_zXNlf/l{$w l{/֩ S,)V]s)k Ď..5kCbCaȈ=kϡ(dž4 .kSb5O1yY8'vAy ~м h^~7b5zlY5.Rfnm{yn{y{y]{y{٧5wb4BUh/3?ь?JlXl*>Kb4 *#v΀}c?gR&$t^mUްfel:;iYL 2qU`,m;.fϪN1:xCS.{%۬ʛ&뚪vK,v糊}vz}GjP*ub!_Sn+f&zISn}}U삦b.b)圦b->fA3}^sbjkVs >g)A5=gqKfqBlX,=%vLf|3bbeΉ9f4cG\s;񒘃!P71_+];⯋]IsCWlf$-XH;bhF-*T8Te>F:[/ήȃ{XAb}!Klb}%vE,GϾ{.USϾˋE)z:vF}lT@F*A1?4[I_?i&olK(vHlf4\b!~fgft=\" 5W@;#h.5kĺ4c=bq⏯ϷR3>8zǟ/Ir^,neb`X;`v][/`>#vRUs/‰]>m<^ÐlB]ٲq⃡(szpB-"7N)1ApF~yΉ9 |<$͋pI؀e=k^z/[j{y 9xb lM%ldm#&_^iޅ{b /53b/4#51g(431<X埜- <ь?JlXl*>}b4 s;g@رnW)b4B׋koKPO3 w4Ylmh ;pK njv@~`XXNsd 弢O \\ kb'^1+kaX\Be͍I%^,;*B/hnz4p^s$< v~9{\ Ĝ )̓pH̙IÐwB3 b_Q8&fqB,U,=%IxD3>1A2,2#{h~b'^sF1pH390 yW"_hbK"inۧ wKo vg$k=VN=v@,n͇HXh| OĜA | bD <bC5_+pž7n[5;1g-za@,5?G1?`P,ț4޴^ t4%ؠbq^bciAW E"Nڣ \+\]b՚5bqPҌ ⏯W0R3>xc$7X [bvjnbz t )횻`7 `?pC= ^eԫ;cd]Beney$O X3p*5i8"\W*\됃pnmw܇cxO<^kxo TZ5[k[k9.":.vgEՀ#1Uxw (:pA?<:CZ&:\.Cx?ցwC' W kA/qЗp^ڮ 6Y_AդcM$pMfפ|M-IMԔUMbMԔnMi{MA62m_ R$zm'HIIIIII]; vnM>NZ|p?3vm1D/.;^8+&X .GG-|$\MW8W`L6 ;S]Xb˚˚˚˚˚˚˚˚˚˚˚˚˚˚˚˚˚˚˚Z^mEpFhfkNZ=Όmvex ]%=4zN]pq;a'd,܀w=d 8wzF t&p]6~SpBߞ\-\Z|eVJX]@7ZXalM a lmvN^Cp2#pq8'3p.yH NR\,-Y8 W u % v/8Qw6ܗG0y F脍||1Np >I 8Wհb\W*\됃pnmw!R><x 9 ^x x.~? /sl8N/7.8 !w~'|ka'9ͫy8̨/5|*n5AjR逸 V p*}i5s¹Rjx2Zpx߫vͥ^i 6a'{4K ߼,88 N&.p 9pqLjV`J,r[y r?{«p=tCx 9J =帎l'CPRA$ai}O0yYx29GN ӈIIi1)-&^Sjw%Ȥ rcrdpNJ ?XXL> 2)&^|'&ܔ~@&#&f;I'ٟdRLWCO)0)&q`jo DMʈI1yŤH^ndȺƎ yrj__/rc:3y $ȤaRLK [%`RLʃgn#4A,\σ<| aRLʃɏ &%d>5T˷v5SP/l`9 Gš?[']alM a ĭmSvA܇cuwy{Kc[aY8 nvp"5 źiY7~0Xح. nvu rc˅DWc"._/sp=o\W= ]|gO*v 5pkSkav4=0L:jxM/ތ;0;L&t}ScLWg!txOj{HT|qL3=Lp_Df8nKa.69n>M&A]Zǝ9|a·9 OU34N c{?,7ݮvz1?0y!O ?O KbeXˈ֕R`X KRj~׺ d΂ł_`zEikc5БҪo\'b_` _p-BwJk?*ɰ lt G{Aվ^Ŧ~8!GPJepNi~p,oJ;Lo p.C?\`$LVô~Ѭ;!ϷLF]!b㔆COC9y uJX-FbH^s8c|bNxBD]?XND]_f ~b%?AdO7)[|"*[ 'e"~ 'H y &cC@LOHc@76qH }.XT|p-Z,DdR^ 'LOȈݔ&6O=AtOP#yw)MujOp>E~ZwO5h4B3R:RNXi|*Xd#IWwB ׉9#pca#p:q7tNOx=)}^a.юOݻ#t=)ٚx]#m^ؑ8_}9{dyjX1kڑԸQ\#zpG#p)0Bc =#1K 0+ 2ٝ7XX#?B1,j.G+cc 7Fdy#J'9 R`D#f*F+Hk`G6ޠvFj|wlp;5ZdĈ{I0Bȣxy xfd^ƻ#&7Fƈr1b`V6#7tGͥdsMssi`n؟[j9I9y0ם|s0g@b&>#Ȝ,%ssrc.,-6oTs3"/&fg&nȈ]{ba4'sb̙w!n:'bh,ˤƿ)5.KŜ#܉99I94'Ϝ,sSspcΑaNɜ9ǒ91w+5~ϷdtfcazcN9xɜ92'}d&b+(sfNɜ7w{c9i62'_ssNF0${-5Ɗx)?l6X 63?oհ:pV Xw6`Kj,";a>`?Y?ҋX0d G(lybTY7܄[A2+y !y@`W_n< x yj۱o7p)5~2⧉nf yi~d^Ư5x~E=iiIli~ץƯr&K7OæYfumw(V#yiq2r2vA o4RO?"cO38]?ޚXNLuN=m UWLO8]ځX-G415<HL?A[AO?K'\]WOs>{OY2Mqc8GYj5չٕRSnS`hWgjj*;S_gF7 S'`ή'Sag6alIM PѩS__N]]{S#a^p 4Hj:pcSoc>N}]ezgץtk\kprp n7Sө_5U'iCNήiwb20ǠH`򨱿]SӵXíZft11 UÈPGOE(\4AF2=ݣyRӷ_-3=JhH[3g#~{]U|Sj+"&<ϫyٟq4wMj1yc71=?mWSxyY2/_jy穝?~oƺG@ Kyf3s9:5}F<ϳ?|?Hy7gb<_OM?v:6@%1T41AG^&>y0h~L/&KM⫱)A r7Vj0-ozC0P1Msx5`RX֦5Rӯj!t^3.6R󚟊WͰ <k~w?|o,{䍯JMcĘ%Fθq+8@]O43SL =VCKj~"V3㓉 %& kRkl vny va؝?f,8QΏ:MXpNBtj  p It\,w knmw!O#_5c?Rw|c=gS^@3QS_dbCf螉 =q̵\lwcNkj&31cL̡bfl9,YbFZHgvڙ^#apfKj? P;CnEX jg3\p9s !tg.@}3;ˑn$hfx:@Ⱦ U?'rX f^o k!Uw+#)Lyۘul@̙n&?LyaWjWc+D`zSL}GAiI0ĤW#5/̞zeDh2 :<݀p n-MX1<{ :ջc nIwx1H3*s5oEH#y˱Լx81Vc$;֕rtT7Fz/cԎ7ئX1(c1m{jx7$8bc1mjbc1,N]s2&; d3Fx\jiX1#~,#~1ǨZ>q>cwR˲x1Y2 F|dl <cRʿk?{Zx9||jkŢ]LeK" iZH-}o]]dQ%/\I-ba5#e9ZܐZ2g ^vQ%/r'F\/J-G~".*EVY]{Q9/SS?RSIYV*0`mjafQ&/>a{jәOv5ɟd/&ݽA8%KsZ)⧈"~*)ǦS}`~w)X)T̯2OxXjNSOq>)Y{w*)SS/܏؟4O=ES7!&=[h*&N-[c+R`JMŬ_y0s}T)=.K#)#zt&I-Sk0wUѻ ΜO-:yWCu?ٲ`lqKW۠~ʗcaSa{}fn{P/ʙX8>y77L͜<c{I8!>E8Zr\~ RK?"W>ԏ71sQzoM w.܃g |c0La}/G#Zt~gMUCw5*b;PLfnN-KK.C1a願&WĂ6?cOŤgSk@LzC :*kա_y!jھ'6`C{`/;C@2 18tNCLM$GP\q9DC\Kk"tp4DеԺcVX1 %#={ur2b4C&=M}?1=bzHn 2C$\ yR>=@*"-*Br%NO֧kbj+VLzr&U+LW(0]Q~+Vbz\@du[!1بu+oڊ6e(XٟZ=d,s;L+S뵐\\!"*$WHXS+LW@EVP[j+"*j V+Vد<[a~+VP[[oEnT>AΛ  Z ڡ:a,VA5 =zaf[alOa?(zϠԚ\|pa@Q8!SpY8\Kp \kprpn- w.܃#x O)<^+x o-0#|A ވծ%Kė/_"D|%Kė/_"D|%Kė/_"D|%Kė/_"D|RߗZSw|{>{aH%K/_b~%K/_b~%K/_b~%K/_b~%K/_b~%K/_b~%K/_b~z]VOM - m `9 `5n聵6 `;쀝 vC쁽80d G(p@8ߙZ?/'SWg p.e+pu 6܁px1< x 5| }V/_f~2e/_f~2e/_f~2e/_f~2e/_f~2e/_f~2e/_f~$ ʜ9/s^y2e˜9/s^y2e˜9/s^y2e˜9/s^y2etj~H?q;R,R,R,R\O5 A#4A3@+A;t@',V*հ:X`#l^ [`+lv. }> Ð,p 8 4p2 :܄[p]Gxkr8q}]~)^NmߌW}=՞>|:Ww.W\~d߷6يfdc~2x`o~@|=2BF׵7:?q{`s& *OTb}El*"QE겨.겨.겨.겨.겨.겨.겨.(һ(2H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|"E/_$H|..$h&hh6heVJX]@7ZXalM a lmvN^Cp2#pq8'3py\~W\܀p n ><x 9 ^x = `/{cz /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /_ @| /P}xXLxXLƥcz<1cz<^gxOQKxNLxL =kG/g` endstream endobj startxref 586839 %%EOF Seurat/tests/0000755000176200001440000000000014525500056012661 5ustar liggesusersSeurat/tests/testthat/0000755000176200001440000000000014525771316014532 5ustar liggesusersSeurat/tests/testthat/test_transferdata.R0000644000176200001440000001653614525500037020373 0ustar liggesusers# Tests for integration/transfer related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small query <- CreateSeuratObject( counts = as.sparse( GetAssayData( object = pbmc_small[['RNA']], layer = "counts") + rpois(n = ncol(pbmc_small), lambda = 1 ) ) ) query <- NormalizeData(object = query, verbose = FALSE) query <- FindVariableFeatures(object = query, verbose = FALSE, nfeatures = 100) ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50) # Tests for TransferData # ------------------------------------------------------------------------------ context("TransferData") preds.standard <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, verbose = FALSE) test_that("TransferData default work", { # categorical metadata expect_equal(dim(preds.standard), c(80, 5)) expect_equal(colnames(preds.standard)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds.standard), Cells(query)) expect_equal(preds.standard[1, 1], "1") expect_equal(preds.standard[1, 5], 0.4280746, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds.standard[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds.standard, "data.frame")) # continuous assay data pred.assay <- TransferData(anchorset = anchors, refdata = GetAssayData(ref[["RNA"]]), verbose = FALSE) expect_equal(dim(pred.assay), c(230, 80)) expect_equal(GetAssayData(pred.assay, layer = "counts"), new("matrix")) expect_equal(GetAssayData(pred.assay, layer = "scale.data"), new("matrix")) expect_equal(colnames(pred.assay), Cells(query)) expect_equal(rownames(pred.assay), rownames(ref[["RNA"]])) expect_equal(sum(GetAssayData(pred.assay)[1, ]), 64.46388, tolerance = 1e-6) expect_equal(sum(GetAssayData(pred.assay)[, 1]), 281.0306, tolerance = 1e-6) expect_true(inherits(pred.assay, "Assay")) expect_equal(pred.assay@var.features, logical(0)) expect_equal(ncol(pred.assay@meta.features), 0) }) test_that("TransferData can return predictions assay, ", { pred.assay <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, prediction.assay = TRUE, verbose = FALSE) expect_true(inherits(pred.assay, "Assay")) expect_equal(dim(pred.assay), c(4, 80)) expect_equal(GetAssayData(pred.assay, layer = "counts"), new("matrix")) expect_equal(GetAssayData(pred.assay, layer = "scale.data"), new("matrix")) expect_equal(colnames(pred.assay), Cells(query)) expect_equal(pred.assay@var.features, logical(0)) expect_equal(ncol(pred.assay@meta.features), 0) expect_equal(sum(GetAssayData(pred.assay)[1, ]), 26.59365, tolerance = 1e-6) expect_equal(sum(GetAssayData(pred.assay)[, 1]), 1.428075, tolerance = 1e-6) expect_equal(as.vector(colSums(GetAssayData(pred.assay)[1:3, ])), rep(1, ncol(query))) }) test_that("TransferData handles weight.reduction properly, ", { skip_on_cran() # test for custom dimreduc custom.dr <- anchors@object.list[[1]][["pcaproject"]] custom.dr <- subset(x = custom.dr, cells = anchors@query.cells) custom.dr <- RenameCells(object = custom.dr, new.names = sapply(X = Cells(custom.dr), FUN = function(x){ x <- gsub(pattern = "_query", replacement = "", x = x) })) expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = custom.dr, dims = 1:100)) preds <-TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, verbose = FALSE) cdr.preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = custom.dr, verbose = FALSE, dims = 1:30) expect_equal(preds, cdr.preds) # weight.reduction = "pca pca.preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, weight.reduction = "pca", verbose = FALSE) expect_true(inherits(pca.preds, "Seurat")) expect_equal(sum(GetAssayData(pca.preds[['prediction.score.id']])[1, ]), 27.83330252, tolerance = 1e-6) # weight.reduction = "cca" anchors.cca <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reduction = "cca") cca.preds <- TransferData(anchorset = anchors.cca, refdata = ref$RNA_snn_res.1, weight.reduction = "cca", verbose = FALSE) expect_true(inherits(cca.preds, "data.frame")) expect_equal(sum(cca.preds[, 2]), 43.61738383, tolerance = 1e-6) }) test_that("TransferData with l2.norm works", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, l2.norm = TRUE, verbose = FALSE) expect_equal(dim(preds), c(80, 5)) expect_equal(colnames(preds)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds), Cells(query)) expect_equal(preds[1, 1], "0") expect_equal(preds[1, 5], 0.3973124793, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds, "data.frame")) }) test_that("TransferData with other k.weight works", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, k.weight = 10, verbose = FALSE) expect_equal(dim(preds), c(80, 5)) expect_equal(colnames(preds)[c(1, 5)], c("predicted.id", "prediction.score.max")) expect_equal(rownames(preds), Cells(query)) expect_equal(preds[1, 1], "2") expect_equal(preds[1, 5], 0.6145459065, tolerance = 1e-6) expect_equal(as.vector(rowSums(as.matrix(preds[, 2:4]))), rep(1, times = ncol(query))) expect_true(inherits(preds, "data.frame")) }) test_that("TransferData with reference specified works", { skip_on_cran() pred2 <- TransferData(anchorset = anchors, refdata = "RNA_snn_res.1", reference = ref, verbose = FALSE) expect_equal(preds.standard, pred2) }) test_that("TransferData throws expected errors ", { expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = "BAD")) # better message expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, weight.reduction = "cca")) # better message expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, dims = 1:100)) expect_error(ransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, k.weight = 1000)) expect_error(suppressWarnings(TransferData(anchorset = anchors, refdata = "RNA_snn_res.1"))) expect_error(suppressWarnings(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1[1:10]))) expect_error(TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = subset(x = query, cells = Cells(query)[1:10]))) }) test_that("TransferData with multiple items to transfer works ", { skip_on_cran() preds <- TransferData(anchorset = anchors, refdata = list( ids = ref$RNA_snn_res.1, groups = ref$groups, dat = GetAssayData(ref[["RNA"]])), verbose = FALSE) expect_equal(length(preds), 3) expect_equal(preds[[1]], preds.standard) }) test_that("TransferData can return a modified query object ", { query <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, verbose = FALSE) expect_true("prediction.score.id" %in% Assays(query)) expect_true("predicted.id" %in% colnames(query[[]])) expect_true("predicted.id.score" %in% colnames(query[[]])) query <- TransferData(anchorset = anchors, refdata = ref$RNA_snn_res.1, query = query, store.weights = TRUE, verbose = FALSE) expect_equal(dim(Tool(query, slot = "TransferData")$weights.matrix), c(128, 80)) }) Seurat/tests/testthat/test_differential_expression.R0000644000176200001440000006532314525500037022626 0ustar liggesusers# Tests for functions in differential_expression.R suppressWarnings(RNGversion(vstr = "3.5.3")) set.seed(seed = 42) # Tests for FindMarkers # -------------------------------------------------------------------------------- context("FindMarkers") clr.obj <- suppressWarnings(NormalizeData(pbmc_small, normalization.method = "CLR")) sct.obj <- suppressWarnings(suppressMessages(SCTransform(pbmc_small, vst.flavor = "v1"))) markers.0 <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, verbose = FALSE, base = exp(1),pseudocount.use = 1)) markers.01 <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1),pseudocount.use = 1)) results.clr <- suppressWarnings(FindMarkers(object = clr.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1)) results.sct <- suppressWarnings(FindMarkers(object = sct.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("Default settings work as expected with pseudocount = 1", { expect_error(FindMarkers(object = pbmc_small)) expect_error(FindMarkers(object = pbmc_small, ident.1 = "test")) expect_error(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = "test")) expect_equal(colnames(x = markers.0), c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj")) expect_equal(markers.0[1, "p_val"], 9.572778e-13, tolerance = 1e-18) expect_equal(markers.0[1, "avg_logFC"], -4.180029, tolerance = 1e-6) expect_equal(markers.0[1, "pct.1"], 0.083) expect_equal(markers.0[1, "pct.2"], 0.909) expect_equal(markers.0[1, "p_val_adj"], 2.201739e-10, tolerance = 1e-15) expect_equal(nrow(x = markers.0), 228) expect_equal(rownames(markers.0)[1], "HLA-DPB1") expect_equal(markers.01[1, "p_val"], 1.702818e-11, tolerance = 1e-16) expect_equal(markers.01[1, "avg_logFC"], -2.638242, tolerance = 1e-6) expect_equal(markers.01[1, "pct.1"], 0.111) expect_equal(markers.01[1, "pct.2"], 1.00) expect_equal(markers.01[1, "p_val_adj"], 3.916481e-09, tolerance = 1e-14) expect_equal(nrow(x = markers.01), 222) expect_equal(rownames(x = markers.01)[1], "TYMP") # CLR normalization expect_equal(results.clr[1, "p_val"], 1.209462e-11, tolerance = 1e-16) expect_equal(results.clr[1, "avg_logFC"], -2.946633, tolerance = 1e-6) expect_equal(results.clr[1, "pct.1"], 0.111) expect_equal(results.clr[1, "pct.2"], 0.96) expect_equal(results.clr[1, "p_val_adj"], 2.781762e-09, tolerance = 1e-14) expect_equal(nrow(x = results.clr), 213) expect_equal(rownames(x = results.clr)[1], "S100A8") # SCT normalization expect_equal(results.sct[1, "p_val"], 6.225491e-11, tolerance = 1e-16) expect_equal(results.sct[1, "avg_logFC"], -2.545867, tolerance = 1e-6) expect_equal(results.sct[1, "pct.1"], 0.111) expect_equal(results.sct[1, "pct.2"], 0.96) expect_equal(results.sct[1, "p_val_adj"], 1.369608e-08, tolerance = 1e-13) expect_equal(nrow(x = results.sct), 214) expect_equal(rownames(x = results.sct)[1], "TYMP") }) tymp.results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, features = "TYMP", verbose = FALSE, base = exp(1),pseudocount.use = 1)) vargenes.results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, features = VariableFeatures(object = pbmc_small), verbose = FALSE, base = exp(1),pseudocount.use = 1)) test_that("features parameter behaves correctly ", { expect_equal(nrow(x = tymp.results), 1) expect_equal(tymp.results[1, "p_val"], 3.227445e-07, tolerance = 1e-12) expect_equal(tymp.results[1, "avg_logFC"], -2.188179, tolerance = 1e-6) expect_equal(tymp.results[1, "pct.1"], 0.111) expect_equal(tymp.results[1, "pct.2"], 0.682) expect_equal(tymp.results[1, "p_val_adj"], 7.423123e-05, tolerance = 1e-10) expect_equal(rownames(x = tymp.results)[1], "TYMP") expect_equal(nrow(x = vargenes.results), 20) expect_equal(vargenes.results[20, "p_val"], 4.225151e-01, tolerance = 1e-6) expect_equal(vargenes.results[20, "avg_logFC"], 1.796863, tolerance = 1e-6) expect_equal(vargenes.results[20, "pct.1"], 0.139) expect_equal(vargenes.results[20, "pct.2"], 0.091) expect_equal(vargenes.results[20, "p_val_adj"], 1.000000e+00) expect_equal(rownames(x = vargenes.results)[20], "PARVB") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = Cells(x = pbmc_small)[1:40], ident.2 = Cells(x = pbmc_small)[41:80], verbose = FALSE, base = exp(1),pseudocount.use = 1)) test_that("passing cell names works", { expect_equal(nrow(x = results), 216) expect_equal(results[1, "p_val"], 0.0001690882) expect_equal(results[1, "avg_logFC"], -1.967123, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.075) expect_equal(results[1, "pct.2"], 0.450) expect_equal(results[1, "p_val_adj"], 0.03889028) expect_equal(rownames(x = results)[1], "IFI30") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 0.1)) results.clr <- suppressWarnings(FindMarkers(object = clr.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 0.1)) results.sct <- suppressWarnings(FindMarkers(object = sct.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 0.1, vst.flavor = "v1")) test_that("setting pseudocount.use works", { expect_equal(nrow(x = results), 222) expect_equal(results[1, "avg_logFC"], -2.640848, tolerance = 1e-6) expect_equal(nrow(x = results.clr), 214) expect_equal(results.clr[1, "avg_logFC"], -3.322368, tolerance = 1e-6) expect_equal(nrow(results.sct), 215) expect_equal(results.sct[1, "avg_logFC"], -2.668866, tolerance = 1e-6) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1, mean.fxn = rowMeans)) results.clr <- suppressWarnings(FindMarkers(object = clr.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1, mean.fxn = rowMeans)) results.sct <- suppressWarnings(FindMarkers(object = sct.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1, mean.fxn = rowMeans, vst.flaovr = "v1")) test_that("setting mean.fxn works", { expect_equal(nrow(x = results), 216) expect_equal(results[1, "avg_logFC"], -4.204346, tolerance = 1e-6) expect_equal(results.clr[1, "avg_logFC"], -1.353025, tolerance = 1e-6) expect_equal(results.sct[1, "avg_logFC"], -1.064042, tolerance = 1e-6) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, logfc.threshold = 2, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("logfc.threshold works", { expect_equal(nrow(x = results), 139) expect_gte(min(abs(x = results$avg_logFC)), 2) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, logfc.threshold = 100, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("logfc.threshold warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.pct = 0.5, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("min.pct works", { expect_equal(nrow(x = results), 66) expect_gte(min(apply(X = results, MARGIN = 1, FUN = function(x) max(x[3], x[4]))), 0.5) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.pct = 2.0, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("min.pct warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.diff.pct = 0.5, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("min.diff.pct works", { expect_equal(nrow(x = results), 44) expect_gte(min(apply(X = results, MARGIN = 1, FUN = function(x) abs(x[4] - x[3]))), 0.5) }) results <- expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, min.diff.pct = 1.0, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("min.diff.pct warns when none met", { expect_equal(nrow(x = results), 0) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, only.pos = TRUE, verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("only.pos works", { expect_equal(nrow(x = results), 127) expect_true(all(results$avg_logFC > 0)) }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, max.cells.per.ident = 20, verbose = FALSE, base = exp(1),pseudocount.use = 1)) test_that("max.cells.per.ident works", { expect_equal(nrow(x = results), 222) expect_equal(results[1, "p_val"], 3.428568e-08, tolerance = 1e-13) expect_equal(results[1, "avg_logFC"], -2.638242, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.111) expect_equal(results[1, "pct.2"], 1) expect_equal(results[1, "p_val_adj"], 7.885706e-06) expect_equal(rownames(x = results)[1], "TYMP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "groups", verbose = FALSE, test.use = 'LR', base = exp(1), pseudocount.use = 1)) test_that("latent.vars works", { expect_error(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "fake", verbose = FALSE)) expect_warning(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, latent.vars= "groups", verbose = FALSE)) expect_equal(nrow(x = results), 222) expect_equal(results[1, "p_val"], 2.130202e-16, tolerance = 1e-21) expect_equal(results[1, "avg_logFC"], -3.102866, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.417) expect_equal(results[1, "pct.2"], 1) expect_equal(results[1, "p_val_adj"], 4.899466e-14, tolerance = 1e-19) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = "g1", ident.2 = "g2", group.by= "groups", verbose = FALSE, base = exp(1), pseudocount.use = 1)) t2 <- pbmc_small Idents(object = t2) <- "groups" results2 <- suppressWarnings(FindMarkers(object = t2, ident.1 = "g1", ident.2 = "g2", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("group.by works", { expect_equal(nrow(x = results), 190) expect_equal(results, results2) expect_equal(results[1, "p_val"], 0.02870319) expect_equal(results[1, "avg_logFC"], 0.8473584, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.455) expect_equal(results[1, "pct.2"], 0.194) expect_equal(results[1, "p_val_adj"], 1) expect_equal(rownames(x = results)[1], "NOSIP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = "g1", ident.2 = "g2", group.by= "groups", subset.ident = 0, verbose = FALSE, base = exp(1), pseudocount.use = 1)) t2 <- subset(x = pbmc_small, idents = 0) Idents(object = t2) <- "groups" results2 <- suppressWarnings(FindMarkers(object = t2, ident.1 = "g1", ident.2 = "g2", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("subset.ident works", { expect_equal(nrow(x = results), 183) expect_equal(results, results2) expect_equal(results[1, "p_val"], 0.01293720) expect_equal(results[1, "avg_logFC"], 1.912603, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.50) expect_equal(results[1, "pct.2"], 0.125) expect_equal(results[1, "p_val_adj"], 1) expect_equal(rownames(x = results)[1], "TSPO") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, reduction = "pca", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("reduction works", { expect_equal(results[1, "p_val"], 1.664954e-10, tolerance = 1e-15) expect_equal(results[1, "avg_diff"], -2.810453669, tolerance = 1e-6) expect_equal(results[1, "p_val_adj"], 3.163412e-09, tolerance = 1e-14) expect_equal(rownames(x = results)[1], "PC_2") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "bimod", verbose = FALSE, base = exp(1), pseudocount.use = 1) test_that("bimod test works", { expect_equal(nrow(x = results), 222) expect_equal(results[1, "p_val"], 4.751376e-17, tolerance = 1e-22) expect_equal(results[1, "avg_logFC"], -2.57219, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.306) expect_equal(results[1, "pct.2"], 1.00) expect_equal(results[1, "p_val_adj"], 1.092816e-14, tolerance = 1e-19) expect_equal(rownames(x = results)[1], "CST3") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "roc", verbose = FALSE, base = exp(1), pseudocount.use = 1) test_that("roc test works", { expect_equal(nrow(x = results), 222) # expect_equal(colnames(x = results), c("myAUC", "avg_diff", "power", "pct.1", "pct.2")) expect_equal(colnames(x = results), c("myAUC", "avg_diff", "power", "avg_logFC", "pct.1", "pct.2")) expect_equal(results["CST3", "myAUC"], 0.018) expect_equal(results["CST3", "avg_diff"], -2.552769, tolerance = 1e-6) expect_equal(results["CST3", "power"], 0.964) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(rownames(x = results)[1], "LYZ") }) results <- FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "t", verbose = FALSE, base = exp(1), pseudocount.use = 1) test_that("t test works", { expect_equal(nrow(x = results), 222) expect_equal(results["CST3", "p_val"], 1.170112e-15, tolerance = 1e-20) expect_equal(results["CST3", "avg_logFC"], -2.57219, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 2.691258e-13, tolerance = 1e-18) expect_equal(rownames(x = results)[1], "TYMP") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "negbinom", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("negbinom test works", { expect_equal(nrow(x = results), 204) expect_equal(results["CST3", "p_val"], 1.354443e-17, tolerance = 1e-22) expect_equal(results["CST3", "avg_logFC"], -2.878123, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 3.115218e-15, tolerance = 1e-20) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "poisson", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("poisson test works", { expect_equal(nrow(x = results), 204) expect_equal(results["CST3", "p_val"], 3.792196e-78, tolerance = 1e-83) expect_equal(results["CST3", "avg_logFC"], -2.878123, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 8.722050e-76, tolerance = 1e-81) expect_equal(rownames(x = results)[1], "LYZ") }) results <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, test.use = "LR", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("LR test works", { expect_equal(nrow(x = results), 222) expect_equal(results["CST3", "p_val"], 3.990707e-16, tolerance = 1e-21) expect_equal(results["CST3", "avg_logFC"], -2.57219, tolerance = 1e-6) expect_equal(results["CST3", "pct.1"], 0.306) expect_equal(results["CST3", "pct.2"], 1.00) expect_equal(results["CST3", "p_val_adj"], 9.178625e-14, tolerance = 1e-19) expect_equal(rownames(x = results)[1], "LYZ") }) test_that("FindMarkers with wilcox_limma works", { skip_on_cran() skip_if_not_installed("limma") markers.0.limma <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, verbose = FALSE, base = exp(1),pseudocount.use = 1,test.use='wilcox_limma')) markers.01.limma <- suppressWarnings(FindMarkers(object = pbmc_small, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1),pseudocount.use = 1,test.use='wilcox_limma')) results.clr.limma <- suppressWarnings(FindMarkers(object = clr.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1,test.use='wilcox_limma')) results.sct.limma <- suppressWarnings(FindMarkers(object = sct.obj, ident.1 = 0, ident.2 = 1, verbose = FALSE, base = exp(1), pseudocount.use = 1,test.use='wilcox_limma')) expect_equal(colnames(x = markers.0.limma), c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj")) expect_equal(markers.0.limma[1, "p_val"], 9.572778e-13, tolerance = 1e-18) expect_equal(markers.0.limma[1, "avg_logFC"], -4.180029, tolerance = 1e-6) expect_equal(markers.0.limma[1, "pct.1"], 0.083) expect_equal(markers.0.limma[1, "pct.2"], 0.909) expect_equal(markers.0.limma[1, "p_val_adj"], 2.201739e-10, tolerance = 1e-15) expect_equal(nrow(x = markers.0.limma), 228) expect_equal(rownames(markers.0.limma)[1], "HLA-DPB1") expect_equal(markers.01.limma[1, "p_val"], 1.702818e-11, tolerance = 1e-16) expect_equal(markers.01.limma[1, "avg_logFC"], -2.638242, tolerance = 1e-6) expect_equal(markers.01.limma[1, "pct.1"], 0.111) expect_equal(markers.01.limma[1, "pct.2"], 1.00) expect_equal(markers.01.limma[1, "p_val_adj"], 3.916481e-09, tolerance = 1e-14) expect_equal(nrow(x = markers.01.limma), 222) expect_equal(rownames(x = markers.01.limma)[1], "TYMP") expect_equal(results.clr.limma[1, "p_val"], 1.209462e-11, tolerance = 1e-16) expect_equal(results.clr.limma[1, "avg_logFC"], -2.946633, tolerance = 1e-6) expect_equal(results.clr.limma[1, "pct.1"], 0.111) expect_equal(results.clr.limma[1, "pct.2"], 0.96) expect_equal(results.clr.limma[1, "p_val_adj"], 2.781762e-09, tolerance = 1e-14) expect_equal(nrow(x = results.clr.limma), 213) expect_equal(rownames(x = results.clr.limma)[1], "S100A8") expect_equal(results.sct.limma[1, "p_val"], 6.225491e-11, tolerance = 1e-16) expect_equal(results.sct.limma[1, "avg_logFC"], -2.545867, tolerance = 1e-6) expect_equal(results.sct.limma[1, "pct.1"], 0.111) expect_equal(results.sct.limma[1, "pct.2"], 0.96) expect_equal(results.sct.limma[1, "p_val_adj"], 1.369608e-08, tolerance = 1e-13) expect_equal(nrow(x = results.sct.limma), 214) expect_equal(rownames(x = results.sct.limma)[1], "TYMP") }) test_that("BPCells FindMarkers gives same results", { skip_on_cran() skip_if_not_installed("BPCells") library(BPCells) library(Matrix) mat_bpcells <- t(as(t(pbmc_small[['RNA']]$counts ), "IterableMatrix")) pbmc_small[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) pbmc_small <- NormalizeData(pbmc_small, assay = "RNAbp") markers.bp <- suppressWarnings(FindMarkers(object = pbmc_small, assay = "RNAbp", ident.1 = 0, verbose = FALSE, base = exp(1),pseudocount.use = 1)) expect_equal(colnames(x = markers.bp), c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj")) expect_equal(markers.bp[1, "p_val"], 9.572778e-13) expect_equal(markers.bp[1, "avg_logFC"], -4.180029, tolerance = 1e-6) expect_equal(markers.bp[1, "pct.1"], 0.083) expect_equal(markers.bp[1, "pct.2"], 0.909) expect_equal(markers.bp[1, "p_val_adj"], 2.201739e-10) expect_equal(nrow(x = markers.bp), 228) expect_equal(rownames(markers.bp)[1], "HLA-DPB1") }) # Tests for FindAllMarkers # ------------------------------------------------------------------------------- results <- suppressMessages(suppressWarnings(FindAllMarkers(object = pbmc_small,pseudocount.use=1))) results.clr <- suppressMessages(suppressWarnings(FindAllMarkers(object = clr.obj, pseudocount.use=1))) results.sct <- suppressMessages(suppressWarnings(FindAllMarkers(object = sct.obj, pseudocount.use=1, vst.flavor = "v1"))) results.pseudo <- suppressMessages(suppressWarnings(FindAllMarkers(object = pbmc_small, pseudocount.use = 0.1))) test_that("FindAllMarkers works as expected", { expect_equal(colnames(x = results), c("p_val", "avg_log2FC", "pct.1", "pct.2", "p_val_adj", "cluster", "gene")) expect_equal(results[1, "p_val"], 9.572778e-13, tolerance = 1e-18) expect_equal(results[1, "avg_log2FC"], -6.030507, tolerance = 1e-6) expect_equal(results[1, "pct.1"], 0.083) expect_equal(results[1, "pct.2"], 0.909) expect_equal(results[1, "p_val_adj"], 2.201739e-10, tolerance = 1e-15) expect_equal(nrow(x = results), 222) expect_equal(rownames(results)[1], "HLA-DPB1") # CLR normalization expect_equal(results.clr[1, "p_val"], 1.338858e-12, tolerance = 1e-17) expect_equal(results.clr[1, "avg_log2FC"], -4.088546, tolerance = 1e-6) expect_equal(results.clr[1, "pct.1"], 0.083) expect_equal(results.clr[1, "pct.2"], 0.909) expect_equal(results.clr[1, "p_val_adj"], 3.079373e-10, tolerance = 1e-15) expect_equal(nrow(x = results.clr), 222) expect_equal(rownames(x = results.clr)[1], "HLA-DPB1") # SCT normalization expect_equal(results.sct[1, "p_val"], 4.25861e-12, tolerance = 1e-17) expect_equal(results.sct[1, "avg_log2FC"], -5.088014, tolerance = 1e-6) expect_equal(results.sct[1, "pct.1"], 0.167) expect_equal(results.sct[1, "pct.2"], 0.909) expect_equal(results.sct[1, "p_val_adj"], 9.368941e-10, tolerance = 1e-15) expect_equal(nrow(x = results.sct), 212) expect_equal(rownames(x = results.sct)[1], "HLA-DPB1") # pseudocount.use = 0.1 expect_equal(results.pseudo[1, "p_val"], 9.572778e-13, tolerance = 1e-18) expect_equal(results.pseudo[1, "avg_log2FC"], -6.036353, tolerance = 1e-6) expect_equal(results.pseudo[1, "pct.1"], 0.083) expect_equal(results.pseudo[1, "pct.2"], 0.909) expect_equal(results.pseudo[1, "p_val_adj"], 2.201739e-10, tolerance = 1e-15) expect_equal(nrow(x = results.pseudo), 222) expect_equal(rownames(results.pseudo)[1], "HLA-DPB1") }) # Tests for running FindMarkers post integration/transfer ref <- pbmc_small ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) query <- CreateSeuratObject(CreateAssayObject( counts = as.sparse(GetAssayData(object = pbmc_small[['RNA']], layer = "counts") + rpois(n = ncol(pbmc_small), lambda = 1)) )) query2 <- CreateSeuratObject(CreateAssayObject( counts = as.sparse(GetAssayData(object = pbmc_small[['RNA']], layer = "counts")[, 1:40] + rpois(n = ncol(pbmc_small), lambda = 1)) )) query.list <- list(query, query2) query.list <- lapply(X = query.list, FUN = NormalizeData, verbose = FALSE) query.list <- lapply(X = query.list, FUN = FindVariableFeatures, verbose = FALSE, nfeatures = 100) query.list <- lapply(X = query.list, FUN = ScaleData, verbose = FALSE) query.list <- suppressWarnings(lapply(X = query.list, FUN = RunPCA, verbose = FALSE, npcs = 20)) anchors <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list), k.filter = NA, verbose = FALSE))) object <- suppressMessages(IntegrateData(anchorset = anchors, k.weight = 25, verbose = FALSE)) object <- suppressMessages(ScaleData(object, verbose = FALSE)) object <- suppressMessages(RunPCA(object, verbose = FALSE)) object <- suppressMessages(FindNeighbors(object = object, verbose = FALSE)) object <- suppressMessages(FindClusters(object, verbose = FALSE)) markers <- FindMarkers(object = object, ident.1="0", ident.2="1",pseudocount.use = 1, verbose=FALSE) test_that("FindMarkers recognizes log normalization", { expect_equal(markers[1, "p_val"], 1.598053e-14, tolerance = 1e-19) expect_equal(markers[1, "avg_log2FC"], -2.634458, tolerance = 1e-6) }) test_that("BPCells FindAllMarkers gives same results", { skip_on_cran() skip_if_not_installed("BPCells") library(BPCells) library(Matrix) mat_bpcells <- t(as(t(pbmc_small[['RNA']]$counts ), "IterableMatrix")) pbmc_small[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) pbmc_small <- NormalizeData(pbmc_small, assay = "RNAbp") results.bp <- suppressMessages(suppressWarnings(FindAllMarkers(object = pbmc_small, assay = "RNAbp", pseudocount.use=1))) expect_equal(colnames(x = results.bp), c("p_val", "avg_log2FC", "pct.1", "pct.2", "p_val_adj", "cluster", "gene")) expect_equal(results.bp[1, "p_val"], 9.572778e-13) expect_equal(results.bp[1, "avg_log2FC"], -6.030507, tolerance = 1e-6) expect_equal(results.bp[1, "pct.1"], 0.083) expect_equal(results.bp[1, "pct.2"], 0.909) expect_equal(results.bp[1, "p_val_adj"], 2.201739e-10) expect_equal(nrow(x = results.bp), 222) expect_equal(rownames(results.bp)[1], "HLA-DPB1") }) # Tests for FindConservedMarkers # ------------------------------------------------------------------------------- if (requireNamespace('metap', quietly = TRUE)) { context("FindConservedMarkers") pbmc_small$groups markers <- suppressWarnings(FindConservedMarkers(object = pbmc_small, ident.1 = 0, grouping.var = "groups", verbose = FALSE, base = exp(1), pseudocount.use = 1)) standard.names <- c("p_val", "avg_logFC", "pct.1", "pct.2", "p_val_adj") test_that("FindConservedMarkers works", { expect_equal(colnames(x = markers), c(paste0("g2_", standard.names), paste0("g1_", standard.names), "max_pval", "minimump_p_val")) expect_equal(markers[1, "g2_p_val"], 4.983576e-05) expect_equal(markers[1, "g2_avg_logFC"], -4.364959, tolerance = 1e-6) # expect_equal(markers[1, "g2_pct.1"], 0.062) expect_equal(markers[1, "g2_pct.2"], 0.75) expect_equal(markers[1, "g2_p_val_adj"], 0.0114622238) expect_equal(markers[1, "g1_p_val"], 3.946643e-08, tolerance = 1e-13) expect_equal(markers[1, "g1_avg_logFC"], -3.69215, tolerance = 1e-6) expect_equal(markers[1, "g1_pct.1"], 0.10) expect_equal(markers[1, "g1_pct.2"], 0.958) expect_equal(markers[1, "g1_p_val_adj"], 9.077279e-06) expect_equal(markers[1, "max_pval"], 4.983576e-05) expect_equal(markers[1, "minimump_p_val"], 7.893286e-08, tolerance = 1e-13) expect_equal(nrow(markers), 219) expect_equal(rownames(markers)[1], "HLA-DRB1") expect_equal(markers[, "max_pval"], unname(obj = apply(X = markers, MARGIN = 1, FUN = function(x) max(x[c("g1_p_val", "g2_p_val")])))) }) test_that("FindConservedMarkers errors when expected", { expect_error(FindConservedMarkers(pbmc_small)) expect_error(FindConservedMarkers(pbmc_small, ident.1 = 0)) expect_error(FindConservedMarkers(pbmc_small, ident.1 = 0, grouping.var = "groups", meta.method = "minimump")) }) pbmc.test <- pbmc_small Idents(object = pbmc.test) <- "RNA_snn_res.1" pbmc.test$id.group <- paste0(pbmc.test$RNA_snn_res.1, "_", pbmc.test$groups) pbmc.test <- subset(x = pbmc.test, id.group == "0_g1", invert = TRUE) markers.missing <- suppressWarnings(FindConservedMarkers(object = pbmc.test, ident.1 = 0, grouping.var = "groups", test.use = "t", verbose = FALSE, base = exp(1), pseudocount.use = 1)) test_that("FindConservedMarkers handles missing idents in certain groups", { expect_warning(FindConservedMarkers(object = pbmc.test, ident.1 = 0, grouping.var = "groups", test.use = "t")) expect_equal(colnames(x = markers.missing), paste0("g2_", standard.names)) expect_equal(markers.missing[1, "g2_p_val"], 1.672911e-13, tolerance = 1e-18) expect_equal(markers.missing[1, "g2_avg_logFC"], -4.796379, tolerance = 1e-6) # expect_equal(markers.missing[1, "g2_pct.1"], 0.062) expect_equal(markers.missing[1, "g2_pct.2"], 0.95) expect_equal(markers.missing[1, "g2_p_val_adj"], 3.847695e-11, tolerance = 1e-16) expect_equal(nrow(markers.missing), 226) expect_equal(rownames(markers.missing)[1], "HLA-DPB1") }) } Seurat/tests/testthat/test_integration.R0000644000176200001440000005235314525500037020235 0ustar liggesusers# Tests for integration/transfer related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small query <- CreateSeuratObject( counts = as.sparse( GetAssayData( object = pbmc_small[['RNA']], layer = "counts") + rpois(n = ncol(pbmc_small), lambda = 1 ) ) ) query <- NormalizeData(object = query, verbose = FALSE) query <- FindVariableFeatures(object = query, verbose = FALSE, nfeatures = 100) ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) # Tests for FindTransferAnchors # ------------------------------------------------------------------------------ context("FindTransferAnchors") test_that("FindTransferAnchors defaults work", { anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]], layer ="data")[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors catches bad input", { expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, query.assay = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, normalization.method = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reduction = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, npcs = NULL, k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, npcs = NULL, reference.reduction = "BAD", k.filter = 50)) expect_error(suppressWarngings(FindTransferAnchors(reference = ref, query = query, dims = 1:100, k.filter = 50))) expect_error(suppressWarnings(FindTransferAnchors(reference = ref, query = query, dims = 1:100, project.query = TRUE, k.filter = 50))) expect_error(FindTransferAnchors(reference = ref, query = query, k.anchor = 80, k.filter = 50)) expect_warning(FindTransferAnchors(reference = ref, query = query, k.filter = 81)) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, k.score = 80)) expect_error(suppressWarnings(FindTransferAnchors(reference = ref, query = query, k.filter = 50, features = "BAD"))) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reduction = "cca", project.query = TRUE)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.reduction = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.reduction = "BAD", project.query = TRUE, k.filter = 50)) }) ref <- ScaleData(ref, verbose = FALSE) ref <- suppressWarnings(RunPCA(ref, npcs = 30, verbose = FALSE)) test_that("FindTransferAnchors allows reference.reduction to be precomputed", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca") expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", reduction = "cca")) expect_error(FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", project.query = TRUE)) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with cca defaults work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, reduction = "cca", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("cca", "cca.l2")) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 1], 0) expect_equal(dim(co[['cca']]), c(160, 30)) expect_equal(Embeddings(co[['cca']])[1, 1], 0.04611130861, tolerance = 1e-7) expect_equal(Loadings(co[['cca']], projected = T)["PPBP", 1], 12.32379661, tolerance = 1e-7) expect_equal(dim(co[['cca.l2']]), c(160, 30)) expect_equal(Embeddings(co[['cca.l2']])[1, 1], 0.06244169641, tolerance = 1e-7) expect_equal(Loadings(co[['cca.l2']], projected = T)["PPBP", 1], 12.32379661, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(324, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.8211091234), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with project.query defaults work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, project.query = TRUE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]], layer = "data")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "data")["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 1.577959404, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.1358602536, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(208, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 10, 0.4984040128), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "GZMA") expect_equal(anchors@neighbors, list()) }) query <- ScaleData(query, verbose = FALSE) query <- suppressWarnings(RunPCA(query, npcs = 30, verbose = FALSE)) test_that("FindTransferAnchors with project.query and reference.reduction works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, k.filter = 50, reference.reduction = "pca", project.query = TRUE) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]])["PPBP", 1], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")["PPBP", 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 1.577959404, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.1358602536, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)["PPBP", 1], 0.1145472305, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(208, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 10, 0.4984040128), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "GZMA") expect_equal(anchors@neighbors, list()) }) ref <- FindNeighbors(object = ref, reduction = "pca", dims = 1:30, return.neighbor = TRUE, k.param = 31, verbose = FALSE, l2.norm = TRUE, nn.method = "annoy") test_that("FindTransferAnchors with reference.neighbors precomputed works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "BAD", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50, k.score = 31)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.neighbors = "RNA.nn", k.filter = 50, k.anchor = 31)) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.05175486778, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(128, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.08361970218), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with no l2 works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, l2.norm = FALSE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(100, 160)) expect_equal(Reductions(co), c("pcaproject")) expect_equal(GetAssayData(co[["RNA"]])[1, 3], 0) expect_equal(GetAssayData(co[["RNA"]], layer = "counts")[1, 3], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.4840944592, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.2103563963, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(115, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(5, 5, 0.2950654582), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 100) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) # SCTransform tests V1 query <- suppressWarnings(SCTransform(object = query, verbose = FALSE,vst.flavor = 'v1')) ref <- suppressWarnings(SCTransform(object = ref, verbose = FALSE,vst.flavor = 'v1')) test_that("FindTransferAnchors with default SCT works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]], layer = "scale.data"), new(Class = "matrix")) expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], -1.852491719, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], -0.1971047407, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(256, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.688195991), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) test_that("Mixing SCT and non-SCT assays fails", { expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50)) ref.0 <- ref ref.2 <- ref ref.0[["SCT"]]@SCTModel.list <- list() ref.2[["SCT"]]@SCTModel.list$model2 <- ref.2[["SCT"]]@SCTModel.list$model1 expect_error(FindTransferAnchors(reference = ref.0, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50, normalization.method = "SCT")) expect_error(FindTransferAnchors(reference = ref.2, query = query, reference.assay = "SCT", query.assay = "RNA", k.filter = 50, normalization.method = "SCT")) expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "RNA", query.assay = "SCT", k.filter = 50)) expect_error(FindTransferAnchors(reference = ref, query = query, reference.assay = "RNA", query.assay = "SCT", k.filter = 50, normalization.method = "SCT")) }) test_that("FindTransferAnchors with default SCT works", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", reduction = "cca", k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("cca", "cca.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(dim(co[['cca']]), c(160, 30)) expect_equal(Embeddings(co[['cca']])[1, 1], 0.0459135444, tolerance = 1e-7) expect_equal(Loadings(co[['cca']], projected = T)["NKG7", 1], 8.51477973, tolerance = 1e-7) expect_equal(dim(co[['cca.l2']]), c(160, 30)) expect_equal(Embeddings(co[['cca.l2']])[1, 1], 0.0625989664, tolerance = 1e-7) expect_equal(Loadings(co[['cca.l2']], projected = T)["NKG7", 1], 8.51477973, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(313, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.616858238), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with SCT and project.query work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", project.query = TRUE, k.filter = 50, recompute.residuals = FALSE) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject", "pcaproject.l2")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(GetAssayData(co[["SCT"]], slot = "scale.data"), new("matrix")) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], 0.3049308, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], 0.05788217444, tolerance = 1e-7) expect_equal(dim(co[['pcaproject.l2']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject.l2']])[1, 1], 0.04334884, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject.l2']], projected = T)[1, 1], 0.05788217444, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(290, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.6315789), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "PPBP") expect_equal(anchors@neighbors, list()) }) test_that("FindTransferAnchors with SCT and l2.norm FALSE work", { skip_on_cran() anchors <- FindTransferAnchors(reference = ref, query = query, normalization.method = "SCT", l2.norm = FALSE, k.filter = 50) co <- anchors@object.list[[1]] expect_equal(dim(co), c(220, 160)) expect_equal(Reductions(co), c("pcaproject")) expect_equal(DefaultAssay(co), "SCT") expect_equal(GetAssayData(co[["SCT"]])[1, 1], 0) expect_equal(GetAssayData(co[["SCT"]], layer = "scale.data"), new("matrix")) expect_equal(dim(co[['pcaproject']]), c(160, 30)) expect_equal(Embeddings(co[['pcaproject']])[1, 1], -1.852491719, tolerance = 1e-7) expect_equal(Loadings(co[['pcaproject']], projected = T)[1, 1], -0.1829401539, tolerance = 1e-7) ref.cells <- paste0(Cells(ref), "_reference") query.cells <- paste0(Cells(query), "_query") expect_equal(anchors@reference.cells, ref.cells) expect_equal(anchors@query.cells, query.cells) expect_equal(anchors@reference.objects, logical()) anchor.mat <- anchors@anchors expect_equal(dim(anchor.mat), c(249, 3)) expect_equal(as.vector(anchor.mat[1, ]), c(1, 1, 0.760589319), tolerance = 1e-7) expect_equal(max(anchor.mat[, 2]), 80) expect_null(anchors@offsets) expect_equal(length(anchors@anchor.features), 220) expect_equal(anchors@anchor.features[1], "NKG7") expect_equal(anchors@neighbors, list()) }) Seurat/tests/testthat/test_data_manipulation.R0000644000176200001440000002252214525500037021376 0ustar liggesusers# Tests for functions in data_manipulation.cpp # change in random number generation in R3.6, this ensures tests will pass under older and newer Rs suppressWarnings(RNGversion(vstr = "3.5.3")) set.seed(42) library(Matrix) # Tests for row merging # -------------------------------------------------------------------------------- context("Row Merging") m1 <- rsparsematrix(10, 10, 0.1) m2 <- rsparsematrix(10, 10, 0.1) m1.names <- paste0("row", sample(1:10, size = 10)) m2.names <- paste0("row", sample(1:20, size = 10)) all.names <- union(m1.names, m2.names) rownames(m1) <- m1.names rownames(m2) <- m2.names m1 <- as(m1, "RsparseMatrix") m2 <- as(m2, "RsparseMatrix") test_that("Row merging done correctly", { m3 <- RowMergeMatrices(mat1 = m1, mat2 = m2, mat1_rownames = m1.names, mat2_rownames = m2.names, all_rownames = all.names) expect_equal(m3[1, 14], -0.17) expect_equal(m3[3, 2], -1.4) expect_equal(m3[14, 18], -0.43) expect_equal(length(m3), 280) }) #test_that("Row merging with a list done correctly", { # m3 <- RowMergeMatricesList(mat_list = list(m1, m2), mat_rownames = list(m1.names, m2.names), all_rownames = all.names) # expect_equal(m3[1, 14], -0.17) # expect_equal(m3[3, 2], -1.4) # expect_equal(m3[14, 18], -0.43) # expect_equal(length(m3), 280) #}) # Tests for log normalization # -------------------------------------------------------------------------------- context("Log Normalization") mat <- as(matrix(1:16, ncol = 4, nrow = 4), "sparseMatrix") test_that("Log Normalization returns expected values", { mat.norm.r <- log1p(sweep(mat, 2, Matrix::colSums(mat), FUN = "/") * 1e4) mat.norm <- LogNorm(mat, 1e4, display_progress = F) expect_equal(mat.norm[1, ], mat.norm.r[1, ]) expect_equal(mat.norm[4, 4], mat.norm.r[4, 4]) }) # Tests for scaling data # -------------------------------------------------------------------------------- context("Fast Scale Data Functions") mat <- matrix(rnorm(n = 10*15), nrow = 10, ncol = 15) # should be the equivalent of t(scale(t(mat))) test_that("Fast implementation of row scaling returns expected values", { expect_equal(t(scale(t(mat)))[1:10, 1:15], FastRowScale(mat)) expect_equal(t(scale(t(mat), center = FALSE))[1:10, 1:15], FastRowScale(mat, center = FALSE)) expect_equal(t(scale(t(mat), scale = FALSE))[1:10, 1:15], FastRowScale(mat, scale = FALSE)) expect_equal(t(scale(t(mat), scale = FALSE, center = F))[1:10, 1:15], FastRowScale(mat, scale = FALSE, center = F)) mat.clipped <- FastRowScale(mat, scale_max = 0.2) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) # should be the equivalent of scale(mat, TRUE, apply(mat, 2, sd)) test_that("Standardize returns expected values", { expect_equal(Standardize(mat, display_progress = FALSE), scale(mat, TRUE, apply(mat, 2, sd)), check.attributes = FALSE) }) # should be the equivalent of t(scale(t(mat))) mat <- rsparsematrix(10, 15, 0.1) test_that("Fast implementation of row scaling returns expected values", { expect_equal(t(scale(t(as.matrix(mat))))[1:10, 1:15], FastSparseRowScale(mat, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), center = FALSE))[1:10, 1:15], FastSparseRowScale(mat, center = FALSE, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), scale = FALSE))[1:10, 1:15], FastSparseRowScale(mat, scale = FALSE, display_progress = FALSE), check.attributes = FALSE) expect_equal(t(scale(t(as.matrix(mat)), scale = FALSE, center = F))[1:10, 1:15], FastSparseRowScale(mat, scale = FALSE, center = F, display_progress = FALSE), check.attributes = FALSE) mat.clipped <- FastSparseRowScale(mat, scale_max = 0.2, display_progress = F) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) mat <- as.sparse(x = matrix(rnorm(100), nrow = 10, ncol = 10)) test_that("Row scaling with known stats works", { mat.rowmeans <- rowMeans(x = mat) mat.sd <- apply(X = mat, MARGIN = 1, FUN = sd) expect_equal( t(scale(t(as.matrix(mat)), center = mat.rowmeans, scale = mat.sd)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = TRUE, center = TRUE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) expect_equal( t(scale(t(as.matrix(mat)), center = FALSE, scale = mat.sd)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = TRUE, center = FALSE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) expect_equal( t(scale(t(as.matrix(mat)), center = mat.rowmeans, scale = FALSE)), FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = FALSE, center = TRUE, scale_max = 10, display_progress = FALSE), check.attributes = FALSE ) mat.clipped <- FastSparseRowScaleWithKnownStats(mat = mat, mu = mat.rowmeans, sigma = mat.sd, scale = FALSE, center = TRUE, scale_max = 0.2, display_progress = FALSE) expect_true(max(mat.clipped, na.rm = T) >= 0.2) }) # Tests for fast basic stats functions # -------------------------------------------------------------------------------- context("Fast Basic Stats Functions") set.seed(42) mat <- replicate(10, rchisq(10, 4)) fcv <- FastCov(mat) cv <- cov(mat) test_that("Fast implementation of covariance returns expected values", { expect_equal(fcv[1,1], 9.451051142) expect_equal(fcv[10,10], 5.6650068) expect_equal(fcv, cv) }) mat2 <- replicate(10, rchisq(10, 4)) fcv <- FastCovMats(mat1 = mat, mat2 = mat2) cv <- cov(mat, mat2) test_that("Fast implementation of covariance returns expected values for matrices", { expect_equal(fcv[1,1], 1.523417, tolerance = 1e-6) expect_equal(fcv[10,10], -0.6031694, tolerance = 1e-6) expect_equal(fcv, cv) }) merged.mat <- FastRBind(mat, fcv) test_that("Fast implementation of rbind returns expected values", { expect_equal(merged.mat, rbind(mat, fcv)) expect_equal(mat[1,1], merged.mat[1,1]) expect_equal(fcv[10,10], merged.mat[20,10]) }) mat <- as.sparse(mat) test_that("Fast implementation of ExpMean returns expected values",{ expect_equal(ExpMean(mat[1,]), FastExpMean(mat, display_progress = F)[1]) expect_equal(ExpMean(mat[5,]), FastExpMean(mat, display_progress = F)[5]) expect_equal(ExpMean(mat[10,]), FastExpMean(mat, display_progress = F)[10]) expect_equal(length(FastExpMean(mat, display_progress = F)), nrow(mat)) expect_error(FastExpMean(mat[1, ], display_progress = F)) expect_equal(FastExpMean(mat[1, ,drop = F], display_progress = F), ExpMean(mat[1,])) expect_equal(FastExpMean(mat, display_progress = F)[1], 6.493418, tolerance = 1e-6) expect_equal(FastExpMean(mat, display_progress = F)[5], 6.255206, tolerance = 1e-6) expect_equal(FastExpMean(mat, display_progress = F)[10], 7.84965, tolerance = 1e-6) }) test_that("Fast implementation of LogVMR returns expected values", { expect_equal(LogVMR(mat[1,]), FastLogVMR(mat, display_progress = F)[1]) expect_equal(LogVMR(mat[5,]), FastLogVMR(mat, display_progress = F)[5]) expect_equal(LogVMR(mat[10,]), FastLogVMR(mat, display_progress = F)[10]) expect_equal(length(FastExpMean(mat, display_progress = F)), nrow(mat)) expect_error(FastLogVMR(mat[1, ], display_progress = F)) expect_equal(FastLogVMR(mat[1, ,drop = F], display_progress = F), LogVMR(mat[1,])) expect_equal(FastLogVMR(mat, display_progress = F)[1], 7.615384, tolerance = 1e-6) expect_equal(FastLogVMR(mat, display_progress = F)[5], 7.546768, tolerance = 1e-6) expect_equal(FastLogVMR(mat, display_progress = F)[10], 10.11755, tolerance = 1e-6) }) test_that("Row variance calculations for sparse matrices work", { expect_equal(apply(X = mat, MARGIN = 1, FUN = var), SparseRowVar(mat = mat, display_progress = FALSE), tolerance = 1e-6) expect_equal(apply(X = mat2, MARGIN = 1, FUN = var), SparseRowVar(mat = as.sparse(x = mat2), display_progress = FALSE), tolerance = 1e-6) }) # Tests for data structure manipulations # -------------------------------------------------------------------------------- context("Data structure manipulations") mat <- rsparsematrix(nrow = 10, ncol = 100, density = 0.1) mat2 <- rsparsematrix(nrow = 10, ncol = 10, density = 0.1) cols.to.replace1 <- 1:10 cols.to.replace2 <- 10:1 cols.to.replace3 <- 91:100 cols.to.replace4 <- c(10, 15, 33, 2, 6, 99, 55, 30, 25, 42) ReplaceCols <- function(mat, cols, replace){ mat[, cols] <- replace return(mat) } test_that("Replacing columns works", { expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace1 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace1, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace2 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace2, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace3 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace3, replace = mat2)) expect_equal(ReplaceColsC(mat = mat, col_idx = cols.to.replace4 - 1, replacement = mat2), ReplaceCols(mat = mat, cols = cols.to.replace4, replace = mat2)) }) test_that("Cpp implementation of row variance is correct", { expect_equal(apply(X = mat, MARGIN = 1, FUN = var), RowVar(as.matrix(mat))) expect_equal(apply(X = merged.mat, MARGIN = 1, FUN = var), RowVar(as.matrix(merged.mat))) }) Seurat/tests/testthat/test_utilities.R0000644000176200001440000001446214525500037017724 0ustar liggesusersset.seed(42) pbmc.file <- system.file('extdata', 'pbmc_raw.txt', package = 'Seurat') pbmc.test <- as.sparse(x = as.matrix(read.table(pbmc.file, sep = "\t", row.names = 1))) meta.data <- data.frame( a = rep(as.factor(c('a', 'b', 'c')), length.out = ncol(pbmc.test)), row.names = colnames(pbmc.test) ) object <- CreateSeuratObject( counts = pbmc.test, min.cells = 10, min.features = 30, meta.data = meta.data ) object <- NormalizeData(object) object <- SetIdent(object, value = 'a') group.by = "a" data <- FetchData(object = object, vars = rev(x = group.by)) data <- data[which(rowSums(x = is.na(x = data)) == 0), , drop = F] category.matrix.avg <- CreateCategoryMatrix(labels = data, method = 'average') category.matrix.sum <- CreateCategoryMatrix(labels = data, method = 'aggregate') test_that("CreateCategoryMatrix works for average and aggregate", { expect_equal(unname(colSums(category.matrix.avg)), c(1, 1, 1)) expect_equal(unname(colSums(category.matrix.sum)), c(27, 26, 24)) }) test_that("AverageExpression works for different layers", { #average expression on data layer is equal to log of average exponentiated data suppressWarnings(average.expression <- AverageExpression(object, layer = 'data')$RNA) counts.from.data.avg <- expm1(object[['RNA']]$data) %*% category.matrix.avg expect_equivalent( log1p(counts.from.data.avg), average.expression, tolerance = 1e-6 ) #average expression on counts layer is equal to average of counts suppressWarnings(average.counts <- AverageExpression(object, layer = 'counts')$RNA) avg.counts <- object[['RNA']]$data %*% category.matrix.avg expect_equivalent( avg.counts, average.counts, tolerance = 1e-6 ) #average expression on scale.data layer is equal to average of scale.data object <- ScaleData(object, features = rownames(object[['RNA']]$data)) suppressWarnings(average.scale.data <- AverageExpression(object, layer = 'scale.data')$RNA) avg.scale <- object[['RNA']]$scale.data %*% category.matrix.avg expect_equivalent( average.scale.data, avg.scale, tolerance = 1e-6 ) }) test_that("AverageExpression handles features properly", { features <- rownames(x = object)[1:10] average.expression <- AverageExpression(object, layer = 'data', features = features)$RNA expect_equal(rownames(x = average.expression), features) expect_warning(AverageExpression(object, layer = 'data', features = "BAD")) expect_warning(AverageExpression(object, layer = "data", features = c(features, "BAD"))) }) test_that("AverageExpression with return.seurat", { # counts avg.counts <- AverageExpression(object, layer = "counts", return.seurat = TRUE, verbose = FALSE) avg.counts.calc <- object[['RNA']]$counts %*% category.matrix.avg #test that counts are indeed equal to average counts expect_equivalent( as.matrix(avg.counts[['RNA']]$counts), as.matrix(avg.counts.calc), tolerance = 1e-6 ) expect_s4_class(object = avg.counts, "Seurat") avg.counts.mat <- AverageExpression(object, layer = 'counts')$RNA expect_equal(unname(as.matrix(LayerData(avg.counts[["RNA"]], layer = "counts"))), unname(as.matrix(avg.counts.mat))) avg.data <- LayerData(avg.counts[["RNA"]], layer = "data") #test that data returned is log1p of average counts expect_equivalent( as.matrix(log1p(avg.counts.mat)), as.matrix(avg.data), tolerance = 1e-6 ) #test that scale.data returned is scaled data avg.scale <- LayerData(avg.counts[["RNA"]], layer = "scale.data") expect_equal( avg.scale, ScaleData(avg.counts)[['RNA']]$scale.data, tolerance = 1e-6 ) # data avg.data <- AverageExpression(object, layer = "data", return.seurat = TRUE, verbose = FALSE) expect_s4_class(object = avg.data, "Seurat") avg.data.mat <- AverageExpression(object, layer = 'data')$RNA expect_equal(unname(as.matrix(LayerData(avg.data[["RNA"]], layer = "counts"))), unname(as.matrix(avg.data.mat))) expect_equal(unname(as.matrix(LayerData(avg.data[["RNA"]], layer = "data"))), as.matrix(unname(log1p(x = avg.data.mat)))) avg.scale <- LayerData(avg.data[["RNA"]], layer = "scale.data") expect_equal( avg.scale['MS4A1', ], c(a = -0.07823997, b = 1.0368218, c = -0.9585818), tolerance = 1e-6 ) expect_equal( avg.scale['SPON2', ], c(a = 0.1213127, b = 0.9338096, c = -1.0551222), tolerance = 1e-6 ) # scale.data object <- ScaleData(object = object, verbose = FALSE) avg.scale <- AverageExpression(object, layer = "scale.data", return.seurat = TRUE, verbose = FALSE) expect_s4_class(object = avg.scale, "Seurat") avg.scale.mat <- AverageExpression(object, layer = 'scale.data')$RNA expect_equal(unname(as.matrix(LayerData(avg.scale[["RNA"]], layer = "scale.data"))), unname(as.matrix(avg.scale.mat))) }) test.dat <- LayerData(object = object, layer = "data") rownames(x = test.dat) <- paste0("test-", rownames(x = test.dat)) object[["TEST"]] <- CreateAssayObject(data = test.dat) test_that("AverageExpression with multiple assays", { avg.test <- AverageExpression(object = object, assays = "TEST", layer = "data") expect_equal(names(x = avg.test), "TEST") expect_equal(length(x = avg.test), 1) expect_equivalent( avg.test[[1]]['test-KHDRBS1', 1:3], c(a = 10.329153, b = 92.287109, c = 5.620942), tolerance = 1e-6 ) expect_equivalent( avg.test[[1]]['test-DNAJB1', 1:3] , c(a = 42.32240, b = 15.94807, c = 15.96319), tolerance = 1e-6 ) avg.all <- AverageExpression(object = object, layer = "data") expect_equal(names(x = avg.all), c("RNA", "TEST")) expect_equal(length(x = avg.all), 2) }) meta.data.2 <- data.frame( b = rep(as.factor(c('c', 'd', 'e')), length.out = ncol(pbmc.test)), row.names = colnames(pbmc.test) ) object <- AddMetaData(object, meta.data.2) if(class(object[['RNA']]) == "Assay5") { test_that("AggregateExpression works with multiple layers", { object.split <- split(object, f = object$b) aggregate.split <- AggregateExpression(object.split, assay = "RNA") aggregate <- AggregateExpression(object, assay = "RNA") expect_equivalent( aggregate.split$RNA, aggregate$RNA, tolerance = 1e-6 ) avg.split <- AverageExpression(object.split, assay = "RNA") avg <- AverageExpression(object, assay = "RNA") expect_equivalent( avg.split$RNA, avg$RNA, tolerance = 1e-6 ) }) } Seurat/tests/testthat/test_objects.R0000644000176200001440000000173714525500037017343 0ustar liggesusers# Tests for functions in objects.R # Tests for SCE conversion # ------------------------------------------------------------------------------ test_that("as.SingleCellExperiment works", { skip_on_cran() if (requireNamespace('SingleCellExperiment', quietly = TRUE)) { mat <- pbmc_small[["RNA"]]$counts seuratObj <- Seurat::CreateSeuratObject(mat) sce <- suppressWarnings(as.SingleCellExperiment(seuratObj)) expect_equal(ncol(sce), 80) expect_equal(nrow(sce), 230) # expect_equal(length(SingleCellExperiment::altExps(sce)), 0) # expect_equal(SingleCellExperiment::mainExpName(sce), 'RNA') seuratObj <- Seurat::CreateSeuratObject(mat) seuratObj[['ADT']] <- CreateAssayObject(mat) sce <- suppressWarnings(as.SingleCellExperiment(seuratObj)) expect_equal(ncol(sce), 80) expect_equal(nrow(sce), 230) # expect_equal(names(SingleCellExperiment::altExps(sce)), 'ADT') # expect_equal(SingleCellExperiment::mainExpName(sce), 'RNA') } }) Seurat/tests/testthat/test_read_mtx.R0000644000176200001440000000275414525500037017515 0ustar liggesuserscontext("ReadMtx") test_that("skip.cell and skip.feature work", { skip_on_cran() mtx <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_matrix.mtx.gz" features <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_genes.csv.gz" cells <- "ftp://ftp.ncbi.nlm.nih.gov/geo/series/GSE126nnn/GSE126836/suppl/GSE126836_SN_MD5828_barcodes.csv.gz" counts1 <- ReadMtx(mtx = mtx, cells = cells, features = features, feature.column = 1, skip.cell = 1, skip.feature = 1) expect_is(counts1, "dgCMatrix") expect_equal(ncol(counts1), 1436) expect_equal(nrow(counts1), 29445) expect_equal(colnames(counts1)[5], "MD5828a_GGGCATCCAATGAAAC-1") expect_equal(rownames(counts1)[2], "A1BG-AS1") }) test_that("ReadMtx works", { skip_on_cran() mtx <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fmatrix%2Emtx%2Egz" cells <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fbarcodes%2Etsv%2Egz" features <- "https://www.ncbi.nlm.nih.gov/geo/download/?acc=GSE127774&format=file&file=GSE127774%5FACC%5FB%5Fgenes%2Etsv%2Egz" counts2 <- ReadMtx(mtx = mtx, cells = cells, features = features, feature.column = 1) expect_is(counts2, "dgCMatrix") expect_equal(ncol(counts2), 22063) expect_equal(nrow(counts2), 22530) expect_equal(colnames(counts2)[1], "AAACCTGAGCAATCTC-1") expect_equal(rownames(counts2)[2], "ENSPPAG00000040697") }) Seurat/tests/testthat/test_integratedata.R0000644000176200001440000002251314525500037020521 0ustar liggesusers# Tests for integration related fxns set.seed(42) pbmc_small <- suppressWarnings(UpdateSeuratObject(pbmc_small)) # Setup test objects ref <- pbmc_small ref <- FindVariableFeatures(object = ref, verbose = FALSE, nfeatures = 100) query <- CreateSeuratObject( counts = as.sparse( GetAssayData( object = pbmc_small[['RNA']], layer = "counts") + rpois(n = ncol(pbmc_small), lambda = 1 ) ) ) query2 <- CreateSeuratObject( counts = as.sparse( LayerData( object = pbmc_small[['RNA']], layer = "counts")[, 1:40] + rpois(n = ncol(pbmc_small), lambda = 1 ) ) ) query.list <- list(query, query2) query.list <- lapply(X = query.list, FUN = NormalizeData, verbose = FALSE) query.list <- lapply(X = query.list, FUN = FindVariableFeatures, verbose = FALSE, nfeatures = 100) query.list <- lapply(X = query.list, FUN = ScaleData, verbose = FALSE) query.list <- suppressWarnings(lapply(X = query.list, FUN = RunPCA, verbose = FALSE, npcs = 20)) anchors2 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list[[1]]), k.filter = NA, verbose = FALSE))) anchors3 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list), k.filter = NA, verbose = FALSE))) # Tests for IntegrateEmbeddings # ------------------------------------------------------------------------------ # context("IntegrateEmbeddings") # test_that("IntegrateEmbeddings validates properly", { # expect_error(IntegrateEmbeddings(anchorset = anchors2)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 100)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = c("pca", "pca2"), k.weight = 40)) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, weight.reduction = c(ref[['pca']]))) # pca3 <- RenameCells(object = ref[['pca']], new.names = paste0(Cells(ref), "_test")) # expect_error(IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, # weight.reduction = c(pca3, ref[['pca']]))) # }) # # test_that("IntegrateEmbeddings with two objects default works", { # skip_on_cran() # int2 <- IntegrateEmbeddings(anchorset = anchors2, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int2), "integrated_pca") # expect_equal(sum(Embeddings(int2[['integrated_pca']])[1,]), -3.13050872287, tolerance = 1e-6) # expect_equal(sum(Embeddings(int2[['integrated_pca']])[,1]), -5.78790844887, tolerance = 1e-6) # }) # # test_that("IntegrateEmbeddings with three objects default works", { # skip_on_cran() # int3 <- IntegrateEmbeddings(anchorset = anchors3, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int3), "integrated_pca") # expect_equal(sum(Embeddings(int3[['integrated_pca']])[1,]), 0.221867815987, tolerance = 1e-6) # expect_equal(sum(Embeddings(int3[['integrated_pca']])[,1]), -16.7881409595, tolerance = 1e-6) # }) # # test_that("IntegrateEmbeddings works with specified reference objects", { # skip_on_cran() # anchors4 <- suppressMessages(suppressWarnings(FindIntegrationAnchors(object.list = c(ref, query.list), k.filter = NA, verbose = FALSE, reference = 1))) # int4 <- IntegrateEmbeddings(anchorset = anchors4, reduction = "pca", k.weight = 40, verbose = FALSE) # expect_equal(Reductions(int4), "integrated_pca") # expect_equal(sum(Embeddings(int4[['integrated_pca']])[1,]), -3.13050872287, tolerance = 1e-6) # expect_equal(sum(Embeddings(int4[['integrated_pca']])[,1]), 13.1180105492, tolerance = 1e-6) # }) # Tests for IntegrateData # ------------------------------------------------------------------------------ context("IntegrateData") test_that("IntegrateData with two objects default work", { expect_error(IntegrateData(anchorset = anchors2)) int2 <- IntegrateData(anchorset = anchors2, k.weight = 50, verbose = FALSE) expect_true(all(Assays(int2) %in% c("integrated", "RNA"))) expect_equal(Tool(int2), "Integration") expect_equal(dim(int2[["integrated"]]), c(133, 160)) expect_equal(length(VariableFeatures(int2)), 133) expect_equal(GetAssayData(int2[["integrated"]], layer = "counts"), new("dgCMatrix")) expect_equal(GetAssayData(int2[['integrated']], layer = "scale.data"), matrix()) expect_equal(sum(GetAssayData(int2[["integrated"]], layer = "data")[1, ]), 44.97355, tolerance = 1e-3) expect_equal(sum(GetAssayData(int2[["integrated"]], layer = "data")[, 1]), 78.8965706046, tolerance = 1e-6) expect_equal(Tool(object = int2, slot = "Integration")@sample.tree, matrix(c(-1, -2), nrow = 1)) }) test_that("IntegrateData with three objects default work", { expect_error(IntegrateData(anchorset = anchors3, k.weight = 50)) int3 <- IntegrateData(anchorset = anchors3, k.weight = 25, verbose = FALSE) expect_true(all(Assays(int3) %in% c("integrated", "RNA"))) expect_equal(Tool(int3), "Integration") expect_equal(dim(int3[["integrated"]]), c(169, 200)) expect_equal(length(VariableFeatures(int3)), 169) expect_equal(GetAssayData(int3[["integrated"]], layer = "counts"), new("dgCMatrix")) expect_equal(GetAssayData(int3[['integrated']], layer = "scale.data"), matrix()) expect_equal(sum(GetAssayData(int3[["integrated"]], layer = "data")[1, ]), 372.829, tolerance = 1e-6) expect_equal(sum(GetAssayData(int3[["integrated"]], layer = "data")[, 1]), 482.5009, tolerance = 1e-6) expect_equal(Tool(object = int3, slot = "Integration")@sample.tree, matrix(c(-2, -3, 1, -1), nrow = 2, byrow = TRUE)) }) test_that("Input validates correctly ", { expect_error(anchorset = anchors2, k.weight = 50, features.to.integrate = "BAD") expect_error(IntegrateData(anchorset = anchors2, k.weight = 50, normalization.method = "BAD")) expect_error(IntegrateData(anchorset = anchors2, k.weight = 50, weight.reduction = "BAD")) expect_error(IntegrateData(anchorset = anchors2, reductions.to.integrate = "pca")) skip_on_cran() #expect_warning(IntegrateData(anchorset = anchors2, k.weight = 50, features = c(rownames(ref), "BAD"))) #expect_warning(IntegrateData(anchorset = anchors2, k.weight = 50, dims = 1:1000)) }) # Tests for IntegrateLayers # ------------------------------------------------------------------------------ context("IntegrateLayers") pbmc_small[['RNAv5']] <- CreateAssay5Object(counts = LayerData(pbmc_small[['RNA']], layer = "counts")) pbmc_small[["RNAv5"]] <- split(pbmc_small[["RNAv5"]], f = pbmc_small$groups) DefaultAssay(pbmc_small) <- "RNAv5" pbmc_small <- NormalizeData(pbmc_small) pbmc_small <- FindVariableFeatures(pbmc_small) pbmc_small <- ScaleData(pbmc_small) pbmc_small <- suppressMessages(suppressWarnings(RunPCA(pbmc_small))) test_that("IntegrateLayers does not work on a v3 assay ", { expect_error(IntegrateLayers(object = pbmc_small, method = CCAIntegration, orig.reduction = "pca", assay = "RNA", new.reduction = "integrated.cca")) }) test_that("IntegrateLayers errors out if incorrect input ", { expect_error(IntegrateLayers(object = pbmc_small, method = CCAIntegration, orig.reduction = "pca", assay = "DNA", new.reduction = "integrated.cca")) expect_error(IntegrateLayers(object = pbmc_small, method = CCAIntegration, orig.reduction = "lda", new.reduction = "integrated.cca")) }) #itegration methods int_cca <- suppressMessages(suppressWarnings(IntegrateLayers( object = pbmc_small, method = CCAIntegration, orig.reduction = "pca", new.reduction = "integrated.cca", k.weight=25, verbose = FALSE ))) int_rpca <- suppressMessages(suppressWarnings(IntegrateLayers( object = pbmc_small, method = RPCAIntegration, orig.reduction = "pca", new.reduction = "integrated.rpca", dims = 1:10, k.anchor = 10, k.weight=10, verbose = FALSE ))) # int_mnn <- suppressMessages(suppressWarnings(IntegrateLayers( # object = pbmc_small, method = FastMNNIntegration, # new.reduction = "integrated.mnn", # k.weight=25, # verbose = FALSE # ))) test_that("IntegrateLayers returns embeddings with correct dimensions ", { expect_equal(dim(int_cca[["integrated.cca"]]), c(80, 50)) expect_equal(dim(int_rpca[["integrated.rpca"]]), c(80, 50)) int_rpca expect_equal(int_cca[["integrated.cca"]]@assay.used, "RNAv5") #expect_equal(int_cca[['integrated.cca']]@cell.embeddings, c(3, 4, 5)) }) test_that("IntegrateLayers works with harmony", { skip_on_cran() skip_if_not_installed("harmony") int_harmony <- suppressMessages(suppressWarnings(IntegrateLayers( object = pbmc_small, method = HarmonyIntegration, orig.reduction = "pca", new.reduction = "harmony", k.weight=25, verbose = FALSE ))) expect_equal(dim(int_harmony[["harmony"]]), c(80, 50)) }) test_that("group.by ", { expect_equal(dim(int_cca[["integrated.cca"]]), c(80, 50)) expect_equal(int_cca[["integrated.cca"]]@assay.used, "RNAv5") }) #Harmony integration # int_2 <- IntegrateLayers(object = pbmc_small, method = CCAIntegration, # group.by = "letter.idents", # orig.reduction = "pca", # assay = "RNAv5", # k.weight = 20, # new.reduction = "integrated.cca") # # head(int_2[['integrated.cca']]@cell.embeddings[1:5,1:5]) # head(int_cca[['integrated.cca']]@cell.embeddings[1:5,1:5]) Seurat/tests/testthat/test_preprocessing.R0000644000176200001440000006044714525500037020600 0ustar liggesusers# Tests for functions dependent on a seurat object set.seed(42) pbmc.file <- system.file('extdata', 'pbmc_raw.txt', package = 'Seurat') pbmc.test <- as.sparse(x = as.matrix(read.table(pbmc.file, sep = "\t", row.names = 1))) # Tests for object creation (via CreateSeuratObject) # -------------------------------------------------------------------------------- context("Object creation") fake.meta.data <- data.frame(rep(1, ncol(pbmc.test))) rownames(fake.meta.data) <- colnames(pbmc.test) colnames(fake.meta.data) <- "FMD" object <- CreateSeuratObject(counts = pbmc.test, meta.data = fake.meta.data) test_that("object initialization actually creates seurat object", { expect_is(object, "Seurat") }) #this should be moved to seurat object # test_that("meta.data slot generated correctly", { # expect_equal(dim(object[[]]), c(80, 4)) # expect_equal(colnames(object[[]]), c("orig.ident", "nCount_RNA", "nFeature_RNA", "FMD")) # expect_equal(rownames(object[[]]), colnames(object)) # expect_equal(object[["nFeature_RNA"]][1:5, ], c(47, 52, 50, 56, 53)) # expect_equal(object[["nCount_RNA"]][75:80, ], c(228, 527, 202, 157, 150, 233)) # }) object.filtered <- CreateSeuratObject( counts = pbmc.test, min.cells = 10, min.features = 30 ) test_that("Filtering handled properly", { expect_equal(nrow(x = LayerData(object = object.filtered, layer = "counts")), 163) expect_equal(ncol(x = LayerData(object = object.filtered, layer = "counts")), 77) }) #this should be moved to seurat object # test_that("Metadata check errors correctly", { # pbmc.md <- pbmc_small[[]] # pbmc.md.norownames <- as.matrix(pbmc.md) # rownames(pbmc.md.norownames) <- NULL # expect_error(CreateSeuratObject(counts = pbmc.test, meta.data = pbmc.md.norownames), # "Row names not set in metadata. Please ensure that rownames of metadata match column names of data matrix") # }) # Tests for NormalizeData # -------------------------------------------------------------------------------- context("NormalizeData") test_that("NormalizeData error handling", { expect_error(NormalizeData(object = object, assay = "FAKE")) expect_equal( object = LayerData( object = NormalizeData( object = object, normalization.method = NULL, verbose = FALSE ), layer = "data" ), expected = LayerData(object = object, layer = "counts") ) }) object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6) test_that("NormalizeData scales properly", { expect_equal(LayerData(object = object, layer = "data")[2, 1], 9.567085, tolerance = 1e-6) expect_equal(LayerData(object = object, layer = "data")[161, 55], 8.415309, tolerance = 1e-6) expect_equal(Command(object = object, command = "NormalizeData.RNA", value = "scale.factor"), 1e6) expect_equal(Command(object = object, command = "NormalizeData.RNA", value = "normalization.method"), "LogNormalize") }) normalized.data <- LogNormalize(data = GetAssayData(object = object[["RNA"]], layer = "counts"), verbose = FALSE) test_that("LogNormalize normalizes properly", { expect_equal( as.matrix(LogNormalize(data = GetAssayData(object = object[["RNA"]], layer = "counts"), verbose = FALSE)), as.matrix(LogNormalize(data = as.data.frame(as.matrix(GetAssayData(object = object[["RNA"]], layer = "counts"))), verbose = FALSE)) ) }) clr.counts <- NormalizeData(object = pbmc.test, normalization.method = "CLR", verbose = FALSE) test_that("CLR normalization returns expected values", { expect_equal(dim(clr.counts), c(dim(pbmc.test))) expect_equal(clr.counts[2, 1], 0.5517828, tolerance = 1e-6) expect_equal(clr.counts[228, 76], 0.5971381, tolerance = 1e-6) expect_equal(clr.counts[230, 80], 0) }) rc.counts <- NormalizeData(object = pbmc.test, normalization.method = "RC", verbose = FALSE) test_that("Relative count normalization returns expected values", { expect_equal(rc.counts[2, 1], 142.8571, tolerance = 1e-6) expect_equal(rc.counts[228, 76], 18.97533, tolerance = 1e-6) expect_equal(rc.counts[230, 80], 0) rc.counts <- NormalizeData(object = pbmc.test, normalization.method = "RC", verbose = FALSE, scale.factor = 1e6) expect_equal(rc.counts[2, 1], 14285.71, tolerance = 1e-6) }) # Tests for v5 NormalizeData # -------------------------------------------------------------------------------- context("v5 NormalizeData") if(class(object[['RNA']]) == "Assay5") { fake.groups <- c(rep(1, floor(ncol(pbmc.test)/2)), rep(2, ncol(pbmc.test) - (floor(ncol(pbmc.test)/2))) ) object$groups <- fake.groups object.split <- CreateSeuratObject(split(object[["RNA"]], f = object$groups)) object.split <- NormalizeData(object = object.split) group1 <- subset(object, groups==1) group1 <- NormalizeData(group1) test_that("Normalization is performed for each layer", { expect_equal(Layers(object.split),c("counts.1", "counts.2", "data.1", "data.2")) expect_equal(group1[['RNA']]$data, LayerData(object.split, layer="data.1")) }) object.split <- NormalizeData(object = object.split, normalization.method = "CLR", verbose = FALSE) group1 <- NormalizeData(object = group1, normalization.method = "CLR", verbose = FALSE) test_that("CLR normalization works with multiple layers", { expect_equal(Layers(object.split),c("counts.1", "counts.2", "data.1", "data.2")) expect_equal(group1[['RNA']]$data, LayerData(object.split, layer="data.1")) }) object.split <- NormalizeData(object = object.split, normalization.method = "RC", verbose = FALSE) group1 <- NormalizeData(object = group1, normalization.method = "RC", verbose = FALSE) test_that("RC normalization works with multiple layers", { expect_equal(Layers(object.split),c("counts.1", "counts.2", "data.1", "data.2")) expect_equal(group1[['RNA']]$data, LayerData(object.split, layer="data.1")) }) } test_that("NormalizeData scales properly for BPcells", { # Tests for BPCells NormalizeData # -------------------------------------------------------------------------------- skip_on_cran() library(Matrix) skip_if_not_installed("BPCells") library(BPCells) mat_bpcells <- t(as(t(object[['RNA']]$counts ), "IterableMatrix")) object[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6, assay = "RNAbp") object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6, assay = "RNA") expect_equal(as.matrix(object[['RNAbp']]$data), as.matrix(object[['RNA']]$data), tolerance = 1e-6) expect_equal(Command(object = object, command = "NormalizeData.RNAbp", value = "scale.factor"), 1e6) expect_equal(Command(object = object, command = "NormalizeData.RNAbp", value = "normalization.method"), "LogNormalize") }) test_that("LogNormalize normalizes properly for BPCells", { skip_on_cran() library(Matrix) skip_if_not_installed("BPCells") library(BPCells) mat_bpcells <- t(as(t(object[['RNA']]$counts ), "IterableMatrix")) object[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6, assay = "RNAbp") object <- NormalizeData(object = object, verbose = FALSE, scale.factor = 1e6, assay = "RNA") normalized.data.bp <- LogNormalize(data = GetAssayData(object = object[["RNAbp"]], layer = "counts"), verbose = FALSE) normalized.data <- LogNormalize(data = GetAssayData(object = object[["RNA"]], layer = "counts"), verbose = FALSE) expect_equal( as.matrix(normalized.data.bp), as.matrix(normalized.data), tolerance = 1e-6 ) }) # Tests for ScaleData # -------------------------------------------------------------------------------- context("ScaleData") object <- ScaleData(object, verbose = FALSE) test_that("ScaleData returns expected values when input is a sparse matrix", { expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[1, 1], -0.4148587, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[75, 25], -0.2562305, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[162, 59], -0.4363939, tolerance = 1e-6) }) new.data <- as.matrix(GetAssayData(object = object[["RNA"]], layer = "data")) new.data[1, ] <- rep(x = 0, times = ncol(x = new.data)) object2 <- object object2 <- SetAssayData( object = object, assay = "RNA", slot = "data", new.data = new.data ) object2 <- ScaleData(object = object2, verbose = FALSE) object <- ScaleData(object = object, verbose = FALSE) test_that("ScaleData returns expected values when input is not sparse", { expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[75, 25], -0.2562305, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[162, 59], -0.4363939, tolerance = 1e-6) }) test_that("ScaleData handles zero variance features properly", { expect_equal(GetAssayData(object = object2[["RNA"]], layer = "scale.data")[1, 1], 0) expect_equal(GetAssayData(object = object2[["RNA"]], layer = "scale.data")[1, 80], 0) }) ng1 <- rep(x = "g1", times = round(x = ncol(x = object) / 2)) object$group <- c(ng1, rep(x = "g2", times = ncol(x = object) - length(x = ng1))) g1 <- subset(x = object, group == "g1") g1 <- ScaleData(object = g1, features = rownames(x = g1), verbose = FALSE) g2 <- subset(x = object, group == "g2") g2 <- ScaleData(object = g2, features = rownames(x = g2), verbose = FALSE) object <- ScaleData(object = object, features = rownames(x = object), verbose = FALSE, split.by = "group") #move to SeuratObject # test_that("split.by option works", { # expect_equal(GetAssayData(object = object, layer = "scale.data")[, Cells(x = g1)], # GetAssayData(object = g1, layer = "scale.data")) # expect_equal(GetAssayData(object = object, layer = "scale.data")[, Cells(x = g2)], # GetAssayData(object = g2, layer = "scale.data")) # }) g1 <- ScaleData(object = g1, features = rownames(x = g1), vars.to.regress = "nCount_RNA", verbose = FALSE) g2 <- ScaleData(object = g2, features = rownames(x = g2), vars.to.regress = "nCount_RNA", verbose = FALSE) object <- ScaleData(object = object, features = rownames(x = object), verbose = FALSE, split.by = "group", vars.to.regress = "nCount_RNA") test_that("split.by option works with regression", { expect_equal(LayerData(object = object, layer = "scale.data")[, Cells(x = g1)], LayerData(object = g1, layer = "scale.data")) expect_equal(LayerData(object = object, layer = "scale.data")[, Cells(x = g2)], LayerData(object = g2, layer = "scale.data")) }) # Tests for various regression techniques context("Regression") suppressWarnings({ object <- ScaleData( object = object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "linear") }) test_that("Linear regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], layer = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[1, 1], -0.6436435, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[5, 25], -0.09035383, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[10, 80], -0.2723782, tolerance = 1e-6) }) object <- ScaleData( object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "negbinom") test_that("Negative binomial regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], layer = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[1, 1], -0.5888811, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[5, 25], -0.2553394, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[10, 80], -0.1921429, tolerance = 1e-6) }) test_that("Regression error handling checks out", { expect_error(ScaleData(object, vars.to.regress = "nCount_RNA", model.use = "not.a.model", verbose = FALSE)) }) object <- ScaleData( object, vars.to.regress = "nCount_RNA", features = rownames(x = object)[1:10], verbose = FALSE, model.use = "poisson") test_that("Poisson regression works as expected", { expect_equal(dim(x = GetAssayData(object = object[["RNA"]], layer = "scale.data")), c(10, 80)) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[1, 1], -1.011717, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[5, 25], 0.05575307, tolerance = 1e-6) expect_equal(GetAssayData(object = object[["RNA"]], layer = "scale.data")[10, 80], -0.1662119, tolerance = 1e-6) }) #Tests for SampleUMI #-------------------------------------------------------------------------------- context("SampleUMI") downsampled.umis <- SampleUMI( data = LayerData(object = object, layer = "counts"), max.umi = 100, verbose = FALSE ) downsampled.umis.p.cell <- SampleUMI( data = LayerData(object = object, layer = "counts"), max.umi = seq(50, 1640, 20), verbose = FALSE, upsample = TRUE ) test_that("SampleUMI gives reasonable downsampled/upsampled UMI counts", { expect_true(!any(colSums(x = downsampled.umis) < 30, colSums(x = downsampled.umis) > 120)) expect_error(SampleUMI(data = LayerData(object = object, layer = "counts"), max.umi = rep(1, 5))) expect_true(!is.unsorted(x = colSums(x = downsampled.umis.p.cell))) expect_error(SampleUMI( data = LayerData(object = object, layer = "counts"), max.umi = seq(50, 900, 10), verbose = FALSE, upsample = TRUE )) }) # Tests for FindVariableFeatures # -------------------------------------------------------------------------------- context("FindVariableFeatures") object <- FindVariableFeatures(object = object, selection.method = "mean.var.plot", verbose = FALSE) test_that("mean.var.plot selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PTGDR", "SATB1", "ZNF330", "S100B")) expect_equal(length(x = VariableFeatures(object = object)), 20) hvf_info <- HVFInfo(object = object[["RNA"]], method = 'mvp') expect_equal(hvf_info[[grep("mean$", colnames(hvf_info), value = TRUE)]][1:2], c(8.328927, 8.444462), tolerance = 1e-6) expect_equal(hvf_info[[grep("dispersion$", colnames(hvf_info), value = TRUE)]][1:2], c(10.552507, 10.088223), tolerance = 1e-6) expect_equal(as.numeric(hvf_info[[grep("dispersion.scaled$", colnames(hvf_info), value = TRUE)]][1:2]), c(0.1113214, -0.1332181523), tolerance = 1e-6) }) object <- FindVariableFeatures(object, selection.method = "dispersion", verbose = FALSE) test_that("dispersion selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PCMT1", "PPBP", "LYAR", "VDAC3")) expect_equal(length(x = VariableFeatures(object = object)), 230) hvf_info <- HVFInfo(object = object[["RNA"]], method = 'mvp') expect_equal(hvf_info[[grep("mean$", colnames(hvf_info), value = TRUE)]][1:2], c(8.328927, 8.444462), tolerance = 1e-6) expect_equal(hvf_info[[grep("dispersion$", colnames(hvf_info), value = TRUE)]][1:2], c(10.552507, 10.088223), tolerance = 1e-6) expect_equal(as.numeric(hvf_info[[grep("dispersion.scaled$", colnames(hvf_info), value = TRUE)]][1:2]), c(0.1113214, -0.1332181523), tolerance = 1e-6) expect_true(!is.unsorted(rev(hvf_info[VariableFeatures(object = object), "dispersion"]))) }) object <- FindVariableFeatures(object, selection.method = "vst", verbose = FALSE) test_that("vst selection option returns expected values", { expect_equal(VariableFeatures(object = object)[1:4], c("PPBP", "IGLL5", "VDAC3", "CD1C")) expect_equal(length(x = VariableFeatures(object = object)), 230) hvf_info <- HVFInfo(object = object[["RNA"]], method = 'vst') expect_equal(hvf_info[[grep("variance$", colnames(hvf_info), value = TRUE)]][1:2], c(1.0251582, 1.2810127), tolerance = 1e-6) expect_equal(hvf_info[[grep("variance.standardized$", colnames(hvf_info), value = TRUE)]][1:2], c(0.8983463, 0.4731134), tolerance = 1e-6) expect_true(!is.unsorted(rev(hvf_info[VariableFeatures(object = object), grep("variance.standardized$", colnames(hvf_info))]))) }) #object <- FindVariableFeatures(object, assay = "RNAbp") #this breaks currently # Tests for internal functions # ------------------------------------------------------------------------------ norm.fxn <- function(x) {x / mean(x)} test_that("CustomNormalize works as expected", { expect_equal( CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = as.matrix(pbmc.test), custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = as.data.frame(as.matrix(pbmc.test)), custom_function = norm.fxn, margin = 2), apply(X = pbmc.test, MARGIN = 2, FUN = norm.fxn) ) expect_equal( CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 1), t(apply(X = pbmc.test, MARGIN = 1, FUN = norm.fxn)) ) expect_error(CustomNormalize(data = pbmc.test, custom_function = norm.fxn, margin = 10)) }) # Tests for SCTransform # -------------------------------------------------------------------------------- context("SCTransform") object <- suppressWarnings(SCTransform(object = object, verbose = FALSE, vst.flavor = "v1", seed.use = 1448145)) test_that("SCTransform v1 works as expected", { expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[1]), 11.40288448) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "data"))[1]), 57.7295742, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "data"))[5]), 11.74403719, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[1]), 129) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[5]), 28) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2362887, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_variance"], 2.875761, tolerance = 1e-6) }) test_that("SCTransform v2 works as expected", { skip_on_cran() skip_if_not_installed("glmGamPoi") object <- suppressWarnings(SCTransform(object = object, verbose = FALSE, vst.flavor = "v2", seed.use = 1448145)) expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[1]), 24.5183, tolerance = 1e-2) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "data"))[1]), 58.65829, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "data"))[5]), 13.75449, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[1]), 141) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[5]), 40) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2763993, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_variance"], 3.023062, tolerance = 1e-6) }) suppressWarnings(RNGversion(vstr = "3.5.0")) object <- suppressWarnings(SCTransform(object = object, vst.flavor = "v1", ncells = 80, verbose = FALSE, seed.use = 42)) test_that("SCTransform ncells param works", { expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[1]), 11.40288, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "data"))[1]), 57.72957, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "data"))[5]), 11.74404, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[1]), 129) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[5]), 28) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2362887, tolerance = 1e-3) expect_equal(fa["MS4A1", "residual_variance"], 2.875761, tolerance = 1e-3) }) suppressWarnings(object[["SCT_SAVE"]] <- object[["SCT"]]) object[["SCT"]] <- suppressWarnings({SetAssayData(object = object[["SCT"]], slot = "scale.data", new.data = GetAssayData(object = object[["SCT"]], layer = "scale.data")[1:100, ])}) object <- GetResidual(object = object, features = rownames(x = object), verbose = FALSE) test_that("GetResidual works", { expect_equal(dim(GetAssayData(object = object[["SCT"]], layer = "scale.data")), c(220, 80)) expect_equal( GetAssayData(object = object[["SCT"]], layer = "scale.data"), GetAssayData(object = object[["SCT_SAVE"]], layer = "scale.data") ) expect_warning(GetResidual(object, features = "asd")) }) test_that("SCTransform v2 works as expected", { skip_on_cran() skip_if_not_installed("glmGamPoi") object <- suppressWarnings(SCTransform(object = object, verbose = FALSE, vst.flavor = "v2", seed.use = 1448145)) expect_true("SCT" %in% names(object)) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[1]), 24.5813, tolerance = 1e-4) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "scale.data"))[5]), 0) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "data"))[1]), 58.65829, tolerance = 1e-6) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "data"))[5]), 13.75449, tolerance = 1e-6) expect_equal(as.numeric(colSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[1]), 141) expect_equal(as.numeric(rowSums(GetAssayData(object = object[["SCT"]], layer = "counts"))[5]), 40) expect_equal(length(VariableFeatures(object[["SCT"]])), 220) fa <- SCTResults(object = object, assay = "SCT", slot = "feature.attributes") expect_equal(fa["MS4A1", "detection_rate"], 0.15) expect_equal(fa["MS4A1", "gmean"], 0.2027364, tolerance = 1e-6) expect_equal(fa["MS4A1", "variance"], 1.025158, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_mean"], 0.2763993, tolerance = 1e-6) expect_equal(fa["MS4A1", "residual_variance"], 3.023062, tolerance = 1e-6) expect_equal(fa["FCER2", "theta"], Inf) }) test_that("SCTransform is equivalent for BPcells ", { skip_on_cran() skip_on_cran() skip_if_not_installed("glmGamPoi") library(Matrix) skip_if_not_installed("BPCells") library(BPCells) mat_bpcells <- t(as(t(object[['RNA']]$counts ), "IterableMatrix")) object[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) object <- suppressWarnings(SCTransform(object = object, assay = "RNA", new.assay.name = "SCT", verbose = FALSE, vst.flavor = "v2", seed.use = 1448145)) object <- suppressWarnings(SCTransform(object = object, assay = "RNAbp", new.assay.name = "SCTbp", verbose = FALSE, vst.flavor = "v2", seed.use = 1448145)) expect_equal(as.matrix(LayerData(object = object[["SCT"]], layer = "data")), as.matrix(LayerData(object = object[["SCTbp"]], layer = "data")), tolerance = 1e-6) }) Seurat/tests/testthat/test_visualization.R0000644000176200001440000000157714525500037020615 0ustar liggesusers# Tests for functions in visualization.R set.seed(42) # Tests for visualization utilities # ------------------------------------------------------------------------------ pbmc_small[["tsne_new"]] <- CollapseEmbeddingOutliers(pbmc_small, reduction = "tsne", reduction.key = 'tsne_', outlier.sd = 0.5) test_that("CollapseEmbeddingOutliers works", { expect_equal(Embeddings(pbmc_small[["tsne_new"]])[1, 1], -12.59713, tolerance = 1e-6) expect_equal(colSums(x = Embeddings(object = pbmc_small[["tsne_new"]])), c(-219.9218, 182.9215), check.attributes = FALSE, tolerance = 1e-5) }) test_that("DiscretePalette works", { isColors <- function(x) { all(grepl("#[0-9A-Fa-f]{6}", x)) } expect_true(isColors(DiscretePalette(26))) expect_true(isColors(DiscretePalette(32))) expect_true(isColors(DiscretePalette(36))) expect_warning(DiscretePalette(50), "Not enough colours") }) Seurat/tests/testthat/test_modularity_optimizer.R0000644000176200001440000001117714525500037022204 0ustar liggesusers# Tests to verify the RCpp version of ModularityOptimizer produces the same # results as the java version. # Equivalent java commands are given above. context("ModularityOptimizer") # The "karate club" network available from the ModularityOptimizer website at: # http://www.ludowaltman.nl/slm/ node1 <- c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 4, 4, 5, 5, 5, 6, 8, 8, 8, 9, 13, 14, 14, 15, 15, 18, 18, 19, 20, 20, 22, 22, 23, 23, 23, 23, 23, 24, 24, 24, 25, 26, 26, 27, 28, 28, 29, 29, 30, 30, 31, 31, 32) node2 <- c(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 17, 19, 21, 31, 2, 3, 7, 13, 17, 19, 21, 30, 3, 7, 8, 9, 13, 27, 28, 32, 7, 12, 13, 6, 10, 6, 10, 16, 16, 30, 32, 33, 33, 33, 32, 33, 32, 33, 32, 33, 33, 32, 33, 32, 33, 25, 27, 29, 32, 33, 25, 27, 31, 31, 29, 33, 33, 31, 33, 32, 33, 32, 33, 32, 33, 33) dim_s <- max(max(node1), max(node2)) + 1 # Note we want to represent network in the lower diagonal. connections <- sparseMatrix(i = node2 + 1, j = node1 + 1, x = 1.0) # Result from equivalent command to # java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 1 1 1 564 0 test_that("Algorithm 1", { expected <- c(1, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 0, 0, 2, 1, 0, 1, 0, 1, 0, 0, 3, 3, 0, 0, 3, 0, 0, 3, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 1, nRandomStarts = 1, nIterations = 1, randomSeed = 564, printOutput = 0, "" ) expect_equal(expected, s) }) #java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 2 1 1 2 0 test_that("Algorithm 2", { expected <- c(1, 1, 1, 1, 3, 3, 3, 1, 0, 0, 3, 1, 1, 1, 0, 0, 3, 1, 0, 1, 0, 1, 0, 2, 2, 2, 0, 2, 2, 0, 0, 2, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 2, nRandomStarts = 1, nIterations = 1, randomSeed = 2, printOutput = 0, "" ) expect_equal(expected, s) }) #java -jar ModularityOptimizer.jar karate_club_network.txt communities.txt 1 1.0 3 1 1 56464 0 test_that("Algorithm 3", { expected <- c(1, 1, 1, 1, 3, 3, 3, 1, 0, 0, 3, 1, 1, 1, 0, 0, 3, 1, 0, 1, 0, 1, 0, 2, 2, 2, 0, 2, 2, 0, 0, 2, 0, 0) s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 1.0, algorithm = 3, nRandomStarts = 1, nIterations = 1, randomSeed = 56464, printOutput = 0, "") expect_equal(expected, s) }) test_that("Low Resolution", { e1 <- rep(0, 34) # java -jar ModularityOptimizer.jar karate_club_network.txt outjava.txt 1 0.05 3 1 10 10 0 s <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 1, resolution = 0.05, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 10, printOutput = 0, "" ) expect_equal(s, e1) # java -jar ModularityOptimizer.jar karate_club_network.txt outjava.txt 2 0.05 3 1 10 10 0 s2 <- Seurat:::RunModularityClusteringCpp( SNN = connections, modularityFunction = 2, resolution=0.05, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 10, printOutput = 0, "" ) e2 = c(0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) expect_equal(s2, e2) }) test_that("EdgeWeights", { # Make 1, 4, 5 and 20 a community by weighting them c2 <- connections c2[5, 4] <- 3.0 c2[5, 1] <- 5.0 c2[4, 1] <- 8.0 c2[20, 5] <- 8.0 c2[20, 4] <- 5.0 c2[20, 1] <- 5.0 # java -jar ModularityOptimizer.jar weighted_karate_club_network.txt outjava.txt 1 1.0 3 1 10 40 1 s2 <- Seurat:::RunModularityClusteringCpp( SNN = c2, modularityFunction = 1, resolution = 1.0, algorithm = 3, nRandomStarts = 1, nIterations = 10, randomSeed = 40, printOutput = 0, "" ) exp <- c(2, 1, 1, 2, 2, 3, 3, 1, 0, 1, 3, 2, 2, 1, 0, 0, 3, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) expect_equal(s2, exp) }) # test_that("pbmc_small network", { # observed <- as.numeric(FindClusters( # object = pbmc_small, # reduction.type = "pca", # dims.use = 1:10, # resolution = 1.1, # save.SNN = TRUE, # print.output = 0)@ident) # expected = c(1,1,1,1,1,1,1,1,1,1,6,1,6,1,2,2,1,6,2,1,2,2,2,2,2,2,2,2,2,6,3,5,3,3,3,3,3,3,3,3,5,1,1,1,1,1,3,1,3,1,2,1,2,2,6,2,3,2,1,3,5,2,5,5,2,2,2,2,5,3,4,4,4,4,4,4,4,4,4,4) # expect_equal(observed, expected) # }) Seurat/tests/testthat/test_load_10X.R0000644000176200001440000000334614525500037017257 0ustar liggesuserscontext("Read10X") # These tests were added to ensure Seurat was forwards and backwards compatible for 3.0 data dname = "../testdata/cr3.0" test.data <- Read10X(dname) test.data2 <- Read10X(c(dname, dname)) test_that("Cell Ranger 3.0 Data Parsing", { expect_is(test.data, "list") expect_equal(ncol(test.data$`Gene Expression`), .5 * ncol(test.data2$`Gene Expression`)) expect_equal(ncol(test.data$`Antibody Capture`), .5 * ncol(test.data2$`Antibody Capture`)) expect_equal(colnames(test.data2[[1]])[6], "2_AAAGTAGCACAGTCGC-1") expect_equal(test.data$`Gene Expression`[2,2], 1000) }) # Tests of Pre-3.0 Data test.data3 <- Read10X("../testdata/") test_that("Read10X creates sparse matrix", { expect_is(test.data3, "dgCMatrix") expect_equal(colnames(test.data3)[1], "ATGCCAGAACGACT-1") expect_equal(rownames(test.data3)[1], "MS4A1") }) test_that("Read10X handles missing files properly", { expect_error(Read10X(".")) expect_error(Read10X("./notadir/")) expect_error(Read10X(dname, gene.column = 10)) }) # Tests for reading in spatial 10x data if (requireNamespace("hdf5r", quietly = TRUE)) { context("Load10X_Spatial") dname <- "../testdata/visium" txsp <- Load10X_Spatial(data.dir = '../testdata/visium') test_that("10x Spatial Data Parsing", { expect_is(txsp, "Seurat") expect_equal(ncol(x = txsp), 2695) expect_equal(nrow(x = txsp), 100) expect_equal(Cells(x = txsp)[1], "AAACAAGTATCTCCCA-1") expect_equal(Assays(object = txsp), "Spatial") expect_equal(GetAssayData(object = txsp[["Spatial"]], layer = "counts")[5, 9], 1) }) test_that("Read10X_Spatial handles missing files properly", { expect_error(Load10X_Spatial(data.dir = ".")) expect_error(Load10X_Spatial(data.dir = "./notadir/")) }) } Seurat/tests/testthat/test_dimensional_reduction.R0000644000176200001440000000460314525500037022263 0ustar liggesuserscontext("test-dimensional_reduction") set.seed(seed = 1) dummyexpMat <- matrix( data = sample(x = c(1:50), size = 1e4, replace = TRUE), ncol = 100, nrow = 100 ) colnames(x = dummyexpMat) <- paste0("cell", seq(ncol(x = dummyexpMat))) row.names(x = dummyexpMat) <- paste0("gene", seq(nrow(x = dummyexpMat))) # Create Seurat object for testing obj <- CreateSeuratObject(counts = as.sparse(dummyexpMat)) test_that("different ways of passing distance matrix", { # Manually make a distance object to test distMat <- dist(t(dummyexpMat)) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)), suppressWarnings(expr = RunTSNE(obj, distance.matrix = as.matrix(distMat))) ) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)@reductions$tsne), suppressWarnings(expr = RunTSNE(distMat, assay = "RNA")) ) expect_equivalent( suppressWarnings(expr = RunTSNE(obj, distance.matrix = distMat)@reductions$tsne), suppressWarnings(expr = RunTSNE(as.matrix(distMat), assay = "RNA", is_distance = TRUE)) ) }) # Normalize, scale, and compute PCA, using RunPCA obj <- NormalizeData(object = obj, verbose = FALSE) obj <- ScaleData(object = obj, verbose = FALSE) pca_result <- suppressWarnings(expr = RunPCA( object = obj, features = rownames(obj[['RNA']]$counts), verbose = FALSE )) test_that("pca returns total variance (see #982)", { # Using stats::prcomp scaled_data <- LayerData(object = obj, layer = "scale.data") prcomp_result <- stats::prcomp(scaled_data, center = FALSE, scale. = FALSE) # Compare expect_equivalent(slot(object = pca_result[["pca"]], name = "misc")$total.variance, sum(prcomp_result$sdev^2)) }) test_that("pca is equivalent for BPCells", { skip_on_cran() library(Matrix) library(BPCells) mat_bpcells <- t(x = as(object = t(x = obj[['RNA']]$counts ), Class = "IterableMatrix")) obj[['RNAbp']] <- CreateAssay5Object(counts = mat_bpcells) DefaultAssay(obj) <- "RNAbp" obj <- NormalizeData(object = obj, verbose = FALSE) obj <- ScaleData(object = obj, verbose=FALSE) pca_result_bp <- suppressWarnings(expr = RunPCA( object = obj, features = rownames(obj[['RNAbp']]$counts), assay = "RNAbp")) expect_equivalent(abs(pca_result_bp[['pca']]@cell.embeddings), abs(pca_result[['pca']]@cell.embeddings), tolerance = 1e-5) }) Seurat/tests/testdata/0000755000176200001440000000000014525500037014471 5ustar liggesusersSeurat/tests/testdata/cr3.0/0000755000176200001440000000000014525500037015316 5ustar liggesusersSeurat/tests/testdata/cr3.0/barcodes.tsv.gz0000644000176200001440000000012714525500037020255 0ustar liggesusers?Fw[barcodes.tsv% ./Zx& DCB}{Ņo]+5j{P`ܡC0VPh3T:r|V{̤ҧ(.s>?5nY0'2K ?Seurat/tests/testdata/matrix.mtx0000644000176200001440000012076214525500037016537 0ustar liggesusers%%MatrixMarket matrix coordinate integer general 240 80 4814 2 1 1 6 1 1 9 1 3 12 1 1 23 1 1 31 1 4 33 1 3 35 1 1 36 1 5 38 1 1 40 1 1 44 1 3 45 1 3 47 1 1 48 1 3 50 1 1 51 1 1 52 1 1 53 1 1 54 1 2 56 1 1 57 1 2 58 1 1 59 1 2 60 1 1 65 1 1 78 1 4 80 1 1 84 1 1 93 1 1 95 1 1 100 1 1 104 1 1 109 1 1 120 1 1 126 1 1 133 1 1 141 1 1 149 1 2 152 1 2 159 1 1 163 1 2 166 1 3 167 1 1 177 1 1 194 1 1 198 1 1 208 1 1 222 1 1 233 1 1 236 1 1 238 1 1 4 2 1 9 2 7 22 2 1 23 2 1 25 2 2 27 2 1 31 2 4 32 2 3 33 2 7 34 2 1 36 2 2 38 2 1 39 2 1 40 2 1 41 2 5 42 2 2 43 2 2 44 2 2 45 2 3 48 2 1 49 2 2 52 2 2 56 2 3 57 2 2 58 2 1 67 2 1 69 2 3 72 2 2 74 2 2 77 2 1 78 2 4 80 2 2 92 2 1 93 2 1 95 2 1 100 2 1 111 2 1 115 2 1 116 2 1 117 2 1 126 2 1 128 2 1 141 2 1 147 2 1 156 2 1 159 2 1 177 2 1 191 2 1 193 2 1 197 2 1 198 2 2 219 2 1 223 2 1 228 2 1 231 2 1 232 2 1 239 2 1 9 3 11 12 3 1 24 3 1 31 3 4 32 3 2 33 3 11 35 3 2 36 3 1 37 3 1 39 3 1 42 3 1 44 3 1 47 3 1 48 3 2 49 3 1 51 3 2 54 3 1 55 3 1 56 3 2 57 3 2 58 3 1 65 3 9 67 3 1 69 3 2 72 3 2 78 3 4 80 3 2 89 3 1 92 3 1 93 3 1 98 3 1 104 3 1 107 3 1 124 3 1 126 3 1 140 3 1 141 3 1 144 3 1 149 3 1 152 3 1 153 3 2 155 3 2 157 3 1 160 3 1 163 3 1 164 3 1 166 3 3 172 3 4 175 3 1 179 3 1 181 3 1 222 3 1 232 3 1 9 4 13 12 4 1 23 4 6 31 4 5 32 4 2 33 4 13 34 4 1 35 4 2 36 4 2 39 4 1 40 4 1 42 4 1 43 4 2 44 4 6 45 4 4 46 4 2 48 4 5 49 4 4 54 4 1 55 4 2 56 4 3 57 4 3 58 4 1 59 4 2 60 4 1 65 4 8 67 4 1 70 4 2 72 4 4 76 4 1 78 4 5 81 4 1 88 4 1 90 4 1 96 4 2 98 4 1 104 4 2 112 4 1 130 4 1 141 4 1 142 4 3 149 4 2 151 4 3 154 4 1 155 4 1 157 4 1 163 4 2 166 4 3 169 4 1 172 4 2 173 4 1 176 4 1 180 4 1 182 4 1 210 4 1 214 4 1 222 4 1 236 4 3 4 5 1 9 5 3 31 5 4 32 5 3 33 5 3 36 5 2 39 5 36 41 5 2 43 5 1 44 5 5 45 5 2 48 5 2 49 5 3 51 5 2 52 5 1 54 5 54 56 5 2 57 5 2 64 5 1 65 5 1 67 5 3 68 5 1 69 5 1 72 5 1 75 5 1 78 5 4 89 5 1 90 5 2 99 5 1 100 5 1 101 5 1 104 5 1 128 5 1 131 5 1 137 5 1 140 5 1 141 5 2 144 5 1 146 5 1 149 5 3 151 5 2 153 5 3 154 5 1 159 5 1 161 5 1 163 5 1 166 5 1 172 5 1 176 5 1 180 5 1 182 5 1 183 5 1 198 5 2 211 5 1 217 5 1 222 5 2 223 5 1 232 5 2 233 5 1 4 6 1 9 6 4 14 6 1 23 6 2 24 6 1 31 6 4 32 6 1 33 6 4 34 6 1 35 6 1 37 6 2 42 6 2 43 6 2 44 6 3 45 6 1 46 6 1 47 6 1 48 6 4 49 6 1 50 6 1 54 6 2 55 6 1 57 6 1 60 6 1 66 6 1 69 6 1 72 6 1 78 6 4 80 6 1 81 6 1 93 6 1 100 6 2 101 6 1 126 6 1 128 6 1 135 6 1 144 6 1 146 6 1 149 6 2 151 6 1 153 6 2 155 6 1 156 6 1 159 6 2 166 6 3 172 6 2 182 6 1 194 6 1 200 6 1 223 6 1 231 6 1 232 6 1 236 6 1 9 7 6 23 7 4 31 7 3 32 7 1 33 7 6 34 7 1 36 7 1 37 7 3 38 7 1 41 7 1 42 7 1 44 7 4 45 7 1 48 7 3 49 7 3 50 7 1 54 7 1 55 7 2 59 7 2 60 7 1 65 7 3 69 7 2 70 7 1 78 7 3 96 7 1 101 7 1 113 7 1 125 7 1 127 7 1 132 7 1 149 7 2 152 7 2 156 7 1 166 7 3 179 7 1 210 7 1 213 7 1 236 7 1 238 7 1 4 8 1 9 8 4 23 8 1 24 8 1 31 8 2 32 8 3 33 8 4 35 8 1 36 8 12 37 8 2 38 8 1 40 8 1 41 8 1 43 8 2 45 8 2 46 8 1 47 8 1 48 8 2 49 8 4 50 8 1 53 8 1 54 8 1 55 8 2 58 8 1 59 8 1 65 8 3 71 8 1 74 8 1 78 8 2 81 8 1 85 8 1 92 8 1 100 8 1 120 8 1 128 8 1 141 8 1 149 8 1 152 8 1 154 8 1 156 8 1 159 8 1 161 8 1 163 8 1 166 8 1 172 8 1 182 8 1 183 8 1 222 8 1 233 8 1 9 9 2 31 9 2 32 9 2 33 9 2 35 9 2 37 9 3 38 9 1 42 9 1 43 9 3 44 9 1 45 9 1 47 9 1 48 9 3 49 9 2 51 9 1 54 9 1 56 9 1 57 9 3 58 9 2 59 9 1 78 9 2 80 9 2 81 9 2 83 9 1 89 9 1 93 9 1 95 9 1 96 9 1 100 9 1 112 9 1 126 9 1 142 9 1 148 9 1 156 9 1 159 9 1 163 9 1 172 9 1 179 9 1 191 9 1 236 9 1 2 10 1 9 10 21 12 10 1 23 10 4 25 10 1 31 10 2 32 10 1 33 10 21 34 10 1 35 10 1 36 10 9 38 10 1 40 10 1 41 10 1 43 10 1 44 10 6 45 10 1 47 10 1 53 10 1 54 10 3 55 10 1 59 10 1 65 10 3 69 10 2 72 10 6 74 10 2 78 10 2 80 10 1 98 10 4 100 10 1 101 10 1 115 10 1 141 10 2 146 10 1 149 10 3 150 10 1 154 10 1 156 10 2 159 10 1 166 10 3 172 10 3 182 10 1 210 10 1 231 10 1 1 11 2 2 11 2 4 11 14 5 11 3 6 11 1 7 11 3 9 11 2 13 11 1 15 11 3 20 11 1 21 11 1 22 11 1 23 11 2 24 11 2 26 11 2 28 11 1 29 11 1 30 11 1 33 11 2 43 11 1 54 11 1 86 11 1 90 11 1 93 11 1 95 11 1 100 11 1 121 11 3 126 11 1 128 11 14 129 11 4 132 11 1 133 11 1 134 11 2 143 11 1 159 11 1 233 11 1 1 12 2 2 12 4 3 12 5 4 12 28 6 12 6 7 12 1 8 12 4 9 12 9 10 12 2 11 12 1 12 12 3 14 12 1 16 12 3 17 12 1 18 12 1 25 12 1 26 12 2 27 12 4 33 12 9 43 12 1 44 12 1 45 12 1 48 12 2 54 12 2 55 12 1 57 12 1 68 12 1 72 12 1 75 12 2 81 12 2 84 12 1 89 12 1 90 12 1 93 12 4 98 12 1 101 12 1 107 12 1 112 12 1 121 12 8 124 12 1 126 12 4 128 12 28 129 12 10 130 12 4 132 12 4 133 12 6 134 12 10 138 12 2 141 12 1 143 12 1 146 12 1 155 12 1 156 12 1 161 12 1 163 12 1 165 12 1 166 12 1 169 12 2 172 12 2 180 12 1 181 12 1 182 12 1 210 12 29 217 12 1 222 12 1 236 12 2 1 13 4 2 13 3 3 13 2 4 13 18 5 13 2 6 13 2 8 13 1 9 13 2 11 13 1 12 13 2 13 13 1 15 13 1 21 13 1 23 13 4 25 13 1 26 13 1 28 13 15 33 13 2 37 13 1 45 13 1 48 13 1 66 13 1 81 13 1 82 13 1 98 13 1 100 13 1 108 13 1 121 13 2 128 13 18 129 13 4 130 13 4 132 13 3 133 13 2 134 13 6 142 13 1 146 13 1 149 13 1 154 13 2 155 13 1 159 13 1 166 13 2 182 13 1 215 13 1 230 13 1 231 13 1 1 14 4 2 14 3 3 14 2 4 14 7 5 14 4 6 14 2 8 14 1 9 14 4 10 14 1 14 14 1 16 14 1 17 14 2 21 14 1 23 14 1 29 14 1 33 14 4 48 14 1 54 14 1 64 14 2 93 14 1 121 14 2 126 14 1 128 14 7 129 14 4 130 14 1 133 14 2 134 14 1 143 14 1 178 14 1 180 14 1 211 14 2 1 15 2 2 15 2 3 15 5 4 15 15 6 15 2 7 15 2 8 15 2 9 15 4 10 15 1 12 15 1 15 15 1 18 15 1 22 15 2 24 15 2 25 15 2 27 15 1 33 15 4 36 15 1 47 15 1 48 15 5 56 15 1 61 15 1 72 15 1 81 15 1 84 15 1 98 15 1 121 15 5 128 15 15 129 15 8 132 15 4 133 15 2 134 15 5 138 15 1 142 15 2 144 15 1 152 15 1 153 15 2 154 15 1 155 15 1 156 15 2 166 15 1 182 15 1 194 15 1 196 15 1 198 15 1 222 15 1 231 15 1 236 15 1 1 16 3 2 16 3 3 16 8 4 16 28 6 16 8 8 16 2 10 16 1 12 16 2 14 16 3 15 16 1 17 16 1 19 16 1 20 16 1 25 16 3 27 16 1 30 16 1 32 16 2 37 16 1 47 16 1 56 16 1 61 16 1 71 16 1 72 16 1 79 16 1 101 16 1 121 16 9 124 16 4 128 16 28 129 16 23 130 16 8 132 16 8 133 16 8 134 16 16 140 16 3 141 16 2 146 16 1 149 16 2 156 16 5 158 16 1 164 16 1 172 16 1 176 16 1 194 16 1 198 16 2 228 16 1 233 16 2 1 17 3 2 17 1 3 17 1 4 17 7 5 17 3 6 17 2 7 17 2 8 17 1 9 17 3 11 17 1 12 17 2 13 17 1 19 17 2 20 17 1 23 17 4 26 17 1 30 17 1 33 17 3 44 17 2 45 17 1 58 17 1 67 17 2 100 17 1 101 17 1 107 17 1 124 17 1 128 17 7 129 17 7 130 17 1 132 17 1 133 17 2 134 17 5 138 17 1 149 17 4 159 17 1 182 17 1 233 17 1 1 18 4 2 18 2 3 18 5 4 18 26 5 18 3 6 18 2 7 18 1 8 18 2 9 18 6 11 18 1 13 18 2 16 18 1 18 18 1 19 18 1 20 18 1 22 18 2 23 18 2 25 18 1 27 18 1 28 18 23 29 18 1 33 18 6 45 18 3 48 18 1 49 18 1 61 18 1 64 18 1 66 18 1 72 18 1 81 18 1 93 18 1 100 18 2 121 18 5 124 18 1 126 18 1 128 18 26 130 18 5 132 18 2 133 18 2 134 18 11 138 18 1 140 18 1 143 18 2 154 18 1 159 18 2 172 18 1 175 18 1 183 18 1 190 18 1 211 18 1 229 18 1 1 19 2 2 19 2 3 19 5 4 19 10 5 19 3 6 19 1 7 19 1 9 19 5 10 19 1 12 19 1 13 19 2 14 19 1 17 19 1 21 19 1 22 19 1 23 19 6 24 19 1 26 19 1 33 19 5 36 19 1 44 19 1 45 19 1 47 19 1 49 19 1 56 19 1 62 19 1 63 19 1 90 19 1 93 19 1 98 19 1 102 19 1 121 19 1 126 19 1 128 19 10 129 19 4 132 19 2 133 19 1 134 19 5 135 19 1 143 19 1 151 19 1 154 19 1 166 19 2 180 19 1 221 19 1 228 19 1 231 19 1 233 19 1 1 20 3 2 20 5 3 20 12 4 20 16 5 20 2 6 20 2 7 20 2 8 20 1 9 20 7 11 20 1 16 20 2 18 20 3 19 20 1 23 20 2 24 20 1 25 20 1 26 20 1 30 20 1 33 20 7 37 20 1 41 20 1 45 20 1 49 20 1 56 20 1 75 20 1 80 20 2 81 20 1 98 20 1 100 20 2 121 20 5 124 20 1 128 20 16 129 20 6 130 20 1 132 20 4 133 20 2 134 20 8 135 20 1 143 20 1 144 20 1 149 20 1 152 20 1 159 20 2 166 20 3 196 20 1 4 21 7 7 21 1 9 21 1 23 21 3 33 21 1 36 21 1 44 21 1 49 21 1 50 21 1 64 21 2 68 21 1 81 21 1 90 21 2 91 21 18 92 21 30 93 21 50 94 21 1 95 21 10 96 21 14 97 21 3 98 21 3 99 21 4 100 21 15 101 21 1 104 21 2 105 21 1 107 21 1 108 21 1 109 21 2 110 21 1 111 21 3 112 21 5 113 21 12 115 21 2 119 21 1 120 21 5 125 21 12 126 21 50 128 21 7 130 21 1 134 21 2 135 21 13 136 21 4 140 21 2 146 21 1 147 21 4 151 21 3 152 21 5 153 21 6 154 21 1 156 21 4 157 21 3 158 21 6 159 21 15 160 21 1 161 21 1 163 21 2 164 21 6 166 21 6 170 21 1 172 21 6 173 21 1 177 21 2 178 21 1 182 21 4 186 21 1 198 21 1 211 21 2 212 21 1 217 21 1 218 21 1 222 21 1 4 22 22 6 22 3 8 22 1 32 22 2 44 22 2 54 22 2 64 22 1 70 22 1 74 22 1 76 22 1 82 22 1 91 22 5 92 22 12 93 22 29 94 22 2 95 22 6 96 22 13 97 22 2 98 22 13 99 22 7 100 22 9 101 22 2 103 22 1 104 22 14 105 22 1 106 22 1 107 22 3 108 22 1 109 22 27 110 22 1 111 22 4 112 22 1 113 22 6 116 22 4 118 22 1 120 22 3 121 22 13 124 22 4 125 22 6 126 22 29 128 22 22 129 22 18 130 22 5 131 22 1 132 22 8 133 22 3 134 22 12 135 22 28 138 22 2 140 22 3 144 22 1 146 22 3 147 22 1 148 22 7 151 22 6 152 22 7 153 22 5 155 22 1 156 22 15 158 22 4 159 22 9 160 22 5 161 22 1 163 22 10 164 22 3 165 22 1 166 22 4 167 22 2 170 22 1 171 22 1 172 22 15 173 22 1 174 22 2 176 22 1 177 22 1 180 22 1 182 22 5 198 22 1 211 22 1 231 22 2 3 23 1 7 23 1 12 23 1 13 23 1 74 23 1 75 23 1 91 23 25 92 23 51 93 23 25 94 23 2 95 23 5 96 23 3 98 23 5 99 23 1 100 23 1 101 23 6 102 23 1 104 23 10 105 23 1 107 23 1 111 23 2 113 23 2 114 23 1 118 23 1 121 23 2 124 23 1 125 23 2 126 23 25 129 23 1 132 23 1 134 23 1 135 23 15 137 23 1 140 23 1 141 23 1 147 23 1 148 23 1 151 23 1 152 23 6 153 23 1 155 23 1 156 23 8 159 23 1 163 23 4 164 23 4 172 23 2 174 23 4 177 23 1 180 23 1 182 23 3 198 23 2 4 24 10 8 24 1 9 24 1 24 24 1 33 24 1 44 24 2 74 24 1 91 24 5 92 24 22 93 24 49 94 24 4 95 24 9 96 24 10 99 24 6 100 24 5 103 24 4 104 24 8 107 24 2 108 24 1 109 24 1 110 24 2 111 24 1 113 24 1 114 24 1 115 24 1 116 24 4 121 24 1 124 24 1 125 24 1 126 24 49 128 24 10 129 24 2 132 24 1 134 24 5 135 24 11 140 24 6 142 24 1 146 24 3 148 24 1 151 24 4 152 24 5 153 24 5 154 24 1 156 24 5 158 24 2 159 24 5 163 24 2 164 24 6 172 24 4 177 24 1 182 24 5 195 24 3 198 24 3 237 24 1 1 25 1 4 25 6 9 25 1 13 25 1 31 25 1 32 25 1 33 25 1 42 25 1 44 25 1 45 25 3 48 25 1 54 25 1 56 25 1 58 25 1 61 25 1 66 25 1 72 25 3 74 25 1 78 25 1 81 25 1 91 25 25 92 25 85 93 25 98 94 25 1 95 25 7 96 25 16 97 25 1 98 25 11 99 25 5 100 25 7 101 25 36 102 25 2 103 25 1 104 25 11 105 25 1 106 25 1 108 25 1 109 25 1 110 25 1 111 25 1 112 25 1 113 25 6 114 25 2 115 25 14 116 25 4 117 25 1 119 25 1 120 25 3 125 25 6 126 25 98 128 25 6 131 25 2 134 25 1 135 25 13 140 25 5 141 25 2 142 25 2 143 25 2 146 25 1 147 25 1 148 25 1 151 25 8 152 25 4 153 25 3 154 25 2 156 25 4 157 25 3 159 25 7 163 25 2 164 25 1 165 25 1 166 25 1 168 25 1 169 25 2 172 25 7 177 25 1 181 25 1 182 25 12 183 25 1 198 25 5 210 25 1 212 25 1 239 25 1 36 26 1 91 26 6 92 26 3 93 26 11 95 26 1 96 26 4 99 26 1 100 26 3 101 26 1 102 26 1 103 26 1 104 26 4 105 26 1 107 26 1 108 26 1 109 26 1 112 26 1 114 26 2 115 26 1 116 26 1 117 26 1 119 26 1 121 26 1 126 26 11 129 26 3 135 26 7 136 26 1 140 26 1 146 26 1 151 26 3 152 26 3 153 26 2 156 26 2 159 26 3 160 26 2 163 26 2 164 26 2 166 26 1 172 26 3 177 26 1 182 26 1 191 26 1 198 26 1 237 26 1 4 27 4 6 27 1 23 27 1 38 27 1 44 27 1 48 27 1 58 27 1 64 27 1 72 27 1 82 27 1 91 27 24 92 27 54 93 27 59 94 27 1 95 27 1 96 27 13 97 27 1 98 27 2 99 27 6 100 27 4 101 27 5 102 27 4 103 27 7 104 27 6 105 27 1 106 27 1 110 27 1 114 27 1 115 27 2 116 27 3 117 27 3 119 27 1 120 27 1 126 27 59 128 27 4 133 27 1 134 27 3 135 27 37 140 27 3 142 27 1 146 27 1 147 27 5 148 27 2 151 27 5 152 27 1 153 27 1 156 27 8 157 27 1 158 27 1 159 27 4 160 27 4 163 27 1 164 27 4 171 27 1 172 27 6 174 27 3 177 27 1 180 27 1 182 27 15 183 27 1 198 27 2 206 27 1 211 27 1 215 27 1 225 27 1 228 27 1 236 27 1 3 28 1 4 28 3 24 28 1 48 28 1 56 28 1 58 28 1 91 28 40 92 28 55 93 28 28 94 28 1 95 28 2 96 28 12 98 28 3 99 28 4 100 28 4 102 28 1 103 28 1 104 28 7 105 28 1 106 28 2 107 28 1 111 28 2 114 28 3 117 28 2 118 28 2 120 28 5 126 28 28 128 28 3 129 28 1 131 28 1 135 28 5 140 28 1 141 28 1 142 28 1 147 28 1 152 28 2 153 28 1 154 28 1 156 28 2 157 28 1 159 28 4 160 28 1 163 28 6 164 28 4 166 28 1 168 28 2 170 28 1 178 28 1 182 28 2 193 28 1 228 28 1 231 28 1 233 28 1 4 29 7 6 29 1 8 29 2 39 29 1 43 29 1 59 29 1 61 29 1 72 29 1 79 29 1 80 29 1 91 29 16 92 29 35 93 29 34 94 29 3 95 29 8 96 29 19 97 29 1 98 29 5 99 29 5 100 29 11 101 29 3 102 29 1 103 29 1 104 29 22 107 29 1 108 29 2 109 29 1 110 29 2 111 29 15 112 29 2 113 29 5 115 29 1 116 29 2 117 29 1 121 29 7 124 29 1 125 29 5 126 29 34 128 29 7 129 29 7 131 29 1 132 29 4 133 29 1 134 29 5 135 29 20 136 29 2 137 29 1 140 29 4 141 29 2 142 29 1 143 29 1 146 29 2 148 29 2 149 29 3 151 29 7 152 29 10 153 29 6 154 29 2 155 29 2 156 29 11 157 29 1 158 29 3 159 29 11 160 29 2 162 29 1 163 29 5 164 29 9 166 29 3 167 29 1 170 29 1 171 29 3 172 29 4 174 29 1 175 29 2 182 29 3 191 29 1 193 29 1 198 29 3 225 29 1 227 29 1 228 29 1 2 30 1 4 30 13 9 30 1 13 30 1 25 30 1 33 30 1 36 30 1 37 30 1 45 30 1 48 30 1 51 30 1 54 30 3 64 30 1 70 30 1 81 30 1 84 30 1 88 30 1 91 30 11 92 30 17 93 30 16 95 30 7 96 30 12 98 30 10 99 30 1 100 30 7 101 30 5 103 30 2 104 30 37 106 30 1 107 30 3 108 30 1 109 30 1 110 30 1 111 30 2 112 30 1 113 30 2 114 30 1 115 30 2 118 30 5 120 30 4 121 30 6 124 30 2 125 30 2 126 30 16 128 30 13 129 30 7 130 30 1 132 30 1 134 30 3 135 30 18 136 30 3 140 30 2 141 30 1 146 30 3 147 30 2 148 30 1 149 30 3 151 30 13 152 30 12 153 30 4 156 30 18 157 30 1 158 30 3 159 30 7 160 30 7 161 30 1 163 30 6 164 30 8 166 30 6 168 30 2 172 30 20 174 30 3 175 30 1 176 30 1 177 30 2 180 30 1 182 30 1 186 30 2 187 30 1 198 30 2 203 30 1 206 30 1 210 30 1 211 30 1 232 30 1 237 30 2 2 31 1 9 31 1 22 31 1 32 31 1 33 31 1 36 31 2 39 31 1 47 31 1 48 31 2 54 31 1 57 31 3 61 31 1 62 31 1 64 31 35 68 31 4 80 31 1 84 31 1 91 31 1 96 31 3 101 31 1 104 31 3 129 31 2 135 31 1 140 31 1 141 31 3 146 31 1 149 31 1 152 31 1 154 31 2 155 31 1 156 31 3 161 31 6 163 31 6 164 31 8 166 31 8 176 31 2 183 31 2 193 31 1 201 31 1 211 31 35 212 31 27 213 31 2 214 31 35 215 31 5 216 31 7 217 31 4 218 31 5 219 31 14 221 31 1 222 31 1 225 31 4 226 31 2 227 31 1 228 31 1 229 31 1 230 31 3 231 31 2 232 31 3 233 31 3 235 31 4 236 31 7 237 31 1 239 31 3 4 32 1 24 32 1 31 32 7 35 32 2 48 32 2 49 32 1 62 32 3 64 32 14 65 32 2 68 32 4 69 32 1 70 32 2 72 32 2 78 32 7 80 32 1 84 32 1 87 32 1 98 32 1 104 32 4 121 32 1 128 32 1 129 32 4 132 32 1 134 32 2 140 32 2 145 32 1 149 32 4 153 32 1 157 32 1 161 32 2 166 32 2 172 32 1 174 32 1 177 32 2 182 32 1 183 32 1 211 32 14 212 32 2 213 32 5 215 32 3 217 32 4 218 32 3 219 32 1 220 32 3 221 32 3 222 32 1 223 32 4 224 32 1 226 32 1 228 32 1 231 32 2 232 32 1 234 32 1 235 32 1 236 32 3 239 32 5 2 33 2 26 33 1 32 33 1 54 33 1 57 33 1 62 33 2 64 33 12 68 33 2 70 33 5 74 33 1 77 33 1 82 33 1 84 33 1 87 33 1 93 33 1 96 33 4 100 33 1 104 33 9 126 33 1 141 33 1 142 33 1 149 33 5 153 33 2 154 33 1 155 33 1 159 33 1 161 33 2 164 33 3 166 33 3 172 33 1 177 33 1 179 33 1 211 33 12 212 33 1 213 33 3 214 33 15 215 33 9 216 33 1 217 33 2 218 33 1 219 33 4 220 33 5 221 33 2 222 33 2 223 33 2 224 33 1 226 33 1 227 33 1 228 33 1 230 33 4 231 33 1 232 33 1 233 33 2 234 33 3 236 33 2 237 33 58 238 33 1 239 33 1 9 34 1 24 34 1 32 34 1 33 34 1 44 34 2 48 34 2 49 34 1 54 34 15 55 34 1 56 34 1 57 34 1 59 34 1 62 34 3 64 34 30 65 34 5 68 34 7 69 34 2 70 34 14 71 34 1 74 34 1 80 34 1 87 34 1 90 34 2 92 34 1 96 34 3 98 34 1 99 34 2 104 34 6 107 34 1 121 34 2 134 34 1 140 34 1 142 34 1 146 34 4 149 34 1 154 34 1 161 34 1 164 34 1 166 34 5 173 34 1 176 34 1 211 34 30 212 34 10 213 34 4 214 34 3 215 34 2 216 34 1 217 34 7 219 34 9 220 34 7 221 34 3 222 34 2 223 34 1 224 34 1 225 34 3 226 34 1 227 34 1 228 34 1 231 34 1 232 34 3 233 34 1 234 34 2 235 34 1 238 34 2 239 34 3 4 35 1 12 35 1 26 35 1 39 35 1 44 35 2 57 35 1 62 35 2 64 35 20 65 35 4 68 35 2 69 35 1 72 35 1 80 35 1 83 35 2 90 35 2 92 35 1 93 35 2 96 35 6 104 35 1 120 35 1 124 35 1 126 35 2 128 35 1 140 35 2 141 35 3 142 35 1 146 35 1 148 35 1 149 35 1 153 35 1 164 35 2 166 35 2 173 35 1 211 35 20 212 35 8 213 35 10 214 35 29 215 35 6 217 35 2 218 35 3 219 35 7 220 35 1 221 35 2 222 35 4 223 35 1 228 35 1 229 35 2 230 35 1 231 35 1 233 35 1 236 35 1 238 35 1 240 35 10 12 36 1 14 36 1 23 36 1 42 36 1 51 36 1 57 36 3 62 36 4 64 36 27 67 36 2 68 36 4 69 36 1 70 36 29 74 36 2 75 36 1 81 36 1 90 36 1 91 36 1 96 36 7 98 36 1 104 36 3 116 36 1 151 36 1 154 36 1 161 36 1 164 36 5 166 36 1 176 36 1 186 36 1 211 36 27 212 36 5 213 36 8 214 36 11 215 36 3 216 36 1 217 36 4 218 36 1 219 36 10 221 36 4 222 36 1 223 36 2 225 36 1 227 36 1 229 36 1 230 36 2 231 36 1 232 36 2 233 36 3 236 36 3 237 36 1 238 36 3 4 37 1 31 37 1 44 37 1 51 37 1 56 37 1 57 37 4 59 37 2 61 37 1 62 37 8 64 37 28 68 37 3 69 37 1 70 37 1 72 37 1 78 37 1 81 37 1 88 37 2 90 37 1 92 37 1 96 37 3 100 37 1 101 37 1 103 37 1 104 37 14 120 37 1 121 37 1 128 37 1 135 37 1 140 37 1 142 37 1 146 37 4 149 37 1 153 37 1 156 37 1 158 37 1 159 37 1 161 37 2 163 37 1 164 37 6 165 37 1 166 37 5 172 37 1 176 37 3 179 37 1 182 37 2 183 37 1 210 37 2 211 37 28 212 37 10 213 37 12 214 37 22 215 37 6 216 37 5 217 37 3 218 37 1 219 37 10 220 37 3 221 37 8 223 37 3 224 37 1 225 37 7 226 37 1 227 37 2 228 37 1 229 37 1 230 37 3 231 37 3 232 37 2 233 37 1 235 37 4 236 37 3 238 37 2 239 37 1 4 38 1 25 38 1 32 38 2 38 38 1 48 38 1 55 38 1 57 38 2 61 38 1 62 38 6 64 38 10 68 38 3 69 38 2 70 38 7 74 38 1 80 38 1 81 38 1 83 38 3 90 38 1 93 38 1 95 38 1 96 38 4 101 38 1 104 38 2 109 38 1 120 38 1 126 38 1 128 38 1 135 38 1 140 38 2 141 38 1 142 38 1 146 38 1 153 38 1 154 38 1 161 38 1 163 38 1 164 38 6 166 38 1 176 38 1 183 38 1 193 38 1 211 38 10 212 38 7 213 38 10 214 38 15 215 38 8 216 38 4 217 38 3 218 38 2 219 38 2 220 38 1 221 38 6 222 38 2 223 38 2 225 38 4 227 38 2 229 38 1 233 38 2 235 38 2 236 38 5 237 38 2 238 38 1 239 38 2 240 38 1 14 39 1 23 39 4 31 39 1 48 39 3 49 39 1 57 39 1 61 39 1 62 39 1 64 39 25 67 39 1 68 39 2 69 39 1 70 39 5 72 39 1 74 39 1 78 39 1 81 39 2 90 39 1 96 39 5 101 39 1 104 39 1 120 39 2 134 39 1 142 39 1 155 39 1 156 39 1 161 39 2 164 39 1 165 39 1 166 39 3 176 39 1 182 39 1 183 39 1 211 39 25 212 39 4 213 39 3 214 39 18 215 39 2 216 39 1 217 39 2 218 39 1 219 39 4 221 39 1 222 39 1 223 39 2 224 39 1 226 39 1 227 39 1 228 39 1 230 39 1 231 39 3 232 39 1 233 39 1 234 39 1 235 39 1 237 39 1 238 39 2 26 40 1 35 40 2 47 40 1 48 40 1 54 40 1 55 40 1 57 40 1 62 40 11 64 40 27 65 40 7 67 40 1 68 40 5 70 40 25 72 40 2 74 40 1 75 40 1 80 40 1 81 40 1 83 40 2 84 40 2 89 40 1 96 40 15 99 40 1 100 40 4 104 40 4 137 40 1 140 40 1 141 40 3 142 40 1 146 40 1 154 40 2 155 40 1 156 40 1 159 40 4 161 40 6 164 40 6 166 40 2 176 40 2 179 40 3 196 40 1 198 40 2 210 40 1 211 40 27 212 40 11 213 40 13 214 40 18 215 40 5 216 40 1 217 40 5 218 40 1 219 40 7 220 40 2 221 40 11 222 40 3 223 40 6 224 40 51 225 40 3 226 40 1 228 40 25 229 40 3 231 40 1 232 40 1 233 40 2 234 40 2 235 40 2 236 40 1 238 40 1 239 40 2 240 40 1 23 41 7 32 41 1 44 41 2 45 41 2 48 41 1 49 41 1 50 41 1 54 41 2 57 41 2 62 41 1 63 41 1 64 41 31 65 41 8 68 41 2 69 41 1 71 41 1 72 41 1 73 41 1 74 41 1 75 41 1 76 41 1 81 41 1 84 41 1 85 41 1 87 41 1 90 41 2 96 41 2 98 41 5 100 41 1 101 41 1 104 41 1 122 41 1 140 41 2 141 41 1 145 41 1 153 41 3 155 41 1 157 41 1 159 41 1 161 41 2 163 41 1 164 41 3 166 41 7 172 41 1 198 41 1 210 41 1 211 41 31 212 41 3 213 41 1 214 41 10 215 41 4 217 41 2 218 41 3 219 41 6 220 41 6 221 41 1 222 41 1 223 41 2 225 41 1 226 41 1 227 41 1 228 41 1 232 41 2 239 41 3 9 42 1 23 42 1 31 42 2 33 42 1 36 42 1 44 42 1 45 42 2 54 42 1 57 42 1 59 42 1 61 42 1 62 42 4 64 42 22 65 42 5 66 42 1 67 42 1 68 42 3 69 42 1 70 42 14 72 42 2 75 42 2 78 42 2 79 42 1 80 42 1 81 42 2 82 42 1 83 42 1 84 42 1 85 42 1 88 42 1 90 42 1 93 42 1 98 42 1 100 42 1 101 42 1 104 42 3 111 42 1 126 42 1 129 42 4 130 42 1 138 42 1 141 42 1 144 42 1 153 42 1 154 42 1 155 42 1 156 42 1 159 42 1 166 42 4 172 42 2 179 42 1 193 42 1 206 42 1 211 42 22 213 42 8 215 42 1 217 42 3 219 42 13 221 42 4 223 42 1 231 42 1 232 42 1 233 42 2 234 42 1 235 42 1 236 42 2 237 42 1 238 42 1 9 43 1 23 43 3 31 43 3 33 43 1 36 43 3 44 43 4 49 43 2 54 43 3 55 43 1 56 43 1 57 43 4 61 43 1 62 43 1 63 43 2 64 43 7 65 43 5 67 43 1 68 43 1 69 43 2 70 43 27 71 43 1 72 43 1 74 43 2 77 43 3 78 43 3 80 43 3 81 43 1 85 43 1 86 43 1 89 43 1 90 43 2 93 43 1 96 43 1 98 43 1 107 43 1 113 43 1 125 43 1 126 43 1 137 43 1 146 43 1 154 43 1 156 43 3 157 43 1 166 43 2 211 43 7 213 43 2 217 43 1 221 43 1 227 43 1 231 43 2 236 43 1 9 44 1 22 44 1 33 44 1 36 44 1 45 44 1 48 44 1 54 44 1 61 44 1 62 44 2 63 44 1 64 44 2 67 44 2 68 44 1 70 44 3 72 44 1 74 44 1 75 44 1 81 44 1 82 44 1 84 44 1 86 44 1 88 44 1 89 44 1 96 44 1 149 44 2 166 44 2 180 44 1 182 44 1 193 44 1 211 44 2 213 44 1 214 44 3 217 44 1 221 44 2 236 44 1 238 44 1 9 45 7 12 45 1 23 45 6 24 45 1 25 45 1 31 45 3 33 45 7 36 45 1 42 45 1 44 45 4 46 45 1 48 45 2 49 45 1 57 45 2 62 45 1 63 45 2 64 45 4 65 45 7 67 45 47 69 45 1 70 45 13 72 45 1 73 45 1 75 45 1 76 45 1 77 45 1 78 45 3 80 45 1 81 45 1 84 45 1 86 45 1 87 45 2 88 45 1 89 45 2 90 45 2 100 45 1 101 45 2 121 45 1 129 45 1 152 45 2 153 45 1 155 45 1 159 45 1 163 45 1 166 45 5 167 45 1 172 45 5 182 45 1 211 45 4 221 45 1 223 45 1 224 45 1 4 46 1 9 46 1 23 46 1 26 46 3 31 46 15 33 46 1 36 46 1 43 46 1 44 46 4 49 46 2 54 46 1 58 46 2 62 46 2 64 46 14 65 46 1 68 46 2 69 46 2 70 46 17 71 46 1 72 46 2 75 46 1 77 46 3 78 46 15 82 46 1 84 46 1 86 46 2 87 46 1 90 46 1 121 46 3 128 46 1 129 46 2 132 46 1 134 46 3 149 46 1 154 46 1 155 46 1 163 46 1 166 46 1 172 46 1 177 46 1 211 46 14 215 46 2 217 46 2 221 46 2 231 46 1 236 46 2 4 47 1 31 47 1 32 47 1 35 47 2 43 47 1 48 47 2 54 47 1 61 47 1 62 47 2 64 47 16 65 47 6 66 47 1 67 47 1 68 47 8 69 47 1 70 47 7 72 47 4 74 47 1 76 47 1 77 47 3 78 47 1 81 47 1 83 47 1 84 47 1 95 47 1 98 47 1 101 47 1 104 47 1 128 47 1 141 47 2 146 47 1 154 47 2 157 47 1 161 47 1 166 47 1 176 47 1 177 47 1 182 47 1 211 47 16 212 47 6 214 47 4 215 47 9 217 47 8 219 47 6 220 47 10 221 47 2 222 47 1 223 47 3 232 47 1 236 47 2 237 47 1 238 47 1 239 47 3 4 48 1 9 48 1 14 48 1 23 48 1 31 48 3 33 48 1 36 48 2 37 48 1 42 48 1 45 48 1 48 48 3 49 48 1 54 48 1 62 48 1 63 48 2 64 48 4 65 48 7 66 48 1 67 48 1 68 48 4 69 48 1 70 48 3 71 48 1 72 48 1 74 48 1 75 48 1 78 48 3 79 48 1 80 48 7 81 48 2 82 48 1 83 48 4 87 48 1 88 48 1 90 48 1 93 48 1 107 48 1 116 48 1 126 48 1 128 48 1 135 48 1 141 48 1 144 48 1 146 48 1 149 48 1 153 48 1 154 48 2 163 48 1 166 48 4 176 48 1 182 48 2 211 48 4 213 48 3 214 48 1 217 48 4 221 48 1 222 48 1 223 48 2 229 48 1 233 48 1 238 48 2 239 48 1 9 49 5 31 49 6 33 49 5 39 49 2 49 49 5 61 49 39 62 49 5 64 49 29 65 49 6 66 49 1 67 49 1 68 49 5 69 49 1 70 49 16 71 49 1 72 49 2 73 49 1 74 49 17 75 49 1 78 49 6 79 49 1 82 49 1 83 49 1 84 49 1 88 49 1 90 49 2 98 49 1 103 49 1 104 49 1 120 49 1 121 49 1 140 49 1 141 49 1 149 49 3 153 49 2 154 49 1 155 49 1 156 49 2 166 49 1 172 49 2 179 49 1 198 49 1 210 49 1 211 49 29 212 49 2 213 49 3 214 49 3 215 49 3 217 49 5 219 49 5 220 49 9 221 49 5 227 49 1 232 49 1 234 49 1 236 49 1 239 49 3 9 50 3 23 50 1 31 50 4 32 50 1 33 50 3 36 50 2 37 50 1 44 50 2 48 50 1 49 50 2 54 50 1 55 50 3 57 50 1 59 50 1 60 50 1 62 50 1 63 50 3 64 50 8 65 50 1 66 50 1 67 50 1 68 50 2 69 50 1 70 50 12 72 50 4 73 50 1 78 50 4 79 50 2 80 50 1 81 50 2 83 50 1 85 50 13 89 50 1 95 50 2 141 50 1 146 50 2 153 50 1 154 50 1 156 50 1 166 50 2 167 50 1 211 50 8 213 50 2 217 50 2 219 50 3 221 50 1 223 50 1 235 50 1 236 50 1 238 50 1 2 51 1 4 51 10 9 51 1 15 51 1 24 51 1 33 51 1 38 51 1 42 51 1 45 51 1 50 51 1 56 51 1 59 51 1 64 51 5 70 51 3 72 51 2 75 51 1 91 51 2 92 51 20 93 51 41 95 51 13 96 51 11 98 51 2 99 51 6 100 51 8 101 51 2 102 51 1 103 51 4 104 51 5 109 51 1 111 51 4 113 51 3 115 51 3 116 51 2 118 51 1 119 51 1 121 51 12 124 51 1 125 51 3 126 51 41 128 51 10 129 51 8 132 51 4 134 51 8 135 51 16 140 51 4 141 51 3 142 51 1 146 51 9 147 51 2 148 51 2 149 51 3 151 51 15 152 51 7 153 51 8 154 51 2 156 51 21 157 51 2 158 51 2 159 51 8 160 51 5 162 51 1 163 51 17 164 51 12 166 51 5 168 51 3 170 51 2 171 51 1 172 51 9 173 51 2 174 51 6 176 51 1 177 51 2 178 51 1 182 51 5 183 51 2 193 51 1 194 51 1 198 51 1 211 51 5 225 51 1 228 51 2 4 52 10 6 52 1 9 52 2 23 52 1 32 52 2 33 52 2 40 52 1 45 52 1 48 52 2 51 52 1 54 52 1 58 52 1 59 52 1 64 52 3 67 52 2 70 52 1 72 52 1 81 52 1 87 52 1 90 52 1 91 52 2 92 52 6 93 52 4 95 52 7 96 52 21 98 52 2 99 52 5 100 52 8 101 52 4 104 52 12 109 52 1 110 52 1 111 52 5 115 52 2 116 52 2 120 52 2 121 52 4 126 52 4 128 52 10 129 52 3 130 52 2 132 52 5 133 52 1 134 52 4 135 52 32 140 52 1 141 52 3 142 52 1 146 52 3 148 52 1 149 52 1 151 52 17 152 52 12 153 52 8 154 52 2 155 52 1 156 52 25 157 52 3 158 52 1 159 52 8 160 52 3 161 52 5 162 52 1 163 52 13 164 52 12 165 52 2 166 52 10 167 52 2 168 52 3 169 52 5 170 52 4 171 52 2 172 52 20 173 52 3 174 52 4 175 52 5 176 52 6 177 52 2 179 52 26 182 52 3 193 52 1 196 52 1 198 52 2 210 52 1 211 52 3 212 52 1 230 52 1 237 52 1 238 52 1 2 53 1 4 53 4 6 53 1 38 53 1 44 53 1 59 53 1 74 53 1 92 53 1 93 53 3 95 53 5 96 53 2 98 53 2 99 53 1 100 53 7 103 53 1 104 53 4 109 53 1 110 53 1 111 53 2 114 53 1 121 53 2 126 53 3 128 53 4 129 53 5 133 53 1 135 53 7 140 53 2 146 53 1 147 53 1 148 53 3 151 53 8 152 53 7 153 53 6 154 53 1 156 53 6 157 53 16 158 53 1 159 53 7 160 53 4 161 53 1 162 53 3 163 53 1 164 53 2 165 53 1 166 53 1 167 53 2 169 53 1 170 53 2 172 53 9 174 53 1 178 53 1 180 53 1 191 53 1 228 53 1 2 54 1 4 54 1 8 54 1 23 54 1 45 54 1 91 54 4 93 54 3 95 54 1 96 54 5 99 54 1 100 54 3 101 54 1 104 54 2 115 54 1 120 54 1 121 54 1 126 54 3 128 54 1 129 54 2 131 54 1 135 54 9 141 54 1 142 54 1 147 54 1 151 54 11 152 54 6 153 54 2 156 54 10 158 54 1 159 54 3 160 54 1 161 54 2 163 54 2 164 54 4 166 54 4 167 54 1 168 54 1 170 54 1 171 54 1 172 54 3 176 54 1 177 54 2 180 54 1 215 54 1 225 54 1 2 55 2 4 55 6 6 55 2 8 55 1 9 55 1 12 55 1 15 55 1 22 55 1 33 55 1 37 55 1 45 55 1 48 55 2 72 55 2 74 55 1 91 55 3 92 55 10 93 55 14 95 55 4 96 55 21 98 55 2 99 55 6 100 55 10 101 55 2 103 55 1 104 55 16 105 55 2 106 55 1 107 55 2 109 55 3 110 55 1 112 55 1 115 55 3 116 55 4 117 55 2 118 55 1 120 55 1 121 55 5 126 55 14 128 55 6 129 55 3 132 55 3 133 55 2 134 55 7 135 55 11 140 55 1 141 55 2 146 55 4 147 55 2 151 55 18 152 55 32 153 55 9 154 55 50 155 55 3 156 55 26 157 55 1 158 55 3 159 55 10 160 55 11 161 55 14 163 55 9 164 55 35 165 55 3 166 55 17 167 55 1 168 55 2 169 55 2 170 55 1 172 55 6 173 55 6 174 55 4 175 55 4 176 55 1 177 55 2 178 55 1 179 55 2 180 55 2 182 55 1 193 55 1 226 55 1 233 55 1 236 55 2 237 55 2 2 56 2 4 56 28 7 56 1 9 56 1 22 56 1 32 56 1 33 56 1 43 56 2 44 56 2 47 56 2 54 56 1 56 56 1 87 56 1 90 56 2 92 56 4 93 56 17 94 56 1 95 56 3 96 56 13 98 56 1 99 56 4 100 56 15 101 56 3 104 56 10 107 56 2 109 56 6 110 56 2 111 56 5 115 56 1 116 56 2 118 56 1 120 56 3 121 56 5 124 56 4 126 56 17 128 56 28 129 56 7 130 56 1 132 56 3 134 56 7 135 56 17 140 56 5 141 56 2 144 56 1 146 56 2 147 56 3 148 56 1 149 56 1 151 56 13 152 56 33 153 56 9 154 56 1 155 56 3 156 56 26 157 56 11 158 56 4 159 56 15 160 56 9 161 56 4 162 56 1 163 56 12 164 56 16 165 56 4 166 56 8 168 56 1 169 56 4 170 56 3 171 56 3 172 56 9 174 56 3 176 56 2 177 56 1 178 56 6 179 56 2 180 56 25 182 56 1 198 56 2 206 56 1 228 56 3 4 57 10 9 57 1 12 57 1 22 57 1 32 57 1 33 57 1 39 57 1 43 57 3 48 57 1 51 57 1 59 57 1 61 57 1 64 57 5 65 57 1 66 57 1 67 57 2 69 57 1 72 57 3 74 57 1 81 57 1 90 57 1 91 57 1 92 57 8 93 57 7 95 57 1 96 57 16 98 57 1 99 57 3 100 57 18 101 57 6 103 57 3 104 57 6 105 57 1 107 57 5 108 57 1 109 57 1 110 57 1 111 57 2 115 57 1 117 57 1 120 57 1 121 57 7 126 57 7 128 57 10 129 57 6 130 57 1 131 57 1 132 57 6 134 57 13 135 57 33 137 57 2 138 57 1 146 57 3 148 57 1 151 57 36 152 57 12 153 57 10 154 57 1 155 57 1 156 57 16 157 57 3 158 57 5 159 57 18 160 57 2 161 57 18 162 57 1 163 57 14 164 57 24 165 57 1 166 57 33 167 57 3 168 57 6 169 57 3 171 57 1 172 57 91 175 57 5 176 57 7 178 57 4 179 57 1 191 57 1 194 57 1 198 57 2 211 57 5 214 57 1 228 57 2 237 57 1 4 58 13 6 58 1 8 58 1 9 58 1 13 58 1 22 58 1 24 58 1 33 58 1 44 58 1 50 58 1 54 58 3 65 58 1 72 58 1 81 58 1 90 58 1 91 58 1 92 58 6 93 58 6 95 58 1 96 58 9 98 58 1 99 58 2 100 58 19 101 58 4 104 58 2 107 58 1 109 58 2 110 58 1 111 58 3 112 58 1 116 58 2 117 58 2 120 58 3 121 58 14 126 58 6 128 58 13 129 58 5 130 58 2 131 58 1 132 58 3 133 58 1 134 58 6 135 58 10 137 58 1 138 58 2 141 58 1 142 58 2 143 58 2 146 58 2 147 58 1 151 58 17 152 58 19 153 58 8 154 58 1 155 58 27 156 58 15 157 58 5 158 58 5 159 58 19 160 58 5 161 58 9 163 58 8 164 58 9 165 58 2 166 58 8 167 58 1 168 58 1 169 58 1 170 58 1 171 58 2 172 58 11 173 58 3 174 58 4 175 58 2 176 58 2 177 58 6 178 58 1 180 58 1 182 58 1 198 58 2 228 58 1 2 59 3 4 59 5 6 59 1 7 59 1 9 59 2 23 59 1 33 59 2 39 59 1 48 59 2 54 59 1 67 59 1 72 59 3 74 59 2 84 59 2 91 59 2 93 59 9 95 59 2 96 59 16 98 59 2 99 59 4 100 59 4 101 59 2 104 59 12 105 59 2 107 59 2 109 59 2 111 59 2 112 59 3 115 59 1 116 59 1 118 59 1 121 59 5 126 59 9 128 59 5 129 59 9 131 59 2 132 59 6 133 59 1 134 59 6 135 59 15 136 59 1 140 59 1 141 59 2 142 59 3 147 59 1 148 59 3 149 59 1 151 59 12 152 59 18 153 59 5 154 59 3 155 59 1 156 59 11 157 59 4 158 59 3 159 59 4 160 59 7 161 59 5 162 59 1 163 59 7 164 59 9 165 59 1 166 59 14 167 59 1 168 59 5 171 59 3 172 59 18 174 59 2 175 59 1 176 59 6 177 59 3 182 59 1 198 59 1 213 59 1 219 59 1 225 59 1 4 60 8 8 60 1 9 60 1 23 60 2 25 60 1 33 60 1 43 60 1 44 60 1 45 60 1 47 60 1 48 60 1 56 60 1 59 60 1 61 60 2 66 60 1 72 60 1 81 60 1 89 60 1 90 60 1 93 60 6 96 60 17 98 60 9 99 60 5 100 60 17 101 60 5 103 60 2 104 60 16 105 60 1 106 60 1 109 60 4 111 60 3 112 60 1 115 60 1 118 60 2 120 60 2 121 60 11 126 60 6 128 60 8 129 60 4 131 60 2 132 60 2 134 60 4 135 60 25 140 60 1 146 60 6 147 60 3 148 60 2 149 60 1 151 60 27 152 60 29 153 60 10 154 60 1 155 60 1 156 60 22 157 60 6 158 60 6 159 60 17 160 60 10 161 60 11 162 60 1 163 60 13 164 60 30 165 60 1 166 60 19 167 60 2 168 60 6 169 60 2 171 60 5 172 60 18 173 60 3 174 60 8 175 60 4 176 60 3 177 60 6 178 60 2 179 60 1 182 60 2 183 60 1 191 60 2 193 60 2 198 60 2 210 60 2 214 60 1 225 60 1 238 60 3 4 61 108 6 61 21 8 61 3 12 61 1 15 61 1 22 61 2 23 61 12 26 61 1 32 61 1 36 61 1 39 61 1 42 61 1 44 61 2 46 61 1 48 61 4 49 61 1 61 61 3 71 61 2 74 61 1 75 61 1 82 61 1 84 61 1 85 61 1 88 61 1 89 61 1 90 61 1 93 61 76 96 61 2 98 61 2 99 61 1 100 61 5 101 61 1 103 61 1 104 61 8 107 61 1 109 61 2 112 61 2 113 61 3 114 61 4 115 61 3 121 61 75 122 61 16 124 61 6 125 61 3 126 61 76 127 61 3 128 61 108 129 61 102 130 61 25 131 61 2 132 61 11 133 61 21 134 61 50 135 61 61 136 61 1 138 61 7 139 61 2 140 61 9 141 61 1 142 61 4 143 61 2 145 61 2 146 61 5 147 61 6 148 61 4 149 61 3 151 61 12 152 61 6 153 61 1 154 61 5 156 61 10 157 61 8 158 61 1 159 61 5 163 61 5 164 61 8 166 61 4 168 61 6 172 61 18 176 61 2 177 61 1 182 61 6 183 61 1 191 61 1 196 61 1 198 61 1 210 61 1 222 61 1 223 61 1 228 61 1 235 61 1 237 61 2 239 61 1 4 62 93 6 62 21 8 62 2 9 62 1 23 62 3 25 62 2 32 62 1 33 62 1 36 62 1 50 62 1 54 62 2 56 62 1 58 62 1 59 62 2 64 62 1 70 62 1 72 62 2 91 62 2 93 62 20 96 62 8 98 62 2 99 62 3 100 62 3 103 62 1 104 62 13 109 62 2 112 62 1 113 62 10 114 62 1 115 62 1 116 62 5 120 62 4 121 62 52 122 62 1 123 62 5 124 62 6 125 62 10 126 62 20 127 62 3 128 62 93 129 62 78 130 62 39 131 62 2 132 62 26 133 62 21 134 62 53 135 62 31 136 62 8 137 62 1 138 62 9 139 62 5 140 62 4 142 62 3 146 62 1 147 62 1 149 62 1 150 62 4 151 62 7 152 62 7 153 62 2 156 62 5 157 62 1 158 62 1 159 62 3 160 62 12 163 62 4 164 62 8 166 62 7 171 62 1 172 62 2 178 62 1 182 62 7 191 62 1 211 62 1 236 62 1 239 62 1 4 63 41 6 63 3 8 63 1 23 63 1 25 63 1 26 63 1 36 63 1 43 63 1 54 63 1 56 63 1 62 63 1 72 63 3 80 63 1 81 63 1 84 63 1 92 63 1 93 63 24 95 63 3 96 63 6 99 63 2 100 63 1 104 63 21 107 63 1 108 63 1 109 63 2 112 63 1 113 63 1 116 63 1 117 63 1 118 63 2 121 63 11 122 63 2 123 63 2 124 63 5 125 63 1 126 63 24 127 63 1 128 63 41 129 63 23 130 63 5 132 63 5 133 63 3 134 63 10 135 63 25 137 63 1 138 63 1 140 63 5 141 63 1 143 63 3 146 63 5 148 63 2 150 63 1 151 63 7 152 63 1 153 63 1 156 63 5 157 63 1 159 63 1 160 63 2 163 63 5 164 63 3 166 63 4 168 63 3 169 63 1 172 63 9 173 63 1 176 63 1 178 63 1 182 63 2 184 63 1 214 63 1 219 63 1 221 63 1 228 63 1 2 64 4 3 64 8 4 64 42 5 64 4 6 64 5 8 64 4 9 64 5 12 64 3 16 64 1 21 64 1 23 64 3 25 64 1 26 64 1 30 64 2 33 64 5 43 64 1 44 64 5 45 64 2 48 64 4 54 64 1 59 64 2 61 64 2 66 64 1 74 64 1 75 64 1 81 64 1 91 64 2 92 64 10 93 64 79 94 64 2 95 64 1 96 64 9 97 64 1 98 64 1 99 64 5 100 64 5 101 64 4 103 64 2 104 64 9 109 64 2 113 64 2 114 64 7 115 64 2 117 64 1 120 64 2 121 64 19 122 64 4 123 64 4 124 64 4 125 64 2 126 64 79 127 64 3 128 64 42 129 64 25 130 64 2 131 64 1 132 64 2 133 64 5 134 64 9 135 64 14 136 64 3 137 64 33 140 64 7 141 64 2 142 64 1 143 64 2 144 64 1 146 64 1 147 64 2 149 64 2 151 64 4 152 64 3 153 64 6 156 64 16 158 64 3 159 64 5 160 64 1 163 64 3 164 64 3 166 64 3 168 64 1 170 64 1 172 64 11 173 64 1 177 64 1 179 64 1 182 64 6 193 64 2 198 64 1 222 64 1 225 64 1 228 64 1 2 65 1 4 65 138 6 65 11 8 65 5 12 65 1 15 65 1 26 65 1 35 65 2 36 65 1 39 65 1 43 65 1 44 65 2 45 65 3 48 65 2 54 65 3 55 65 1 56 65 4 62 65 1 64 65 1 65 65 1 68 65 1 70 65 1 71 65 1 72 65 1 74 65 1 75 65 2 84 65 1 87 65 1 90 65 1 91 65 1 93 65 53 94 65 2 95 65 2 96 65 11 98 65 1 99 65 14 101 65 2 104 65 20 108 65 2 111 65 1 112 65 3 113 65 3 114 65 7 115 65 2 116 65 6 118 65 1 120 65 6 121 65 54 122 65 8 123 65 2 124 65 6 125 65 3 126 65 53 128 65 138 129 65 69 130 65 16 131 65 1 132 65 31 133 65 11 134 65 68 135 65 58 136 65 1 138 65 6 139 65 3 140 65 2 141 65 1 143 65 3 144 65 1 146 65 22 147 65 5 148 65 1 149 65 64 150 65 1 151 65 8 152 65 11 153 65 6 154 65 2 156 65 2 160 65 3 163 65 11 164 65 13 166 65 2 168 65 1 171 65 1 172 65 12 174 65 1 176 65 2 177 65 2 179 65 1 182 65 24 183 65 1 191 65 1 194 65 2 198 65 3 211 65 1 217 65 1 221 65 1 225 65 3 228 65 1 231 65 1 233 65 1 236 65 2 237 65 4 4 66 77 6 66 11 8 66 2 22 66 1 23 66 1 32 66 1 35 66 1 43 66 1 44 66 2 45 66 1 48 66 4 51 66 1 59 66 1 64 66 3 66 66 1 67 66 1 68 66 1 70 66 1 72 66 3 75 66 2 82 66 1 90 66 4 91 66 9 92 66 41 93 66 53 94 66 1 95 66 4 96 66 14 97 66 1 98 66 6 99 66 11 100 66 3 101 66 5 103 66 2 104 66 10 105 66 2 108 66 1 111 66 2 112 66 1 113 66 4 115 66 3 116 66 2 117 66 1 120 66 2 121 66 23 122 66 5 123 66 3 124 66 5 125 66 4 126 66 53 127 66 1 128 66 77 129 66 24 130 66 6 131 66 1 132 66 21 133 66 11 134 66 36 135 66 112 136 66 2 138 66 1 139 66 3 140 66 5 142 66 1 143 66 1 146 66 10 147 66 4 148 66 39 149 66 2 151 66 10 152 66 7 153 66 4 154 66 5 156 66 3 158 66 1 159 66 3 160 66 4 163 66 9 164 66 8 165 66 1 166 66 2 167 66 2 168 66 3 171 66 1 172 66 11 173 66 1 174 66 1 177 66 1 179 66 1 182 66 16 183 66 2 186 66 2 198 66 1 211 66 3 217 66 1 225 66 2 228 66 2 231 66 1 233 66 1 236 66 1 237 66 2 4 67 76 6 67 10 8 67 1 15 67 1 23 67 2 39 67 1 48 67 1 54 67 1 56 67 2 61 67 1 72 67 2 81 67 2 82 67 1 89 67 1 91 67 1 92 67 11 93 67 87 94 67 1 95 67 6 96 67 10 98 67 1 99 67 3 100 67 6 101 67 10 103 67 2 104 67 23 105 67 1 107 67 1 109 67 3 112 67 1 113 67 4 114 67 2 115 67 3 116 67 7 120 67 5 121 67 45 122 67 8 123 67 6 124 67 6 125 67 4 126 67 87 127 67 3 128 67 76 129 67 43 130 67 11 131 67 6 132 67 21 133 67 10 134 67 49 135 67 37 136 67 3 138 67 4 140 67 12 141 67 1 142 67 38 143 67 2 144 67 21 146 67 9 147 67 8 148 67 1 149 67 3 151 67 4 152 67 9 153 67 4 154 67 4 155 67 2 156 67 16 157 67 1 159 67 6 160 67 4 161 67 1 163 67 9 164 67 7 167 67 1 168 67 3 172 67 7 178 67 1 179 67 1 182 67 28 183 67 1 193 67 1 194 67 1 198 67 3 222 67 2 228 67 4 235 67 1 236 67 1 237 67 1 238 67 1 1 68 1 4 68 15 6 68 1 8 68 1 32 68 1 36 68 1 43 68 1 44 68 1 54 68 5 64 68 1 74 68 1 90 68 1 91 68 23 92 68 32 93 68 76 94 68 1 95 68 1 96 68 10 98 68 3 99 68 4 100 68 2 101 68 6 103 68 1 104 68 5 108 68 1 109 68 1 110 68 1 112 68 1 113 68 1 114 68 1 115 68 1 116 68 2 117 68 1 120 68 1 121 68 10 122 68 4 123 68 4 124 68 5 125 68 1 126 68 76 128 68 15 129 68 8 130 68 3 131 68 3 132 68 2 133 68 1 134 68 3 135 68 18 136 68 6 138 68 1 140 68 7 141 68 1 145 68 1 146 68 1 147 68 2 148 68 3 149 68 1 150 68 1 151 68 2 152 68 4 153 68 2 155 68 2 156 68 3 157 68 1 159 68 2 163 68 4 164 68 5 166 68 1 172 68 5 173 68 1 177 68 1 178 68 1 182 68 3 198 68 1 202 68 1 211 68 1 228 68 1 239 68 1 3 69 1 4 69 19 6 69 2 8 69 1 9 69 1 26 69 1 27 69 1 33 69 1 44 69 2 54 69 13 59 69 1 62 69 1 72 69 4 91 69 4 92 69 17 93 69 42 96 69 6 97 69 1 98 69 2 99 69 8 101 69 4 104 69 28 112 69 1 113 69 3 114 69 2 115 69 4 116 69 2 121 69 23 122 69 7 123 69 2 124 69 3 125 69 3 126 69 42 127 69 1 128 69 19 129 69 10 130 69 4 131 69 5 132 69 3 133 69 2 134 69 9 135 69 29 136 69 1 137 69 1 140 69 10 141 69 3 143 69 1 144 69 1 145 69 32 146 69 3 147 69 4 148 69 5 149 69 1 151 69 6 152 69 1 153 69 2 154 69 1 155 69 1 156 69 4 157 69 1 161 69 1 163 69 5 164 69 8 166 69 6 167 69 1 168 69 1 169 69 1 171 69 1 172 69 4 173 69 1 179 69 1 180 69 1 182 69 6 183 69 1 198 69 1 221 69 1 225 69 1 233 69 1 237 69 3 4 70 104 6 70 11 8 70 5 9 70 4 12 70 1 23 70 2 31 70 1 32 70 1 33 70 4 39 70 2 48 70 1 49 70 1 54 70 2 57 70 1 63 70 1 64 70 1 68 70 1 72 70 2 77 70 1 78 70 1 80 70 2 90 70 2 93 70 114 96 70 7 97 70 1 98 70 4 99 70 4 100 70 3 101 70 2 104 70 13 109 70 2 112 70 1 113 70 6 115 70 3 121 70 37 123 70 1 124 70 5 125 70 6 126 70 114 127 70 2 128 70 104 129 70 50 130 70 9 131 70 1 132 70 10 133 70 11 134 70 26 135 70 125 137 70 3 138 70 5 139 70 1 140 70 18 141 70 43 142 70 1 143 70 8 146 70 3 147 70 5 148 70 1 149 70 1 151 70 6 152 70 4 153 70 7 154 70 3 155 70 2 156 70 5 157 70 2 158 70 1 159 70 3 160 70 1 163 70 2 164 70 3 165 70 2 166 70 4 168 70 4 169 70 1 171 70 4 172 70 25 173 70 1 174 70 1 177 70 1 178 70 2 182 70 3 183 70 1 185 70 1 193 70 1 198 70 2 211 70 1 214 70 1 215 70 1 217 70 1 231 70 1 237 70 1 4 71 1 44 71 1 70 71 8 92 71 3 93 71 3 100 71 1 126 71 3 128 71 1 129 71 1 135 71 5 143 71 1 156 71 3 159 71 1 172 71 1 181 71 43 182 71 18 183 71 4 184 71 14 185 71 11 186 71 1 187 71 8 188 71 6 189 71 14 190 71 5 191 71 8 192 71 1 193 71 2 194 71 3 195 71 6 196 71 3 197 71 4 198 71 2 199 71 3 200 71 4 201 71 4 202 71 3 203 71 5 204 71 4 206 71 5 207 71 2 208 71 1 70 72 5 91 72 1 93 72 1 126 72 1 135 72 1 147 72 1 156 72 4 172 72 2 181 72 41 182 72 8 183 72 4 184 72 11 185 72 3 186 72 5 187 72 3 188 72 5 189 72 5 190 72 3 191 72 2 192 72 3 193 72 5 195 72 4 196 72 3 197 72 3 200 72 1 201 72 1 202 72 1 203 72 1 206 72 4 210 72 41 9 73 1 33 73 1 67 73 1 70 73 4 84 73 1 93 73 1 126 73 1 136 73 1 142 73 1 153 73 1 156 73 2 181 73 36 182 73 12 183 73 2 184 73 14 185 73 13 186 73 3 187 73 2 188 73 9 189 73 8 190 73 5 192 73 3 193 73 4 194 73 1 195 73 4 196 73 2 197 73 5 198 73 1 199 73 2 200 73 2 202 73 4 203 73 4 204 73 3 205 73 3 206 73 1 207 73 4 208 73 1 209 73 6 236 73 1 70 74 10 88 74 1 153 74 1 156 74 6 164 74 1 172 74 3 181 74 55 182 74 18 183 74 2 184 74 18 185 74 8 186 74 3 187 74 2 188 74 10 189 74 11 190 74 5 191 74 12 192 74 2 193 74 4 194 74 2 195 74 3 196 74 3 197 74 2 198 74 2 199 74 7 201 74 4 202 74 1 203 74 2 204 74 4 205 74 1 206 74 4 209 74 1 235 74 2 4 75 2 70 75 11 93 75 1 103 75 1 104 75 1 126 75 1 128 75 2 135 75 5 140 75 1 152 75 1 153 75 1 156 75 3 166 75 1 174 75 1 181 75 58 182 75 18 183 75 2 184 75 23 185 75 8 186 75 2 187 75 3 188 75 7 189 75 15 190 75 2 191 75 8 192 75 3 193 75 1 194 75 1 195 75 4 196 75 2 197 75 14 198 75 1 199 75 4 200 75 2 201 75 2 202 75 3 203 75 4 204 75 8 205 75 1 210 75 2 219 75 1 4 76 1 25 76 1 27 76 1 36 76 1 44 76 1 70 76 30 91 76 1 101 76 2 108 76 1 116 76 1 128 76 1 135 76 1 142 76 2 156 76 17 166 76 1 172 76 2 181 76 54 182 76 28 183 76 15 184 76 62 185 76 29 186 76 7 187 76 9 188 76 23 189 76 6 190 76 42 191 76 7 192 76 11 193 76 6 194 76 14 195 76 3 196 76 9 197 76 32 198 76 10 200 76 3 201 76 6 202 76 8 203 76 1 204 76 1 205 76 2 207 76 20 208 76 25 209 76 26 210 76 1 224 76 1 228 76 1 236 76 1 238 76 1 4 77 1 70 77 8 128 77 1 135 77 3 156 77 3 172 77 3 181 77 66 182 77 11 183 77 2 184 77 9 185 77 3 186 77 3 187 77 3 188 77 12 189 77 4 190 77 2 191 77 3 192 77 6 194 77 2 195 77 4 197 77 2 198 77 37 199 77 1 200 77 1 201 77 2 203 77 4 204 77 2 205 77 1 206 77 1 207 77 2 209 77 1 45 78 1 70 78 5 156 78 6 164 78 1 174 78 1 176 78 1 181 78 34 182 78 13 183 78 1 184 78 14 185 78 6 186 78 1 187 78 3 188 78 6 189 78 3 190 78 1 191 78 2 192 78 5 193 78 4 195 78 20 199 78 3 200 78 2 201 78 2 202 78 13 203 78 1 205 78 2 207 78 2 208 78 3 210 78 1 4 79 2 49 79 1 70 79 9 128 79 2 156 79 4 172 79 4 174 79 1 181 79 30 182 79 16 183 79 3 184 79 6 185 79 5 186 79 1 187 79 4 188 79 11 189 79 5 190 79 2 191 79 6 192 79 3 194 79 4 195 79 5 196 79 1 197 79 8 198 79 2 199 79 5 200 79 4 202 79 2 205 79 3 207 79 1 208 79 1 210 79 1 4 80 7 6 80 1 13 80 1 26 80 1 43 80 1 44 80 1 45 80 1 60 80 1 65 80 1 70 80 2 91 80 2 92 80 7 93 80 22 96 80 14 98 80 6 99 80 2 100 80 3 101 80 3 103 80 1 104 80 10 111 80 3 112 80 1 113 80 3 114 80 1 115 80 2 116 80 1 117 80 1 120 80 4 121 80 5 123 80 1 124 80 1 125 80 3 126 80 22 128 80 7 129 80 5 132 80 1 133 80 1 134 80 4 135 80 16 136 80 5 140 80 4 141 80 3 142 80 2 146 80 4 148 80 1 151 80 7 152 80 5 153 80 1 154 80 1 155 80 1 156 80 3 158 80 2 159 80 3 160 80 1 163 80 1 164 80 4 166 80 1 168 80 1 171 80 1 172 80 7 174 80 1 178 80 1 179 80 1 181 80 6 182 80 9 183 80 2 185 80 2 186 80 2 187 80 2 188 80 1 189 80 2 190 80 1 193 80 1 194 80 1 198 80 3 199 80 2 204 80 1 208 80 1 209 80 1 210 80 1 231 80 1 Seurat/tests/testdata/barcodes.tsv0000644000176200001440000000252014525500037017010 0ustar liggesusersATGCCAGAACGACT-1 CATGGCCTGTGCAT-1 GAACCTGATGAACC-1 TGACTGGATTCTCA-1 AGTCAGACTGCACA-1 TCTGATACACGTGT-1 TGGTATCTAAACAG-1 GCAGCTCTGTTTCT-1 GATATAACACGCAT-1 AATGTTGACAGTCA-1 AGGTCATGAGTGTC-1 AGAGATGATCTCGC-1 GGGTAACTCTAGTG-1 CATGAGACACGGGA-1 TACGCCACTCCGAA-1 CTAAACCTGTGCAT-1 GTAAGCACTCATTC-1 TTGGTACTGAATCC-1 CATCATACGGAGCA-1 TACATCACGCTAAC-1 TTACCATGAATCGC-1 ATAGGAGAAACAGA-1 GCGCACGACTTTAC-1 ACTCGCACGAAAGT-1 ATTACCTGCCTTAT-1 CCCAACTGCAATCG-1 AAATTCGAATCACG-1 CCATCCGATTCGCC-1 TCCACTCTGAGCTT-1 CATCAGGATGCACA-1 CTAAACCTCTGACA-1 GATAGAGAAGGGTG-1 CTAACGGAACCGAT-1 AGATATACCCGTAA-1 TACTCTGAATCGAC-1 GCGCATCTTGCTCC-1 GTTGACGATATCGG-1 ACAGGTACTGGTGT-1 GGCATATGCTTATC-1 CATTACACCAACTG-1 TAGGGACTGAACTC-1 GCTCCATGAGAAGT-1 TACAATGATGCTAG-1 CTTCATGACCGAAT-1 CTGCCAACAGGAGC-1 TTGCATTGAGCTAC-1 AAGCAAGAGCTTAG-1 CGGCACGAACTCAG-1 GGTGGAGATTACTC-1 GGCCGATGTACTCT-1 CGTAGCCTGTATGC-1 TGAGCTGAATGCTG-1 CCTATAACGAGACG-1 ATAAGTTGGTACGT-1 AAGCGACTTTGACG-1 ACCAGTGAATACCG-1 ATTGCACTTGCTTT-1 CTAGGTGATGGTTG-1 GCACTAGACCTTTA-1 CATGCGCTAGTCAC-1 TTGAGGACTACGCA-1 ATACCACTCTAAGC-1 CATATAGACTAAGC-1 TTTAGCTGTACTCT-1 GACATTCTCCACCT-1 ACGTGATGCCATGA-1 ATTGTAGATTCCCG-1 GATAGAGATCACGA-1 AATGCGTGGACGGA-1 GCGTAAACACGGTT-1 ATTCAGCTCATTGG-1 GGCATATGGGGAGT-1 ATCATCTGACACCA-1 GTCATACTTCGCCT-1 TTACGTACGTTCAG-1 GAGTTGTGGTAGCT-1 GACGCTCTCTCTCG-1 AGTCTTACTTCGGA-1 GGAACACTTCAGAC-1 CTTGATTGATCTTC-1 Seurat/tests/testdata/nbt_small.Rdata0000644000176200001440000133013714525500037017431 0ustar liggesusersVŹ?K[Xzea"xTQJP@ v^b5&7hnM175h$&^b)]3Ϝyg̜s]IV9=e)2zf3WV-ZիUVZwټսV-tij5 Ac߮,lm)5lj^`Y=9ܺ'im|=e nsZ3n'& _Www+0dh|(g #~bb0fӎDP|ri>ޜjwxq gp>㟣MwAuo\ cyz^?_n{|}Fm1^~ׯ+kuy*8"kr/o}p!LsՏ׽5|] D_Ν^9ű_t~~Q"SyqW8Nlnu-X6U-+qopT}P3Ή_5n".-߻{Y?<Wv~Mז&gq:0~}n:>oyb>kVF~ngz[Z>b)2C]o?),uu>o+k yjhݼFk/W~@D~q|g 8SKEa %MK}wj#5uBK^oј~u"/jdBPW?Me }d`x__`~7G\o*zvL>yxPH;>?<{O&P6gN޹){<]r֯ nPeI wG]bXO YWdmŸD~i/lv<:(ާ&:FPu$-U ?ym*~ӊĺ!ZRk[Gfp@bg>a=I7CmvMw3O}gnZ1J:Xq<4~Qg@}/~̫c~5g|o >~S0N8^t?y)(84L GQB+"W$[=-jӂD3c/&/a|.u5nbhij/{ gOXQGU &xGsKq_ɰq1O[M~?Z?T'~\8N} ~՟ytKˈ~(ەlvsgk3?KLu6AC?~nmZo#R>d?gv!>ق} 9wAy<E eLW7; \I<Ƽ+h#E xduȡypP<~v?Ց∏h_0PtU7uO Vq1܂ o!폩ńOucbb:4Tgy/_Zt<1P'Oˤ9Oj]}h.Wdwvu7fӀ\/)>Zc/!-C vL/fx=ǰ籮B}_֝㗾P—" 뾫P&|yZ[IǽQ͓xy%ſTEIuEIqޑz<Ń(5pn8JO܅-OZ.3-|'OyMC`w݉#f9ڿXzA:zxU?+?W'߅5c}}]x]9\C*Ы/p'~ok#Ђ &7 MkpG}e=8dG_Yk1rpa?֟半mꏽ7_ +?p]p-¯vRwg^F<a:q-m[p?|77A?ҽn:WػWq܉[ S׼ % q s'd5fg\mΧ@_:/Aٶ:g$w6< ol2}s$^ux 'G?3-E~nGnWH[ͣ3AFD뺔Rq3])؅;\?bKXﯸ)o$&u68[Kg Wd/'SJxo* +h4:ZW ZQ|'Ӹ1w;:u|5eqbh! ?Q 7J||w5N_{9B=v/])4pǧSCpmC{ǩneg4/ӉC] [C/y웖%;N)|% 0Mo|38^>]{Ot>kZ'e0 pXd^7؎O_ |'{/~̏WS >ů2/TFo|& n6Zp[~ ;[xwG!&E[钧IN=ONNq{Jǐ ,Jw8ad/\lwr/y<~ހ UU$>87`&C+?އM>cZzI=X!Q>Bߛ.]OQo6>SL\WW>I |Vl~ꦣAG_<xiH?6D<ьZ0Tx,uK=9^ͣy.z]L$֙N? azY ;ߙu ?W]BquԤ߅˓`(Ƅ7_߳m2vPq!w:{~Pƈcjl'֧oKn8_9ɢ;(PG*⿹"x'{1}c @-ʟ.wRՅؿ g{zEw?/dHl6fuwZGn+nXޣϯ>y1O=t~ xQb?2ϺQFG=yiܳ][{"~-}DDp--q|3KUot[{$;:;Au܄cR>e1V J>,,TggE$hS]w˾ ȷ[PS6;+4o}91-t=&+ֺBGshFnLSi~k"]!Pa|35?:/D]]߲mt1y:̖gͩj=~cկ<_J0^qo^]\2=nӅG#ΡMsYFu' 9n+<;|2}>)'Πz[ #`ym?G')O ~#޳SXcnyUa0~,ԧYaNKO&[gڶ^%{n#PN,V K7[eٵ{ מT_:NEx2u>G{hRUc cC'=1޿h]\7#wWgok3zr *b ۬jd2a2oaw |ӹD^kAo-uoT 0; Pj+ S(0NS>mT>9t5Ϲ 2Χf2]nEsH˙7 \#ޓ>0?Fs'r\#3b7ǹlŜ樇қR}G-봝qv._qnnuBtfWG=W/$+Q3¥OeGtAqhj#N|pk̻( kMNZ/-h-ד$J2wu\bέn^O 5/M\'cZ&*&uOI?Oz>p(i[ûuREٺٙ?}M?>Ed׆OxMWǢ,}! .uո>[2X#;jeyjūƙ1?ui1;|ZזPVp({B!v~C9<~ g@h=~0i4\-0Z2\>]*Gb3N7~<_Tdi|6mr+Lxl+ŏ/6h<;ڨS1_8z8㺬hS_Ɛ VL_\_;"}D\'{;T<3-sAE~"aEd(V߃q0RG{cNR>Xh~`o3~? yVs~,>j9'统n5_P3NJ|.pV&p|^,<V7 JuqW 7VUцUIēu9tR|&8o;Iz47wf=ѢhϧF?nugnikJl*ׯYMsE|h<ΈI}foP뎖N>(0FM2$y~R n<{5WُSq4/E:uϔUtTᚯw K,R-yvz}/ ?%jq [ n}8~+G??7Cה'yz<{o]JyUB{%eq=%Um48=>Cz~Ǖ}%9WOqt9gl]FI㢹&!rqew?I։G?d֤S$;5ӎL>4ٮrbs9>[|h^ʓ+yXFkĕPW:2&p} p*k:ЯXXկ﮺?[ј7Ÿ>6<%+euZǘ.K|Ugٛa,lE [։y& } ({V-7" PݺE8(&'Ɓ`ӣ)Wt[s1O¸q%u EA p Eu:ܯg2[7D_5W bD7_O\C1!!##/f5LMϤO5I/mδ OD3O?=U M}UagtwD̈|_m^xA_QLݹ=s~k|{, uR(Y>.O%yy|юYK}1(Z/;ͳgu&̟-q9,KT~phxD"u%p?g o]3%^濄:駅!بskS޵7g2XEyM%ƤPцb=_\v9S;:FOˋs;"μKNC~qf?Ŭp]Y͡g7%WR5<#y`7><\ ;^PAǾxh0֙Z\zjx|/µE/u>ZO{EGh<$?d^ʒ%{yLpD'Ȓ^V=޹+[dW~ԧ]T8͛-|#obx/E_/WCC7?_ӯ|rc>1"<ƶЮh뿙.Y'~ׅ:L/lj~41۽T8|Ew-4|B6 =s/Ϋp'9sTG>SŞ8k4NGOO}0G&*&ݾyx,}/S~z}KeB%MKv3?웲pome&92ܰ*7QW=>O\yIvR<ƿGgꇅuOO ױ zy滉|0\GOQ%9>H㭇R]7q;Xqgق% !u-CB14<=ig:6;ui,y0_XVB2.֙<1 ƒO$5LkytXM~&561.0~rz{~9F~gֽz\߰:w<>xڧ¢?ׂ>GK}0KԵtԱq+}g۰[Z*Lm:i2]M)t¥qb$PϬKTsUxs:رN70귊b_dO>TF Nxyհtg~Rdf?O߷0iOy[S8^p3?W[fOwK q|+e:q$dӉY+`"kc=մ,9[^ C+,C׾G.GM0Ze8<oEݱ8 +6L:<.tbuↆ<Uyd=-=י|?e|ވ[Mf#8g &M2gq'u$ /ckcb."7]Ix5-׺~En+Kx,D(߸Kt^:f}V!]Lcniyl\wC]mӽ u_G)릾m%.h,Ckq݃,?!xz> g2\짆y"ṟgG2zyWW8Ekg~*ȱNY#f=٭o wd]bqR-0 G]#ÇC>EpvXvwKDyAYpڧ5av⿌\AZϰg{ncr;&Kbs&.^K@~Zmq{x= pL zj%q4Lr'{w],#F~(oF}q]V&2ߛEuLZJ8jXC??pQW}z疟̡>>^e<7d׸}?aU=f/ {Gn-5. B=\U|_̟P\ /ri!R7|ĽgF6,q9rK`Ԋ7]{Sלs65ǽ}V|7ءO 5#a!^,?@ٳ|_}ЖՉ['nz)z^&u#9a?wq!1<mKO!w9b7"üЂ,u翻.7p?~]B ->OK\ϟ~@7sÿfއlfp=F@{Sp;y~V6_YL: #FVqUVeUohjG\-ؓ]\vږZeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVen.gGL{Ut4R /!>nw`-,.,зko:[/W^}vzḤ{ ]kOzMՑgUtx=3G>O'C ׺mKSӏ?oW՛ĠG|޽Cfz[Ï.ytW3ymB`SԫT=rq>_yO ~Qh^yڿAvu$0'^iqSOD7S >W?8cîx(W>(I>\,[(u/Xne">)> ao ,WT[~N]Q3D,KuۿWcXp3zu%銘t_8_mhS\7 7\sIsҋ@UkR*q_z]:/K6-.8u$('-a_Kp1y |II.{P~))Xw#ʶ:P?O}y0|q}rYD1"&鯇Ѽl_~3(oZ [ʯiuǝhz^mA^{&i\}ceUkŠy'싃Z W$s>5ӡv‚Fz-!E>($ݡz3%WTx:OxnB]1[ - F$2.0ZŒ?(_mDh/q)Y'cb>\31(^5EiSGHs\&e,P;Y#G[TK-g`qM)2L5]QW/:o|3}v]w>B7gyVUwbOUzNկN~bUMSy/4Hk0BaK'NRϋ-.y{yźp}5q_RZ8o.#2?|=Qn}YS79/#GMC_=+~? cgbbԏ?-uq_ļ~WMc-cпޠS~u=X[u^0OW/a}1q1x^Ħˎ _׏(vv&=K&Em}wq y5S~/"~km7w3K䩆uzJϊ(zէ"5zQa.+p`z?=F0m4/s:kt_q.-z?[TWG+jh>91o~ӠӉy|RWO-+}0W0DUK>Za3S{ҧE|kS 8eοub"}Vqӂ5EwXybL{*YT|@:ZP5 }^~NoG,٧%>2| |ކ SlnZ?=Oi./˸#2[yR_&Y :1=YF/0]=7-]c놁ROS*eغtwq ~ɂnbȷc ߆.jTGcv/?nMm(:-AJJ6zyt0c|-~6=^t/TYUYUY;Yp(;ʪ5ū@w!CA#{k(5e _Ko/~{TLN"ƒE3bսʐkgX΃C^᫂C,pKlS( >6^7_Qֵ%?!!RypDɐ6{=gI-8ffla7na׋lW:Ou]눬>M: e<Y0ng+jocQ} ))%j!™eC &Ak~%MK$ , /ӫߝ1AF^;ĺ."Iq2R/KD>"Y_;UB ؠ-C]v|z)"mwmO{Z?Ղ>z3x17<;};yGgGw7oRŪGos,Uq[zנ;9c1yg;q)=Capܚkv@}ޥ.OkqC؃OׁyBO/yOQ^<1ljn|?O\lI721IqͿ}4P~Dgwk::8Y꺲90]buNy{_ cvpX8 l.SU" sV}HOux2Hɧݘ?x O縦,q.jI83n]tˋ>`EkX CY^\- 7"ռX;~[扺xc6/^w>_ziG1 ;rŸ_;,.40zPWS3֨U7IW#SN ]Po?! n6MFKک;I+r uGwS GKU(w~"rp.vӢ6/Oh[~9O,֤go^; |yotefer9Mj(?g'#EnWt#h-< d yKn:-,/>GxV(K.2Jyq^ :\2?k_=7x ~zb^uG[JZFn}|y?ōLUE]F"6+~uULgܨ{` ,oPG~Ԧo>'=|vB2gIKz.x(:cp,!Yϟw((8蟳W&Qpj5r<x|n:m/0[ ^ׯ^Ա>qz V)"^FWCd{hA柂i] =[c}?c Z0u#, :|\ky%z]CWVwt6kuSuq= ,fx\qb`MKn~FqN>;&kDx*nelg g M%U~~^8N[0s{_ Zo܍O-\WQo1fq|2t; ]Œo|R=mx?ãʴ?ӽD aOE"Fa}c]A(43Ծ$w ^y~}8RX3T_.{٥b!(ֵ Zߺ=E|4`s1,:_qZ,eDcw/7+;zwj7Ps]} pmšW)TOj 'r~[G87WoSl+gwݦ{^q}7mסA#Iiqޥ>GzsPHk׈ijNܹ/%E(u6e<'6xway9hXNgy{^u>ՙ:395"a`{Ήοf'㺜,={D?`hm/3ެ̛UHNv&C#V:}?GM [G*'fz\+_}^}x`e^sQ!#*G\?naIgbq#Segv{Ac9:-|/:u[)i?Ao W(pe毘,czklj~zNhDÅ+_,-3ͣ|+2Ќɳ\x|9y8c9Z_8ًE pt[8x \K}xP=Bc-wau{@F@XR= nUpЛuiceN_>:9xЊt\˻uB-ɲy]]=G@6ꡮ)m_d>DW[66~~qD'mK2M^WYa>~5SlA^HVG}^U=)V 0Q?G,uӄ^!kAWco@0;}K_ߜ_赿N}9CU(8Ly!#t#z*ּۣCɞ ߺ5ԺT5[E|LY<{mqqGKW\_)M?ϩٚ>~=ZC}o[1˨?P ךɊNƨGXn^?{2OW'җz`̤ e}XzN U\ >-mp8M+q| qSq+n[FGq ֩zhU.1kٖ-?"pDžm:؊ ۯ](I OdtS|K=\Ȗ]:C5rTt=̦?gӯ+Ek|7={q< %+ίp 0@n0ě0/[r紾:3-Vם޳QW~/'Z#nLz#դNsM uyn/ԟU>/Z[OЌl\> O_)oO@e2e_\S_ʶࠁ?vb3}x_pğlZWc}[hboTľe{+"O8A;Y}/Ւ΢&ʼn7uyxqog#j_Q{U֗z^,<>̓GW4Wk)jcGC?7LkIjFKſ }qы͹"|#*2+k}u=Skİ>,)qk#F{/[T5z|sܵS]woґwl'ts?X~ jm>a`h_zS=/맬+k8c_]b Leu"7ϫˣn<|a8M2!<"&Mu:ăK+Q 34fŚԈ6pە\ڝ0]/>xdʦ9K^yZ }qKh?-e4Yc:HmHs/Fru&TOW㖊qJno^fY=Wx+?;kqPl%hAq*;wU=SJ1˸,:*=qb+ﯘe#Oq}FI%4uđ^'vhsSOR]Џ~ i hNx4ܯ;<%6_.vIdU'O9e]hUpP_b y6-}j} +_޽=Kktɯ'v#58#5 w_}K&ME/9kV ^\߿3|ߊpUgO~J:1։']K6L!ʔW|PÅLWK;3}5xmN'nv}+jp15SWx;E[[H+WKH~ -?"ο{>xh>Bo7=3.j<[z yl )Vﺻ_V7Ċ?l< -ܶ }]-D?>;ļ?k9]\C}~|/J<ߞYؔ#brwuH-5HzbSy܆BmOMjOZ`G 1BB]Co3)2,9WB*k.?`Ks1;O?-KGon~ގ' }d^o?~%G~⹟}}0c+/Cit\Tuom/X~z3 #ry wkKGԘ6~柠[>Yݭ#"}Hl=]\Q5zK'_e糲>^@}[zv)H٧ ŵxQt={.d%t$<lAz\7>?sX<'[*CE|!S`ЍGֿŬ~go.uc K?(S{5@:`ҟö'/J}=xՃ/c|knظNgaʷ㌷E<y uh,d<ՄD-w=?;}&YD}/n| oYDŽk- |tB$z]P}Y?N8w}v;D篱27{8IW 1|Jp4]Lq\ĝ`ζ;+NH̃-pЛ~z?uqyua;U:9s70l6 euh}`MXyݸ}j'=:&jt0l;XlI>.}zIS>d~1gZxp }X=^}R|ਏ,F(6CѺO8ʡ"`}ADk N.hR ]L\+Qٟ]֦:&õ³.Ym}^q X+'V7%ߌ~&?}~eDzzb#z7G%_b7Mx&; {sV t@l9g3 zXWwrH?4c>ٰ%?g?'_ͳ\w.ps]tq}_Qq;wߪ-zz}sǩm/1\G|<~"|+uЍp]7=Lj$O,e|l}bBz?>QǓɿ8nk6NkÐ':9~|Qc~A[OSpz2o:^P5Nu߷0XoCmag#zL;DYR?>|WwQ|/ɃN^.gVrIg=Md9y.3q$W/ɒ}$01; .9Bx8(Ͽxi=Euպf_}1v\Oد) eW e}{_t[+~9l_FKSӐjPCTyԽ> Uk13W|ɋ`_ptɓuou4VߩL dydzq~&ա {|.yU_⶛U^懓4F!flc7M/?q~P@qp;ЄG8:N \5.+#מ&_>q |JycP 7]a}Fo3elC1Нb|q4wBrm1zS=2_!# ÇRE0Q՟/:n~}>L T'juz2z}fgudyj~D'OE#ߦz17 Y+oEW{ 7YCꖆPߏ{y=j7DO&^:XZ. CЯ5VmMePw5?녿R=O#ȐCl8LW#XFù\l;)Lgq-}~Z3G&^)$Ps㚩lݯ婔dwokio ܃/w Qo~q N 2mӌN%>ץ' ߰P/V翤H3_w^s!Szz}\Ctrz2]=䧣l3׸SoH2p|~ LCL} .Λ=&],3WM?="?%ЎYnѵG {kκE޼Nϥ*,I'a/uCǖQ}%WPBޟZp(tcSYe_+}jIFЋp5OW f7u<,/A-8D~5f9&mvpqԛL$% _|JǙ} yӄw mN}8~1-y62oŝ|0spH^bf΋-Oi{nWBĹ P?р9_p;>2< 񷰳mz>x &XtʺDC{^W5ib>}Pp#_xx,oB'z7.I[E.xM}>Ӽ"R:=qk% y4d(f/k'먦6?k:I|{8F&z^=&>pw&ŕML-;:mb)~zga?uh*ˣf>> kӭkëe躽 !svfÎqy?Yf=^w==J>pUo<fFj̾4P.Z!ț᪟ߎxGuyd8 d uSG<`Ÿb]?O">7Oc:ip H5Ī"' вG8fY({'+ ^Cd~L\uLK6 úWNE+-y6~bG_ZZ^N5 u>s0S|uߌ 幄ǽxt}zzZ6HG7T-ߗm]wq uc1LoП;ֹ@N~q97MV(UYq=D5u캖8?ܓ~$rp6Ł\OSMgwΓv}TA>wTn)xhy| xN~6+:Ep=>CL8~u<ǥxyWlF ﰙFsak$8_nvc"mCES7-VF#ϟDȴ8 K(C]qHc1x4s[gqWa4ެs> ϴ:o< [ߩ I3% |_L2Vl[kЅNWH\qR7x#G~X']\'OkϑU/y]cӀG05+*ZQ#H{\\XiWʭIE?е<ų8j҇Ck{_|P{_qL7?N481c*oñ~1N|0.pw<}At|aܾAr~}}RO[zWmYJqMup[ϊ{+WÝRyOdKܭ8udžXd-V^Ϸ^\Sy =C@qpܒtp;O_S?7ϫ\=x>`ZE^: r>Ƌ-雡(_4yOnN:8ꟲ;U&лI"?,1*o')6|[az]WlE7&7nx,lOOo{ȼHXW=w[x:zd{^u>>R'[Ǭ]Ju,'w>'tcܚ yK|{ T/.I/pt (< u'<ozgmչX XѺv `{/z/Y}H7xvNy\MBupmNuV]Uw1:{~P TOjah08._ )ǦU߯xl\}ԑC/;&hv)YnZ=ytӪwQ*mW.XWoys ZG^&ߍu-oO׷_ߗ^궸` q>,-ky8d_]PqfVqc[nu|wbk1t>܌vVouκK>U?&QTCdjKR'}?yJyۣ󛋾*.%cq%xH=߆sōOnԱO>qbaz'Mi>cA\orvwW뚡grK,52c;֧CG5=wy~BۅAT=߭䶿a'qsO'ho?cF֗ߋ}nlg..{yaF5+/13xNGKSg%}^NAǀ, :јKy>]qwߢI:(HK )PoքuQ<QqTya2J3쓉8)R>węj\.u%0TI ??íӋ ^ǮI>\=S`H b֙&Soґ #OK}7t5:z1>ˣ--0C.MYMGr^ jG3%qG<|=к_^I#j{kpգKC=1x_)TWMu&cu .R}xegE~_صu#a(ct~-bez}{~^7.6ů}OUCKCJ}[$Et.ct K^C'lxpbx2䤣w=+Wo qϾDK[k2ae ._lĘ㌺+|4w_MJY ~;F y;4 |humM~#z]1CՃLb.q//SpBDs'[u ^ٌo}c :G3f2u*&~22퓌VV9GwCݛ Uݎ١p>T(tÅI]ph!גwa~:=?&>#b*CΎSTEh/*TGpƕxS3@d}_~B]φvJm&\}ǗAj|qoE6p>^oC(_}16uoA[ )Sk8dCߕ/6eXVꅾ\9^X+:Tϸ/{j/wXǮD֦=u2:x୓Ч.S.b:A}|fK2MwxK%Ol|-Orceb?81VU_XRvy*ry~D;A>^rMsn>2GsX+ɨo*Y uu<Ej>l7uK܏Gto?Tx<CI?GjA:$敓]W pxjċÆ<^=>7i;/56Gu~3|`R+z]Ɩ:"hwٯeW(i^ ?\.^ ۆ*^08LqQ}VhO=,v~W1gs.:Ox&1}}WSҡ#_Y#|xneY_zC5ԥҚT7 QK>jS|@Qd3\:~RooP_?GcIk>ܾ8>_s6amJ>F_psQYfxnsX:e\yg(y@_w~QsqJnQ,SU=Sф TB}Kxu:c3,yh4NyYq߫'Ed7Tgk-_J#Zou|?SqOMNQZ轆ģLV5 Q`#/3t'#xN:Ť J"wǣ_D̓wO `AOl'G~;W;o^BeŬU}1鵠 Zm2x"\}5WF>Ku}oSZ' ?Wq § E~:+KF/NtHI#G~IS6n-hݠַkKOBx+Cs("9㑢sȟr?ճ&NU9`B<0 V68?U M?<?&^%pYuj1Yp ;[U|< Bm;`_fl_'/NE< 8Kюcx&;TsGjdzglw.Λ#DAUMOQ讹Ґ;~WT7an>oijש{S̓ڈy>nlAzR/o  ԏ^oPx2: w:o3)a8t>!rNO|DG ~ dhX j᎞OyE W~  =e? _lͥp%|vBMg?a7"\ٺk<}k߿6Yo^hIWwihn^>?^`dpRMn0^OMJR x8)^wH|Y)e?z/F:Y"9ju;ݠuzM?W⭭ӌz\#lc~q'Qd5&=7aD ۖ7 A}$zJel}=Es|b_]TgM:vR k%Wt]n}>HO@wRs3D͟AYW?y~]m rkmy[1o./Vw;Mp2E[7T'}aW*:3alyv(]Hw`p#I-ķut9tǞ1)5ne |aUuRǻD} ==Dž#7?ecgO:]h@mslwDq͇;r%ܑ?D KӺ#(ӱ<#ނps1ut!b8JøA<W_~jU4A =N < >rO׫)ak2݂+ŀ%7~8N?. L5n<>_3M"l2'xj`Ly=j10O"|k߼g|?=K::aEC}qYBd>W^>MNgG6絣XDS߰r**|C_ 齌__ /cYKqBC_.-ngޟ>L8Nq?&i } M82=cvsf vuW/&uqmВ8^]Qvm_-}rHCa}=tdt򾢍]ShW7!/\η~#}S7cMCݦu=}?i~˗"8?nܕԜ# ~{of <oxźEg}PO]GЈt Z hPK^qb?ǭ,Q߀tm:g5ܢ&' eԲߜN~x mAG|dޔmC!{,M>p-Ga;uɑwteLڌkmoգy=t>U>n*ip8 MۯG׼(/+^mӭ/kbAԦg]׬\N6'ͩ hN'͢С}Q%(鿏D+ yq03Y\1R/";>yU[I] tHVs|WQ4$~i_9l:hs8hjֳ| # }ƺ$q >ָ2}n̸:5T7_WǵGg_Y;Ww䗠e}V1̓G?}cVL+: ﷇ'(::h>8ںx=&x/.>#QoŘY]Ǹ:,>TR9z(k`oܸnFu`~6:<`DZNzKϺ엓O'{u窻j7x%oZN /~L+AxMY] l1K.xJ[{:'zTٗϺTcɏK\8׿/w0u76LUYFRbg͛5w1ug4Hq3_(o Ѓ~' 4ݾWaG 5W ,ge't]uH#=cep/#c_}1ROh⎥/u㯾{j}kܾčG,T 7.Wׅx }/[`$֥Gzǭ᡿ |2Ѝ97_JZ]6XgNz_~X":AO^kyh}q >3_w5"zQx^{oa u D~yI5~=7aptzxuX{+vXKVE; zBá'7@W7QUe݊k1LhB:#΢NrњzF ޔDOOh 9wWl}DqJ}Mwp?}y a7fņ/#q딗ǒ|=LaN}imo~F׻Q**(CSC_.I]>*;9 =qUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVeUVe'ͬwqί}ʓia>Y8/lҁgul\3aB$ѧfÔ ^_g}x}P`4˗zhKݒG,20֚*툷0Ae|ih﨓g[r|4_zv=̺{J70,z_0׻wԿO}Pk9鯓>Bl)3|ܐ(27=h3O6־/Wp> ~)8P~}NTӰFm㦃bzm}S Yu^饱tBЈ*1`_.<8*;;F s2‡[|('YTχ+ݮƬofpܱ/G?Z[xlh>AZ/^&~C*00W FM?^OŸqx^<IHpt9B-~bچnbuStajмr.y|DQ,r;)׊Cc97xӠ Sѡ?2±B3H5\l:!n\QByI+}9lo/keÐj~x鿋~V~nwk[?4ί54%=v?^ϳyI thdBoWѪgM2v>sԙ u9Zʼi>ȬiMئLAףkz=zQ"]Rq5żz>M }iOuX~v>}ռE Mֻ˶)ysmpTAa>e I}W5qg;J}=soL[͢#%:1鮯e~vsЅ;g~u l5Pێu=AQgkFu:hIMY2ηH=3z&J|ƻkzKZ>:&J.tCJzo>x<9Gc<ޯuܒ^%ƣCZ*# y|Y{Z&Bj)#^)j^xy_p4" 'lg~yOlɠcV>NP]U~r5u&`_},p#?UD—2GK$pcO ~NdMW1Mm\wnJ~QGY&݌e} >53p#YI|t^9E÷^/MqWnqsx5Iy<~Vԍ\]qi6%CڠbgMAL)pOm |tpoD}{st̴L~WBx?gP&xX~^O#~yۤKb>01oh/1o{euW|E?=?sM[P̀;6^p*VLX# Fc/|JkW=ԦN~sRLoW Ӎҽ_uAwԄQ|1j% qB'6lO R:u+Ktc~y/89?P`ؿtİ~]#+|\ͣ^X~rFfM[a=JduI*qJy.dN:@eF鄙L@] nQ]6u5 ~Ν.z}/"T3^珈?7/>1eY[zR,r!2%x3|1:ndߢ9Jc[ըc?a~kuzX%/Ϡ[a=韹bO{i}G6$~HM'16Y[<3Q] v3ɟ RHCӘxbחKp1qTާJ}6<1nc#؛.tǏnqu&x~y?ibS}x5HO:˯tlyJ_cwrӉK۰}Pt|00/ >4'&nfX֏'ݠr1ig_g<ٻc+P3=ŋYp?jUp jﻗNEVw.fwdWwQʀc3uz\NMJ-;`:ϸ^Ô|X. ]²Ŗ,w0s^W3cYR}6ߠB`W\xTEEwNI, qpqYcz]='WWu9Ey/OyPfo£Ab(fn]l?e_ng \#/gqM l .u8/A'7c~|g}Xo1h?~]F~}oȦ/ߍnFL?p]Ӿ+J8wƱwc~"}M俠~]6݈ mS֏דˤ;o%8:'[xNl= >~tvԅ04YT5ەzn0YGhQg`Kw;*Kl8KsGwH u p~+| &Mu5xU~"拹}>OQp+X2|yodp8x#T=>EsT]}d=_Ȍ?~XXb_6wRI)"-Sr2qm']|̑gO|Qo{xxY,|OqsZ:Z6-y.ۓ+q(N"nOt?hۓ:,$Z&~B b;m]wyO-;.c>ʿRs(<>;su;s?P\]E낻<3OĿ:]_ػO{QϷc|E(05jxP߿~?Tn<˹'b˖'Jky!>1i^FQT+S:.λBy^+h,$<Ɨn~/~,W1c O8'<3yg`}x"iϦuk >6 =,pyJՁu5>gd3wawwY}B8a#h9Fh` OI}dS?l(2t^;Qc_B˦ဇppg'<>\R^uX7C/-- {5YᎧ?ן'^2whЛ7/nR3 VfsG?/%= G}G ^ۤp.~<+} +SϟF] G(yȘ>gӢ-/Igc;8fJ *G瞛ijG(/r 1y53rt071|Nyobq8oJg_s>ŁX/Jp 1~C*W:A+0CKo] F*QBȏgJ_q8K=7⛚zN:9 Ɛ GV;Ӄz~a<._8t>O.qEy|vzD33qͦg2ɓJz|MIz]`vqد[,&s5eWny󈋯7:q~>q{ꪛ -}Gkog|9#;{=>j-ȯa ?y 5>Ŧ]z'O2ۆ^/(UW_)܏pa;OLP$t/ьn {^*;8l_p?Oyx__T\ڕ'bzXX_,\qd6\?9'ŕg3cg_+hJ[R\Ϻ)ŁotwG*Z6/_8jS?>T>gzlA7z_5SKm|v٩ɵJڃ vj܉9>goBם>CLR"Sł\Ws >Q;R4RG+-] 9%c5;9*****a/Yܽ7:.<' >݉0)xq*p;^n3M:2hsO|>CŜ'T2W}w#D_O&RS5FMɯgȻ:][ P^.r:2= G ]!\z[GwCPy A'"c_@U0J!7\R1E4_?z&폗GޯUԟd1^>WlKYV'tڶ 8ST_͎7]}ݟU-mp& lN$#g &Ő5j]{׿AuŸ~vao_Bݷ>[/3ю՝r)QXm-`AGC?~~ȋsA<Ꞹgg(4ȟ' :{Gx.#ibWuq뺦yنS€C ~#w2ORӬgq+/'f4<uϏ n뤁_s;βz]~ƗŕQ 7[~$CUlKe1}R?Tn8ts|j_󍟨x%E/\m=x_fXQ2VԉMc|/=cX8Rǣ #?E[qL;y|Yˠqj_]8|tIėV,?~ε^|l<u4cJد5mW O8>4AH,y(p[oQñ#~?I͕g! 9G^xAW9lo}.gJ}U} :ި&4z}G#?OL&ҿz[Ktby|z[e|naV$뿐C|Î 9ۍO ]|_7OLTg5oT d31u,&JͣLRH$xrEtݢy y%ozQԗU,q~7)x,τ;:kZ:9fMO [ޫEoq>߭ض>>{5+g郙tݯ/MIf_?gS_Ip>S/nS'[_Q`[4i立FLؒJ$*:]o=O;_YcqtM1>i+ts?V8^Ivʸh<ϧ6Hs5Yy]SP/:G|hWdmyE%mq~YcUM5'(^)x-ޏǻQỌ߸ZlΎqdOIU\#XL1H}Tg}o'~Uچf~Fgy?Iu54"e*.[^4~8_' ɎoKujYeO>l/nyI5g(4 |t3,O(/ sq0FCC߈yR?nɀg]qs}q}kBtg>FvozT=U&?ZNjMr[xyuoCv#ku+ѻaP4WX'+Ư/:_)J :hHDa{geq*iߺwG{#f0(=Ϲ4w(W:)횇fRQ&H-ҝdH y]4udm3[3:ZDS(`|vGTԗ'\-iEW㲮L߭(;9ݳOp zgqڬ:cT+n6nUG<&?dW]~G2A1 1!C8i|u+}ȼ)>-q^пǼ=~@b8ՙӺxW/u>"2_+qI,2+'%v?y>u7es<ݡ%~:JR>KI' ÍLӮs#?o<nHq3p-0m)[_x(?P897udhxLk]?݃EZc}3=M5 q&r{ǰA1ri᱙MPO?`_}yyZ1]_c]c?cɓົ-G\-T,ֆ[kascϋ 4rM~|Gg:p)F|]'xG <9n߭8&?o|Oz=a?zu*.ynnZ}.qWN\'1yڈ<;z:"~| JX]ebq }#uڠus6vɡ`}>ה÷D|ܞ܎ }X]P@&:)_)>Ԡߝ/Kv:OoϏ? DJ|Cl3;8$$Aȳazg(zDz9 =a.y8Kia}^-uǀRy(iRt?,@s2wH\D] 4^'#ﳣ?cA7Y~#l-[lO`rV~-r1MQv- ~D=?ED@)_c`isntS5&JUo ]]uĥF^,hF'}_7Nxda<։x럞oSuhC) %ėQ}n,-5#hS1y=_HN 7~@hcX_ɗCihѯ50x뉫qB^#ϚO!]{UW \(kxW]Փlqq]p_,Ԯ溼o20<6gleߺ\]wFOC-ük_r;_}W:8XS?,k}q5{+ȸL3lkܿp=p E],ujFWČߚUD_FMؗϖ<1 )?vnvu_ņCuR]WOyN{\ -Ca8#~%-7j5}p/-MC7{kse#҂⪇Vt1a+B7+1 ."\ߔX~8xhtU'e|%uȬ㖤N=$I?8(qÝq| *6/oy#;f{o]8Wly[U=}Jen5׺W su}1d::%׷C!+E^||}"W879q< ,y.Tu^!0YpWoΎ0oi6aޘ}:,.^G~Ч7Xoxnꈛ?(<iLyږ+':~F{>f6yݢ~&>#B?_"Ĥuti37ม~rkt35q2{JMq~gpQȠCTYfc pz}yql}|GQ%'65>#utmbꖶiv(կ_Y翯;~ȯWǝ]6>$\Cmwm\78U bZ1 ҭ_Zϋ!*C滹q"kup[t$N9x.߉џӉ/2H¬=v-4苣SYx g6տ\͵q&}~4HG/GC0}--)i:c'4R{);kҫIOf'Q˯>"Tg;Ͷ}vV>Ky|LNAn b/\գϋ*k<Jڿ9iCϛHVŦ} 8zԳ fIib~żn}Lޡ:1 d0\Ó߫_~ȑ?[&롶F:wәO1ۺx_Cvu{&͇y3y6I0hKx_[_o׾Gw쉩Ў[+o_|RM+s* 6D:-&mw)\-~ny +kq:_9I\ ӷ<߷+}̾MEZ?npߘ]\g1pvx:_ޝ7ofmO0g^/{ Sռ$<jU<&qO ǣ`ia9/{KRu{[_$ u{(< O.^0Uk8Q_AEOS/z\C~{z (ו+~(F|yP)mqKZU(Y 6WCfP΢KTrϷӯ#eUo\-Cj,L]CōY\odr::Nιi} >^e ڋZE/pf\3eY3Iz7Q|> ,Օ=]-ؓ:jw64('ܿ)&Bo^z7kǬכg[d}bD>XSDZ~ Jy]=t> ǹ~ճ1x/_!eb8 =cEj>WW2@f(D?jъO c ۺ[їٴBp >sV-K"v) p׃n3ZWhPH_KDc (ͫ& H=vQZ۳.\km=gnWT>~󨭞2Sbɗ*4^efůl8`/[+wѯ~ KvfX+Ⱦb@,<+>&:}AEWg)hcL q1Umַ[N$`]R^~"` ?S<]&w^eS;y3 ^8Θ^+L:(v[7_x/q>(Hg`Y[YS}_ƅSa[dGo6d}e^;I53Kp$ԪKxb> gǵNǭo s;$ Ow5Nׇ qV03%MJ^6`M _q3hnD\1/"_wC; ݢ[c<,o= =8Ӟ/a{I0?Obtx7gaCOVCtX̾P5y%n֌  W/vݣZD_!VsXׂ ozW}|X*MxkjZ[$_g_O 5 :ǝa+y-T'Ǽhz2wF?(EijzC=O (ޯmK-UAa> ޏ]^oy s:#0'X}U_2,Zͳu,1:N&7bOd79H:<2/M"'O ʴpxG"r-ulٗD<\;Zvf̳P7~^\uĠ]-:AEQ2{NnK7I~u;ܟ z҂OY3?jZ֝Wy02//Dx| ~{=ʹ4|i%~}\8V;9ǭ~%BlxHsd<Zy鷣_;6qgqt֏7o/>ƣq̏J?;}qzw?:M@8I|-e, 621|obx~VBZ#Vot:oBl`vuAZ>tys'O!&K2w>>R]R_u ћy 5g b7?"xǢSCxu;/ΌLJCy)r9{WL'8\9GX\;fNPZA˖ĻoXz^_~#{#컅z\(}6Gсpy T9 SIYլ3x}Тr5hzjՕ3?%ሳ7}]b}?86Q?V=!IM" R*Oo~l>Nf[M3K)PN_d G)xy eo{O?ԑ^xF:z$p~#̯^>ݱf,MQug kg|p{ˋoce-68;8Fu>O8{P+&>d?.&Pq^igs`tXN" VӮ<_Vm#,HϭlCNOZ8๓#z~,$_-OC_`Db{O7>: .i+Ӄp/yzuw,ƩL^6x5V~(ǧ>*/]8 =WF|J|un!`[uq^bΖX@qby/>UbIׁ0Nk5Mg['xE_K'iz`7W7b*ZZ}ˤۢ?[L|tUo"I[3#7ymk]Œh &}i|ר7 ~+cJmп;S:\~VWn˺]?yiU N)^`ZKdӤm&>Lx]1ycqkُ"GRe1K#lu[5 8Yz3WRVDߨG6LCkM>/o_qp.!뷈7^7?/Y& gЊ?T}E +t*?꣺x&V?E7U쇜PpDuIu0DyʶIϴyIN׷' Xw Xe qϭEoW~ƃA^(؟ Co(}9~Wؿ3hC^PJ}?p&o2X7~2SgޯDqSa_V|JYOOQq]:qW]ljY#zթ9NŤEhP=wsVC7TP[+WVӺy7F*{/z?}'{}_ϸf=Ãr'5ŽO]C=ߢE8`the+GlXK,6qQS͑$|v%6 ^67RG.y-]s[%n!w>`ʠJ-zUVeUVea72@g]>#ΊqzQtOJeMqu[yS[PG}Kp8X|69~A+> )^ `3m amX{Qb:6㷤& K\_~Xr^wEzu 8iڞ[ 'fCy8^ILf\{oAq;zȮ8 z#|xMD< ꈺ(ә@sKs u{'[t%x24qu%6?_nprZ2Y4:P'fN ̓륝 daz>]},}ͤ~Rt~_y_϶W< zǛoК B&{s bM{qnw4Xu},H}\/*zL/g} NSq-cXW5m嗻=7X}r]bjMz]C7'QFx::0=Jʿs]`E~t־哕y-8[C?&׸bMdz`Rw ET_nc=>f!oFͩ!fc/8n#iW pL=P'4?aYOK"81MOM [\ _ZuʦLg'`աknvPW͎[EA0-oguxU468N)}Íe#}Kƣwyf:gq ӋXlSبc.l=3u3TW'a_bdHKo7/ͳ~8P 2-xkkB#?G3sQ-;G>Ewo6seh8#zy[M_Gٳ#rcA3U_L䚕?\[mKoH5^yPVr%k~ly,$;g:G{ݎVwCX*BoNz,J;ˏF۳1 }O:$^m_usOG-xV>@;O]hϦg$uo~v+]jޡru2\ xDߑ=; Z%/-  <]^ONVeпhcYW=u3O0n<.nq o<#KŎϧ \Rآ{OA̋nMWԋ]pRSp>8ٸ Sߏ5䘟 7ShFG&#Ⱦu op< SNO:l>|Y۝6ip BY݆]KR .0{vΊ8.R5s frr|v'3B?UVeUVeq N'WmC[UYUYUYUYUYUYUYUYUYUYUYUYUYUYUYUٿɍW;k(n,߮*OgύO(Ra0;'|Fz]&~Fh1l%]~W\ O5Cc~k=Q̯*8m)n:)he??|V 5 ~t*4Wgln4WqǙϪ.&.%] ճG0e{vgNB.}|y*Pk !cR@wdxg&~f]X˔&& }> j鮚}9N NtWuǾM>/ނgwtǵSѵG+Kwm wc~Y3G<_/Y!O\g+uz\{n"ޅopq6{;! C*osMQeK_:8|vqc-Z>Zu̓1[E:Ij"u}Zb~Θn8􍶎֯/j蓖1N7g% -k,K!LQ_sݡ hΏo}@{>zttCQBzzK =ô>1}6_#.ngz_|(gޮqA]Wz}{/wӿ%˧a|gZ6}zn Aolz95oysuG"U>[ߍ{cŝu"tO^|x%z]I?}uXX{L}O8wtOg5׫}ns VfΝ_SLD<|>5/Qܭz䧓]rp7B~!s]G=Ү>!^e=&A>D(mOoѩw-';z?C"S+i6]Hl>/!з}Z7uςnE?%mzv~&)iSTw`T~m}+=o]ů+u~/dʵҩq&}O깧m&0n'4-C76nK8^MVOHs 1gxu_C'n|nدߎ ޤ-շox4wyD~6?Գ.Rw3h1'N=:Qՠjny{az+ez2[KZ / q(6V/7gۣ ^~ %5 ͣ){[BM~lݎmj^hX1TǪvϕ7a 7&GulJ gO5Q;ᗪ6SV[*;9?ReU+<,Wl*; ݣ6 >sTe7{?Gmqkچ} 7^,j'>ߘtP͇5.0+& g~zEd͂J aI. |ӏǰtoԏ_Y9~pi4^j/? ֤ris?O Lثhhخ!g]=7y{==}6/6m^Wޏs<]˓7+&wh>f݂'1A x|׃9;"lxp֎u$t&zYyrA_k'smϻhiX7 {P3vܥI:KL(q{΃pZq@<>5S\}?ytqո`Ӻ`[W*M[~/ބ}-yh׈|JwgNL_ nXr*x`_8ϕW`?U[kh[IL~+xIPGtOaߙ]Bva(=L}dzR3xLu{­c!tnv?,<_j~>u{]ɸ>3?I]mk~;zޭ~V8c*-]˲@@F.EĊ kl{5h̫5M+~ߝwg).~v9眙\uK߈v2^j~Q-On:p|vσ_'~_/~xjQ-Ih"Ք0 g|_srНx\wu,.PxsF%5ۺԻ>}Ȇx%e7l}>Bpy(ΟQ\G`rN|.0@~|M ѳ@ r^*\-ʫC1lq /O.q͘Lt՚gguw=Bqmd/bfy ]FCw5nI;NkM=טh}_ \= LVJ 8_w.1Sjx'koE߾2~ӻoh;r_"Ɠkyv KD ?[E'F|MX5j퓹8dG*ܮ?j?oe~N WG3˘ae~rSq$tZLwygot<Ւ]>p^$g8}YKAUj>dؾC wtd|[W@׋Gdzǚ_fЃ}ޣ.|㕯?ïQq#.cz$ ӑK\{87?aݏ`PV:gV&Z>,A, CpV,7e1S,z\/ҭfxuW]̆j"G;6? |W'pbqtXFEuLGhH&Sxm9T6}h 8Z}qsEֺ?ȲK5_3|^^uz3Fע;n]~jWp>S-|4)i\ϏQ3o_[CO>`\ޫ,qV{/ }yr,qwKɾ:^o`hZ/>>^%Wþ_ pL0+݆{o<ٓwKoußv <#,/VuR- ~}y<)?wc5%a}+x8M3eT{R8/^-~'|H/Ӡ&MxSGL5//ӵ^wKd\V^{\<Xi Z)Lyۨ>/I -9'@\I]o3ƴM>L{~5~ߠUt2;t>o[_ӚgƱW)޻x_ӥ\gX߶euup< Uz~-]3t.,l7nJ]ǵ5n}<~DrY:xh1v56Lg\%:lGG"D?n\vL/^g?-Vs;&2Q6,=! /0H9ǢżUu@u:r]k&׽^'&  re#m%&|w݄ux|*߳/0__ą4#Q2\J-t3;G0RwtM}Q-WvПHxZg7nǡ'z(mg8S w Uh:Cqvёxݽsoߪ+/?|pW0}t<^w}\D%h¿',tog~oj{dEY}~'?Pj{wzr>_ dϳ|xuGxoi*};";?OW`s3l|fvzlj{ŗECVyk) k4$m8HF IV9?'/~݇m8˪Yħ$[.ii īj:}4o[zqhZڑ;4zz><>A ^g fb1PHypAE1mh^_]2VcYwoqAO_F׍ 8"μ/Bo(j(Qq#x&/a)z}oJ/y;d-է+#Hg ?wC_fLqNa8 e"hc hO4("D?r97x &t߆G b!;xV?rAg.8vdPc8Oy =y^a8kQIR{D?Q UTC *x+pVxu6gQ ',stWS}p\Wɕcd\ǐ< N{-f?}y$/{ WuvkYWqY{~# .'1ZS: _xqݞg}Y< kp"7MaKۘ <_fhq= nr^@Pbj8\,> U ;X_9W׿/Fӕ\{IECwLG)ex'udfI8^\Z_^k4Vo0 Otwc \t w6a=?9yu+_n*n:}P[cѲxJso'O= ~yLj"}eu}ע?kZ?_W쾺?DuWogc /es9o @ xkx PB3~llz8:z~y,,u,[X<r74HPC YE8ض~p#l?o}Lxo x 5dXu:>+u>c ͔t]>.ߠ :}M&۲Dq.~W90|z_nc>옘f-; nǤ /7?| WZ^ O4.~F28j}s"ǎx0reS혞W}5 bS>?FԂ^Su]'/bz(g||p+>1YcW`G<>;o,\.8vq:>9~/RѰ(E&/?GnãL p޾z@WXyL)[ y?No}q9iu_*sm'}&=R4&qY3M_A^?7d'veuK~vXKY=>ӟ vۢ\m}Dv<]Ϲ2y_ $ov烜u' qe>ND>C|-{kטt-y0{pm\|iCo :7c'Z'Dą}o~|T=QS_i~߆WCE~hMtD &p#ޗ<_BxS8n2~@_5'cOZ8Ǐf|eG%{DvJ zV1POkk$q7Bw|ɾ,n?9yΊ ʩyof<"}do8LP?w>By Qq~#sX~coK{?OmއqH~xFd8Z y]?Z:;;Ba͖&< /;ܾvHv ~f~OKe8͍jr~Q1_RWTs޾98VHtgq^?GLzuNQ ^u]yS_7R},)ך~lk#tssJg6)N/.zO<sphG5nlխ3:=ߤj AS.tO3Wۘᛮ;q<0Mjh q|`VlՌ?'NJg5 *j۩0t=Q?]w6/Mr5eCoGmu?e@8ذ)'.ƒ/X}q6^[Bvr~58 O/*OzΡ1^D_q>=쮯{< uOuSǻ[t/RƩfϷN(ћw :b-un,OF W|]zuћ׫=hN9/d[ӾXOף×P(~4=; |r%ٯ0~upIN'.3bV u2ꆢR .- g)D'n_VԮ.Z3 NYכw4!zd}xlrxyP-9]TF%[EMpzxu>Lq6&zj^S3[vՎ}^]H<\5z_Ͻ1#z?Oc}l*u?_XX;ogLWxyix5$p5r"Mu,+ oS̼FL<'hhp kX/^;#>$6Z' =Nh/jLx ޽MlǾ1O3x[8\; z^C5w395+Q4y[ ?ծ)>6p\Vvrp}B{BkMOU8&<2 3_ckߚy6h۬+7kp8J Fa,˓cUI(P{܌Bb]FOJy\0 <~9 ~?9vQGM-Q\O.}0+~ӎjT9tO ރSl;}!;q>i\ r܉ƃvyݙ/yQzo$ C|5XYd W~03=ћ.)CGwCuBѠCےw/Rzr}՟jA\Mk0$\,p*W~;"]bl(fҩ`񸉗>c tI5?ߊY$cEB=z$mD>Aל~Ӓю߅vL*j:lGy2-P=I;4˺ 4zcly POW 4ZOiZ;;<\_2ѡaEpKaBG˜sZX|X}BMVOG4tUQMUU< }>S_>K^\[sztD}7 K{3%2,uF~ 縋'/yo};Z[/_ g8|)ՍObNፋy_X#Mk6bZ;ݼ;z#e_# /vߨ9>|RpGcqлhO'[ j }W~KrG>1w5?=V-gaS^m<86Y7/tj,?!)pbqH^?麆,MO?m]\=NHO 0Pqyq韂.ϘpWDQ^pܼOui5q@Ot^uet8J~]MߛS]&g#E/S38\3%n5wDu0`R<_aMYo_'Ԅ>Y~TKL|DZ% GF!{h"|n;Û<73QWxszpϋxD/xX9>cca,ao/OZd~<< nx]mS]rEUOV7' 3c= zO/ ;M|FKbj>޿]:<,}8e<nEX|PU9Om&K%݆:ո G_g)Hcúb ԸԸy4ʍקF>z3n?BkW\TFeQ)k7Jp1׾e!'`+,k(":b[N#m/0B~I2?`2tӅ(_o[u٤f%ch8< :8g@8\a7n-)oGxeʸ^u=7p}q4xkmM}++߯>K^ FV(0{!o 6ּ` _M밵e5%?{i4Zt 꼔wA{=6Jsd o*=cO5e-]D ck,F\wh]Eןw7身Лԍ 'm&"<~+?eAoF ['nx%Dke}3Rf=_c/e=K.^$ -b7үoѯ}/#ywű"pӻx:95uަqބWo |kD`]W,~MΕwǩ,#x=6qx4THlG|m/Y׫8g+۠w3$LWhnh,z#mV/BZW8^t6x|;S_1FxE7:Z,1oz`%ܡjSVj?7^.P?IbMW=a?beO@{->v}07,~aOz1o-,zQZ $ qr|ь76R}xfiSi'3 ][)a`M.N\~`fL6̻ 7 }鉏zhL0WK5xfnkp[c^98J5_7nWIK *ǢFd}oOD|\kCuQ~. Q(luBms"ѕ$}&~ o@x: ^9|~/=ZT-؋]GtVA.]Ѥwm獻Z81ʘٺ:>Wt2fooKo&ǣ=ے2Y!Du:YU9x⇳})yI?s%3ILjkN~Cz$M{8^|8} NLd^qkE r^1MboìNd'Ƽ.oB-quQ ~-D[~ay&$?Ըx{ꅸM{oGDE '`'  Oo$=Y?U1M#E_nnAn~܏?eyVjg!%{^cG7OpxG%_Q1_\{uaxϐ뿆g/zb'(q7 ej\Jw٥7`ۣz>լㅦ+3x G'τv\M-I}S?UׇDbpKoo_y 9X /KBfq+ 3"[i ?B8|guF-ՎFouCV׋wL::1jԼ6t}hBtWNg¨a?ĪvV#''~ kE=?\^ҕ5$_ك)~OЖu*˓cY DNwbiG$YBpVC3^ٙLQuhu=< &(Z R3-~C/y16;-n"D?nOp_+mgek&>%[1cyp]czC_lxS>a< ]x?^hIj>' Pwِ|{Z@j]\\v=J>עF2hѕ 3Ty/1 }x_GۡV2jqXwúьh U ܊:yd[:1 VrwuF|'sX_jKZr^ov=IP.(8, ֑k׊D`g}yrB]xxM?/u{nz(pEth8Fޭ}*5[tz ]q4]̾AyvPq]j?I4:dp[;:qp$mn|b]g;$754H11ߟr7?Z3DcuUQE∖8דG:Zyy=E_XM]&vq>kQ>4]x-Ϯ)LB*R_&t_8~쏢,9cR_>/ݽJ4'hAPƓW5w4^M=Ǎ}u1z`6X2L0Gh&[h G3^qLAlcĻiŧ dG3b_qc<)e;Є\Nγ<Nxӭ[Ny;OMT3 3 W<2g/`, >e4QNrML#7!My̷Rg:#^yO9-|ަC?X/[~p˫ѤN+=q%5d@xk qNÿ-c蠩8-<+Β3߉z5}5<&TmCgoC1}ƺї*;:̞uyɠ/D??#"Oӻ+V糗c↿zKK7ThloF]n}A~ˠ֞mo|?{fXJ~=^ 18u?o#G^>jbilye瑫'0աRix?:=970 |ͮ￙#f0~bOxq"t&'|RIlkJ?/%/x^myly@}hca =>k?=~uѸu=Z+1 d-SQycțr}p5ϚuΛd=12XUaޏ#;MVi}+/h~{U탿ϸLq~!_/3_yeܿn6ڧocܮAڠ#kG_yÍ#Ij!Y"ݮ?%~h]r. Z(ae^CӠ&)8O Sb|Kx>Q8.=u֌e~QUr5k.55Ohgws%,=]8Cu wt|q9=%`B) W-sod?pyIr&_QgS}pooi$>3r4D/&sXWu MoSg h)߷?1l=;/)t,Uy>ע3#7:K a¯v<&j/ {>)-5hfǐ|+wz : f;&oKzṂԿ&$^'?T;Zʓ:i/APz*>ܛã ޷ _Cg98|4V}Qb:~oD/aH2q(s'=you`xƳ |iY?cAOz}h>1Ẑ ^}P >WEwmקP8&}y:!оnaqd -ӭo Qz.)"ڪ^Нؕ=O[CBs7ysܯfJ5d#%~}1!\?JxWҞ#χ :lnj=Q t \%7L57q԰{*M#HT< 0=E|},HO o2 /?> 5vQThKp<\A:D;qZt:}o6կ8ή?eF/xyf{[գi֢u]抏w%!ïW㶾sdB>qA5j >[G3h?h~P˃j>?𳟭?#<.O#җ`ߟUq]"ũyO/%Ootwy1~7ڽO!1Y86p?EW?]&e~'UV}Zꄡ2J֗s߯d~&v}IIco.w{/&xNd>?b猏{Ν*)plX|:S]]1w],>!5S STA;ngBzqq+N ofI~{sÅb2)^^|D'͛w&un7^ϥ)ϵvyo[48Oys\ԒoID?u?")0+cyC.4#?XVX_,nt<ܧԄCpu oT!{(š'(~k;6sӎWcg=o ^V}y9̬de%sZ@~Qtws䥛K%i\v <{y=ƑgH<|F0rY65ۡ ЈbO kKguXRo. Lzp6"ypx$tIOW𽾜ː;_oUzs]ʕ?JoEG uywUL[ H>eXkS& ԪxXӏ;YoAǞ~SE|7#ԛ}HW'M9Qq᧝*'V'ke; M~5iqs5txpu=x)|CoOԗݤ~C%Fg-me,}Fdx״[LG3UxSt[%WS\z#uXpgEg0ԋ)]yԙ:P8 O$3duClq-/4&Nr}zꯕ ߽/4.7Oc>6c)X_)dE/a$OnH?65|[̯sܚtup)WV_ U`9{(g}_<=+O1Rsu+9u/(E7&Hgo4q=꾛_8mw%hxd' uXJ}ig4|ID|Twu']_%Ի!<5l_p\ϺPuБj#yjuUoE-_Oz&0<|UƩ-xӎi+_1i<5y]"aePؗExuZ^otXtuac}ƴ>{{qwW8qox4%UOw"~4wnQyM ܯqv{ڟ焗O~rޤi-վ@ Z|7| ^(z.ts.l}z]]YuM,FO9GuPQOOo̽DiPf\hg߽]<\yj5堊=%SOyTWTA繚uO 5ʗ\%' wpη獼DkSRa =sKA3E]<$U}ոo{ѿyPy(?9\J':29oex?'\{l/E>F몂{a K*BVȺ]L@P X>k*PiKז㪃ßGgCGL'G0Sԗxo/kK2 K/S3^TjeNY!%}ފ ]Fm]MP.yp_w7ƫwgqQWsqz7u$OW!tq7 x*/GuEɣ|r CtGjs.45-a~[%xWp.*ߘ 7+ZRaE}G G[5_qqU΄. pX{ X'p޻r9=Ii~y4Ӧ8ÍG{YAceuaC8R_n ~(t R}$н3ıocG`$n/ ϐew_mwQ2?^ۍ'rDz\Qd)z< =ޒy[Ar]űQ/ cD=hdпl3ҧh໡|5~jxx4gx!m2%\u?<N:y0W-z˸-?Ϊ훓8LQ( סВ_.jGW>4Qad4?Sy] FY+@vp#x"{5a' ,~zq 뎮,(SƱp=ZxvcI 4N!Xz~u9^@yے/E}`D'QU;Fufj7ȸC7@7uϭW C4Be&"Ob}j}M9s<oC >/ð0K_ Ap>{8:kMv7Σ:<7î#jR׳]ϽG~mؔi3/SCoܛ%_+/{PNoNΧ9W2ߌmXoFy˼-7΢sϫ{n}}hd|`)\Xk <%^ZzEyQ`TBO;17 GH +`Q٭;2uq@VGx-~;+o&h6Χ:v<Ƀ %렚Shx_V__𧥜^˄/s5<-u(O%Tۯlk)R[.OcS" /K%GTA{v/xt|S.m'[XvzNkczlPq" zfc_wëPy;6ǃ u+ƅkA 1RޯnēXzS C >ZyyM0'ѧ| OGtyW\__m_?ѾG}}MǾQ+:F{߽7xy@xpӥ1P3zDzuR]~oɬXZot~15" ;wPfs1; 1\ㅼ{p=8yO\q{;C)°>}x\eiכ !?_wQN#"MuϷHJ޺,.z?lHuIko9'.Fqb?vÈc;=[5Jƴ?xvq/qp_|N#tC7mwՎSguJ[{'ls`\1~O7d cx >Gۧ7Z#'叾U?6~o:ɶ!u#xCvK_$"RX~z5b=\c: 3yPAT}_8x+?2 z.Ge0s? dq#{c3:ϫe[y6) CYnנD³e_G|6]p]<)U[A~%\wʆiۿ G' s7ˏ'o}yƞZk1w@pp#do] h^=!q<5rDuJ-5wsa +VN5xs;~qnM˫`lE5mыgt5O ]ʟ 4IT ߺw<|~ۂ'1ޞ!08c/ ^b&o3y㥊~zOٓϫ?`x|u?vd?<-r5qޭ+vƚ}%SOCǧM}x?DH>C#D8~Dn}?SgR{pQ7^۶K8q$O g},/($joڇm yQ҃/AA&|} #}Dߧ/g_p ʼ>1ZxʭCx(yt:Mߴ㗇 LmGx7Mylo+'/7^; }^>'oF_*] LC?a*|"GC8zbC[~x+|e?0v3|_B렣𱾯<0éfQWqyxO`ֿ#i)?_ćpm1\ݿbM'zccj!a c".m ?y!'HY~T%x 3*[~.:qےVr}+{?,y_z)z1yX״ 2n(yk4B8;܀; ;y\;wW6w Qw$z{8Y}xcf:F z_W.'xvP/8 O3ps5Z}\\xՕze*:3`@SzHjx)h^E|~QKp {4]@;(2<$mS8(Ĝvy>|?\>ONO\{3\,pNۗ.C{EmJ&369{1Q@uGV -m1Y/,?ϹXy _lu1/ F^Z3?m)#I~xu|ܭg@ xԟkTÌO!o3Jċ>?vJu_x7#Z&ô/Dy٥:/K?Pgb}Wt_Mx\ru`>#qy<\|]<0Gmo${_4oM|quD~g|G3X_(z$^ @7k{</(gu(]~Gb5#uj|RބpOoǃN3$wDӫn<-e]Du}^Y}aWĵ?\NIfO}X~e `~- >pC%OҏT1k<^Shu)j4u&cGa?to?Ae8uׁ0Mv}uu'9Ow(C9jFœoR_ 1'J^onM~2LR}{P\vMsa!z}*GWvr}گB |&.eD bqcC{ޏM煷2\U[ǫS<}8Y*.%$q ַNlZ5!O~`N}eIu%05?8no7Ui5a)}:E[ 0q7OSNו9-?_)sMqXR)?r|42EٷEm^tsrX;| N}^>ˏ(Pʼ5Q,?bN߁Xq=l}KOg_~\6jֻ"o=mtX,u'-yxѧbQ'e^7AV]O:/~M% e8N+N9TO qevG*/:~Lʍ..=a=z! B:e!XB gG6??2{3^\WI_UUR舘ar&ΟG~,M:8:!FcG .~FtP|_v50Ӵ)4? I}ΐ k:~f. KW& '|K9+!}xkY;AKu  y,2Mo'p+8Q': 8Dw"1hꬶ>1ztխ}̈́ᅆ.,*^Ӥ۔_I|{:ƲanC nKzp݉/yaK8av' ?s7Ck}]z7mF=,o4EkIk]4??:ԎיW }qhC3y\PoMnq"6"َ ]7ge#HyѕOH}>6eR<7YOzm0{TmۤaD eS/&ch_f11nOAp\m㍿qT3oz=H>θƭr ÃRK>OJn#^.XT8Ko/E3I\01/iCjxS>lǣcģSVۍ ס~qcԴziy<0 .EQ+jQ#m6n>?o\Wכ$?cɛ3qʼ4r&4%$[Fԗst1u9y>0H\ǂ_X|sn0>~5m8<=\?w Tq-n?cnɰb ÕLWՍ)iqqfJN~^=+pRxM9vKG.q8K`J/8f/DS}[!nfGGC[7TƔW6]pIOSDB?HE&m<>8e>o쏓~ync3cS!qn:o<ƏQ?`UvuJ7 E y7 ]4uE\y :<r[8M<Fq| uٚ!PupL5_7sĉꮋuُ*y w§%IϳQ ~c?cD@\}BW"TM osrs1ᤐi+X4Cwf<.z#丗m8@ToLtDױ|dfLGWn~_ҕ!Rx!qȉv]L|Wy"jO\7u]d5y:S+ Ǧyn>6>P/yl'ٞǍM <]&e4c)g.!yr;&8?=qNAg坉Є=Pq9~v9kH>Q<^鼓JL=Nb:kxhHyBczj6=3ռ9jª>r4S/xۑey=y֟Ε{.psXp8fTb1ooL|w?)~F{F̟ IJ?VցɋPv`6tۆnhO6xH@ Mǻn$f>PצO6e^٬?Nbp[M`O;kVKx3w`I m ~:j Ƶ*"S';Mɒwx|Fbu^RA_ (!mt0L+g@𚝨ӈ~˰jY~A >D1 n (:1cFJ c3e+u<iGގ]KxKo*,u)ӀegҞOucze cV P}?>,Qr@a:djfqg&FKo{^dLChfߡ׹}pr%~XS|y ^PJ u5Njkr\ыyhL:]M$y9\'7c|uJ6FywxI8ܷ?k?"xC&|_r8Ɛ6͇?EDefݾr/^* -Vh^<ְG0u NWG]I6$ж7E:,uݼqxt\[nn[=xi|pB~3t>A(H=o[sG75W]Msz9OxbXp8I7 Jo}! LuђW.&>,m&m M|~;hlVW_I?[_s?x#wy0:q8skq IZ_Wo̸to9n(7lW6 '?Īqլ cB4,Si*IxpQ?9Oߘrϟ77kPaoWLY]̿.ɟ~˫YiGK9OۚLOMl@EzuF ԑOS'PkO7bf y ǃlqW^GhWVAfણ!|@ |-Esȸvg< MvCί_PǢgz0{LPNpD4I;˶EY)__O&ϻYpKnSjp'/QR[åqʛ~'&-ӵ̿ILz{]~Bi{ |'9~nU~cj.W|>_D%n-_஽rԙY1߳G<|Yn%.Cכqoq:mU2|VQ +TkD0r?A٤y!_hn'>^phT^C'[2~?9oUI/մ}cx-C PPK^ GbU+BZxo|_ryd3Ngg0/۞|g+)%֞?-υ||o^KglzNu8aGp~)WwNMu/Gүp`N ''U'JE=xď,S K^e:Btay:qNl7U^gG&ƕ![iABN}KX5\qF?'~xuhc@y0,QB覃p_^KtK|]UNKS"nb^Ǩ>(.C&H}=q kۧ*~yG(x߰/xS,{Q ^F,5N1QCW7yv8qtVQh?"x̿ 3(X,kieѮ2~/{_Q.a_\{OzFZfX>cD?OW@ xr> Ƈ$!lERJ.߿衩>\wp?ď"-y*g3ÕRש2|"B7E7;8ؠfqIǓh&q%_w:2C{D]UG_"CLgא88~_<пƼ>٠}iO !QJ⓼h ypRGd'Sc'~Qq}+ph>ZZE{ۚ'Ehy+?-or>l/юX^Ir?\/e,c pRӺ!0?hq]N'A"#@tԭP#<ߌM#A64xo;n#nWߘbX)n8Lc]SC%7e\mhi.z<"Nӡ!F'PlGh}_`XO[]'oWIuKLД}T|{u}.u8>!뻅gMLO},QոBT~>773B|%R|y':uI ] 8`Ϭt4WZuG4:JB?ns`e+p|*:{D_^GNv8HWDß%x3Mvn(7߁7>yoh0/QHsMhܺ)Yv+X>">[8,-q<&יz?HXxe><~~?o#w3.:xlPc5N'_6:CӖo?iv+Y4p~Y|: QH=4sqlCyq#}Cta`ǠOh8WBC y{\f̒/]W}F΄@f]:m5q^cү~I [sپgy\wgHpx;vw;("CEEv")9f7i5hL"5L $XwxY_!HO\߸h4?f_ .o> {?Gx*SNf[侚̓[Rk?N27^Gc_$D$f5W~tZYP 4jzsh-Ϻ`e8<ĔO, yч.-4Tzu]o*7 8L5y][yC~y0ˠ3\#Z&e^W]\#YC|+Fa<+Hgy6gLT'}x5nLWx$MW^!p8Jka sh6y8UJᠧ^ة>ȫq(\]]x^>pY\&;u6 ~~n ~_K|8yT<@})<hUk9B^z8/JlQII/ ~TmQv=aұ>S ve'ų5;w>S>q#seN ;~]]7a߲?ʿ>M?A( ~~'X'GB&v}e{{8 ybI.Q %(֕jμϿXg(MVkyd,cX2e,cX2e,cX2e,cX2e,c } k< qdqΙV}Y)zz"Oϕ8xS0O1ލOuxm }O+ ~_ܮМ4gdg" /G]W~%Vy2aG„yYP](ܿGۭ#FyohtUy8O=._r:4;tQÚ\~'t:E;ۙ }^/pO37稓Y}o '~ΎW`}R%QAfuB0]>~$ * 6RM1;ߨoSo\ NJb>n꠫>2w>+𱝟$>֭P+'Tk|R~;]_5~確j٬?k%]}u/.~[h0Mp&鯭lu;Ց =Fogs% Mϊ"5u'`8x}?[nuуvu] o ZWcy*θj>s8tZ䶞`| 4-x>%4sx_:K(w ??@—׆wt<\dk;Z<Ёk~i/HnK~M/BWlw5< qi S;XW-ޛί2sfAkq+ڍBo~Q⍋Eg2|yyӸ Si#*/#|z]8yl}+W2*_q_3Gq~XM W)qG=8_ %yگu '|}'[O k󑬿 Lމe_85$qc/z_ :T=vrsbsumgjIW3e,ckpzBmI3ﲡ*'cnJg8g.ZT{ɹrWսV8P׾.s5u^LĤS_r[;(ާ J8ٸMaEdf6?κ.~g|ṎPT\s.9s:3ջE!nMGɐoP7'^8M5D"%:zyo߿x7NZg\ONz䳘e)ߧ~g~Z W}ֿL1.u>_GdG+#g:ZU/*V|1[ =Nd6u,6;?[}1.RCzuw]͟÷$#M_?y/`ƠOs }'x%=K&r=5r??R>IӭBVaQ}ǞOr ;)_ 9nOi܍";.}%h=~ ] ނ8{fJ9yW'i*k7Wt8E6Kwwt<0,M>m€EL-<;A/~Z统rX!Y򗢀kg|/7ȶ>mhW4v~ʸă)?EMʋ)xK"c5,HTvW}"x_Gϣ;L}Q cQ VMf~aOgQW//jJ~k3|ꯗoOQ7M𜢱j{5*_['tq,׮ ">+ z7ԯ4ϿoR#0NQ >#CU߇ߟ@_qM<n) ׼OޭCu<txO;^xRxgn_~XL뫭Ɏ,u*U|+޺mHJq *|v6 <0~[/糶 <&hy9 ‘u@xC^˳`Pp755Lc Γ?_D]{=.'ٿ[e|"~Vӯ)H{U5_| [׆OCG> y*7bX1QL9@8Fw"3ײ>wyN.<@B}ƕ柳8^WԷQ!~2_p'~jwT%Ѻ&u4O:5ײx~ g0p5qo/1u=~ՌX3{/|Qyz]ְjJ0_Q?N=.&.vݠEz}] `&ޤjl7=Q/1׹mRYgr^Wm=WλbsЃ1pd(;ߟ;ght=ѕ_>L̓g`is¢_2bӦOjt%m& ԍ6_~O8&~Љ,YyK cMe2_ŠGY65΢.9FQ]_\^Y>ruׄ!kV&٨ɨ%]_`<|缹Gil/oo8B 'F%ϯ-Oˮce GpGK?LUE`.ƒ%2>b<ѦC1H]~U0X~W痍PŜTNɾVSBbH.L׳tA0-lN7A'wz僲w5O ~fY6O`C׃kWϳKԞ?u<ٱU5~#sY3 }MuC,o*#u>e>S? e|fmHVn_#b}Kh␺KۭRח\xݐL?!YOm:6XS~WKqk?R>\5޺ LqќDpU&C\Au<g6W6&?446[v\_ DžuW+αO9r>P"G㟣k|k߁%)C.>7˼M:Q9L8̔qAw:\93 rd? yHIgKp&t}2^y=Hd{x+pOAy+68Wֻ0Wek~ϻo?&^!,~5``{Q۶˄ǃOeS<Jf,cal02e,c˘Л 5ȍǏfu\ }}K}g%˗kT;{_j_v?'|M]\׭9)":x[C&.C8u*닪1a)pC+jъc9=nixVu[A8} [^s㺃p&h_>*^~q}Zs\o)sF8~5^|i^&Uy_?WޫHGDĄMWӗz4WxU ,$aͶ?9?d^,3 |=#CťagKhopck6+JB;7x֟pq Nsx4ׂ?!>pvq&= }{³4/5t5oOtO,VN;O ĬcּX=}1>*RoEnD}I·#xLBƗ18l4sq,r2N`Hu?YʣobgNZ޸2hu֭1=m W(i$kx]ⶫY}͐?^V! u>[:Ѷ<_8Z㽸^z[QH$ͺz9E:|&'mku:uy|{שZ~|tz]G\~)ˏ9ONx{ u5zg~}}cDZv8K-zVyBu]zQN#g8`]ks\?t)Y▢#t<}O{ F2/5: )_v_S] ׳1x%| )$|Pa?p"pd=%>"u/rlxAu54G@G9ѕu]9>EbqDAQ_oLI>B}R^vkEY7̖İ&5;/ݡvSh|K|<>QYu0p }VOaө3xusSso侽%R>]8ӥk2'hKP4 TDy'8_V }K6G{/˾k&q?sJ濌 pߞr܉):0.?>.og {Gk _B;u1Wo¯a)˃ K}-?vrcxNb}_d}Z7gF?ºԼUP=MC` npw?Pf)_Ss{[Po>h׍O÷J:e~SqEB7‹~v]m s3=0TF)cZՍ4X^+SqIU$aIO91k<#jԼ&k{`zq͠Cz< }P3"e :XOxtH 냯W(AنOu λpT7S{%ekR{~GƻuHp3p1&SO^w3B_BAO_ /J$>5_sa1joHx'{n:[4}MfN%_qPq#4[ 6wSz}=xrUꢡ/ޟW9ޝ yy}xnq!ռMv7dѽWuɸO#:Co`AGuKؗ#ʥs.oQA}gzMU>XpB0>bcQnz/'6!WW[:5y 7'1$jow'yR P.Dyߴ+Z'}jTP릸8ρ XO=[{2eshK7ng#Qܝώ"_/z<:ոCreɺ1Cou:ڢy:^ a8E<'O hoö~8QwE3=I@URpHnOn"yaMk 7|$Og{r~mx\:=H`H>cmNyf1zx_zsaU~Y' ,݉O:=ő9("9tE++%u&8v^~/~MOu2_gܐo%u4|HA8Cxєs?Ȓ' eWO9^l]1𫥜E kb|w穫9_GWh]y^#sou7j7~Eocf P|_)ݥ4xE}5>]OsN񌃉VZbypsVr=7^:OWq{Ĕ<\l~ޘ78<獈^(/}hDpw=38v՟Bs.m|.OnOy|,C#\t"Oyvk֓G6< 5A~;ZDt:ICkDӉW M67ћ.tu'TWJӷ}sf уǫ¾ G~yP=lIxń38?WŏhI\]Ecu Iġ<ΰ'E3 -qcl<]gueokȼG+>?QG4ϧ-?LXC;'km!O]w@_]<)ˣ% ׇ'P03kuS_Bi [K O $t]ql6L? g{aup8>:؀7~'돷e_!``_1Y(vCDTs nH1ЪՋ1ޣccBuet=IagvJ?\p[<+sEkٹա<W{^eL%~'lqg(/?B cu[Bu5${cwWωՍuθL֗?5tc<8_zV=ŝc{ИWt%w˄c"Ky 1~q yo\M0C;зuL}) 7M#gdAr5SCqN_O0-&- >I#EqY콦qmq=IzOpy'Q]/ʉeÀ}^qO+Z߀&C6:YqeݞYuǢquKם+cE_3r4<ټheT^~ҋ=.Wqjvp|r懩W%{_WYׅ5,k]{i8skM{z0^W^x6]ݷy'4 n0hgjyp]2c9S0k[kXŽ;2Y}Bq7v:K#8S?o8CHw.?\Kގ:%:= C nJϜi '9nhkx&jR.p`oL/߿XXωD\'xZ=T]pW>f׋? 5Cpqa]!}xx:ۚL=޾n(Q M5z.׸?ۣpMLb jLyi(`^dGs=/a8c@Sz"Qq78!qiP/}#݆v|mm,6okпҽ(,OuM)_R)gxo%~6OE 9m&zh8Gu4(Sy /drGԎQi׸="󙃈Aϕ+J]sLq?g\}S_?,Q-a ץjc4=3`&_sW3i9m'7XXZgụ/$W_KP{]'kU/Zn {84B3ӄK3$Y^gxRrڮu]*69רna퉍u7id\-IW}1t+;HIzGH7si+ý ËUUHy#0$~OE05OMg=Ag w]|6-T~O?ӯ>JhT}Mu!y!_O|"Hڄ{L5~}aL9ߠ#xm'Mu6J=tZ`̘ϕߧIgz6s:_?gZܰ:a gDQ)7XRM/: Do&/,$O'X`Ёryڇ~uqhXhio*Ȏ>8Q4՗0{LE 'q!+Ijd>O#}slc |_iQTK'>ORBkܒǵ:@E/ի./ qOl$)"kVy돻xxۇbU?gyu 0E2'~y/_[8Mj~L28vw]߆75qq,WM Y⳩}0tQe{$x )?N>2OaK7G[nx]w\r'Xgϭ n}Qwuo ĵ?0S۽7^@~.5"?A'IJzg/O{G_7&\i~]M2_}/v7d7('w<Ghy-l_  Q:ePKq1x\OD6OWͷ{9}޶ kޖxt(W1M6'}?&Cz~N8C?Å{&[_?M:q:MSr},<~(9D>h/^+~d0c-u7z 5kYoWքǒ+ixq'8N`@ ~ԷN K.&\ƀ8M<|yσw/]3w{'s2n¤ ?iQEL\[nLx>?pfG;nCh)JX/nC+Kt2Lf=nip`p<ݎvsB /mU#QLǢf~R:~7;Yߍ}@g8i=%ѺdlN |yGGC16@7nzM D&/Lhj(mϟ)7GBPBtM4-SLX[q=_W7{H}>;xp:^Cy{><+rF u$ ]LM4Cq|Cv*ߚym&;upEӃq|a,"8X LnϞۭdfxyLvMqrOX۴h3$95Ľ:kp׊*Rd{>]-nã:hx:^:܉l= U+>Ʋ_}2uZq]dxuu5I nO]?Z'BcRO+Bp?Ճ{>DQBNq BӔ@AxP0jE㪓WB78:=%2qe{mhGp[/Dݏpˣ5[t]h- OŸnG]xSz#8oJ⥉衄5g f)-R piG%By?'1}݄ofxce?Jau(6/p e]8/9t@#3'\_ ⮫ nwiqS0~6deCogZvq̸J~|)^-$~29GEvukF2$ ;)^ yƒ5x}o S}N8o~,9N)ް~|M ~I 8~[;| [ :}.=p5uÍo{['Ac|-&xϻce]'e^x~e{ Yoܡ$eDރō\w=cR_Y;Qj=O؝`2}N'YޏԱИn*q_ﻞݧ۾T6B3|.JWRmP%kIq[X?WCh?LG K+) ټ1riAQߍe4]3[cρǖz0<+(Aj-{if׮PtaMW0.Lj<2 cz][qОjX^- |bD5 UN aȼ 8njD>ˏí_z+^s7$V7n-86 }'uݟ!}w`G@M`z>}]jҿߒ&zF_^FHOp!q*? fLG#GWԺ-zi׶#< Xspk_Y[y{|>޳+s4~xx!uj|4nN\Ց'U<䥁ŝj0f ~">RW ǞVgyxgc]* ڮO ?u۬ѡ2d7X2:~)^h}Ɩ@כ˴i<?pbcL90\nb16<>Q,1 Vx3~hSnH`yy%~5&Ae2>U|ū+Lgn /v?N7 @ n<&m%5G9áQE&m82%oW;y{@Q:g蚞z0CчL0PƷq!d%=8z,>>\7X*P~P.*t#idqV˸_EUZ}~y_fk~apwFu AX=D Q_=K{o?kӾuN?ڱj#Ϡ<ݎ{#p蜐&CS-BBxo89kG$#Ovd>t$z 'mh7x[޸y_);Jy|xۡAhbEkE1G>vӒG7!r|:#=szwWpn5NLuoj?ZQ~q>QWԟI?ۺ-m@5O{[)p]pɯwswܲN3Ӂלar7ߏȼw㢜K.ZFu|Q*:+]FuhKs>  ^+`>Mk't>On&5lt3Z m$|ZVIۚ]< "ԗG\[G=^2*] pqZ׵SpN=הLCWė#hvquSP[dyiJ{+^#}o~R%*+P7޳ X?Nuo%!eY?G'Nf,cK~ 6ʳe,cX2e,cX2e,cX2e,cX2e, *YחaE{IRAә'Iu'jU"zqlj D7$Z:w9Sd\8ĥ3daY2^0ߙI~/nQ0vM׉.r0oOe2 "xm7x ֿ9:+TttmF~H`?\dO‘k|=>ZbWz/Ys#DB4Y,c?3;'\j8 "^0ѩaIzGcy| }ݟo{?f!Bط$z|uϯG:Y ߵ?aS}:nAv;tlh=K??e~] sEna8y B8;i3=na^OT/@7<2e,cڦX2VrP1Y֯dqnOOVƿN;=}7j Dq{BFC E C xC~ȓ;Am%a?6F3Ժ"l@ޟ-GpkGΏɸ+?ybZ_/n JV|NK 8z©eSM1ZkPmT S#:xVnz}&,UfSs,M @P B? ̷1nS&Bz3}} C8$z/tp/}'\ ~X;ϣ>Η{\I|@(+yy+SbŖ߀ 4C ^bz{_nCyr?SpGOfW~~RBw6@QNxX3ևp'r+&eh/(/%%ubx74MI7pWW [U п ?ۦ3hI0+T}CQ_t1sqhް[owĚ5?7ȣ?c^wOT>"BU(Nq9J3Vw̜x.bGS7YW& =xR{pO>/`7^%LPX'9wT,y=9V';+zkE3Ks2W?p~e3y4yG2] wvyކx-` o|5+pqkKwdAB?YL8X@cyrM:=)F@/[qS~^ >)_ | 1=QjnNy5'pÖ<)uS dki/#:\np=GjϟW1!}EZNP^8Qkɷw{/ЋW6y-}z0޶3n<7r nӛmRd?;O䔼g[F&?-dpX꒨7ą& ]{+:zy &6Qۯi)cZS߽y:<8?׍ފԸDFjxey89/n^ӫB, b 7f~W QߌmL}qFx}ékx{(=wÇţߥHza!%~JJ{½8DKRw}.[l%Cl=?oPrRA*d{m>kwM͕uav;dwuD=Zu7n=kGuyzjڪq 髞ߓ;|/j[/F_o{e{ P= s woN/ Cۻwr}Ys3vlmHì3hk~U{fGq+0A9F9gWDi$DAH&cqX0Y5I,cLN&HBI}TWVuUu3go=[v\k($=~ip|6ݥHZ⥤vӉqH̯tkpNvxis>o:QF- qQc?LZPQ g ͜>\+uVu>~Jԡe~3ԊSЌ鋞qS<5P.ŭUMG>gg_L?r_ڌ M͖a}O|ssZW1}DpǣytЈ;*HGlGjޱr.~L#L%)>>SMe oF~xKK(QGG[4ʐ~7Ե>=K|y3x!$[|k\ ?urmh>+O6_Xϫ@WLzqbb[ m ߧtM[‰o穷|DzPlBW]Nɢˈ/ܷsz=Hfҿ/kCd~ p 3qt˴u9q딏<= KCUO>39~ urC`Mg[uI l%n $=K vn p޺N]_Sx( KԽrrW9ͭXazC%]1icXGawu6:?SaVaVaVaVaecit^?QC1Zm}Xoܯ%Ky >޵]#g+0lO }隣t."txJ0X[=3)~}Wm'Sa ){Pݮ[^|5ɿ}TgY&o=o5T<'nP􃪤mRtW} }N-!Sq5WDa'ԌkfPq-R:8pFu9D9yb(1Q~.9?ab>ZûY6=tg8~ oD%|`WD-}9,8 g)yiPV" =zx8to1T :^_W;~k8 ljW:;;G1Nx4?FGi3ankgTXnNY2O"/| Mk>z z{?{[ >@\C'gR/Wꯇ}swòSƖ? N<ӿR@(Uwua>~M2'167u>k} Ea#8[yz6 Q^y?N^u$ h6Gg4bko9:?{>P89+Dxܸ$wS꨺ Ma=UQ۸_a:@_=5qCuK.,S/:WV뿡K}xP,ޤgϞlp|Ͼݱw7Uw 3$ӣn38&4֋SNDvCFE1G"?w94A~si!Sпhs};ue|= H|QP¢jA~W}v3CxU>bǣwcwx߆1gO֧Yh)k.cx_LG[/^|Nz:,8x4\:y֠KWNjMeL} Yɭnp֡U7qIn}OZO:B8)GMVC5y+xϳ.pzq x:=ļ,gu-,?yӌEMp>J1^z^~ }?V'%qMr߆Lt-|~WR=<?ɬ`ãW}_ZfWqY\}yˤɰ5??g,{2<cseLWth.%y}^_v<\߸;5WN^szn| qϿtʋJ1+U ѫT?gGxxز2[<>w#x ._h?A- km7-oEx-- gSn}"+~'uTd=w) _s+ׇm"ZBd~>fT{P[1#V ޴c$L%ȱߋ~<e .Uc}rB5 *?qJdw!_OIP\ yxT7'v}:"g0ς3%6} wdžT6-Wm=wo2/6z KO/Cp}GczNjP~ޫXnL<4Odm%Ieۜ7O>F_WXEb=A]3tZx8>ǻ'ov|?} <8$]tcq0!yy &cM7~.G7Cݡx?P7fO;;^QgTwGPRǂ/@:i>`>37TEce7p!HgcD( H{׵b-!|6ė߳m8]y] cc˵aQ`Yj)|nzQ "Mg¾I{\Vssƞ<6?כ}L(IǸxϯ G]aI]+t/.P!2go2MR_Ԕ=a x1+>=2N|;v>~)&2W2γIGo]" ߯C4w}9%K|~g>Qq_#Z%ϓ˱}'WB_v(ói+58,<'~~JH z/ϊGnZP|ğ63A0I$-hS'1O!ٲ-:y;AY-ywި;[OjCߵpf~aQN'= f|Oz6= lϮߌ|³O}08ۮѿsWMz>v_a:_κ jFavwݛ<|I,ɀñ8N'msʛ?E[cz ha|[X:XM?QrkanN|U!9 2멛j'a[p'i?6,p(|Sa15U­ y|^˲阐l&zJe[eʤ' ?ϋC-/2G hqa׳ۍHsMfg:J3)5bƓI #?uw`}>ÛǴ|LꪇoUq1b^۸lMN=Tb}-z~H׌~FqEmOvMy&Bz2cjbk{8?C36tZ|*z▼=ϴ #~hBQvu}Ё-&π^}(X/u`7S=ͦWWތ$_ywrjn<`Yth]XEo;+!\'!xI_7Wx{ޟÐ{}\c<7_-Zpm~tmu7w$^,^}鿲?1_ZWQZsgy[EţO+5O}|aҾ{q =$a΄_*[wDi>R0˓~Fn[J޿ 'ɾ+4 n6CGǃ8_|pT >gu;G\#q< v͊6&nSu'O3c~yC?S[?ŌC=VH%?7 bL<= dCys-󖿪y,Q)V-=/<4|їEߚ[ >ÏwwuT`5K8WX8a5,/~.)L$p3SߗsϿ8+kN+3o\Lsnz!|a-?~>\c§,}.2x+Ec(wXt9Uͦ3=c?mO:zM\w.=yx*ZkdTN6@f4G q[g!7I቎/)y7,\Qccr2=9x:Wj@񘅿z-j>_$;7VX2z2nB>֋w^BʓBJ]9sG'MSq"_SOo/Ǡg~Kķ[\~E3gE#3'Vї_{бzPᦳ\_uS|* (4u{SrՑ^eSYayvG[{]s@̢oQR/Cd~eG|KC,؛%Bӵ=WbNFp,;;W3fVducuL)Mqn_ק*q k j/x(6fqEG-8ȼQZ:?ύu모8~ ' 3ԝ!>wRJueJxRW{??[s߶y+;qA7 ac W7ny*]Տ~V%sT|r~]IWuFMv~$HOPx*= tWWČxaG\hcSK)N+m6R3) =|M@w?}Tu|xK8:W/#cO{:ΉTf| 4!ɐ{)9n]e>LwUm7LjeСGӮ~?Yj}$ڰݪ?߀Y͑}u(<ԗR+bɰxymwu*_M%tz0V26KpSoOG2Z5^GxĬDpXt.96?:D3ƹ_de3dž[wIYps|Z+xø#Y@~k4?a+h֓IʴP=wxv5 t G'Û%zO_  yS{rGUYޤە.נCU^ f$oY}2aT5ԑ琖I w޾7+nSInVaV-8΃~K#2 )_9o8!ׂ>U.ˇx~*C](^(7F(p5Dtx{߆aBx~OwV']ן\#c/Qy\h|5/_hk"nJ<:7p>G<մ<'|/U+o{׹2]}+oqoߍu$YEwܒkJɵH~Wt4\M:ui6|=/-G!Y:ސ_$᮱zݬ":=6=/pg3"=ncLʯ8YC kvǤf$-W\pڮ:(͜+ xP"|;ۙ ]2b `73wـϒF,8^'|-`seKLz4ZxqW\sAwߙ%|?"ֺb /;!&_+֧ >آU.&3A9pN5?6`K1\;2(sK_S I]یϿW5CtuqgU)W8Po"\9MqSȸ„O+qu zQW  s~02sj8NA0mj^yN}0/^S_o_CCMg2ۜ;8!7_^83CGGN{̣?p})Pű07ϦpaV7}^PCq)?/{|1S =o療w}}IАy7 abW ~U#nAez db*+Co6M}VO$4ɲ}gq2? HƑpmhV1?7eoT$CD?" cPn9* ec2S.xVqm:|].~u4RzS*ܿ[t>ȋߒ0אVӞZǤD)V}Lťanzn:T1y\778[_þWD-җXsya6o`󙡾n?+Q>i_q^'G:Xλ_%ܒwąbh+wg>6EjXtҲlqHæ_:g_x@3D{,u{~qm+y3"7+ H N4ihp[h^7Y\׮\} ywJ6oz)8n[0k f{7|bh^hc o8!-|,Ǿ~F6$1:Pp-Ը#q_[.2kr~t\b^ʖ~cy*V>_%OkM9":?uH4fswJPY[Gm9~ht:ī?_M:qa ?Keƶ7cz?H~hkxA U]q+vcYޏtTw17䟇"J)?'o&הhXP\ʴuR'}Zi6VW3 eg8lbRCtM0r[OM:R+cmyeGAz: `cs_xk #u=]yy }ڕ~+w5) soy g}IjΏCGW.e o4n]'g>h?xq*e~C%jޏ5 z}v|u[m.̷|UtBZ<<36K|h6_}ZyO/,urf!xO|}tN̟9/Mzjejf|͏yLЀToMIO|?._ j_֞Ԯ]ޗ%8 Nw_N_./ b}|k߱C~{j\L~Dpo3A|,~I;|q{&ux]Ħ'%Oyo4!>/Y%9dבG]nV 7"`{u|?N 7cyF qxdtC-^?鳣H.]{bK(sX(Vė&j|gz^7: HY|0?齨N>,u7H#\o#?NL2p<__%p)>7nx,gKQP|&,xϠ*\-^Φ8鼇rR7EsCA]CW geX3!:a~K|>?潨ǣS9U-tkk@BXŐiaؾA/ 9YaY7C+ۆ-x)Ztleqgv K}^An-cm*H;ת}W zT&ۤ&ȇSnv}Gy7ń-YW~7l&ܱߜgdπ4_/蟠 gz|юk[]u |MO\HGZ6pmy)Yo:ђ7ovȋ}"O<;yZkJ &Oi/V*4 .OhP>yX=Q= Ƅ &>Dpemܹu>I[pݣyf!C!{yD_)4d:1y.^w_@uv~8V}/4&]oGBsI_>)g40xS]L$!هniq݇$ʯ17w5_x{輊c6W`3ٸ2b{WS_O_7xMyqxLR cgܢRCGK߇6 %y"W_2Χ8p^d Et7#:>I./fko}V>Npj/Tύuo|,6~'稿W\6ϧ_g|>yYzs/#?wjf&>zbh5j`;X٢߃3> d+m ?*z|R5z)HXgjj|a\¸%DŽjW6(7d0ݺ;4鼥mK}<6]^/܏'AL!uѲl}P3~4el+ ҿ$BGpm^^ i~:qn𥘯L:HS,?O4 wUocv=Q߳O{.'uX!m#5ul-҉^:ܦe&t&~P@+u_KB<|K,J' ~?w|nGAdz0oԙBTЛ=0De\+;J6lap#ڐd}z.}vߘa?u r,2߸#_xq{\μ3E޺G_/KGc?6/JMg%W7mwpx W rmIw˗r/}+****************,;9Ӹz˪FUW" S੧h3; Χo;MOCY壣ނ z=Ѻ- $1,85j/u_tz_QՋJ,ڠ~vqahtv_p]]ul1+ R_KmeI_EET{0gtFe?gnȆJqx:C񎓂X1OSeq$"3|A+z:)&Uh?Ym^L3n\}l QzzZNt}[(25OD?|Ooh,to <7o?QY'>*$@.y<φPg0e]!aDzXXKD%aB+7^i^ m ~G|y'ʾ~iZon#>CN'ؾ+՘>'r>'j:R|Ԗ'k ;^jSq^~=T[Sٌ5>WϪj둫v|pq㽊1Doҗ& [I=$4shhXg.ݓ~Ǯx->Gi/ 3==딴/C;cܯs']+>Yq3S5-[H?_N؏ =-U m=u =~uQ?vp(tu\ʏ~~:/j__OtT?os_ɪak'.ioD͇un&Î{r>+yuTn<Oz#jcK_JE˽ꎧ[N S}[5ώ3AOAq߲aX3|u&^z+cXݳ:ťb; z~=T-nܢ֟!(r2nJ%>՞.tx!z\;x[4V#s O5~osPxW=x?M#{ՓGϓg{U/bN |e(aXujPM# O/Iy5ȯ:4fqEަ]EoT=uՋN$u;.קv;y/zy_B<'(49p}O+|%IܞǷLğN׉#}{hԼY L)DHlC숓ha?ӖxO7̽{Z@5KE'f3ok#U?a%ոi~gěe܇pԊ+ >WF뢇H04i8c1uG: xnzFyZKKq]X]'}:^~_z8MKf>(  _E3^ Θ{Pǹ/5{HGhȧJ ƟRg>9P~CjAm%tqN}PMw6?gN\-E}?;z?W zjaȻx((]GO]}e/s!ʛ!oWꗌQU͟"PRv{z HWͷ-oz?q ǻ)Eűh!sн#„tup<;yY'ulX.'o""U77Gnd_U}b;Dwx1{e h>]mҿu\WNYk ' 96{:zRRyX\}~z8 눟!8iY3'\V&8\)^?+?v."ʑ`/ʹK9?44:?bGjYV \`zȾ5_[W&}sϸ$h<.(xG5-ptd x_+pqqY=߇ݜpq8dc½#l0z= uvqwgu^Xev!`T1~:W2Տu"]Bp[Q?[iyinv{b7J Ǒ m *Ɇ}d̸>J=gOe2/U+3? ga6Jֈyj*Y y1y -`W_?i7}*|ַ~\1!%ԛq>O|1 u8M4o{4#=3!ͣwcO|յ:6-Y|V;A}:Z"x qZvZW}QɕF#C^:u WqVСFٯ4 בuN7_YVnPtu|uMy.=j3jtxly>uPtloՏ~\ /ւe|jyUqIZǬfLx~TaЏǹ/T-{,JsO.>  X}R񷆟_A℁q4ZwNȹFu DԅUd>z݉W)gVY͏?~MZqthA ETi>]׈'f!5:tOtL`x4/?8|FQuoQy\uÌKNUV"~~7g{JN]=Fᄀ9Mhn˭7Go|o]+ۆ&TF>qOozpsćW<*.uzNt1tCh=!焖YD=/zٛՕC#O`91h&n /cOrVqɿR#|_TACۏynjQُu+O\ɰY7\o}NzȿmיJ7.yV3Q= }! ډ мObG\&~qG\@C t:%{*VO?#?IbCC3⑧hV7P'3%qP| %˄d67Ût,~~+?=x׀s8 w|~~Yy1לIt=+#rpIVqR_WKv 9UP1)xCXckJ^G[F3{'U녠CJP 0`ǢE?X2]ݦ&-} )4gO~F̓5I錣=2f!IyRgģ7^tPwN ˞Π+ U<&ȩn~RjqONSEuIW7W/IZxmXT1=hƆG#TyOiFk2\eɘȼ#qz+B1)x2ƹ} zC2v< -qco8Lg㭲rynjg/PF2/&unR.#/YƤk޳Qy7_iBew!u :Nq=uK2#w,,^X|YTWYS4J_K\ONk R%Ro@o}~/8@uI}|Ju.<;"?]h4`A:[JT/{ݯ-U|ZRޘ!tB7!>/TeГiQ>_Xo|Z{ۭ#'`=M_;kveab:o㴠~AG诧:I\ss8˫azgs'f׏_>& úSqGǾ=UCfOy X5ZϘ_Kkk |7Y⥜͗r5ɋƃ>HymJ-"m[plQc'Q҂wb2CG~F񇲝Eq<`\6_n1v=g4R/'1Wtv 1'sh= 6yaV=%;(1v]ne3.g3{n뫉b|wYsW{ 3WrOn'MeVvx5:G\pֿGٍ;^RX}\x)A1 ^e}9"iyepV8_0z'_.&~Zb2CCˏ@#7cqopD"8e/ӌq{ 2׆Yo뼙\QˏWngG<7cIщSO q(ljm UUOL=zNNjׂOQ.C_L|Ztq>](?Wv -^8lh ꋸS,ϻŭw%^;Wϖ,C蝌'u$ï7$v7'>o?X__}Io":/G^q xE4c ḵj:yp3ZWRw2SmoF}Cz{҇WpɌIyO{%nm;ihkkLIO\]`38^D8Ov=! ~Ȁ7[S/:CSP^KRy1|w'!~y-]>AOA`AFC :̿J:0/N P>$k ѿ'+UW̧z]FUi"сyf|$Gibɠx%/u>F[# ȣmAW/Hy7-f{Vxsa/Z;9 s4d.gYt SG4?;q>yL~߸&qm |`c~7qA2?Xw9F5RlQʼ ߃3 0kWgm;f󣟈%UC<#_]9Tz}&y<}CDCa5bqgҷq8`zܤ?d'/y_O$$_5ήo_ Q{-:}mbSXuMGAE+~[U5yw5-=$'કj%7뷗8nJØ T' ՊϪtHIQ$%>NhkgK^d ap% 0>:5ql~gC?TMRts<^GQ/5#y1VgZGo/ =G<=GFA glk_bg,.Cg3(XQ,0 _Uo--"_dw.bzTZ_Sd8Wx1Np7=5޻>WpIY&z0lpu3":Eݦt>Hykt0d qֽШfY}vj_6d &|v,#VZn}`Sq>IoŜ5_rşLF]7,Y?</jdp}dS< >=0̟( _p7|miQkiJ>FRSEŃs݅K~.#\se0ݾZ}>k HOLu%S| fH={![6tqJtpqMmw>_W7tq6Ac_XgC3xǙStrjKSԑWQ %ω]]CxQCcE|N'}ޔ6J=YlZ' &~gRw- oSՀGqcZg}붧˟FW^IKms3gȃ^Hz<^C>Fq&C^3GiJśgZljlWnyYVEA>u[b3],K ?$c|؟a 8>R< '/I㊺~S K>59b_7!8$C^?2ΔT=MOof87VCh/@w~[7$xu/%źx=kKģk5 ~K*I~o=K勫 [݀۟>k[EnqK3u#yRbO:x+XH4'!q,tJ7*٩sb~=7]J\Gy.g#Z2Po<4ɯt.^ElM\q,x!>H> 5z-7I3\Zpjӕ~S+C%^U\wK^B]O0A+[}d&pO>nLaC$xq|E?Lz8NTaN5ߔ~_qKNI* y}Vuft\n9~E櫻"\$t2ӑMw&P50֯Tl/Э|7 {=XdD1OT'֕D~h/}c^n3J~~mNybm%(ougWY*#ڧ{mSKK=e Cx|\INOTdzO7]sux >:ήnd?{[*OQktS+fo#"ruTO#G CYhȷ>*.߲bsW'ȤeRC{)_ D訿:0Q3:V}5Iy̟ E8[7xϲou5t㛂<&ky5gKҲ@e_k>08TLzЂ ?_[g[+OM4YCK-c']Ǥ;6uo iע;a'u/EQAbw.|R}!~j S,HiyBEy:>}o V_%xGXs9IW;E|~ 5cՑBT~D|91=o(/_7u) UXS9ͣF`8OKQrM}d2s{߉;kߦؠ5x*z/H}!.aO7qCEOk/^>V?!3">NφFnR};9\,%7~~,ˎ?ISͻ>5NZLֱK0KM99^ԯMso9Y̹U?31~s2o\MO ˣu)8OPiҽ,xD|E|Rくu5c*>ϋa K~f喷w}f<~YAN;[nWyv]2~3O˜w y_\P||Ϋi<;P2Ƶ7QVPZ{26oѓ#+([/4gːGtD;nKτy/h<yZtM[D (OV3w{.Zd#E jGA :>W.PDEz T>1GM <C! -T_٩j^ffngcq?(&<䙕p(?xnc5_\Â٧z>]Ժu+e|W bvk91Gpdy.>̠ahfOL8F4֍SQdoGjU1Li[gD-.k^R'^^z=aR|!Jdy丆Z>tֵ7Te"ug G?_sz='W?:F|g_E,zGIǪg$~2P1/$OF\2KGa=.Wn8]o3xRy6z$?kb9.WO"*]:ҖyngjւFmzΪ_}d_OO~mFoWq&{Σk|:WPMQ=/r}Nƍes~tIOyb[]K{:-"{G.BH/+T^gm?U+Ʊx{(KMoҟƤ[@%KKoys.I;dRw{ze.nbVt㣍34t{Y@u]ƍ5ռQOֿŁ(p[_$Gb@:}|x_~Ӭ/as3ʳy:d-7d}mѯXq ^ֽ [3çu)QJ}>Ztx򧚡nv"Lx?p?lƌUnyƹ2Dwjp.7bwCiv5%KsMP 9ߎ*. FOj8νT|Ûe+?erS*Isϋ2T3['ocеkN~!u׍x?by(OuE?PL&0:XͣiGwx%ϻ=ؤm <3 5(>jx ؠϱz>'^yE%W#k#'. 7ty]Sī߯cgv/%W~'f5@?c1~~bXxW8|<<&0]ՉEx +yo,1p6 >.2lZ>ceЁ%Hߕ1&7e7hJg=Ԍ=O5)/gaSXD9ߑuǯF~s|o>OaaƩ5ME-7Tly2R-T#Iȼ7 tNy}qꌧ<;<%aH?k>#f#{\۔NڢÍ\M)/ћƫn/74U_K\ȃ}-ߘDQHl33U\/I'OCߓqnה1i*߇b> >CEL?o~P]Q iZ} 7:6t¡`OPo5~5^e_~?Y|U-$K'tTp:2l$x4*}FO{+R8aQ~E7H(uq^B3xeKD=ئ;x?ۖ;T<)vã_;H/T$PE?o6 =@׭s4n'|Ok]\CzPےOZX.h;ySM M<ΈaT.66Q ܠXypخ~c~5WmKuѺW7d} ,7=* BK&p9߸ _>zYOWx k fIH^[ߐ|ǥSŲ?)/:闥oPm콻=x[}[gmDkVcSHۊ յDz^D[T??&+;\G"mÖj_4Nx.R_7|- ^}m᫇1/ypYdq& *|F/K8}_Htkݧ{ ➩nNĄé1XWY߮p,<6㱧mӬYKz!NgچV^r1U]}e|LCntg̳R#ϛna2?V0} ^-*Vi48%:w '..ؗ|y XߕkMY9ˋ,Y!cߣJyēϐ_6Y>ЏuPT#f*^?dJyqM5t6YtN1|ÎT6FȏjHgqpˠQ+:nk}C K*}LUQ |sޟu|~~?cƼqy5^R:T2tL?+qi8ŋKtpm_s6xŇ~#O>v8߫%3&>z N!I=yGѷL`u;8Nd&av:(dndS}K=1*~iJy^|L:aXgvU-|"7~-^k?"Pxh7vk5w1F<Sr}|ےEGqDg?MxcLO#8wLb"?N1}>87n UϮ.;}^?8şaΟXD#T6XNs+~/Q+ދq.֯:''j=ļMu5,;7>vwOT`tR*;Lm]Z*9[p){-u km9ޒ["ڇzXx $tR񸣞Ey<$ ^1x( }7 8[x%^]wo(7Ul5m7Gv# ﹗CYίضMJnU8 ]~y8q`~ҜymiE_@m_?U?}3{[ r{Y!}n/E^+|C9K>sng<4WY7e"gk[egzǏ \5_ yV6=z>18xni /zW3Jgr1NhwqapClW8P܇N6ާPgfk(yWDчΈN{7|:+:GW?HߥNO~Cs?0/X';WiS,-<^n":q ܬO yVqW ZpV2~Oچdq*yKWI2؏:_z~]pSuV솃c;C!i_ ^rWLzEq)?L2ߺO;0 #7ݼ# _|>qI^&Wqp7\qH./m'Nj8?_O8;:[9]v*qK&x1qq6WX[ }eY;pRAged܀Rږ~Sj ,J<ִpTf!Pgq[0~&ۦ\Ĺz:8syqL9|Fu>5 X{)<篓+.myLYq<+LgoWkw 7*ݩfO"ީx=1f[7p݅BOuN ]/7\W 8l#]>@(P2['F ]_tq3p}L}R[s #sR1Gmq!⤉?r緌ǻOM/1_.$1ѢQvsWQw}`֊jXZ׿~4_>:+ ?y ~҈t.ϫ1FUةaY9gk7T6Wq wr?߸'u]҉J%u|hܸp9Y=9z?V?#֩mzQկDi"^,p㽓]4q-}т➺(N~G꘾<S`M~s;O+io$*97<O?*Ⳣu'W'ʤ"fK!c`ۊx9rq"gyESR;F &m7-:}:W:t wẊY~ccS=W 168]چz:<8ä_tWsSYy#p7d} sqHtmWE[X`'>n72lUS|q5OՁqOAbfu44p/PLmQSӒxjԇ<]p3ԧc(dm7u c~&)^/ 7'y_{8)Ƭ?_|o;|iӱ_N<4:\,p__:Λ,>TޢsVn,_V>E꣞秳-CnuԵ%ϵ:%шG旭`Qkz($f/y8-O.b=O-.Q>Zl={3[l}?kM]z^VQT'_Nqp|^|AhT&| O%7I|Le2+R88WK$6Mj6x\9'R}Аɭ' PO]++5N=;eo~6] syq U|WE^q&?G)S!rş;<fƸOcO˖ҩZz6C5~<ř8NR_'҇źYYw<<V~NPNޯ`_nzmq|4/o0կQQLv}yO'74r~DZxнRk/GYjzݘN8ֱ? z9Ad떭Lpjt ՟y^H)Upu@|./Ѻa> 㭛(<'@HԵ=i>!oŤw|^o>'[~^_qbo;j.lK.A~Zx6 ^ xab㰼Pӕ'߳H'x49jZ~w!#WVͤGdI}JxB?>]=nk_E_mcba^_l7+|1j"}\ Ϸ V ߟLRy4F4@'nb _< oqMphЏ]q~Q}_Wzw/sVa*z,&˦n(qs﫹qPwK [ʉ~EZ&u'Ό4Toh˕a4M^w| - ,u,몢%MbpM_кߑ- DIyhs%Q:aD:&ä##s?ĪB?nn Hϴ~} {y9з%ۆ"u yJii%k_`|_C_] IMx Oҷ*>@SYx?żo9S1g|/gQ+Az}BKS~dѕ72t{TC:Q\P37+Z VDCMW0ϟ;Q؆8_37bs>Y_:'c㬻Ro>h`h8֖ubXN|nDpĺz8?8p3S[dppt{Y#yOW&qi)3X_oGE㥘.6>)_n,8k>?9ӗfS'qڹMˣ7fǻn\5#.[,]qf~f~g?x'P 4-}{1QO̳Z4EN6i߼*L5V{%V+zXG*qAE:X&j.VWuO+ܼ^^q|oG ꢸz]o)ZM0Kz1qMq~O8zgnz8>IhN%t1 L{=ooxHwo2PV^:̤#K,;ȯGď\0Ez1GЎtxˌMu u7-9KNJu|ո5Sn{T>(nmzA[>_1Țnŷ>9@VaVkcےֳ+****************ٍl37Rp T/)UXHŽU=Ɏj_ZlzBſ`<4?w4-$87:o_NA-OOI>q;Ew3 7m'_7G:x&u9wC-`sпxϩk4GMЃ]⚞}Ny&p{7Bcy/-&?DPgxiI=1vqes5 sTw?KEBu[ܞq~HGAwuM&io"t4%ԋ_Sׅy ?q<> ] U?kXBjMI֑.CZ0BZ9lOV2e}ez.]uQwX^ICY@^r\.^Y;,xɯS]O:$X^``>]_AMOYsz-R`A.-yZ?AX[q.֧Knĭ‚ŊmHLx_?3gbm%saa{d/Uu<$Pt*bg~?q:ЖvtLSu/:nS6=szy=Q7P2-2hݾÆuY`Z}7mڏfAwW}I9^ACSvWsڷ-xꇵߣJyTx1uK3Yh@~|?幡 fu36>ܒ)h f7E>iSgo*ztP޼F伀NP8ߠJu}~E돆 2/%ߑq>n*EyK:k)։ՍnwSUK7h+JEmX=)/z>ׂ?[zW3$+**ۂG_aVa*u 6jgRC7NyJ !a}enRCmqx [k0AuǓ}K%R~託Vލj(</ Q6^kUϡ)j ވNCw a1gs]qߖ8'$lWMo2φDOޚxO ExO)לG7_\Ēnjk`CjU0ޫ~ck@n* z#/a7^pb7 N/Gyl?'/}ϥgS+}1}mQr_?a]+q?<'~ sԇxy۪ߊGhɡLy'>Csu ^qjבq|`~-=O1h_.> S떭~\<uxpVzZnۙ}NyѼ\L~i>{ñS.Y]O'+l{2@!E`e*D4~cmeLaS{_!/qǫ[WN R\?{W#zE_EpI3 +n }( jSy}ɯ4Q?V|/FS҆#m|O7 HgPDRG 礯[Ʃ7ÿaE"?4NMp9[ t?Pϟg1~Oj[Wcqp-CS$5?~)Ok2566%[/0P8sO!c>npȑR:x7Hn䍚փp>u1⥬q,?+ZgpM⸁9sy_Z?__҅x`_۠tWpЂtRpL/~Mz/o+i#t%?b jݎTΎ"X]>.pcƸe^|m~재i/ wQ]xR'7t6Ä(>+0pphb֜x)8N+c^< [Yvu2爓ʶa.^NN'|3|'*Sg,#%~XnEX~46ދT=~&rg=6X7~& #Xi?oׅ$-J07ud3SxTt.2Χ/MEĤIOGCO[tϳnRo:n[< z:t.٦ǹ/0jX0&e_յ@N%aKgK}V&ij&ŒskCxZZk=W?S+.R?R113׶*bkKY( _rT'=2FN8atK1v: ssN:bw<94uua~ɘr'?N;FO 80q|6կ?@Өd1ObX0St=gTgz$8d]wxD[4ˍo܇kD+[uP-zߘF@(ϯy맓3ꑶ?>|N{]}C_rì<1(`/^1jFo&#Wյ"IMz52/zP݂e}J$>&cj~q՟ m'~&j>iCkq=bu oO,,޿j|cv/'xWƧ [6p;Cd/a]^Ɖ뵲x1aciaA5•L]?r!RnUE xk]bq; 6h$so0+<%AnCYBq6d_ ߲ c}^x^Z.jGO,x+bvndz1K^մWupY=眧ף c6ORMWKC&z~c31F~e9_:)_'q[0 ZQкQ T?}|A8^{u+xNq¼躚447_(K_ yV2X!(kv '~M^}p6|>SNOt6/_s2n}m ,:(֑{E|YIneq=t+-sEVOċ4yf7zV=/s~`p&q76=\*P3^3RWE(n)wl {gVtr=8~C)[ QE繣L׸fa_RYѺLzR8Ĕ%zqiټnq{}/z[[&T[do%;H'B}m}T}.:Y-Ͳ۟ zx0]w]|&b^[ċ8zÚ"/Y՟[w>^.Ruh>["}fR'Nn\v^ސO0HT2tXnݣĵh9&ElA>ÑKt5\ϣ羆IkfOaw.^8l~zsp npZqY ~$XS<&uny!Uf_W{=u1PWCLux8̈́-Gp/U Au1ʫX}"t`/q~9[2'`A[*1^ӮsxM5Al{Gͣwֿ}h0ϓ3=~q Ebz5I`? .i>O7Td:s'v/5|LML/s><=p}v-))7QM;k;4 Q:_G{[5}ɚT=Q-!RM?uyàgN:R"﨟BQtYQدT5nWax4 |wGkz3bAsm.1Ku;1;$g{uES_aVaVaVaVaVgAcﯫ:+1s=_E|F5U ryIh'&mEW|R ̧Jިp5p2+4:Z{!>h{< IV= Gc)+>8h=9HԟgG%%,so4>ȩYSzϣi:-Ј 4x?WԦ<\\4C8<ipMOYDUe̯щN_QݠhNi>2YїkhF5ps,ztX>R$ܮ.">?Fp$'uw|FGsΏ?~K+x\L#ļ<֔!UK_L^Fuyncۖ}<(֯?K >բn:+Ǒ}tmm5Ncgʻ ׫xe~'h;˖applnxo{^p3CB}9~f6m 'piy2|9PW+E/֯(PWw}#xE&gma/Taݑ}eW&Hq'\D%~ GD@[}?q m=%Bɤ3/ ylYO<-S5:Fl/Ƣfٓ >['-~8S[cz 7AmQzwy s9䗘xߺ\-5G :&}("pX2RJVb<s/]G.Rd*} 1k㥿?=7Awx,725 8?SM;t3.ِ*?jLX;S*85)OG^Y_+{+7ˆWXFtpI(yNW OT֢}kѸ+}mq{Ulcx6 B X8V<R߼[ \cR/8WeuL8*{w]*R8L&upԺi<[ :xh熓ʫڳuV4բO~PQ>w̋ERo4w1x9<5uiϸY1[ \L=u>^O5|?\m}~98:b `be7Șy.3[1=B>6q?GJ#+c?Oz>W\ʗJ=:JҹAzd|[4kKգ~Gy.V,A-Ǹ `Y91?3nTق/Ϯ.&!}[.cwX}6gٺ-M x FFw׸(s]yyi4Jw@C'6l+ŔKDRdЕYW =}_p²yIm6膏OGڎp .{V=Vs<=1S-ӌ;N=X1sa7|Q?zECrl} q<ϓ$5c 7|HgS df(d_6-~]_tf P3L}y"oeI=xk ~,ǓK]tVʍo -qsZGyU?9^MKrJ7?;2NU(^Ýe>cjL~J:875[J8p}zQ-I+Շn\ 1΢7s%8+cEyʅ&Uy}  #YBM e=D]_)7_j*> g ׃Cgz%zMX [zFq˅8ytqoŷ_< bVo7ܹdy5gsY>/A?_d!@4 TYV%3.a0dxx<6Kx< +`1<#3<נ_e3깔4eZ\~q~,O~?VEߡ#=( 9}\nhM[AWyҸt`u5}}DC H/P/Fn׳soL ' BG yAJ~0J}i{t-RqnaL` 1on~_ #c&4M:;#udug,-۾G8KɤwwpcTƈ~,>D}ucQo궻M :oYj~\xy!e؟pLhv}lQsP/ yIy׆ bU hebBU^$s=n_ zVoS\sdQCp%[Qӝ)oAcOY:OtuQ䳺};GpEb^5B sx|;bj_r/oExqPW8 \*e<UA. ­?JAJz7LOogBd{i-¯cF4TŜ?~GEw?Iռ WC[!R_X{/v['u气¶/% e2emنKzum߮M[cl5z~|N:n8~;K_~x-4$܄CG->j}a%Su^ާԱG79Ⱦ?k޴bԺ' %]^;־|ӕ:͋n U]1{c}pאxy >5K|7+ 7bϭ7/WQ.Y_y$]=$qZڗ7Ȁ+M ^k @?;':XOL.c`|ǵ~cK3?Jn*j{Gcoǃ U~vZ!~,ǿ^|&_3ۦw6/X^m%"߷-x%gNyWlW<Է 9ܗŗkzKifwݰѹ_G ('j~d5tr1jL:,_׌j,yA8>Yy,={AQ'dt[ygh1y`񩿙k_0/wG< ?GOeIY Dv3nk+GY޵_^=.y\_q)~NExR w3!5|Y~=V_Xԉ5`Dy_3~71s73$KI/W&}oA# ^~JG+*3>͎ y(wW~~VsiL|n|WPi}gP/Acuz _,xig g^`\Ctxkxߎuw2TSty\_0X䓹.ջ0?T\ܧv>zOfa,DBVĥl$`[܁b}nS8VY}mFT]WE~w+GlEе/$`Obg:;1P)~ ˧y1)=7b~*~i8^u5+7sՙuyzL' l>]]Լf.׹ԌyA?x |Y]Cܐ~> PLͫ~!Ńi ~xaqK=~.֛| t;L/Q;]p)ǣۮt8:oԂyzǏ8﯉buWNآʉp6xlryXٟK/ [RqAlߴa>:H'~Q$/ck׿_z8 $W??OLZ7[{pLb_wk>}fYpXZ:2obxOOK^KTx`/ϛHku&}1$v/G]i1?5L>tPCFGW}1lR0{ fy-y5CF3 =ߞ >\OSMgr?7j>_]H[i柜d`S(\o /:_Uu ~]9Ksv5~YĻIJ|^`xЉ9P=|C݌"OԄp5 < /WU}_|[L.]ۢj-8)35K[#5>vJ?O/sat=կ)QVկo&oȃJz\X?mN4|N:r2hIk6SZSlgxpke1Og'qJ?=*? UdKvDc¿3ʙji8O?7')IkoޤS4L6&>{ )8tz5k:cb*W\wkxN{`kz))JG1 znb!s{<͔vg};jϏF/G6|sR. ]@5<N+Z:Ԏw_w=HEa̔T9#/"1iQ0Efe`gbHy.U H/R? u|uṖ!^+Wz+'Ϲf_L?!}a;1XGkz;c0ḗuu57:aڏ! \s87- 8 Mӈ˯_7>)7?"E&;|!6ZK2ckTϱ#$yϗO= WuyP\*ǰYRIAH<=|#v,~)wS/e0ݵ!wQEo~觩Pio0O];QW7Ou=KN \Z΀A*<ɳ}L<)4k+sLfy;3SSmT&\lj:Og8%%=VPd|ӮonXm!_7ӫ~s.|da@gz|T< ~D_tpyźLK07y͠`=|? (ܮ15В_Ĕzz#?Hj/_<~'>w,/J)k%`xMp}%ux-Yꎗyz}머Y!o6= ?%滱yj~%Ej5տP{N&~DZI0Zޱz^MXX\UNp^㰝k`T%^2I Ŕ/h`$q?c^ܗqKq {]E,bnrWԠH7pm&~s_rYW]G/|Up𻞏ӒGu@/+Xw|.Q]?C%[ Czr.-^ƨ$`/}TP~>,6^5ԧ<%uP3:5z1ܜ j~{q_,(8ht9LeK#֓EZ2磻'#oX>\ԋv PSE]C3r};u[#( u$un#<|smDVg>puFTƉ}ZrG yMt=Bߢnh}oy U5\:+j5N%>6l} Z?`oל۲5W!R~q~=Dd<gj' L0^*t.)?bLB=-/Mӿpjz_ t&j>שDc1Uiϯ+p{loDI38K}1mu&y2u>:o3o65P=m}%~aD>m_N`^E-3]ycskWpb}h1:(1!~oO2z|B8YRD-[z ,V 9s_+v:lGpl-' /} ۿw %[`s"A<=o7{l]d]۸ߤs l9;V`7mP#7X/l8h\.ګ<اƍ7$A[~hȎۖ ?q]?CW|>1|n3bՍ2 sWp~^}`T?׻u5BݬNJ~Go<9ԸDBi헽*u#5WyI176^;kRχö:[Ժ XX(_og8n+ߜ&)mG9.3K=<POOǬѕ_\X)rVd>iyfd: R}q͕I>WsSѕ@?6LGW0cqX:tDS~kwn6KYtEgY)[1"7G\p\`˻y_!N f4UynLχ^ON&EGߞuUXY޵_PHxu7 TXxJ|uvz}6'Pe[dz>>ׯ-,/V'[;E{4!OǸ͑V~~}gV3ǿrw )ä;aӏ1GOjANzrehB?CǍx u+z |$퓕+٬*:|z?[dx~G_^U9zF)QQloH~<}~yJzSMo2Z7#:s8fTw6q8o9MMfwSz nf{~yu$^ۄä;E#¢Cnw0%=Fka2Q4ߏ@D3xvչ5j K.W]G<{d祾nX㷮fٗHG*kj/?׋p哊?q˷J }!q˅6=.3f/<ꛉ]:7Mq)}G N5# REu.R/6JR=xcƩcѹ| S523!Ӊ.8 6ԈO~z}e1{t<6hGuQ\fTA(N*Qqn] y7px_^(3xUuPj"K}>+#=W4<+G>#'T27t/ۗ05H=p 8ߐx{b|G}=^Ke~_g:G.{~Wpm Y^tR(?f\ CXa?Q*ϒ?Džqfy 7g}5/C}*ҍ²~~z _5/W%s5yc}bsH̓Ay}ؠ Ć>T5Kw{a{K=x 1uu-vSY`Xxˢ?P_7^?q??3M*΢k~Q#_?wys~}Zҙ^3 }4mII=ujּS6~U r 8H\Wr{ϱ:Gigo{_G3dw|iʊzqM&kwq>W$nmy/T5Q6/_osݣļ_1*Wn%~ևJcj4kEױ3P˾uRj}LθC~z ob~O[z ?8Eߚ"Oşs;Nx<~] )R/P5=LfX[ 1Q集h)g{fK/͊N,8Lg_N/qp~i@UƳi,oj3,\7#yo-RѢ~ysR#ύeK]I p-[Tqg~mX< u,21Ts3_s|Bhޤ~֟a<#&?//P'KugGb~Fcpܠty>>訟0Utd:-D޷˯_'+ش -W LM'꘹z~*u9b/z|-K&ʯH=wW+Az;G/Md@?mvtX*>K/QӮs6cY޹=G4z/K|L}1a'(7Oz==wƂz\ywpTqk 4K27spy^vuf} Sbx^ո|q '|2ghS~jًo)~#Y5eRl#K7L}8~Yl}R?c#K"?nR')P0?! }:dQ"Qp,x!rd0kyC~{[ sicC?~vXqz4nk8?}u; Ǖ}I9'aWC{ޟwqtQϒ_VՓ@JYŔ4!]qZsvc-o!=x ^H8/kCJuwS*^'=wx~!L7q/ut;LKÅW˔tъYK2E#SxNW>V(c(cuޭG=,+;zE :q=/_:Gqvt2sM/+[3q} !>Ojht=qֿf<;Ett#]t\:U{MTGj>񺰴P5PZ}qN R]=ls]uT$\+cj4aS:ޜ#a}Mud F(m)0O uq,a-Ox:oGyfl7pL_ ,>nIDp%q]=t^>"Ρ>G8hz|[#?p,X`L٣7Vt]}Nqi24>nJ'/m#P .*ݧ~*)0b>r~=Ϲ6st=~cJb {L6Yo4ӛ5WT׾&iRo~+YPh?RKLPUX0(lr}/=^Vq-L^(/bb3CP*⟉*aP(~1BaatbI~j~[\TcYܛ4Z^,K4Fk$<#PO rm&Jod||aD-^3\cY1SS޿)N&+<u"o]-}9/*/*j ׏Eׯ6/f _t;,cц3pR%IYxw0+s70Ӓ7 2|v(i[pp ,߁fC q,e+K9ߔL~x_'wMu:$>.n?;c,QǩdOJG UinsXUҽp!M< }VKJSBC8^cGg}l-}yO\):Hq=*JV  t6]p*k2UaZ_v̳E7tN [np%pvhn^7΋}C_`TP|=74fqi:(); |Gj?a=9zK ӑm.OA{š;;~A:}:33MPV^=IP$S\A>;EtM`SqF_N-VQxN\bWNnXG-q~XO X~(y^p*I {y`s>oj9/5p! }IWC5oz;=x%\Ng/l_oVMb>]WjPu;W⎋KCm?'{;c T ah`Y̸bfZ2 1 :7u8r;v82#Y{OU=y]{gΜgݙ9sz |LhѸ]TCYsMz~/\?^4FߟG%]gꙉ=d$?;r|8x'klNmIG=q>/l2ovbѻ"x^Y[?j`'=(2Zsk͂`PǬAn\ߴվΌьp 7'qGECUݙyz53o{['_Cyuz {o%)b! p\T4*LZyկ&;ھE=~)ǥ<|uxXx,N2x 36S#OB;@ǟIZoWz m5u_`'~NìzV5WU-#8֭Sg ;~5?,<5U5/w b^]!tP0¿IgE騡%|:Z}h"s,a^ofsƊ6lqw_W*[ux\Z*jp(Ò^N* < ,}l=P8(+j?,H|?gGǣgoz^U+$q`Å U_ y٢ϕl;EɻR:[?ދUN"_#}XgByv5>xVX`V`iYTߗTV`f2W]++++++++++++++++K(38 夁f=R#JC#:w1jo wz?su|͏R_['x^'\>S识£DwP#Y='d収|T<.q9{ȏ<,.J|Kh O|y ?gzn_<[7]Z G(4f=yעEOu=/TyoMS렫Ǧkû0w5LKWwVیxn⁵A?릯ה_΅𶙏EEu^ȒYFʺSW#b_Kݖp,zo/[K9f]=uoZQ֥P#f+?ci l{/G1[Ǎ˘r`8!:u-Fщ1?ohFVft 5wٟPRVv%slά!늾2W#b6%epڵat>X?9޻if"^-#hX~'_#N6hH ^; 0>R:ͽ꯮E.uyU}MK?:%e+X/`~Iˈ)&C7JKҢ3߷4ft5}O: JŶMv":a}lqX@;(%u/[xzqK;%C׭M=҂+隯W1BpObk-x٬7; x}4}f<O_]IO?L-}Lovܵn5T:ˈ~K']M??8;NnڿaX9}P=XpW=/ՎunZMuUxzӯ7=^˷gJ1sDzV"X' C15yӨ1>6ޫqْ71MqAWOr]K͡Y4=7j1T:6Yr]x-׫|wy qӧJ7c=7};azwa(zyy.8k%ok7}OY 烟qb>滋#\[FuC7_\y]]NO>ޠ ]j">CǑM}9_7nc9Ϥi=y?ϻe+_/xƮ7:ss7T(P–~4>H_GX) u^a~G-VyM->Ƭׂ5 RƙB'@'䥌=R%GW94pOqߎ 4co31O4E& +yv?m| bx.8^_{o|~vWN@ﶿT1CqHXw4~)n3Vu:CsC&Q^O">Dq>XY!Oxׯ_[†nCQ,}_mHlu|@4idMݿߢ|pH}ǽY=pc6tIQӏٗV`V`հV`74ʻܷPmUf\H']Zxh8i$i郃=hsMPxbׯxqFqDyhdzoE뛔y |Z~ow //ʇWq2w@>ꭻ_~o1~y|Q?efqCt>ߗ)no7)'itڿu<NݒܣחP:?<}d+:qn{eh8$S̸26j?oyᴞΥ :7ϥ}9H`eߨTN__SW`];ok<>5DZlFo`}C?tn@1.ճ9m;P)7 w /~)/`oWu1ZR.y|yOהrou.EC1,yݑd~eҁhxޖ<@`^V]+܈Ⱦq2^dz?Ҷ:Ie}q`׏~W#@6&MW&^_WE !9(LS``C:_`n;_Xȼ=jQ˾u>oŒ/ĸJɿ.GǛ8tz P+(L8)†o0nW:[&uQ'$OTa]y\/$u/0g>Bb*8⟢_p =B֙ʏ1YWYD\zG+rqn/??$9e<iNgZpq}|,+fSg* E53Ьf"`'x#"],<}di{kC:O9;8)~*6/kOy][oCw֋\]uQ-hD|:7OӚ/ܕ_%_Wx |N|SV7}_7D?hA6-tӴބ鉅p&oK/?XRP)1P~D|Ft6Ztي2RfxTD7vOJ})NkHTTCm_Ly?~ ǵE3UWy˧V->BM7UnhhކApŷ)Ng{a;[W1֗FʸmBtlz3F|G_9τN҇e\бOǂO/E5ܰF_V^3m&WC7@擱&~NuM\E%z~!jV?Qnt$okOVB}hi_Sy[<=3i*OEqC[׍< .Db=[&p5C{ՅJEb~n#~xYg-\G5Id%oy溾0i"DWE*bY*cьH'K]]Oyoe"?dsx=ߏ~;nW0x[?ރu=]ɯK:QrVEʣ~Y6{80$H1=]ʉU2)ϵwO~#)YR"K3[c5#zh+/qdTG{PHJzbir]?ټn|ה뎠< _MwX#*zkB+ZJt`kXy=t"'zQ /f|hxn:k3-̠}\eqx"0@ w:g }yVg`Q3.}gGz7C(U >yGrCf*۸Zk+g8/utYH3+,hO_[Hs9OSwTޛy+/]csV {>2p,c~IW U Op?/ߓ՛,K>b5̦ͣ4W5ϯeaBo8O?rXW±`~C Y3d;²nL|!{e?(@mDC3zL.f޿/C)}lu^1Nı9._ ~I1G,S`r\o+ǭxo0s7Q󕪏ܞ63Y=C˅[—E{H=šzfZ[[`qŒ;#3]2:vnןoU7t)}> 7o3ƾUz^?vE {h7On3\?8/~o1O| ˆN~<@6%P Gyl:PoWEaW7]'3W{4%/jcpW>s9Fg N/76<ת) i,0Z7̤wʲn}rw5..8?pS:IKtoG~2tZQ[t?~;7,x^UB/wt @ϻ.}~Xy-.\mobr\S1PŒִ1:.|Zy6yWU˫K:<"ņƺڼw3b]$-JE)aA;}?N Xq#N޷Ӫ!)z:̗`:zu8!W*xOwl܃\g~UDImy^L$v~+P u%e~‘|8'jG~oNBWؑOtnБWWJuN\䜤/Cb@%aņq^ KZ6Oƞ'eVxa+筪g8爛+'b&^'*#Y+$9-u0S߿yڤWˏR<[UJ})JJ=z+⫽4~;5m\0/gzO9@o^w;|Q֗ ~H~&M-ϕS njH|'3 '0/<L(O뿯bui~Ub;UG;˟k˚SnxvS_Ci Y{OĿCfVgle~ZKs?t OD~p6SW}Q-zȅ盌㕮L8y7a)ך~I㳮zR }J\#gN`W_{U(^M = BYg|ڌ6}OsͰOqJCG1c'po(ahuae9=~EYq6T}}fi#!+fݤsaXX] XXav[ݿ_9׏ŃV8).DmJ=KڴCJP}@ ϊNwŚhz#Xpu#Ǐy5EBkd%k8p>5X/ դ.b2Q]գUB΋KG7_cm.pօ:Q_˄6Uǟ7-|?ts <!hH<}( z];E}U2pWux9Xg=J|6\&F ۼ}-+]UCu("c(wH;.d/wF䵡:A} _Iu|# Ws ?򟮥z&Q3\B̟ѧDu3,&n3OdYHs[x^Cm۶^:n8"/e^F[koNGu7Ctߎͼf\.`E]O?6\GxOv`0#ްx_gT1u~'}mU_SG{w[z%yEoжw>-B..mԈtTc/U'-f7 _h=o|gc^}o-#`oiOwmbk?z73/*2}[?^%\I1f}ZJoT`}U{r"2o1_`e{i8Gx$?_Yӕw5^w~]y5zN53"l&|d?&^u\>_u5f`ߏLe无#٦\]9-M-D}ct¥-0 /$W9~)wD6/G4/v[:}o~uY uu*?icdC!Oy01Rat}00SuE,"]|59OzD kYt_:G!31y7 __W; o1{XܝG2[Gg)C_wϤa]aGE>"ǣB -"b駖zz^ڿ^*[z=OZ/1?ֻn8r|؇4a麢zH\7> SU'7n45>j*}0t&\gh^Z"T3u=c$.'ÌoYoXI9~!pQMNffݷ7̭GT=Z7l?%"D?'cٟG{EYY=^űWFWzl} 4[qgBQQ7fF>z<<ߗ˳.YSx l(Gܷ|]]Kx_?:((rNj/?z`w0M>&[4^wR̿]w,DFlZǥtx?%O|iߏsOh>?/q@a"1 x kΛk'ϕ=Ge~"?\Z֓dg<3Z?%8hyBƼCיm3m%!Զe<Yp7Ao 79稫ռҋQo뛟 CuGMOb森{gn-WMmW҇[cN1 ׽$p5븡Xzuto,3kiy +$r#|_u?rSS\.㢱f7糊׭~z}c\0*%ƪnut:_5 '.<ԙ wZ6O_ͧ4J{ | z/Ni{:ќBeBt [QՇ1} ӆᄗ\s">θ.:yy ŗ]c{X5~K׽Iߺ{?\׍sW`66 )*pSjUy9xl}Ҍhү4d}7xוMד,ZV|-:]iUA% .4o;~Bt~oߋ5cﯩAɠ,XBx.oz1|?[OÁ\i$e54{1>h yC׉c@s~x_,:o']Eo1=?{ 7}5g؀yzQ|y:xøo(olc_9ʺ U-|}Wcgs^Ǽ/,^N.G~RfH~ۭi}}0*xcyos}}0d~ ƃUgx#x]:EʣGkLz}^P3O*~S=\(y%)o~cR\qkX90|}|?N8 +N8͢7'oz' 99q6柕{X/IsGyc7TXg$?N˚@Ks}ŐKPQ`gԫ=Co[/Ì&9sބp=BqU&Z~e3Oi43w]sh4|ߞ_!>89j}jOs]xH͖giEާ[}SP;BMXoӯ,_4_T߯4n7Iϳbeڏ}.bG> |;L{!?:J6w_W ^?iCzۧ'ChgXՆ$XGѭ%&t(*s=.Z~NX-u6GM^:5,af Ty(W=)+x藍_u*Vk_PS^;jp;</gckr)!>HUY缥> z4[H7ktG}(Ch?`@+x_Q4^P%_uV\+]| HfѹUQnzZqg!tu'Vdgzz]QaOa(wP׏^=UOߘEp}yY}i op]Fٴ? >~0˲D>}W|U,xߠ|m>\\6h;ͺP}L|\Ny50?-)pOQK0jOJ1xJ'Q SKD;gt1N_wP\޻nHkIBW#Odd , }s/ ٷz܍ߦ]'s{Oy@Rwէ\Ş߿5b%H Mi߾n;WT2LJ ֙}ҝzČ+zO45t7B;9K~3VNO^ewT"iL]/@Y"L?8E9VzI)Ws^ƅ̷`)ghNnҋ*|5J[cwLRuɒTv:ssTW}_Uj~`}OyQ>Ds%2/΄GS]g~y? fP axsa~?jO] n 摠P{e?3oJ)cѯsGp{tBji7BW݈?i7-L{QCOׁa__`SƙReE:Y?AUy7 YV-άp"#[7kx0NjKIg)?6%~ Ou<0=7Γop>v}xR0Wd8#ޤyj8Yt~+>G) }7l=uUFi}sLИt 8~`,<7x?ƚxl|=??=Wq_)c|2/.`CZ_^}KUKաR&c9+YX=t~6 èTpB77?YJ_42ν-nb4&̬jv8B ^wO!azS~ec>{12??opGڻFhHyczTB|u2c߃/V<8Kq2& Gj{&_X]bw`])ױ1R\J$g z)Uw׭TOsE5, /u\eeڄ}N袹>sf`D \٧qqQX%mLJ+R<=ܖͿkwyXIޯTN+2ϊ}9װ8ͣOIF[1xxS+q&7,xQ?ޭu2`sp$O F 0 GzH QzbXqۋ*Yub᫄ A`yZS&=~̿ɤ˺/2-qJQ 3)/^?HQDp];:J!]s}D54w?EiĦ҉M=u3++SfzX3U;&F- '=}S|=N4T>Wbwv+(u 3m 擪5J2sZXᗱVN`GI4JOw">-ӻ\m)][xW#4ەiN קO qsWu_Ld?K츗D4L7WBFx|ߤ1Dk.d&^ 3T_/Syh9`[[:>)5}UVkq=>xjh zQHO㯪~3(? EBBnj:mߤ}k\W]˅#YOBt zCײ䋭Vv;?Er! Ueqϧk1t3ϸkYn:xnT`_ubyǺ|LܲwJ;Ԛ|ﯘD+!"ь⧉$/f#K4%T!I+#+Z2Ռg@d)`?(b eƟW>ז<O81/F\G7)/tW~+1exÔ#Xaہ`އo 뷖r;ty]=P<|/YR *d?fϕs 2[MPC/?#Os[A_ƞb5JH]ے6,l~M1Mwd6f%zaŤ J0]=~ݿA+/hfY[i)Us˫H+U8iÊ> oTa}MǮmBtfyOqL Eפ 1+R!S抋?MG<)@=~~=D{Zm:ڬ4ba}󮢸pu ^(~A$fjm~ ճ/aG5OL:ny0տ*.2 Kϳu0)Z[]>y } @eE2/+-}w3v}}!e'#SAu\Nio3GcΣ/w+KGz/&g$#+'C"QC\CYӣPN)wO:_K6ߢ}]w݇SΛ5^5J3aT~mXiUE7 ',ε?'1>_`LC~^ZoD"MI}oI+ GH>F%溔2k/9mLz}N`+xatբad.__p6?G`?/CF[Oyttl'>njOP}]O\U_ge>1lekn I7U|?㫍G~FIgN,vXoOSt"uɒ_.˯ݮ(w>U}6\V~O+ҾpT^]H=כnZu~g]O^w1nsA>0:Q\K>^*phzE,cy4Da̗6Xs'{JZհPZcb3#w9|ѳSH(_p4(ӌº&5#| ֧=i:8X}W2BQO=YI߆wS?r[*9oO_u=)qڇjX׋ڲO3ZO0vL4>֫=FYt1r^ Ƀ'ma:uE~ļ1zΣs8|*p| Qv#/wBdYSWtS9n?g5o<' .&OO 7yXzNy?;{yP7DS1z/W {=P[OYDWfp}~:uʆeG|kIs';v|:.3j:=>x;RY_UKJ?:OZpljR 7|| SxsGbꁥ~qp>[kɬwGk|YtǤw3XP- ǯaeK=?>4w9VVޥQ,޼ˣs򎳎_xz>~^qd_g[)|40'>}“5d.dW1wZufۼB_ƒI]8?:VX‚[.jO~&}jo}X]}3/up4+睉g8z$:{/q(â<:OQIBV1Gzޭt<q~޻'̰G䵦W%z|׏16*"O-~Y)6|.&naHp#:#|(q+COY\cUnxX:ϮU.RY%p}?Y^#ďX30<.(L6Q❒•%Eh4?\n0Î#հ ލa9sKFcX0Nx\xM(X}:DWݷOǽ9pMe2VӔ;/q{&_k[14Dg2j?8T19n h}1G_/yxl;߹ʼnbK| GOw4]tyu_<&΢y"syhY5E;al?ԏU'OJd_u;fs^p| q7|oa[\) ̧ۘ~zwT_5f^_I  ֏^IO Ӝ[ќla֟W\{V%9g%\()mj(kOp ?ȡ742[g;huf]HG~0no8X7u>R~"w-r>c`[EGaan! !~aH]Ǫ3jƻ37=بqdtC=;i/aς|Z!yyO$}jxhBqX,ˤYxE`J k_'&mp%串ec2fu5lD6~QyYXxOXowTO7D Oߋ~qŽ+?Y_qW<=HwOpNJ|y?Qci~glxg|F)()W ߟ a 6/#=!e>P+3zFa}Q4>b)MK"C8Rmx`N蕁7KaI%P>.rwQ#DoS?t 6D%>B>ƼHRؒ65_Q>$_VB}W;S&-*$WrY\܂r^>1[y=\ O儿`0s KʨfSN#|,:z9:󩯲ew,_P-_3u]2WGs57l{MOdRw(_<=,G^' %/O|fE5ltY*l}Cs'-R!ܷ$^e.?IV/ $rh 8>RH^Ju9Lxovu"o0kM^طY!>|ްw}X˰.ԕftr ޔ7^$j~M'^}s<YqSƱ"%~HzAUߊw%I>gy>8/:{G\2ouaiKH'zt?Dw璨`E7㺋bo{e'ŭWsI2JnnT {&g",)}0=k̮,6iFcsy}F}Oa:XȬ[w1ں3.[)?:dt4~Z\q'y-}Kg$%Fw~nD%c?K>~/v9^QRVpHBՍ~4Q%~pۼΰ [Anhߨb݉xkȻaRυޏ /J a}5n|Y B^l_tq)8EzD5侖Vuq#O>Áǹ_{θ{o>Xu\oz;)w=Sy8 ?aS=V.;H#">s۾' +h3n#<[ʷH/Ϻs 4O~Ǖ(+tBx)u-/kgƜϣ|Ƥe3=OhFqSa- +wV[ ɆG*C+Rq#? sK$-έe|o"qQoe龳u ,a[(I͏rkF^D]qͮwi2ÿ* i=C)tM{ȿH@!om;v#8_O]@j8To5u5?(+w;moǮ~fi?Xn}xeu\Fw8A#;=t-j?aWC(UD̯?R]z+`zh+Y#^Rx\w?@ԕs.?~'w睵- -l*OQ _B}ྱiH_GUF䉠Cps=y'^O >(V #֋R1w XaD<-Y?3K\peǏ0˭K̕7Ic~[Xy A2DQ#Duz^ӫ(/(̼HTQ;"fsqH~ޛ}c?+x,Qz%_& ͣ#Q7WT3xK`L]W߉](22Q._kCSy1YxT񥒹>*ebo7[:8n'@{qXР7v}zˌߞ[K#S)xܶfh~9F,4 a7`s3(tSCRR߯ sїzݗ,-o gcmBz-xP2#Ot׭gr5R; O/_$mKۺyN|?z늺>Zdbzq=I3.6I/iMU YBY_3"_N]+$N$>^`v%O&<~qJ1>Qt-~[?!u1uO Wdj,J5t7 ; tqt8 Q2٧+"c܆sj{b,>"#cgɫM#=[~J<j\M2sݨzsԁc_O3oKowCt\=F<ߧoNluΣDA^J0 u/dߌ+e]T _XCຉw{s]ϑu CDc&z̆H9ޢk|>S{dk _(n*)ii^d^w *f'Q&OUT-},1~j P&3z$Iz캎RO8L_ps4=y[dzԩQ6|^+(U ')CKo/AK>BB:6}c\E5s^%?w’gdz餮u D#ꇢW~jsx's'uP ?r~8\U WHy?c!G{n= YE]}~68__ 3'Q;3z>^]q߇sBz?)߮P`xI{哕o(8Ox~*fWT߼uf 82/y\2Mdп G]J7j+ij0=tfqy ϐFPqԛ_ru_^=v:>#eFg)%To!Bo1$Of'OhRNn[ITUF;HGhOt~FCB&}v[QfOי#0.n{R.Q?t>?tƯ2D(܈B H(?y!x9l=ﭞ;Ze6?$uh0Aa_RO9%ZofҺ4ۘ3c?ΤLᮆ!}-ϰw!3y>[=*t*A}8^Tq:$'C~⅔gnꦿU1=;6/-:ƨ7yQ +gu@!Ɓ]j{i*o3+빶||Jk>zk~!Uq;COdҿF}˾yYLپ/pXE6gBCu >&k[`=ߢo3:D+gur.O?\~_\9ދ7CfOŪӨ|-Xw㥜G:ㆉ|=ǩ>cwxwEY JOoaZ Warg'7om.Θh:,?3.IgQ0BWi`!K^;ӘҖtc+ <#?Y(m΃n||UT<^Vu,rk$z!8 ?c _DOpa^7 WW07TrԻ\UZl:z(syV`V`S $>KS/X`V`V`V`V`V`V`V`V`V`V`V`V`V`V`V`V`S0nIeIqq5\.Ↄk.iK~W%A_}R&mE9X|$>E^ f,}J.;bgK?rNʮ3oqb_ k<:Btd QOWEZ}ư[^rǴ0<;j1_sIb/Q:9]^'ah|%շ9DUzyXʼJa]Z}1YDUf]Q}Y7a+&u ]ynV;z!+"]GcStV>mA_ @\r\QQ]4=/N4oj}i(ԯo0\Gޞ?b٪8+|2`4M]Tg淼,mNЎy0?'oSw4QZ@u9#2K{pЩtQ/:Bm}z7 \wxQ֧{B}w?buot끣`rܮxRCykq;}ܖ/C݈zI&H=Mh9h:e;J~M2j9mq!~ Lh.y*. =;tKZ7;)l_YozU,>pD}=K\sOi_\tu$*ڷ%~1~GoUfuW簎[|`뤛w =bsT|~e1^u"8#wtFJgWQ2z^RJH=b8EuA5-2U;i~~(=j럊®P>uV6Cu 5o:αx~-Aܟ2׃-'ףa06~Uh:_Z[҆/h?XXX};?XX_fc ›/zp}J@eǺUiL(_k3k\X"zw\fhqV{~ʌ?3Bsrul!y Bu4?cqG{:>HC/֫CwTykR"ه܃Ng$iy^};xikM;򏕡NbԈ* }ǂHQpըbnᗑz6VR $~]xwzxTxے) _)Jv`:6jy=="{`e&e KMG>? {Y*'qA?({IzIP='۷3Ӗ bt>%WHciXYLjMy%_+(O:oEwtĸj0_'m7*³ ׍ɣ>w#X2)=\ܒhVZC?2OΛ0=mobGccE51Wq,9_s)xo&zw:ڸu ޑͼ]7J[RgҽO`lW7<Ȩ.x:<Ў5yE-_n/V7Jfxw5TO:uB깠\b=ơXe7*Gmn$UMrM~=/nSC2iC[\r/k}:9=۷#[=CwR]~Wt ͻbΘ3?N4~Ơ:>,׷8_{%jJ#yH&ɣێKܨ]8E=f?\<3c#T/ ;m+;s?aײizS?c%D|du*魣 XQeR1D;2n`g>,G?0_*'OTw!Ŭg,3B#3 3o-6} 7Kw?Kl:^IK]=߽.ajKa˺N:yĢ/߇(=B%Ɖ=T7E/2Z\`QGcNP\o'[z"NQ^en̏?m/~ (NUo7=9s#i=\O1Z{~5](C.)%]=~սc֏9s!G?1=Hޱ\=F 9E ;>׃g<|R"w8^G'>0137ϯ9E/%7o:7\GcuWsWt䃟Wt=w~}L].7]04kkD(N[՗.rI0?e0C!y6\\C7ޛꟸ ~ w]ṭ~;9|~8z゚Uw=~&>D<8͓oÃyIJKb%xZHT;‘Ѽ/8_ӫ^>}3Oq7Dt/ou[hUǧmt\>6F/MNq_s]T cf~^HC|_*c7+L K_Q|؟YU:-oZ?cESS:Co(bU絡I_[wYwƱ-yf%ff|-)RiY.~ʈ/#QJUa/He{k`=!gq]u==Joռܾ^Ǫ|YTG=9O;~Ugc}W}Lp$׉ɯuA}AU@hσ]](UX\p }>ys߸`ݝ+$ռs޲ʣLR4?Mп_6oG>쾓♦+x'XCL_`ʹ#u6}gˈ~:f|W-cw$nqQ5qc~އGiL9uRƫb>XgVh9=d?%B7+W bX?ُIQU9phewc+k o֗7n]|WV%S[p| :?U ,whW2p}zcŭ(vZ=L]9G  37cX\! )+^c)_{eZ?=o,jh/@Ҫ:Q3 V^xuoKz<.Uqt S]Pٲ,N2E Of`Œ&եE[#x о s<\'QtϤNn4'~#!IB\C w 7SoџKIq]Ew 㹿,2:PȒGg ָλt(+++++=ww,0w3KNtp+kt1X';jѵeپk0~G<-Xzhz" ^l]}fu?:=V<.P#Bn?QcϭC(hŭ'K6O |28y6o-)u t㼩Ma|0^曉gs^1*b4rڢǜaV'jhL⁠H&*yg:J}9ӡ_f>ghPW`ɯ iE|{%0]A"x]>KIupY/h`e!Y}$-5dW$>1| +񣣟\ʫ_ֻE)Ư:pz8\us_jEIfhHT /󯚡U\;͸liu†T;T\Gn+jo Oxɋrz!Qߗ瞲>69tc~ >j^=0SM᝷\>nÚqyRVu[o(󵭇[_ls7x}R\tA{Y7BTà2{'mbЃz/LjɜXyk)~yc~j3Xzg-v?I^rw8?>Lbuǐ&mY]雥{8/SdO u7~췋ҜghxYWr],3yT<0y[=(Oz5su-8ǘ>[Z:|+q]9:er4mmHW0͡O ׭^W}_V;U^>,fh?e7z%K꽔2mT,Wb~_f tޮl,?%]} fU4mNzqgnCg]KX1~ =C.yՓp~_T^uf[8rߗr^h|:Atf-T\u'<].{_xqR;m/q3ipy gV3 0'a~V6|py;/j($rb眞JE;_mQSoGۼS" ^׉\qo>_a]+%}-~C~?0uv[YwH']GgV?$ qYtD0o*}N揾KTwތ\dx+,cs<#tz9=N#d~k)Kj oQ}yYw^{J)~#GoCI 1yy?A2exo&YC O"[#n,TXYTxJW^2n:7)-FKIՕO 疔~?;+Kӕ WgQGͿ/쨿+םex=Lc/=Bt,\gpˇ}?ǏN$uCu>CWC(rܞzDXyjL 5XW:y+e6ގ-cSx"-_׿7:abζ'?zW / 8@7KyiFKq5f~P _c*&p½#n8[G[kྜ6L7?O~"/pY7dkFfnfD1d@x}sZ`M%Ϻ6VWöS1];®xeXƧx3m<^ϕru}HOhow5-Ft=;J>>ןx^.M7OA~)F=m_ӯCۯ(YJ?9ł[m<:ַ!eɟ)݊”/x:f}3W ?u=qR8t 8;an>* Ewu8x[=+xG:n}.Cn/arq}z\*d#:䌏Oto״闊iۃ^#uUPU3q@}dcf}?>]f@t+4ͶBfgcػҵ9@ 7m9f\pq7īȯg;UӾ\oH_ ߥtGm.N,;G:6kBiհ q*>υf}7w??fi9xDwm1ސw/h2.X|I޹L~?lBK\2H#yOV}<+s;CQs #e gVO~? guo_мyu#~"!|{'!k;R4cc,j\?nfX4yWKﴥ~,tOY k]^(Te~V %! ?^a_73%k(3=GZ0bTtcS/ˇdf8M[*'CS#t ?[\U>0{/܇'tO(⅐~&Q|: ,It?'Ґ:QC]~\ThNOCK\[RxEqnwNFu }2?ַ?y~gMxSkTNy+!݋sڥUONF{e\) )BXOp_96`>>yybr!:(lG23|]c=f?-W]^wD}o7C K⮱O[i4{z1y-Ϩt Z_wӁ٥ Nωc=oTz<|ʱNGz Fʓy6LnC)ǵ8x% b[;u x>r<O9ޗEzuKNE\slH:Q-3!y#*A~^y0HC)_Y7KH9M*ebyyWMBG@⻌3-gM?tz<3e>%C!~Q>.9}/Rm7|'? ߣwK< ѩ/J4`l4y"#_|pUt\EU`=w%c~)`7ѯ߯n:Z's@4O>m3:|gJ}볪kAhXS'|I:d):&bRq]Σr{q8oTGK9} P|0}>t}p¸85߰{)p/{%ū#OngI}J4>AuaDuRn~'t  ӳ" ^yt?¼n-&Z3JO׬B!tB?=c)r/`э4T^qēnufz uA%n>pJ\u76wTo|=.X.x{i=zF1ž<\{7>9=.f;Z =.}BDq򃅎u]b^[M蛼W`YMOt:}fQ\r/*n2cڶ%fAe??ӓL擜Sy5sxp  8>j( n ̓u0!XWzA72i0q_~mnz /-U[埚1? c?*9Ŧg3B%ٍ).Q?W;3#0agwI}NIG}ybnu92u#c3ubm.̓" =d[nX_OYZ;n!AkDބX\ bU)~q#MG*϶쿅9g;Qby/5>Ha}Jly3y&yウa˔AG˯a3:~p'!}St>Gnzy>ytYwM|n6Uhs,ס1o0+8($4O5?AG/I* =O/;)/4CNvp1z?Ip>-ͿUḤv?IJ g}s*31?m1Liw0wກjכ7㽇)םۙ>WOhuaѯO9[~ϖGרuM)!z7y}5+Wcq!:>K?KDXȢ#~\z<}xX !HHy[/PS\7ʨzaj}6^䃗*A5a,Qz>S7rfe|Vqߞ}-)߄oqZ637iߝcScǏ$ `}_\ 敠P2+M$BB+ Zu#^aplxt7V`>P"_~r%ߟ׀WD~?]GFYxtK}DFwc)z\ zG#SgM|,ivU{E#+1ݴpPm~,FBuߊZ$3:Oa}O}Α{Ҍ{W|6v|?p3U^bpQ1@vyBiC7޿j[p/ǣ4ׇ/Чcf1{~oB ;t/#oggrZxՆ;F;,oop6R~/?M]]k(ާ_mM:hOLoU_=Ь ]NS,|>f\@c0Oύ8'NWŠc(_;P\R)dU㭯ؾY˛.t0W2|OSC}}zx43nE&6>1*%/wD?Z㙁'B?߁כqXKZ4-R&S[`~ܒ~oG5.6׫\Y3V>eBɟXxX[1.Ae\?,O)8Ū|fd_[Z +8;aǖ:R7!o(|EۏK̺rYimjG{q_O`SI1s[Z^ǝG4ub5πv yG檗t5\l vRgs4BӨ&jᾦnȏ>%U)".1U1*އR$e))} {[I>ڦz<3:؏u?yg0/f`'-d˷8Mu z=Z$5NR\W^?(c{yy/'B1js~s0=Y0@3zIk}tm䯶e|7tp!ŧ 7{ rmc㋈co)E_? /".6c[m }JF+\@G->XV~4յ EuopZt)4ˣwIz1NLqj=)~iR>8Ǻk)OxW鱭^Rb]s}R_۟ ~qws^Q6JSyB\<$Q)=''j_^WE}l&?U7~b'k3_?w1#>T蘌6Q"E?XGM 43s+"Dq 5"=w* jAZg7[Xw3/ʦ+,Χ Qטaη=t=۬ۃ]骾.lE3>Å߱k[iݯ.ķ}I:%z`vv>\ء7pm{9M߂`1D=tpk_ǾWPʂ/Mg xCiJ~aEs6-L`4`=IE];O<{hФ ouNaV_ݏnC:ZG[9UcHFD?+h|c Ԡ//*7qC/n}1O+l٧8ߍ`>N,u,l%Jz>/fa_qzY"dY̻6a57x^hxS"=$߸NJ 8)ϐ2~]77*|3^mo~~7=NtQ#{}WO-ۉ>~|JA7(eϵ%8Y)OWRG,n zMh+G>O6B|Q͒sDμ9.zk=xQ .S'=x'8W\7Wя{~W?gw~7h^06svqDv4=':'B&FѤd-J~;|Xȸ[0~_xiVN7_kYIMgُ4+i$w,}Pܥ^'o V ^҇*w8;C]/;z0cOh bOlc^]iKTu[5(=q?`>/0; +quwy>GO1^aߜa:erxhiKt%'ڱ>A)Λ0O y=ع30W8% yկ˼;ّ<|d1}ʢ__*nYT"y苦3_NpaYdun}jc 7˚zݹ_H<g҆i=*T>OⷣP,N9vj>`G)|#c )>1Y+,% +k`E=mb!8Q9Su~ol'utI>}wt%?tq~O`dX?[UiHh}Mpolg3MGzޚiݸv9f8sK NjSGu%Ze^Jgw a_Ϸ5x!z|tݣNa4:n>߫w,8T:ct-.;QjíWQ8-IvT_@N~y>+ #u޷bg,uEk5O72lXb_d=N[ښO,}iD) bDCԏ}ui~^^5T_+٧bU1ߦz <}y}PنO_Ke῏M7Ŀ;"oŕ5b-8[cX?q8_7S'xiKW??_ׂUMo~-Ow|ɼf[V|*>\0}E;y90DalMxތ7=8I wҲ=RN8H{z*y'Պtb|yC:Zm}\Ktz\ҠY`Y΢S0dy^yzyCtS`^[Kg[{IOyٜPR>m y'<d`Z;?QR-1.U Њ}qa*̃QDZ;[By3-?;ok3n]O:ٕV5ŘXׁ.u@ߣnH?zBߡX<|yG@Oq=f$q*_wX25]hz˶: rjf\\*,Qo) +t>~r3U |ܧg!wןtɵpےbzxT]xXW 70 P8jKG+<^~*~߇s^v'G}T뜟ÆO3p 7^gY#D'z$1zU};u U g-]OKwP?FեOūנX&5^) |{1Xo%\r)4ҧi^uclFgs% *u=[OM7Vǭ[C:J#Xff^d#v4EE;cKlHP~)/HX~ W44S΍UND?$ ?98-3ި[~ +)\]ӕ[踍;T}'W硅nʃ)|:?#7~vktֱ%[^6gNu;u:}_0`?R=?*~}|z+5:ja܂6,ui)SnQSGy ߉saRYFcJCM :aOşz1?sHwh6)j{3Qm} jGԋZ9Iҗ n:wqWӹUQzy7iD%swn x_ ۢ^X!NēsŕH%3d"W&~}vϴ^W|aБ,o-p>&q׷^/{Qw;J-ch{<˚&7Ƴw~Z RT1ǘ =U:.ߎԴĕWsh7cV !=#2/zgԒoU@ݖ Hqx\NY霬ԧFJܹߧm7{VFbQ\\Ryb|& )uvQs_yUOt$c9Rgi t~+Ւ&JϜGFGXWC^[כ5gIAMDz"*1MpiA0$Wg[[Fʫ_Ltf|S?_Vh}P>52Ϙu_WCu|nHcusb}Y`ֿS>}fQBz V~Y Wq֬?m۶Y1H]:> ЈRM+W(+R uB T>2ySm\q!Wt ΋"ooЄS=}b>:&Ǽ\.4z2Ǖ_ɿ?:>8`ӧSzV8u0/)6){UYKt(EK~p%7!MДu7Ua*Ҝy7=T:cnuZ,uȰ.H:.t-bR'DO_:TapЭ4>룱'Jޝ?`:=^}G9\'#FC )һp'*˔/׏@;afMB!nIp}ȆU7X~hP_Rs=li_Pe\Kʐ׊@uboՕU8Yr9fHZ~kvGl3U}>uļHPؿ5Z)vޑȾU5Bphϰ>\ׯlceC+U~"ue?/,}Zgj;Tx1 #SКq.9lH?_hr| p.R?BtM׫l~aiM EgK'ne7ʺ= hkԍ3֕tkFec؅^ԇKX?f_LwQ6^ovp\2ZtP6nA {8]x/ʭ9Ӌ$.`hDxR@Pш"3s7tHCxNWC<'KE g?wV$_pWﳝeZ8C԰ (>QAo~Ba{>v7};ƐfnuJ _("幬?Mgub!iϻ3q϶[*x{t+?m֗S3ȓp]=Q2Mx;O{33 3_Nqǻҧ?a4̻Y_HEㅧ}Ҭ3ugT] NGCa ﯮ*ta^vp/^^%}S} DH#)z}:_gg*2`R?ܖ)cnx1uBQM{[-Z X `j}[Ԃm=W+"6]{*ze,~xsƍ4ΟS~(:,>΃ ]m3_PZVwYYp^SŃ/UpU>ТrQx[z_?yB t< ̧y#fү@y7(O_T)ݽd>ף^MǘD:ZKK'eC<Ӗy_x\z3/>1sE'+3Tyf(8[> E?q>1tB|t1{E4ސӘN[/6b6Sqĺҽk!Аy'YZ,za7\q y}t`ӅNѓ>:bؿ_Q~쮠n6ю=NnWDk tSgѽÊT][>ݔF0^_G14 3\HkOEҝ9qy(}/P3ωY%}E=}Ye])*2#ΈpSFl<:3/:8ggP/=U_ Btg~˭mzо )TƠ61RPM8?SӞs'`+T}^XBoLqWLo s*z֌=_ŗFSUgm¼pXۙ}'}*7סy<%^#m=qs>2}@Tv˟I?\Ye<]5 6뤣7༔My?鰪z+ qyjnCq<&~gM6490Gc-0vZYU,ϗqBJOSꂠ;xE?)6u^ꚌOpG/sɯT:e~REu*/a%D5 yϹ~>@N%bU WOT:PAER}Sy!G0_m䚯BQ\9x,d^u#ǽuMdzal] Y<lo'k8.єΉXsCPܵN/iϟӼ.Tuw1bM!=O1~4'k6]U ,3>e/K?r֮MP7X_2L'L TCg >' ֛g1 ]6Ļ|T͈zws3t>}3Nз_{cS:+. Ez9vF!_3O#y1~H*=-?ghUO9_{f}V֓#q>s\oiz'-Ɗ'IE0b5x>UOOeFoJzN}D[e#9cfJ?4)jyJtۚ4≧/ } ?*u+pEGfz-ۂ ^Ϥ/~fߏy^>F)f8s588jqYpZ7˘gQ\_yh-#t^/[CyÃuXQ㷵sh=H`wɉG/"_CcVp$'ʉ~58>̳oHǶu'8x0-8ܱWsFe_{~Ѭ;yW8%wͻ>1ֽJ糎Kw tן{D>GO/릠yz:X'#~}+р~z~AsOq#?wu/ nݿKkq_^֑>Mw~¼Sl"WaJYlŪiYO 1G掿si>F < v^G_՗Gy٬[F6Oo?Ǫu9:]&GM:G=C-ݿuZ]o7tB}yTLbc4E-zzf{dBtxǼTELuk)O伀ԃŰ:ȻtE~.{U_/#3D=}~n}>߻kXMQWu]fw 8p$ a}@\ÌōmO;o/B֧1:rƮ} P۞(z3 )et>1?R"ϙ[Aߴ y#h>b?/OOD7e??똷9}(T;?Gc=Ƶkk }MH[X*!)I=Ģ oYO~5;2kw9nĜj~TWpgfHhei4u_quow?Z5R~AHoN;|B͐~͸Ϋh_2Ty׭z3k {ow*kcf.C>g8nxj|niCч,㍓<>> UoDwmy"uHxhlǝ9.g?g>{z\|<_{ `գ18Ru2B:;)'̧< |ni]Gg , ! ,=CcYwyza5D¹Jσj~vM77|/(E;1Oҷѻy"W: Vg|֏I^xbӚ{3)pa3Cy+zG{S֭vBGzļ σƬ0oι>RUE>=խ!UE¡ e]*IÇf|ħe(N;RҼˇc0׳,ċI{ ϵ|$Փ&Oq)ܧ|8P_j}[x\sX[6G?A|`90VxY7 y>*wZ4ʴ?M~uxnEtYucޗ2%|>r<u꣊݌"!'>"QMoTfVe W;sSZnz6ܭ.ᣢ{[': |=Y{ŢcW\S09`[C[V+̗&ZϙW1R4IC壃1F{׻JZa~#F3~ܤx6nw-u n~>3.ƒqU|(O|ոy7yG)ԯ[-a/YPC6/ W'mԊ7a=Gq3߂%q1V8~kUWI?z_+K~ gnTy_񂖾By1} E}1E+lgXG,]Mĵ={#;+e;.弅OӾ熞-߇ zсl1<"kQXTy\̓MDؾ48]}IǛ"u%[A" cБh:|_]t&gr>D!>8jYx|Q»@ *};y]碾7=K,WL[0C7azh~\o˦x)9"n837}/-)n5a/(u!;":u>SΫMDЍzwUh7wUsyϫ/R /('|7gzA~?}qYH)GJ;N_b=껨 u7̸>51eG0L*z ouy}' fR.򗧂;)~B8cC|qDǎ\xWǼpTFȫ:ɟA<8` PxRSn7?=aq"5*xROFvy3¢/qNk'~uHN3~7).Xޏvh>qG~ax98WChymoI~}EԺCaI]oy(Tw~bdXcc;0EU{DcM?\6CU}Y?ǩ^$eQw珿w6={{PRYw-83 _r֢WqsX/cҦNGӇo}w/1at"1t\/؞kqx)޾?'}tŭGXq޴0-D|ã{%eX;ގGxeI} ̇)Y{c^bcȂ_b\tǪ,xHAE)Y;_>S=۸8Σ4.xz(#Gx޳,kC+zqp=n?~{{!ީx%D~_sjA~ɢ h^Vw=OxZ=b O*2^VOM~?AcUU5axF e澿s){?}`ǥKK~KY}:񮱽]kqxI`>>׭sO[, {^=όˍtߦOҸFU6׹{P\)n^ފ'Q %^.Z}м8ep"ҾQ-_|194NI#*Kz]tafg*#b^E~ SnHz>}xd$u%n:%ޔOmX޿:%XDq99aЏłKaC5Y;K}}1tFǿ{a~h=-m{M,oGyOA/GU5g ;U>퓹g|[:Ou0_K-W 8qOx6}uF'c?Wo~_5`ο|Ҋ:âw\qxºISQz23oK.~f;x;Q6>Ǩ)pQK fq ~\ X7#q}҆u3M?on:؆}`ޞ!k+ˆƈ, :XnE$> Ŝ,ڏw eOϥ<=1?i!OuC:~/ם'OtZ<ۮf3: mvG>hg|ot r\1O->n^f|=<^.|-EXڕރ^<~֏dI-N|(˶=  ~aԱߍnx C3Ǟ)n\Eh~?To9#UuټS>ad+4ӯ7^?W/#ڞv%mx C G|ؓEgU9ҌZO?S~RgX?+$/n ~PeEՂoLGR>(x6&kUJ}t:3en(Cݞ/f)n1t-_u;]?fvf>('_ /Gl(Y"|z~ǛK٣%nXаSZ\_;pxDP!^ޘE~V-d0k6B3CxB/A}ZC+r՗gI׼h@VgEaWsi_WОpXPE[?ދ+P:ߑ׿*y]"pǂKPoje;Gv8uѩ$0\ ڿLy~4/j4y'z;8a3>um ΃i]7Vb}7Ĥ~FS_6Z0>uVGaz$@(PnB9QOڋIv5Q 1xV:鸎/mˑҘ굪/0`o;o[MD?G8@N1觛3 }iYUz gFg)Nby \Z+0..0W: ð<)yz$Fs~Y8jΟK]BL8F#>5*ĸǻy.7x1Ϻ=;M.x> ^EQ _+\qqm~(_"a:N>7Rx'K;q>X/>AuŹ):W4M-h+kؒN~CoBt>44S] E^("錻Xtsd,/Mϕq (~5 /z#J |Wt|x5:&PaYkhƢЧ{?g_\wz6c]ys6k8֛h![W/<] 4 sSQlBxn{?}&/rWuKc?kV}Өn6|ywwNNZ?jyђpRC:hz~+r)nXK'_K(t^Vf\g:kI>gFz,TW C^۝焗d']_zϡK#}JgC9̯*OD=M.D=[G >ύ uD?^fl} cۇU?| =CדERQ:dgHLJMKo׹K^=9ӵQ6}A7Wklqz~ݱSǫ>㵪[0Mg{n,,J/КB'ꟷy^䞥'K9Wa~WK2nzRg{>y_8ѣz{9yJ3nP!uÔ^z^>Ĭ}&WZ*1>_AN7^:8nz(7e+__ۍj>B;[~ۅ51alwM |1mMW >y"azR?#4MK+Ղalk{R ;HOsT}oӆcsۑrI(~ fڧW4;a[z1O缓w6=}b[2,}ZьXs[n ߗ̾pؓPQzg8_90NUU#~y|6*Bs/}jVLª_SGfߊ ՂquΣpwҏWgNDpdRGnxXp'{(҃Yĸtjlf{)b0eylC J~s)}t{Μ:\|[QWW4WEGf}0l"Ҝ~yokz?Kh`~)d à;^֏a:p(RK]1 [ʺ*|k+HN:>_Z+⨳w<ԁ 2v׿vԇޟb _bw_H`6,.EcQVGu0Xԏd G]tUz)26gjw}*Ss_X: Wu// 'TX|,#~eoO~E Մb>v#6IS4;Zq~^GWzB>?/ih1QRwFp~)eß?? Y?xXMGq9p'fT_2}9T,K|Wl#˾U=4/sB6=Lg7kt7lЀߟ,.Vʸ.!gf=#ْAe{7׽-aHG׳7zcsZ>c/j(}0>'I֑5TaHFFP~*O x1e7c7$Ƣ+5/^4|)!UǦ^y]yD,p'Cˎ?+!i߇үYS~!x/J'W3~ "yZՓ~8&p 8>G_  -)54~]`? b2W2@qe_wE~Ky;Cs(8oSłȟ8n(d?~MTu~ܒq|䃄.?ExKQ8qlDk})npb><,y=AmʔajuS_qU-:cBE`LeEOqD)iDu<%|Ü'a귡WO7lH}f;(n.N/$?т|) .DH'i}nV13-2^gR[GI[Tb\GEsI$:XY+F=Hx?'~⇪(ܣ׋AU{Ƽc:^N>T׀ 4̟w:Oڏ MRy٨xl/䐛N=Ot8V\y=Ro7NjR]Oi^wu28;mZ2yݛ$>䱞C+>}a ´U}80{-þh.z>rsG̤F|Wr]9S<6r)ї[H<XY3 Ʋ^ :o5շ/omRAx)Bjq|,Dc_NV(*M6b޶x2¿`q;FH1?7T=ʁ H,F  d kclc 1LRN=[nWuUw$v3ϼowun8\WeE<Nj 0Z"sOOfq\z9ō78s)O:O@ۨ4ˍ tkK4dGJW.. =C{êDvT&cp n~?Ӛyꐸ8oQ^Dݹ߬p^{4$QC=p ͝"Y#np|E[Wi_y7z!!<خODx7Ե{}2 "6jEYdxf#  j!5Y1ayPp :0)T`ST4;}uu[&|"⧙*~b \N s 1vuhCSPo)f~U'ڗ_ 14ÄT|I8eoF7AS YNr=nD^i-^s6ճV4W|GZwۿ^Kf1 -m0PGz)坆ꌡKd:3s= S#d5Bp~5Ogf~Aes]g8 7ipgq~(5i]M7p)qq+ۅ/-FywM:02njWfL CQP(|Xy: f~?=bsp%dz߈_v h^jAW{J+{{kQ\0/|3qڷy +S3O1v2=]ɠkX@|qLx"6 `.uBzmD#LjCS=xZr+g0+TdC/)<~+{M2=<Жg 6EtݫP^8/#:R|"~)_'2=w ,U7^N ~DΙs WIk&'$?-~G׻SE5Β6]hhX/_5:Z5)"=dž?gYǛG~;z?kGU|l}pmkL8+BM~/>oXiq8~qKԧ;a'BwQWD+g[p;U@o/^|^5ԽV/p[ `29%GJ4vIp|>:Gxyz]u5|lAN ƻ?cV齠]xJLBY.r@_~l:eiԉK?n|&ׄ?ݤ>#.&0޲uq&_iAyuC #om~@qE,e1T/EDIVa |nѓeL89?C+]x\Q!^8]Wȥhļ{c HGӅ2GwUWqQuɔǷum{Ť,Tݰy۱sO׍=.M|{y':e3d]>ؿB42ϣMfy=Q (8(z]i7@x~1܅? $ڪI߾SG[||náSᣊ |W%HD..͇wU9'r p+#N[jp}|sqꤿ|&GFQ?T4Xcͺu^|ԇճԢSqSA[4b~CgWw?*nxĞ\WC/^KD] 묡_u5}f*n?~0쬽: Y|9ţNZE F<Ɛ_Al? m>A+ <^hu?S5l]̺޵|BUeaCqu2N/u<걁GCc?)VWs2}^-Q\հN=<o=9>[kL']"8_-*O=Ø q1Yv3#;my+=sR=\t= *Rl,*.,U9} N&ǀw$^>qOdqV%|n3nun,<ޑtqw@~6H qqT\U:dO93ˣ5wq~rʱ֢T|ߍu6O[>P?`<<)@`_x'QP'׏a??Q1ܚtM|<Kh )]tS<]㖋1TpaJ(qdj^ǿո;ϔy67i?x,[EoG뇷տoO q}yTҼ3y\,nWquZhy]])}CZxsiJWxUQ#4cҋ^~g: Gӷ0jC-x?qݏ}iKï%]TFup|<:ןr$ܬNftRP2?՛')ɟk^i}B?/XD}EЭSAߐV6uPQl?^u串llmyߑo3ݫi "+?Ե$aOGz>+JB.R|/EtQ}dCw0FvVq)Jk=4L;YPV.xƬD>N8y:2e<.af^jܘRy3e(M8WM*_r< k -XҷQ EWd<~h1.S˸:9m@ܫ/A!}_pf:QqL<$t_b9pQMalut_,(%| oj'k:Dwm2miD}1|i; w<鹖T2eF̦/_hH{yRߣoTk7CIInb":+RDGPFW3šE?" =/ϐyD~ReRv<cJL_ĵܿ+S?FCU{!H:ݨO{w᧜fzƳq?Lu}8[/}`ܼڄm5D8 ;q #ߟ# B/U`ze77$拺dEoш̠ ~\O}BuS|^7'Nha J"n*`w1k#Q>q|R̒%U}mKmH&>/]wy3Nȸ9?qqn?(J=cus8fsƯL3qRfJ;kXW s{<Ϊ:zp1cOQ auRϘ5\O=rߠgo0>2KSbS< }hq48D>2\gc_|m+6U"q@~=pMXs:9n\qľzԃ_{sX>cÅ~~;m3<>/-B#u/ol[plJ {OmvsuaՏd\'p/w[e\:YqE8sYHg?EK#?@}M}ǫen.DŽxӉ_xf{.ݣ+竺g*7#߲B>6Ǵ՗"^dSn26^,?Rɰ]C^t>z42e\7}:}Ǘ} Չ6u _ Y O!Hy)uDC7 gÓ2O%l,ɿ2z>b: /f_Ϗ~ZO'%?'WgGe[YoVGKmU(1u bs۸8 /4Tƒb:_ey,i |.r]X"P}=8DQ?:'F^n'7D1Y_WW(٧5HW3IN38T*xN٠'?Xica2 .ꋇ&Pzhgz^M*%)86KAU} xG.|mQv?{D^pSVI5<-wtyH^n^kZ -<|K|#]{^(3y$!Ch/R'KމxJRsՒ?xf36~i']zWq](\E悇tрܴD/Mːgւxmֿs VՋF#Y4vAJIiR.d vu|V0Nx~lB:xT1eA^!{3}:zIRvsکu:/z,eӧ͇mp3xu7qäci d!a8zWiI}ۿ3u{N$Dp4KX|a,aLB%ZxGf>+ֿ X{wKx o&]TSߖ }ě).R *ﻼz_1E6U,lj+ܹH w^?D^L!Z>?Jy+{_sx'S;`e%ңl)[  HWZR.EZ0?tNoN%Bڏ}=H\B}3vs>)NN O6VA#}TyʔGSOdTl/t|{8hutSu4^a]+/ '??A|eovL*|چ%;?:Qk^{$>p~3Ծ"=ϖqQ+2ϛt x rx_AX(,b[qC8D6`>#QӒ|Hա>*%G>X,y1 _SLy2TF^2\]K7q7nʇߨ}M68>_o2.Vu%}U'c|ӗb]~/I濬eEVD?o>,EeGz j<.D߆-rzch꺚Q.ݺ};\w [k ΐ埰Mw6oZ$UcTL٤wy>42ϝT#bA5)!+%j2|]O$U3G,KG8ʄ7i{6|/ wOĵ-O6=`\ϏUćkQWyUҙ/pR&0~UfM 7-uxmk }Q7xWyx'~TSC{ `NyXp;r/>_yʃGk0?ů&{wvs ~lN*_@mg}JnyBt'Kj2~2xWX0IQk|G[Y0%Yf13Fch&疕ew}H6 ?jP016}?t86ly1%2ŧIVXxHP'a\as>w~}#Uc:@SzQDPLׯL{ Ga#JC>%\O*p1ŜgL2Lw&~|tY|^ue>,:~ǏuIƷs|xޯKl0| JcLC|ZΑ^ÌbY 0j{+ȥ=Նߋ[7.jwHyUQg9bD 3cDsLSj^%K~~%uY0X@dBw}Oǵ/\7)j!Rq<}}yBg~LWDk6J6>c!HF\C[׼)sH |V|oC:?c=!++n?q^~A7Q?~;>Ϝ0:3}l~X#Oqcxx0lҎS+]+Lj539 up?n ƿFhtl9Ioӡ}?МX^ԇ,:i|[o,x/Z;g>'2OO-p{ˉz,UWx"oc{n׀a"OX`B[Twmb/ڔN wg_iä"ޗ|Wìʡ,=7s9WXM_v(z63zqͤh2 BOg ?#%uEfp9xWnWzo{dVƯ֑iwcy ~\n!W8#{-[|WtsTqyLźWІɰ5s"vgYhT%75<%tWROt:?jޟ 7|B"K=-A!\W1ms"yzVV6dѩ:6^8_s ~f'2ժrKD>@; !JٖS7#*~_@Nf2+Ü|A?8M}"׼鋅ɻ>n._]N'M4}<*%ebfzI<)꺌oc^_ٱh}v}/G:;/ʃyMImד~2ngC`[Ș6 8}$?=Pp9boƏ ^JM~`3G48mԅJ m]uق?W38'CyR]yt ZJH1Dp< xհq^4<| fgEf5ᢈ^)3xN2NE&P/KU?VHLf෦:}Q΀[b(cQQGdJ;t-HFt zK"P+b7~Kt ѐwTy}n^*uO)O/ GV͏r|(u{.$ ̕Ga9ǜ:~~}zzP{<`KQ%a~Ԥ8~#/iwԢ `5ЄE 7gC3f86jVr9dK[u}%׍bN8qI%戅w%_Ӎ2E+2Z|2WNhЍ+:2Ugaw/5ٔJ ~OU?dqueպ029{nQ}{j_  E<`2}Y} ŽЇLƽ^w\:ƾ政edk3W6C[RoH[w m7[.q"cu}j3=ϔxm؟g>o͉]0Јˆ<ĥz_c}I!mUXU\$˷km|ΓsfD-iQwꎦW ;uh'& nڦٙխɢxÄ&R%#qQ>y#٧#SS *7u7RgEU?2E546_I<'LLg&=mvZPMӹϼ{N;DNN7_(<<4/"Ygj3)zgttǹdzoZ"~|7=S4߿w{umx2'_d6}Iy)Q-s&^p1-|cöKDJ*{,:mrm/S1σE4p/93N#t;ƛR;nm؟Kn<ϣ Qxs6:ohN~,^pFV&ܼ%<>tƫ;vF߰#Nz;#x7?Z [!ݼ ʱV::-hM:C"4d׊a7 }SЗ- q&}dhk^ d/yI\ =T}_,Q9n{<_f?O>9mwKQ7/I kE U 8; ߣýGwf39[ <-x->yȨ)׾GěZMOw;Muuˋ7OtTҲ@jG=TuNإ2چuDH@ަ7sTY j>7Y!l-dF"prٱ[r~ge&t dr,R{uo'5 7'[%Y -U=LjG|zXbSu)3\$~Ew"[IZC}+Ҽϒ/FwCUuW[B:qMϣw$_=?3z;LyfdtFVB|CQwDp< 78A0^$ l(~2p ߫J ~z<{ =Uʭ-^#_h?❴w#7j?uLǼXQ57c֣X|F,Hl;Jzx/ OrF |ȿB'Ͼn/ Sǫ<ƪSB_ۍǺs)7? k~nc>P!tqz &_秪#!uD| NnoW0?,~f%~ b^4؛ﲠ^U4<'ٔGb:zӉ/.<j/A3xu["ԗidfJ5>3Y8xKӇ-:>^] 9e<9p\ˌI~V~,4(QuT,_KF^zPeVfeysL_NpT˴>]}YYYYYYYYYYYYYYYYYfL36p_iR*<7xDѺh$^wa'Py~G8ci$束~rP t`Дtuq1< 3]t! AKO_8MQS[;UOD6x|'.?jm?# w3 B ǀt~ =U=Ml 91τ^&WuV{ =!?>^?ɤGީx+}TޛẚtPkfz|]|d+bQEMgzgJ2Zͷޑhωc?#K#;b\Dz"SU_L\ ?tZ}>'?0ocf:=: [GPձA=Qphn:~~ V~*P߮ˁp}뚆yqDb)Ηż{ncbj_kGasGVC''w+RS6}yՊ}xgg0};m_5ni_ cvi˔I|w/\ǏvN7ꗓw2^co;L/QoimeN8Js1j0p\<conCD=MWC[O*QI2V,\_ӇuM~#lFד[mq*z_~L,lo48nMjgi2㐮4VuKkhG=iyQ)K^-oLuPp#sbZ dtҡ|U$z=rm&}yQTۿqbaeZS߳:xqw_׊k\|^?jߊJ%:>s>Z#vRM ņ|8J>W\C0ho7tcP%GߦWIU-G 켽-}G9 /GY}6)\ʸAG\HC;($}׷|S]3~ωDg:SNx5u{^iC76"򷬿fsuH~=Oc^[uw.PAgNn*WL+T~_Fw#E"Ajp$$o{^7.. 5r?"42 |5 #xgvEg|E9/`po>ߑR?k*^&0ߥV:2+2+2+/y:28Qfee44,~"5^2X%j}<'˯4_Kkǫclo!򑫯=|T7; }Muֆ~;iCo~ҟ 2 #Rף6ǫJ|wp&xtPKjK⋼ߚ%17o=Y}%z)Z>dCdn *1]}jhӄ{lP]z]}, q<yK{IasuhǥqX޵5ǛG0?Sѝ Ϗ?#|g~2xnVM%CNV? _I<%u__߿yu&K^u .5tryu\ 7oUxzJpU?.p~Us>~UA㈺!vm[14š;K|O  ϑS>bfLw GIMbȳ،OhH;c~nxw 8Q̟=>O_ Λ.9.6w؍e)l2f3G*#֤baCX:S7}ƈq?WSAa9?"_OG%:5 _΃j:޷U@ץՉ[cK[l넪`wbI<|@ٔX~A* %Ǫ }n :|5^kssFC;5&0i X~w`?+ w < XncNG|.-_M*9w0"dž|wFgWAm8k7RZ_7ǟzq͆K5#VGA8o ~y֟¿hXdmLu}ַ穕yKhIG *K.KRIꏭUċ߳>oy7+\\=j;[–bpܸT2Y8OrՋ*6!Æc@O+'_7=-@<' fϷ'g'*a7Xݔ!E}ٯ1NԏRf1;|#? _Ϣ,t_y@?SX>UΆ2s7pL87ECzə3[W ku?b^3ri ;904qX].q}إƳXt6/ 3Ks*@E?5e7ګĺD|B4,4-~-%G/FhS8g] w)YU'zΏ^pρ,88󎼆DgXt_qf}jx/#'3D^׀4钠<G8DlqӏFz~W {xx fIߟy }FηUxt?.t:\ Me?ɐ<+99ReaTw&=vWOq&a~r7aiqxDORu>٭ppr^w"-O }^gX{Fp hdnb76|=EȶƯ̻v EjG`D\]+?{9C~x۪x/ō5R6=#Snx,Aq5O{^_7]=żמ/<+co7R>SٰI=5! _?%?Uuq.pwu;ծQ5o =w/ƒy"tȣ{>jLǝmY߭GP 5r];P;E:5&ľ-y{⾎ =sz])ķ~A}ЀxU .%A_aj#pKD:㋻+%K9ؼ|pP"q׋BJxh4zK*4Ud}#QC(뷜8ԡ;δasd{Ȃ .I} 3A'':-pn ׀e2W5ߒN9v+Z;f!uJ- }e6dD{2贍k0z{%nahs_;MgI]L >Lzh#t1oy;I,vƄC]y_j&h88?z kSU5>p*pIi:J Sk .<%ni";nS19|7Мʢxx69LzquTs4oa}ƂO)mU7pc-:p~`D{;~X?tp }E}GoC;\O\ϦWogǟ=͵|,7DLl?g/8=N؄DUb?QdAؑB Au\W*?QܜDq^t7ݯM=Gp8/VF}EV d㦼qŢWRuOq8¾UcFğ,g>| Y-XoA7K/?+(WC nɋe<bH3ƨxۗfEK?̘:\<_.Cao[=Tlp~bǿ%-G7·ce}E+l:Ts?-:G"o+uXw%,W?b]f)G1T\0e;y⮿>¯,GU}q@벬 ~߉T|!5WY}!-#1@S_Mue[$zl[7λеĮy>bgxwuo:g#>׻^ztLg:*5|7#-}b=n<߆}W0`bk?bX?x x2zil78@-zpq1b^FEޯ__OƢ~Y+I6]uֽ+C- }jY0$4gȋ߿}AϫGAg:_}Y}J:h>Xwׄ>TL\j6IYh.{]rT>W&hM}?dlߌ4~pI?t<}RT`z]wL/LK$sJtDaᥚK'>ofy'ihnklٲ QO]M~ ojCTѫ#˾4hhnV~/<ک/w'ŪwTZZߧyfRcMz' ׏7{~yReF[akFMHexvjOSoN56&E7ˇT'|3Z>uïճU{K$Iqb'sϊ4A>} awiغMMb)O#[/33ԃQ@<.=l:Ow$o33fR+Xϛe_*pk΅C4to>X=`<2?nU+֗M#?f8\1i*< SX_6\\= Ok~^8Ғ:#'ywRGiY@gFoE/$YÎF!{l_S_*S7Lr^qT:n痷Dy~jdPՒ['ǘ}vucPlwxJއ;0'4?#xOVy)u[*7\q¤C)Fw7HY\̎v!"-Njhgz3m-,<|5y1{\x$\=Go^B5uwYl7LJZG{)4o*癸såcu;xhWm )K0?}qr?-$ޚ$q̲>L Y{>1^_ؘ_*cOT? IG1M>[:zǀcjW: } eYe:yBTHG\Z n(z}Duz(ҵ7ċvǬmH~[1\ޭ^k[+WdMm>y* .Ħ(0\&h_t"zȸ oEzvU{9+/DU RfgQ;a>o5?Wty U՛8S4#;^Sߖ bx݄sG|xzp7T _ıl3<*>bGn9]}GE=]Lj7?ތ/k+XvsFX`5ϵHi|w:/F?=/2̍7s1|Zh&|QZ k= =gΜ/XCc~&Eu~&2άwhhWd~| u{Az/L˘!#4+ ~|m?^3bQt7)ɟCηnz]nQOn Mi.3|}mñ/dpk cG}/ʅw7)nOW_$ sKſgy{w"'W H։P0/VR/Q=#2/_=Nw/U|l\-L"=:6XZ֫lCu8?r)/ςŒzR7^WaژG _u^XHLњp 4p3Q9T-L,UeuӒ¶3ev~E7kC^|M@ߡN:}>!x1㻏7:<#bjbwdk4-o.$7bWL#1K y=nڈ<0D紦'-߄>gzjg${cQ7?ϒqShg,"w,ݿj9#yQHxj>[gF߃WĮz?J3>X'Oo,wgՄGgyQ;n\{;'-4h}<} oFd:iK<,$֙|7h42)hns'jx-?,!Td)' l+cx9H<&>Ɏuu|3,Qz?TB_w& 8AӼ\^x@~D\w6S_ɟED'}D䧈&!ZqQ}"ҿ0v K1E8\ ztҿφ]KiDusjOzl_g%نO,yr#=lzU'm=unKz )އkE=UnyWTާ|.j=q&b;4L<^'SHa~߄j%0:yhDх;YG22z~)hbyDRrk#ϩ^E:aėBy.6.ސKД9A-VWO(NdK?'y:M~fX҂i. z_qB|~Y鹳KΜ!].u[W+pZЕh=׃T"Vׁ#~{M}Lpn]_wxمnpCE7}CKYl:h_wǫ_y>n6"v^Qzi>?㸺`tYXc^ z~G|3x~-gfA=7 xO2$nD=e/GZ\]x~--u%L7]K8 T>x y>_x3EqDx-TS/"NQ"oߐs+q~coqch:Xy?֯8.p–Tn o otv$>[n!钯}ԷR깠V:B)*O|)_gx]zk{+gWq.q%~΄{RՏ@jg=>/g㍠z1ϣ!f~_z~G}տWG_/aW uϣ='Ky1~B``ZoEcONE2%{0w+X'(Q>Q@󑬗+ߛV] ݣcVc 5$tdxMoۈy y >c\2eXKR #(_"1W)o|_zHjԾE t(f}=NFS,[:Qq~Fӈ߈u?wi]k q,oq#P5|䥻G['5xVXOwد6Nzy_U'ָ͇/?{/z2`.b-: y#W,.{WUI)RpSobE*sjůƋxB_&,E57Wx8:Im;ރ}'+*pD2)Xog MNhYG^0Zu3Bx#q oҚp- }F*Ż^|~=VZ/ y2e(L݆Z]U^zץƙ6xySpZOl#yt˒X{8:^uWuU'ÿC}l0yޗC˅J3r、' BUSO`{wq.n̨ivQPݠh|uk1yoX1LHo8w8K\{Y[*Gn0Za#SQ7;ӆ+U x:,DR?`]OXslg~u:!}rcІ&OT <,X/75ކݛJѢ'>Naz]C6>>qٺ{ļJ:}Omf; Db[GbȗtQh(u7P>SOjYMOīz[M;qǾsbzP]30I`byq~P4u[yE RYKE"+zyu5tsԒ/S,VGN(u)Ȣ+Q0=jny{zghFĶjQo:N { Rq"Mc-rE_}_RkLJ m' t0u>bG8oYRm:[ǯ,F>zN`\donKSPDz]Wz~^}Wz-މast-aG~ߒ2E5zWèّ[a^.iɔwtF0>d[N GOsd%__==\;?z9yɎ#hLO[|>+>UDgu}X,& y#̧cΏyy7bʋ0Ncz0MSC#GT$~؇ N1uޫ}P}3#UR,nW#^Wb}m?wP+nj,p>@51dkHW1ԇVG#g=r4Ǽ6M.O2 <y5ޓqߏb~;4fyWD/ry'ȟ9bk~[>'Z$3My6J̫į@ W l΋:T_a%6zXGO1dS~yb5P,aJ׃}[ kxq=/ʧ<}4^x< V]&ut睡|pA/qC~6o9b.=Wz._,/{}e?-.@Ztf66?ޡgS7F FA4Ӆ]r+U6cE pӟY{yYLoQӉ/YQGpWb~Np 5ڥP υj:wWr2cQ 'Ч4~B}#>K3:郴 ;ԑhwGe=+5> _D{>1 ߑaI4އB7G$>s{mx~;*NU7뷈<:{ | 'n??)]g1 atzeӷ<0ju+D_hf}}]'c\'2đH45/%ZRe x IVŐW-~"o:,_.9~³-x2nC.uN5o2Vj~H'ȡCk$o8#zl{ὐqnmz~ ǃă<+ wn>g'>dgF=qmK }qSfVbM%Svzٸ&̲1Zu/O(W*U]~@qA7,pckѯUx]_Mf"fDGIYG>ޛ3TO,͟j~`?BS,x?-}tM} Fpz]3:bzGzz|OF,_ ph`3_1[3oe?~tOyXtsp𸳁%h8/B;E]bNUU}IK%0nhy@?!2A1TC?qԙ2tP#Gg&\O8~>`e6KI_Р'aڌG S֗jPKЗtp7N>l63IߊTHK<Oi?N~yEL5?oAm玩yս8/M'Qu -2=2jd1z:(esи.xA7;߶kWQ}P^7z\B/6zSxtsKXpFAm];Vlgk;]ؤbO߯xxYc0ט_oZg[w7$5_?=n'贠a4ܮi'du-;Butn+[tj QZXO}2zg6ߌcmiDhT>' Mơz ?׭hm>wgc dw4ejc;U7 z=7{5~+rXxƘM|u{ l7%GB󀍓g7~V /0 +D9HZkvբNȖDP}vC]}dzVOG[9Һ/q -kyʳ:%.Bj ؅h~w<>Ʌj&_~cC(`q:NP_~sSsoS`Uxul;B%>v ,q?eWM}[O-9@k p(ւc+V6~LqoQc-nuS?L;oa+c8p6sy X#uE"ć*[?.}H??0..csyMt/+O;Y[!}>s~,yq\3ˮo^:?6S|ϏNsZ&H1K~<7„2ߒ`JǪ[~}"ևt_ub+84.5(:>LPf_.*Я%cw_(gcOo] Լcr|~m'_&ލw?x25;j(ͯX=UܴΤt̤u'u[N~czy(T G-ZϦܟѐDwΣy2&[ǡ<8& xqWCɔ1+xi|Z4>G4p\jCOfOWwZQOϯat GGDסۧ:Q*ق9&^ɚ6 ;xdYx3n\GgFV8$_bp(V:umunU4)#y EOm8xt׃D5ndQZ^_)EOQ-ϣ)>S}Nծ /fK\yC:GLTŅ7%2Õok;2\1N^ Gu-mWS)4r}uϙި<%dQ݃_ƎFa} NU-xP\1y?V,_`|C-U?m˵t^;D[o@?͒疼kB'(p#߰`L`_=~_%6I:qj\}.I8  bp=WQacuЀwSsmP_Y@TvEOH~>ϯل[SQ !y1Ikf=yUEcf5I'񩎌AˬʬJ0gQVU ,vEڞ/]22!??CZ-Vkgq&CcdzqܾA#ݪA}0:nBwG r.WʎX5<^[ _5nG{Vzׅ;cqwʐCKxQCt~MmՂc1[;Y~)/ ޢ|x]_Ep]2l(5'qƇ/m YݓQ<`T`uB7ACL}g]w3CSߐΔusR[irf&ǥ0%%d;u*1IM\GQL˱Msk>PkXʆ:*EcҝF,I 3#+N&{O3C}5~_>>UMz֭}B#\9I_ߗ7ԙ uzǺѢk/t"ԣoA[7zEQgEZG{EpRZϫod+ :q↻t}Y3QUS?GZ;zSԱ)DŽOG <7&߫]mğEU85=顣Sq|/hޭ{Rҧ D WGӼ"aBx#.8{EuW`G ;j<^6ߞ?W63K}x.ڄ;c᥄Ģ.aAy {Wkm;tMz$6^U73_u/)Cc=?Mb[߿k4_'5?/8_bwN;jḐ!T El+ S|M׻yhcau&.i"Y^78D~E [|*$x24 ?SߍGū$cUqP}_+>6ɔ9oq=%$o647|N<èx=BV|܈p iGް>FXr Z=ĺ>>Z,^vh;ok_j1W粐gOR?xm 0_3m<4ٿ ]D)u#+hۓ>&|+\zt㿐Z\N?&C_/8[炩?!`k*Nk3i y@\x=Z̳VJw^_KhV:}0\`Wy y]uq3Wt6. P3Ft w(Z-y>z-Ubjׯ|0nk-׉utσ|b,Jķ'\..}.ZEgy+#Pt5Ru2?=HuyTI5e@(z Aql\tHw6_H3L)_zD}>/냦!q6_`#CT!36GQr"xVt{)aLQ+ ǫI^X>:SsGu [^_<[FI&K1Mv$C{}y,Xϣõ"\)-CtmRo=h. >Or;c~{:cq3^uL*E{>ѴN1891j\KqRJ2>=H\G'Sb|%;[vANVq4@_R'ݦ_hy7ׯjdPzy!@)MH׆A5')g}̬]>?{GN+[tyvJ^4Ǐ~Icolާhqb$;_#֏xszN|a-jaQJ$/麿 yI}<_knQR`u(5@5L(>qxGJ_a!7FxmAGKǬN*Znp_.L7+ױԢhCbн}j#th8\#ҕxǛ*vNpM/1*7 .h&u)/::H'7ʓNN?__V)vSSC» ,,0w䑾pM5~auoyiYao,[[oo3YYYYYYYYYYYYYYYYYY_NiaEEi<#p6~)7Wt\XPHw8C=S[It<ۿ_;/VUg +" "A<],yFUxD>eq}# Nwyu|uwz}ThEN^LsJӂwon&DQm w}6/%G/d{EtuC/AڻL,:V?5O^Z~>cX\^+e:ن>ha}eŒR_53@}yV>&%b p{ᠺ>?_Kï\+ }G&(iǾ+;)zvU&w)3W8~J!s͌Qh[_:ܤÎnzw~YLWzAwh21W=6}bSvwjбPk8[oCy?d޻"Z;p޹S{tyo:|Q=C2H'1Lߣ~qR7M<ʋy]!\D5lF)g /B}Q y1v8(_3=}7fY,o֔+'M?_&%zO 7Wd%t_^WFj?nXK}7awUpVտG>?,:n5;|?jwJ}]/ Ͽq۩cRw u^?1d\LNm@맢)ICCEGl׉|[!휐Y2}'4_GD]/`n2M}ug:WϲRw$ ;>}D%j4_` @ΎUXtj,U]@]Ww{^";b~c:/أ餳ẞ蕬 X?l޻%54q2~C"b`ϠKg<{3wW4tz8#)Uw_,*7:IO_}?n^x~>/= ]66u;.C!LeR'mx9KGCo|X4gu?iғkk>p q;K;@^x~,NH]Y­B w62m#4lVz7+Y"&o'e|Mu/#ygX]kV7>gxn;no?=}} 󗺿M#3y~ݪV8nzE?=GUIW uc}XϏ^{Z;zn<RZǵ;|l~N|]$&^A43~{Z|S*&;W{y/û ?o/:7yG~W^%~> t{чu>PŠ*oiy3oF;o 0am|`^y'-Y8X1SS:O^W^K+__bϞs‰^>~MMex[xФmCp %{,pRu=5{8[W/ö}':U7_n=digw>ަrSLy(Rull$>7-=Blw͑h{S| m\0N*uyǮ&\'L.rc~dM_Y-A uפK`c^\yׅ8^K!B|#ϑP8˧_ C>Ң}k (.| cCԡ!"_fu $);]obUT_% ϏqWEIdtwȠ#"L婑Eyҽ1=iď豱2mr0^Ww۱x8n|z=(yu5- ,]g|'AWN|VO7\[|!\6!W#{? 3 b;y;ujޫ:z9qw锌;~m1F{AxtǕݮə=%EQ{PzB[:Qc=nB{ ͯ7;>K^2S3,4t]@9_> ?z&Gdsu~3ӣc8PĦ7ߪ\N҃IJplJ~|Gw"Ⓖ<=Klxi8X?tD>P_zagb_i2̟݇|}zM9sx3:ߖaZG+/\?rKEM Z}jMu֑$Az>#LS-;Pvq%nx: <_)~ǹjH,D':Z'ӏq@) z끳8o 8w8_u;pX.3Y"T U6FoQІuy`L_:_=! z5w~W1qfz _P6ץܦb}IqMgo~Rkx*cK%e[p$.]7~A=FtD{GT(X\53K}lzϮ&xAf1Eudu{%.:ӆ3xܪ<7meq`xRLq:&.З4r+u,>)ܯOm|?ռ1QY#ς?-RWN\:]_\h|$|qE:)M="/к*[u_ZY7ѯϹqvyZJZw}_͟>?w`tWj[7.>dJ,0JC1S':ʿw;qt޻;yd;׸Oǡ:`]"nGqoOߊvUiiArTϙ() >JGEjQ՗ϽIN8'z5:w7bKE;9\ 4#W^c&qF%ϿG}}>ȿOL\o|WI:]5Lܟuq82ZJ“_TP[qP$!s% yt?9tG]`{穿oW3‼?a+ 74ċoEmh7V  |ud[ EcNrJ`8(^Ÿ PߛRLX^0P>t:5rj`$ B \ {۶xIxǍ}/>NԦӐl/y?c}C!'Wqx&}|'"ODz*n!~nǟB\Cg h­|I{;<>aRڈ8YyGἠdxU^| W ؂L1 'wLF⳱H n 9 ]N1γz<ѐeLlj>_+uhůPU8xo$u ,xס*au^vKyB[|)z,󿸢Wy cJOa嗆=V%I,~ .mS9f O Ypb~4̿BA4O`DE5~\AI,1H=5JRE1ϡ!݇:V!^X4t^@C\M8#$%s gn񸟳zGw_ih ?nxnV!xF\p?bZɦ )4'D?*K_&ɧcz=شgz 튣눝 ~l}Op82,%zqY~ *u&bȺjZq8l{ukx~~|8/9Od6"qW8}K},>b|؟x2! /CO:,sY͢m?RAQ>燋(}㮟CVL]\P$qtf-3=u&Lxn4kc}ziC8V_I8U1y' j#ZG:FV#zM-\5,#=ʬʬʬd WfzC9^3Rw:|`yfXE8[k_$.~\G|"YKO:M`7%\3<[0qY;炱;wO.@*;Ƈ6]{1>4} +#0:~,<~K[ulǡgWC~4_ޚE&P1%=9;q]X޹_'ӆ obrI"'.Cz@τS1PS x&\&z"\%sޟ*?0lo-w#U'- d|/ܯ.ݞ#G2{oPK\o/h/TuGs;B7x.(^RPPQW̻)QL(UD{6`s!AQ'w6f:s\ 5iVs<=8Zǡt5"ݹ3ߥӯ73=z_ݱ˝rC_p0 b|Gxx+4uN,?bn W>ϡ3Sq\;EEgs?~ce^78h.ϊ\Єxq;Z0}sO[늖Mh ^wۨ^#kX4@<^T=yEC6 O"\A7:[ W>.]]~^ϟօOҟ֤ckhDy yKDEc::A+F߮KۢQ͐["АcBx }BBT7J2ϯA7fmx㶡N(n#۱vKjo^BT{4WำZ:Vg[֙2fVIx)p>~ _l'yo;uJʧɴQGuEM)Q/3|CEOSR3kIM}ln; ]θ= x37G~iXs~R/Ms7\,@|q S[灉 N<_}Le9^o=!.LGvͳq}Gc_TiF} :~_Z\u_EZW{1\A9Vg^H YGGz0JOyrQ]Tz},ߜ۵c9äq>28^?^5;3c}C[n'{]K}3;{)|]bsQ~>:kpFcwT\*tOe<W1-_?ٗt<ax7~CRׄ<\WQź$p> Y-lnxw|%HYCw|5zxnZ,kcz=%|~1?c~>X#˜خ>OjuBOEAtI _ǛIWE:@:z$=q_OϒZ\gU,<T;\tO͛clE1ߴOxnR_nTnX;7݀չ"TI/s]7rN1>}q7Om~1lȷQWt$T:Q-SfN<7Xg3(&kf_lC)ߋق0F!/{sᅵW.-'Cnq_VT"Xn?HOʫn-lWB.@ͨq#N@]8_,j =y?yzm<`D<_g~?aZw/"h|GzCOEc?iϗ*^ %+|WxbĊTxQ TwM CjSi_sͿ W/~B_8_ ˀ E[7CŧPܼcJܵ#_Ӈ'[aȃᛸ!~!R1gb~3%g[ҍȢ38@8o4p|B>^gjDo10^ ;cʏ??tXaV>[cEnQC٨d68/SyLG._CuoԺ;gޡMhgT]>4fk:6Wq[5c,iyB&\ vqޮck=^[U;3_%n.p(KЇbRP>oG{Rk WG!1\5KmuԢbo8.o.-KW(ձ8?xN~!:Ψt =  ߤ.q>F;뫫ˮwc/D3~Png ao!t)z}#~/Nj&fDcbX/<ώ"U;ǹGBf$R%wЅ]jVG%7YxR:?;?Ʀy:3g %|?)Q`WC{MxC#AMkFu _QHT!᧚>"n|/U/bQGŔgr-DgAĈ ?TtApO>xn_[Qwoz4/7ݬ bI/Q`$zkxʤiۿlW5#+SYWQŻh>{&9{1u/^,3PS-!~жD|€kI n W }_w:>WCtF5G K_(,< ]~ 3Zk&>Q׳?ƕpNQ:Kx|QqIҚ<ۛd3YJM~06\X;l,_~[@=ĸ=YP[^bZQeYk\,L9sxi?+j8??8?G?ҁ)yH[te<+QłV%Hrf\g6f6^ zLp0ot*gKy|_KZ :uY^АAUkN: B뇕hi#М⳪ C|DQ[Ko-#9ROWF?l}JwϓpX|hhSo,i aۺw:K` {LtZ ׫r|m|[$v< %~{}}urh~ = <)2]ͧϯ+K\7{NJu(_ٔslN/ܗCտ~fW/f y}qwzW5;]|&&}zѼ\TSœc~oe?V C}P֙_9)Wn+ lo ̄ۓyr~^4awq,rGG鬠#uc}=R߁ʃp($C?Ks\mwiOWBQG6e|rDL&' qOߦXh8?٠}?\Z c]n.Y6~+g;0pWP޺]t6O;?U9~EE59t%_Ȼ4ML&^;n7߯A7]E:Tdk` 8>XyFRtіrV5 -oy] 'Ohl0]xx)s~9/Ĉ'{D=)-u/tu7̬|԰qvAҖW<{ӑYn!.u:ݬV7 {RbZt2ywבW?/>cz=i+9y}M_K3Kcpj}9Ow{:1u>T_zGͨYE/1Э^ļ墟Tf M, :< `ү>gG%*M2i^IZ,O3p7UtWSY9?~#C笻Ϻ)ሗ"cqE5)Hzo2^{/:Sh@㣖?NA~Ï=O&xYHNc<^aƏko;k[.zPw_/Ӻ#byO\tS}oj<˳wJ:_TWw5l[7S-}~ڮ*+9.LϷd}rpT!km?O|t#K>av-:b]¯3ֽxsG^&:<\-{{Mϯ9n&*sa/d M- 921RulV& ]$ɍ-z ?͗q|߅/,dG&ܲ>xo5J_ҩ`!7|N'jlp&>2Lm?3G8*FG[ċσIT^5Eѿ(dyΜ+ RHϓC"9/1_ >)櫓øwh}[f=MN}x]#_r Fc"OL 94ducⵥY"z9>?>q%>5if[ X|q$?CHPڷH~OKQ7Kh)sp 5(;@o4~BQ묘yQ<0Vq>&1XG.~钡U<^-__13.4ӿYh w*"vt"OoåtYKb:m wsV\Ϗ 𷻌rTٺ"occFx>d?8_9x9|^Cu]4 {#)e]=\t9Fs/h^NچL?J봎g{`ןtNE0Ts[p.?8^(gGF>u.TC= =&^tT/ Wr[~t/x|`'ѵln>D yK>{1OɳW6ezI4u_ἬKQ;Bp<6U|Tp6hH8z'W8{_*y83`*mȣqͣhQay1\:OXt?,/GgxO{ҁ@{r4wU>`b᩼4o'ov/BC~OCoǚO DyЅ|Rˉ6bP5_԰;"W8ʟ@?Q $-^5~1 ehXJy<}~~?tSuD'K?fus? E\>tL7 zdDŽ|@nyTW罣Is}QSE{~< ]O1crL_p>U?le_P?gꗅ*}l'yN#Jx}G=7 $(QU?fZTܴpq F#xٖzOYd#ؖ~퓌Gbwߨ!Z61/юg*t,zkB֑"G|4c0Jy-̂/z!YܟLři2eVg\T9iqľ)/~i޹ CL/Ϻ;F[!)@Q:4cApGd1R$Z]'T9b37'֪yb}w}NE¿^e6jq hnѽUϻz/:yy먆zQL =uˬt +Z$Zo Hj6%ϳ IWC;sݪO%o+kI>`S4L?=`ڑgʑ΋u/L?lS{:+{quwS۽ϓ_(ݬS#pstv㴏Wd/^6S 0]xʆ%˸/ f(@~F6Vftwwy ݸ_WXhAqcAx~\]m`Qcs8yT/&/Y?]#EvԹGz}UJ[_'ĵ|ҴLh\mx{7H3´3}T^`"Ψcඵ7IfY2C3GHĵϩwAFC6+ > :KjR/-}:w5q:w6Y]յnwtv^/OJqj].0GR}O$E#y݆ίE=!=Y9!zd4}αNhq<zgt#=6/ h§cQxXp~5?DW55P]\gѯhϨCc(?ʗbÁ yS oѪDI3?驡4?T_S߷ xM>/e='"?6%gL4op?YɑGg_G#uچBnqz@(Rgxy4I9X hNFy >[ڥ3?dPƒ7ݾVed^A^- wuUnAn4P-_4mj<^[* 񥟟_.E>!`yd}8ok?ubtf'W9\ *xQokB0غSgQ'?aqZbj x Nc1&=+*&:y>n$HI:G7B磪޳zJh\asE>OyCM; KL,y>}qp3V}}+ O,}OK} l:'2O}d^CW!&{~L>mxyی-q/Kn%w]XȻOzR{ C{;A[uW1qT*n&KDB_tI3?8^UGzLRG~=K>- |2{ӍcG]Tn d{giQFcV"Р7G+mhL'd~MQlߞczX$+T>3dT^;J>IyL|t^D #ݣ^fyeNՓ<}Azş)q%>|H`Sz'8^?>/tR jh9݇%ЎA>Z2],Q_׸Ӥ|1[{~%ˀrW1z8,5? QuTRx0UΌiOXׅtf|<;փ0l<7̤ DN8 Ls;WM{eS2aU 'e =c&|4yEyP ;-DCVX9烶n.MusսESKiC~CCX>jEuI{yϵ-U~bzvtnqz??s{E*|B3tv}zN J<㔧D +YVt[nHߥ=n8꩒g) ΘQ$[D<0U䱇 8o/=Ü~H%}e:2_I#:jFrx,YkCb~Kߐ)3 aѡxxޔtLY9zS;spb8r//H[gBE }o;=̎6_a8ᰙ ,ו|㛸6Q1hƮOuTa=YnޭZoE9|^L6߱_fG(3y=4*#W _:xESP?]>n{E>; NOwvvǿY7~S]Gm|QEǕ"[: ^El2# j{slb߀O2n!B}:vY7ۅgXpa%Y2wg$֎ Û4/[=z~4vig:tQߤv۴t,U ?zsu[z KO'>KEfgBg{ZG+qH+ח*zGqgC)q5}D}xz'Osb~LťO-l'&~jb:ɔ_ƍf0hxN>i~&[,>>"}6_3y=˼rvt}ٟ pކ'x/. yįA3%5T뫱 ۙ(~ƥ* r)z|tc_Q}UCɾ}C%?-Iw4z\l!!Q׭=dP~o 'wL bdEԳ ,J>.y:MYBȓv^-bz nշS9)ǩ~/H 'a ):ik3S볩m XWU φ| a3ےūgj3r5{v]WC=g߰Icc::!ާTd}///*W4{= M8N;pMAo)xSi̫0^_Oy ??H:QOS?߆z{dpw&.뺭n-x7^0K߮:Lrx\Ìt$Sgo}.7VׇAb+O _o+$̭ix>{G ȡ%y>??'׊[!wE0T-Q|f>=GzX(&$Ë귡";z 7mODOƫDh[8E^.DsMoW[=ҍ0:[h/ )+Iu.L:Hmَxn Ni[vyޛ߿ x>ʬ̾<_'0#1 W}Ҽy`c~Ml6gFGř?#=Pysߩ'}~GnPD)Mk<̔aXq/.PhGzܜqᔏܑO0h[:UKj%e!Y{̀gP4ssT>}pzǣp ' Z&q/c'^7MW7EF8v>nwp?=]'<' ooWSWz"خV[F{٭~YޤI5&C 8:}~OcGA~T\u\KC%ԍnH\5F>.T[t G7|ȿo2֗7'Zyt9/6oddYm}#rW%p~e`.G,!JGp^d\tH{'Ƒ: ;>LrwxӧPՕ1z&<*ƋP> 7ޫcm޴,3=ċ;nυ\wCuIN-Z?kW31߿+$#֝iMtkexfϯͷh byP#/^]#q5hA<,}Veh̿YWs<˅f{Cu_~oY|L3wf7], )֎s~؞`*ljjdݾV< G΋JY{3Ŝhޠg8#߾c|ypp&CGTi*ڪ_}ǫz8Iidkqm9' | O$ܯA_oDd:r6?Xw $UߋJ]5u%!4fD;& Ȉ+#3"TS`FBbHbB #K c<?sggg0]c1`d1#"c׊sj{_w[{ޱ;޸`{qG>o}mֺ#!W%yS.>$;4ǯܙwyo>9')WOs/xkjŏvj)~l獿KO|O];bN]g'8+vNxg}OxW?i ?>#y~%#~I6*$_;\vo};;.־u=9sSosNS`g~OuJ ۺuszesYo~tWߘ<>wa+[nݟ5u/Ww{Ys3#gkޓ v +~sw+?6[NpuQ}Oܕ{n!JϗګK.wn\]$ҟ6'>y{lmN~臶WfvbP:}~)޿t'=S/o'O3PZwۼ qo󶷷Ƀ?'K뫿}| a<<^m֗~1{oO/7J~Jb;+ ~rgrn7 _G|fiGg{k]ywo:MO>w_?ujrqx_Qߟ2>&?/`g;{ks޳'|^s?g=_pO=OޓÙx|ʋ߁xxyo 4ɍ?^d{{hی[p=ގ[/o^~vysZ,e:5/=׾w'ͽmUKϚ:ſw>;O?3WC'_zΧ}iw—흖߱u{{[?ߴ7_zzQ@Oҙk?wCqێ}Dww_g? /oC>#OG~|u[>:h1{H_\uGѝ|,I%9y.{Qzߣ~}sَ3?f.;Lサuzw~ޝ ~o}{\Frzj=w+7N^yWuo9Yߚ95O_u|7|Q7N>o?{9_Kq-l:~^|w~:~i9}}Ojznwxe}W\jΥK%/`?wGm~߱O__L:׺Jq΃>ᷟk"yQKǽOgΞ<]?qsv3|~[9y|{;y{^kEu{<÷wO^ﹿ9?ѿNlH+sd~'m;I=r<}|OCo~s絟l_qyǜr7︧;޾NM}۾m7oIO&vNUݿn/yoNru䞓糓6ͼiKe=#pɃi_y)s/)hw;K̗_y󾄯O5䃯y7q|zߧ%ϻ;j܃W?W|;{N^g|ϡ/>LY?xo9[8c]_Zgv_p;;kծ_fWosټ]Mk~L oAMng>|v}^g |ޡ{_sKp|CWuoK;;Mm[].~~Vv'}Xߝ%?|ܞ>=K>)|@3~)uv?&WvG׳O~/$KkϷ?0Mk}B=N~=g'8x-yWޕb_7EuSbXo{goGztSϜ/4d\Ɵ4Xκ}:iw/yۃ'Zo|w3t[weهyf{ݾ<K>8w_*e>w ˋ||7>ͼvÓ#I{ȫۧSͿh{?}7e=hO<;&om+I='3 y$κOH/;> 5H%o1 ⯝\~WJwk{Ol]lz}|w9~fe?=yvM~>O {>|uy}{37;9џJ.|AgR]E=ߺK ߣ{{;47=߾۵y]Nmvo: OXcǾk~;6^z]AqOsv{> ߛ-6 u;]N?ql'ϭ풷b]MO?N܇ySީs aǽ>Qa{ɓ/Z:Kݫ;g%1sirlX/If?]J]ȻFg;?z3<~M{|k/r?;%޵.߱c?_ŶgΜ=vfvG|T3~zX}s|-|yɷ~ss/{''7y!yizGm#L/6&%On 4#lSKoVɫ^];$spoc>_7y8w/?G=ռ~q]Ͻه=6ћ˓}\f;M]~6f]I5 Ifu܇zWC[s \~>=e+*P=Ϫz4枰jkm5m֑VO4t$zk?r=^z O k{o߇W~ߏ#~_ϩJZkw?cu_|styas/7$}i/ub>&yƎyi=_W͹5S~ϏWy~6e#rZ߭_]o_ϩuz=/9|?W|-SXU'G||w%amy]/F~)yxL#͹;_zXra;͸/f}~|>6O9ur?wOͼԳNy {䛟Zs/9y>oNs+~~6yxr{D~F{̇f>"y'kHr_*ϋܱp|QxW/N5}|OavI;q?8gܷca cs?G~zỹ9Ys$ut39~ݾ|rޓ<.I?I?luY}?8|s_J۱>:L3.y'y{_tr|_<ܝc:<.ϾA旼}' ګ>[ >]/mgɻ|Ζ}Z~?笒<>Y{qOcf[s{'9>аW|YZ#O=_j OByϾ|W࿝/W\c7IdܬߴO:_>kb~Gyw7ʇ8]Y{ܭ_2;Kjai>35mm~.|-^:S?c׺Dk^ahwuKdc}T3+G>O 91irk7]f_'wɢ'mi>Ykl&_?;T"/]Ͻ쇹u1}|sĮ{tz:s=8;.q{v))>l>ccNwx'$muNGg>㬿9M?c@|mϛ+kE9a'{n33Ss`3vu>߯ҜGͿ%~UfՅqϝ:wLLeGͿ߹Q5u{}tp%o?y?߫{wBo7*߲ݟ~3GvT73CzOG~}G~}ߟ_G:G%z>;]}}Ok湿ݷ\=Ϸoǹz_TbOz?-y앞g'7㱖?{}s{y$}R=U3xǷmȯ:/WqFs~}Yl=y;'o|џ7lO}ɺ[?yOzjto)F|Eb˛{Yو})࿬YVy{oK^w龰v|;>_.},~c`Yr~wzS>no?u߇m?F_'N]W~5BN9~{㇃5/&wdjO~S~ww6.&~䗚wM~ct峽+|RݽNܗܷ_;|'6X^nᆱG?~wߣooB:¼`ZIrgz6:S8zjÔ~<|>>Gށ߿\΃"I~P'AkNѽ1'o꧶?t?/ݝOos^j.m^cooܝ^Ͼ_z|*ߗmwԜ=c松K'CǷ{~+"g3+>@ nw'ӿ$o~R=O}u󝘃?j1xƝ9j]_lߋ&8䩢⾁/w|ٮ{dG=q)zH~Kζ/yPt}ɕ!y ާ|}ߧy9wY/IͽDgzVoo9|7}g=юFt,lǸ5-)ߩs|tڽ~]z:|LO=f_9澹OG7PM|Z>x0y9urkwƹ9MVRn~>߷{w{kR}8|W& g7.> ž}|7'yu3ȯ%=:_)8w+u=7-1"w)ɻDqN|;|H/H>li$5〃3xaO'_{}9yM<_/I^{g_n}z";}yv?yl?<}RXg}oii=X)Ib~'~Mm/oK^q_3N[{ߌ/m" CϾ3/s;uNٛScqi;X9O^N|7إnľ%wvˮ_Z>x[oOn=Mr?ެl~)׽m^ vԱ:^ jk:%g2O>ugy+vwoא#x|rξ[w}I/ѓ0{of7%/o~|+?}-}|o|5?>9;{G;^c~Əɟ;y8(q`bq'QO}s|mxO彆=#?Tms_'[ 9sQ~W&懪~_^r{%ϸ/JwtGޟ}~iI;ιl{&O:&</ܝ{qloQ|)-$=w]{)6}|Or%sO$UH,>~,'grno^6ۉU|y֝$ڭ63'Svh}~?uqhr;zϮW뷹%^K~=yr3zaw_Nl]'wz'{;՗=؜#zYs7]_/>1'W2|~>nWӚ~_אl+9(>Β㳤}u^ܾw~񔷷һޘ3ljqyTz)oKk=]\4烒/A|{u[Jnw^Ϋt:3wr &ך:<;E1~6b~;;qWmAIgG9M=}=~v0qv5,mj볉:e js {I?8Uq\~bS6'(|6{yݎtƍOec߫Y]]O5zZ5W_uS?ؿCCw?km%h_jϳcme~{^^ӯv~l_~>>i;Gފ_G |\9oo_kS1Kwf? Y|؏r1;o)|ן'4c3I{<1>{_xs>MVgr^^ѕJTRJAn^OJH~Wtބ2@+O :di.ByAF' `!1@FC'H[ =F5ABT+Jfώ=nfbF0>BI[P ԚB7iA:9!sӹLjmܻO@Au}ZVNPd:YAD1B0GXs+MR_}-0X5K!CX0>BRh2jR:o?O6 同a!0MV&CҘ.(7e1iWeK/k}8)F17 B"DG:}/ BjJԛF~̗Px2Ft ¾1f"sgAt|h^k}H55oXvT;( An[X}CBy9nP-0 €% SXץFszcrMmݰLg萏 Dh %bw7)5eH,mL^5Vm5N c}ܦ;h>,PsЀLj)F>Q(F#j 'tK 4fh#F#@"c0fT-W,{`-COx~F[s4}ǐcqD%IK}TH*h ,)F2m] r]`wKTE 4AhPuqL01:!D eǔW,R P >`L`NFV&>PC5 ! STԴXޠO>JR yF9?H/Y`0,>xam\LYxFϮE9!w,GF NQhIL*(%%,@\37\堦UCWr.))a b %ZF@죑jkPS:fRF4ͧ4|QFbT}h^x4A!B82ƞ`4`86nܦ1us ϩ>)F&Wk6v}Fh丠1LҎa;.o?^ ^sx]2(> G4'k4Wr|$[CdǠs{MXR'X0IIYcJj jJiOQ VQgKM)ژ{9hyjBp EL!i:v{%$3궴]IJ =F\O1Ҍ t  EB3^ͦ0=4 قZA3JG=>͘L1Ҍ :" br&!ː <Rֳ?h5r4"ϩ#gyv 6QkXpx7>PYdzFhNv}ǐH|#Ρj*RbF09:}|j j#Poʲ?Ԁt1R4#g0 A0rs r@ݼ|ykA\Xb}f}-3~:T-dѪVrPN)簌lJ 5>q99[۟ PSh)TD%|!  @Ok>EtQ-5FZRC ad\{54 NzsHPB(4fz--H[2-̹U(t5G#ͧU(8{P3@ ;qsWp*8<Cn:Gׯ4<(`1S̸mH i㎖7vDI:Jq ǀ!iF #1KH )o9g_󌣅|H;s v" O.34nOex fGr> [go|}bMeVˆl䎄0+V0Sn"~qVWNj,#PC$ ݶWki˵Q)рmXCG<:*#W91zA>:oBW;DwGBr#*W,!Ӫ#XLZ23Y}K:F(9Qg:+C Y(Zc*EG:e1Ԥ8`+/">->LNiBLYYEh]`;m6^p TQi(BF^_A5te˒F$E>5Nt8bއY,)aoW )nȫ~e*LHOx0)ؒѻ@MY -iCh4W2cm.7Y!@7CCB7V#X˵<_B?Ī Ch -3p>|v1G#.xR_aAB 4A!B##H326>?XӵqQWZAlO "Âg >iU`rЙvpms>x_P]^H NF6Y;%מAmw:d>!ɝr5\M@=uPH s%t 㸆pi捎Dٞ:=VZ\9Uj7 tRЂk2T J\S A#!@"5̀u&X!*(U̘Aȏ`v|1l[a.r5i'DTVEmjјb4)Ԓ y }œ7_Cp,,`|V #UP &15 \Cw٣zX]`we@y֐xiZCZa995Y E.##>u֧=Sn-ka 1zDEڪSРYnsץ(mccָg{~cxY`^B,@ASb!C}8C1[k)u(9Z~_v|Jmr D?c>c&=fa6#ܬV9>: [*Zb%A^Tc=|Pṙك{]`.- &SVƴCHiX`sb&$ap-]8@M׊5^`e92]K~^(8UV}rcYrqS4v@8?CLDEBz aŝpm"q0o`pJqcLsW1 C+++}Q4#Q g7VEӂ՚IKjMK(S(%v^ 0F #+(G ҨM9ءš*?pXBoQw0X1)5ktRzsJ]ߛ9If1YQuG){ PM T#0nMQMZj.VS= >l ]pl+{"H-3& #Q(d1! ]NH0lZ0 jxV9 >ǀc;NcYu+~_~voO5FW#]Z{UY fN\H֭ZZڀu!5P} -A" -DV } oԬi #~#C(tGp?ZO$Xu t-NL'1h9v^| vK.ʲkn)KB!@Yy<>`7ZTDa,Fm*[55V 谿}NhXI4A`‚BT ʚ:ھ5Um=sjp*m}LGp 8o]HNj6a'ahtH[k iFȄx5q u԰=]IW/0-3t|!D四jS_tt|;「KΪ9Ģ=gTQ,^̗mB}k%],Q5Gñ]Cl+JWc%YFit.wsq!`J5Z@C< R7aVEhn9HᄤS#gPQRcݒm;cw)Me%%%]}k)jGn*`eN:}K3-+HbLPct#ň #Sf,"MTdQ1͆N ȩSNѩ OKx+F34ZMEAnG{%d9jrp堜B[Yh2YBY@: ]^ nJF0]3*+ګp!m7 X%5ԫl0^1G}.j=F\/`2BB!xiF40tmNǨV֠6Gj.ͰQHQ 5#Hd L'B."Z9u !@SlԬ?^QlZ3{aT{_K17KA(p-F )(a@ɀH b!2tĢ5{|5ѳ1%l݆C9 =_D/6#^$">p/cJdϥ:i{#7pVB_ p¼4 %Xg"1g__p?@P7 _({o$܋{tO|HħͮMv"8N,%c *欕`_t96\ۈ3Ҿ/={_x }[ ? X*P">??SK4BER @H+JmT2a(2QO d@ItF#:,cFEq`8>VFh/jC1g|?9)*VThf}*|EM㋚b_H_D@ Ы0d:8f.c"u:H@Fős&8:VV+ EpCP'p?FH cX"cªI'J ) H b!dp/yȄH4 "O)O WJWR?,+Қ7d=>C҅~5r(Tw*vn7 "!7q7qbT̝j=u\rihiH7_AO 6\+Fj!ױ/P6JE<)PRFp Xxi}jǜ8c (rPŊkC3 X{PPmXx ?ypъ?k25SͿk>g%W,#1g-Z7¼5"|m|_h/ rҨET2QGU Xk*/})3b1c)Ѡh8##{ [h{߀;(<T0OO/'/[^js}= xbG0vcŬZ>jUDgOUU },iC\k¹@x5 ^UWWJ0*B x+ylAnpխքjaNߓRs 8 a i+j6ܰϣsm}kak)O{pր;p'Y= !w^"jOۣVܙU 2''⟧vr\Z=P ^[g3#^b"^b:OB7fp !G]x<" = bV_0G\s 8 Q;-܇EyVK":"0Y7އ=FlYbiB-5qv s>7sPļQgT Cnzw &'c%c<\: soFj855 9HIU{G5o4Gath6CBRFlFd}8SSMwÞJg1>BBK F6"ZlA/r7r YI"#ES 898scM8N9CEw.5o٪x-3ciFޘ3{cfoF!hvt¯ %{46U}vb`i<>Fz`%)}<[fKO0 BZX*ɜb1y95sN(G:y %T!|0`4F*J1{wNs{y`f ; )nlT!B +9@5$) e7b=F\O1Ҍ :4 BXVE_'`+h[,)F[mĚ icNM۷oԪT4k ,)UAQO,C-kA!h! w[Ūu tU!#RoCR$n wʲt3PРe U Gc1qbAQ8y<@B;x({ ,8jMlRIx4< D C(7˕9wSi7I$Ӕϣs>OP//\I cMˆ瀧P}d̓ ^[-'9UI !O'R)%اPO# j',#<#V7Y xX:Ū=cy3/|\Y|݌1X]  <6`3p, '3p,}tb>MHoRD,! nȂ˴"܈ydXF,Y#11,#V"^;qXXEgY4c͘E3fY7f!Y[I̬e31K,φZ ؑK!+49G}B! m7 b0 `hFahTks|0sA ~WF/T:)tay`42E2@+$=dkotc45FSPO ]9h@]`.]`2/FSk4޻ y[K>JX]Jp&Ҁ>B Y]bl:ra;ಌي]29 ~5o %d*[0]_G}nATrl@ *ʀ:e\93hP@23(a:u7K12(]:Pr>1G!F* H)e[&+! ě!zF.Z%ݣ'p*r<88Hs!=/5 !*:5$!A8t&MA0  逪@ 1 ,S5A jߴ!`!m|`ي)$q)z mrA2^QczbP7:gk+dݼV KmXnrs%'q ip ^Թ` 8s 5oܻp;E9>ǀcȑ!RZa0FEcaiA9Q p`H hGߐ>0 `-oW(#A!h#P4 gUz#p.φ8_2\YI'bƬ F)uB0GX , kT݂VG+k(B>M؂\~$x1tF轊t1"mGk!G[FB]SaD~Z1@CqxX#K w3JIC%ށA4B9#*R`@[:@c) ֧F}!Q3rl4QU<qDzvWG]awuDU֔3V Fm =;#}lk /d>))Ʃjj!Ǹ1|L5xqR D٤_fƀrzR9儞&lgwE=iF(`2^AL/Xw bPCbad[9 K\-*M ԐS\XqR9LsOad)4n2?7J:Vɾ ل*2}F)bzci2 3h#0+4NaB< nl ֐ɲi,ks>TX5^cRѭv෵DI9}_%4AUr٥zLS1Ҍ #h(d##_K1#iF(`!DflL1Y3G0+b a mfkŚ㴿\2~Z6MQ@AM2T\a!ENTAMsȡNl i4RB:҂Bȭ&9j!i)FLamoևu 9j̨N=WZ6_ϩjfG3xh/ʟmavEmv!9<:dMe24BX0P] ³QXpk"bdF!QcӌXP`%`!DbhR-&|y?ȡQ Lp 9FS톦c9H)7`#ifO3{VfhĈV*ly3F!#2mO,K>=4ALqJ{|rxMOCf  V]5,o_TrE P[0/|P@MMwXA/۽jtP"5x '#:(TSQ+n(5֯5Vh5&5q b@=h5q#?λ̡1'hga=oS.fYy1c@!  T]Ϗ4'+ȽE bP,,FT-,ƐFSŔV4zNs;Rq/@bȂB` :K8衖B̅]r ĥߢ(ۻ((@?/ iub^afF;5 s \۞˕ r+hW8Sm8?%5T-Re_/cHɩ%sTc']F%9]ñ!ZNl29ѓ Gc1x"VĂ EAhTh*MxR<"*bP4##hޫ5P(-a3 |=G(`&f2` k}F BW׎)=P|3b#g0 Eh&{ =| /aMwY@Sl Kb `@#"W`0(E- -Hlu2_ހԏ7~ o`}Y¥(r CIG) I10wn)@x| aRR+\Cxy9`y+``|!D E`byd0[r Xmrj ez)@Tr!s2" q;pOFEϭZڀ5aIZ*-r[dl1mA!h]"?b4T@go!)`M2`|!Dh_w1Ć}ަ7DA&6b5butA-ô @g1 8Rܤdq;9E*a@3.d0(ឱ(e@JL]gj%8% ˔]JXI(3% ڊAQ6e &X,#[#pdA-7pNN#rXǢ'.qYr 3Z8R"aa%MpfĭoeX[ŀJ f.4eF3` X!dB }UD@kܑai_j͌$M5Q%\"R¸<*Sh)F)e&Ii xaav< -1-Э=I{U\ZPWpŪ!sVhVCذ54KXug9 Wո/Tߑ*5y¯<ƃ|k}[\jAj1QiF͐`hj?h_oD,,b UN\Q A|] Փ-BօvF8 k`@:u1xCl?X`0XX$:hP[68+H1Ҍ #Q38 ]ŬrҌ #@(b#Ai Wrm5gD`3p|^̶aD amRYSJlNa7:} K0zYVMSbh=ҷ)F%SY7>_?)1xS^9a,}Zr3p )Ll; . jw{p]B{}u`~5#PA=55ft@YaF,zԷH^'+ . 1!_B`ӡ4t8}5ѷ8J0G9լ9l%)!i^*5}C8t3pKgpL1:fȋV&pґ [QP8R!%sBS4#(`w Gb' /UP0=tve%TO%r.Ы#a7[0Go` GG CC/Q!w > Nqý kL^_0way|\&w8w;Urh>?^@2[}݁fȑB*^4#ÈV;m#at€qzE؞;:M*UTq1+9wp!qSb jEA ujMlt\e*pNcƐAp ju0 #s(7*.\ EFqu dW=juߠfh3$׃5]d2K9TþyّbpU`50L㙛#L,s,rJ"mmi6U.BV x=a4h795+T/IҁNftAΓW`>>SS8l!(5꬙n'8#l v2./A>]lVDzmtҀY`r4V@h$5p8:"-| Ҡ6ׄvuR8H鴹SSRN[GiޖK5) mJlF܊$>Q#W1jگafN*65*4#s}+5ADb@9J >YyɌ9ܳ=}6:y95! r0VЎw"CP&֐TJ8 A#Ԛ* uT5GÑj] h[UOPas,&}*s@`A%YX2Z:TgT š*D>!ЧvJJ=8c S*C!R00 #4A900t;%BY1$9`PBp0pܹ샆)7>z7>ga|L EO& ګ0 b{UCClRt5US#W/VI`|+6r6B}_B?~(ۃcWj;,'+jT5Gp8 ǑKj5`( ͣy4+85=Gvhv٩Y 4=yF/ chaxNw]%PLJQ DQBJ^58 ۟!^G@ ۚ5q= @gS T3!@`D1d-`RjMtYrj8j%gLI4`2$՚,j%2x8{YU5Gp 9FyJ ]vdc˶5AV +Q{L}'=V㘙e5c5g2PÓO1+5咕`lc[!CZ[LP#EjX\e(0 xp*9uŜ,u^5]TQʈE1uE=SÞ[҅Fh{yh]eN GB 5K',֒jCR4^4˹EZU{ubx1X#?!JjiفBҩ* .:u(9!tɠ.c!#Ph_!w0\AQ۪[/ *1" J>mlZ wRs(kHMԬ/^(zpSEaDIt^@~+qGI<@΀msV@Am@M>c rL\&4U¢ 4uj+Jz*W+V[ցA"[%% IV$+h`Z[QjbT?Y5!1M=5ps胚 l?<`@ZàhݎÞԅwiw꣘,IJuxN%ZbF(9ឤኞ4A1#iF(`11(^QռL= ͮM[8.gt"4^tgp u]ÇG/nmGjM| vfWV[i3S+PjCPLM~ !om'wlBN59A"mGg. iCU[hma6@M͆rR5CyѪREY' DVB{5E=}UviBFҪJ Sp0Ӹܦ 87S4!] ԔEGhGa]۵p+RHtE& h2aSpSSXF謚;pE ( Ǘjqf.)FSfgք3^$dJ5Q&}$ Meh:Y`|Hс(ua Rׇy-QdP9hWk߆*1@9c9 *~%U%cBh ` /U@=!L a'^5ҫ&zdoք pBSJSEi1L!Xu )M;u19G82,` ,,Ga0F L!5\: a(+R0/T4Uio\ +4L U͘jN2ZA.PЦa9P'-&H3œC1c=|)v*B `{aѽ/NܝQ-$!: CP G&?>|diZkD32|F#&V iT@ӒUT|U0) 99w8\54@Am@:uԛho u0z!c+_ |4 ] 4so@iNԪ! ϡ,UӣzXI)nW. iro%oџzNjh%$1!McL}.&K1;bIUܴeR b ,-YP][\X@c4/ E{I L֤ Wv[Mjqg0 QI\SZ/I6 .U&cg@őZ ۑo̤ =WT3N h\NMBAc+=44A!q'ER'EЁ#t8wpf0gdZW-SO7Z% : k5Jȣ+کQTtEۑb0uDٰJZWT>K/U@jjp:!ԐW-gz:ݑ$qIؑtRTvS I")ѝН0˺L_ t|73x~e~NLNLA'Aǯrq'""|baӰ>qVܵՉE܉E,cuʲדN<wS޽Ny:nz$wTݱ)^.t£;)^zr=Sr=N9N,:%\SNNNA'^AǯW<@!h#PA& uffra^m4:)S8RQPn@ +H: JYƞ5A0Qrj`Ѐe bCㆲM!l75ꞕ7G#}o·6MAZ+^2@tspO+XZе E,P®7Q-ޑ;;r:VЋ^(*>=0V5ySiYo&!7ij7f֊oBpsJEͩ>-ޤ@,M<hjjf8y(ZGp G6T9HQ+D cC{.cfdF!3Gsxü#(GpC1,b ޢ=VM5ͭ͠s.*D<9fʹ-F}VIstkT~ [UGMOS95NujBv>p{0- C !DVMHF>՝>| 4L9*J)Q:'7sѫ5 =q]u\^pӆs]U5QCn9p*8k1  ݌!ONcw7qDϽ nS nu#_ioq.3Rܰ57a xR<P⩥xjP`xvQ13yм@iI@{nDVF4O-SK3O<Oӑ@n8.GC֎ 7_ +IʈydbVe8eyBtyB<}>ϥ>ϥ>ϥ>ϖ~rKue9)@Og܀'_/I$x< ϘϘOې珰ǒ/xCby1Kjm !Oggg^S/ nƂ; yj<5CO͈L7@kWWē3xrF<9@ 7( E,D12Q̳LԦXU xq ^e{#`e&I$y Z 7̓,I$yy m V}89f)}N`TNg&*H2aEܰ.y +4a$f.~a0,M|`IoX% 7遚) HÛJCMee6fY5,+So"kg#z^! 6u]`P@sBLrnrK^bJbOz;s!hHvT G\w)d=TOV,y?4iа~E췄>s>K>ǀc0eܣpw9dy*dy 5^ľ?0DRZVa_*\/c4`4d4b4f4Abe/b~Dڢ #zRr`C#ױ6Xy㖈%tL WlTn*1$Xq[cw1w1w1wq0O,mbVSZ0vqԧVRuљ%GCa#-^a}'|hbGHZ^Q.\-@c=Kph蓣kj jXBZ!q}O:|aRO1UrK9‚BBZX5JEQGԴt~iF(`2{jb<;*XPBk B\z=F#Q(dKؒEeIHkbNb(b^[j'<,pck_R~Zyάaa aA1, KKKKf'fa0>̀zA c`BG n" #hU5oVigh#SdF3fS`8}!ѐ$tȯ"Q&V! ]l nH1>.&j^[mֽRk5BWCI~iU+PkPPoS1˴c %oL;y*`: =[5xQ!BCoATb YF$+e$ujy#3nLMB VaEsŀphL}9|𶂱7E =gn!MҖN֠JjDWt4@kNۋ-P~JߌtUXbJO :1^@}iqDM!oOYHSӳY 7U #&ZjZXp稪޳f=޳ !B8GvGhdƊJlMcސa #c#˩YŖOw芃Cn{5Gk=PRR̼z! KmYa{Ʊ0>3=ʋ ccBa{|bq0Vgn}(3gë{,=t/bS{c@1<澵k,?X<ƁfbC13XZ7|az̾_}uc_?{|DǠjyxb:fps!7=/6Uq?)>Cf> z31buױfqgL_ctʯe-|ʟ1̛ݹyy{1ܼʣp~`+p9@pbVĽЧ`NZu jZ͗յ@-p^Nu |tjlQ~Z9 :9iu}V狜P ghƀeP> q٩YD-_bh/Pt=h}x9jQ'i':G)d|!Bq\f}]`.-4]aFkkf~ ,WO ~euڡ tIǔ*I0tFQsv}x3 10s@uqaLѯh:fhlO;G4@Ie KSRBKZ*У=* S2]Gt>RG4=PWތwkbVCV֬ -Jjʾ׃F%Mt@+bSL2U6MCwaTH>g>XQvAaS`To 1@^z{V0KY+Q--:5Mە4R[+X*RʠԬ2X6[AB[ud[,F5V h@3gkYCct)%Zz0Zd:+F W_+,(du#}zt9XfLY ԇ8dG*VpI ؇&78l=Lc<\P@:uLjQS{atDsGPiA \8CY瘊1r;^aJΪ{@Am@:u4ukܣJc0֐hN4 ~AN@Ava_[*4HlJ ڢgiݚ(VjM؍$ {fdK hn37̍cIV̍>>'!mm08Z+C9dI3C#cSY43Jek#!¦B7#eN!^Qq*bj*Z|5_P,U ]Qs4aH3&}FQ(f<9kn8H,(0&vkRSe_E?%34#UhoX촯b9X౒(a/!+RLj˫vΩq ^kJz yhM㥊T#|5Pd+L hM/{]ܦtUSRx}trZ &~u_ڀujD>Uoܼg܎E2t?SS`2]g iT:F+\(rLεkV=5?#330hdžgKlűAkQYBl[uA^;JZ3/<zor o8u`]VfPI= Mň<(`6.VI7dF!v&(NYɪcd4-T _hvf<@htJJGehNtU+PS^gkP`Ǻ>Ǻ>/!CU#~֏6:9 #]||h6YK nt f9|!2 m_9i=KȳB|8sqr`s;#UV#@VdnEe>JAMEj΋%hsA[iA!hLс):E|A6 XM.;5{,`%Fn~A5Ӡo94ۂzc[hhӤU:ub ]` s5$P~LjI|(4(0ޢO4se=&(ahA!j`m!TTc!F3aD3Wm)nj ~=a:"b'䁍xT"1#:#\[n*X)~*e>GbUK9hREז C9| 6˲q,+WHqa W,ǭJl/㴏˜z5m*cEu`fl/Qj#M)\*titqP[VMܪYm6DY (`1~\@RS/XQ@1Yנb:b| GAjxHfL85|,dU8"$:P)Fa3 A>-99EA375sS3W4s3f6F2,a 0 a6,܆% Ͼ<>0} Z,Spl ,p!K.di)b!X# :mC+!@Nrf zrs^ ]1r>gp kA@T`sR!Zg)#9B#t-ŒwT2Zn˸zJ2F)FaS EAt X:*P>A{d#(Ge>KjL9RB9T4# ro UX c1 ˔2C9pvgqw渜@.teF,(`ڞÕ G?+r:C3 03 Yvy(e1 if;c^lh9C5dԔKX)ah3UsZ5띖sc~BI}I:~iCXv!50%We%'eh>i"dO: px4y+\C[QEf nRX&nRX&Y j7:sLs`vpښY0p[7cf#0@tqDm1Y0[+9l8nWicXr)yku gnL*7jVX,`jv٩qLm#s>BJ`j-L>ێ$RgR@鰽j19'qFn;t-NR*KM;P)쀼T@-r!PhGi!@)pc&TФ\i q6jVmHMQ'YI5U@"p6 Otb=FT B!PО`^ha2hE}Gt#i TSʝ֫aՑb+0qSf6`kL7k5|!,m8>l[g2:UbDi1= t5U 7@5`[Es6i~.]nSLb>H}T;.^͊h8cG#rBY!Pi W(נ6AJ%k5 Y8<}YGtmMcpO$##pthgQ')-VH"mLTZ5unlI6Tu9-眨B4P^V 9'*;oӴ tT`fi;iQI&v[tE`:k@L!h* B]Nb}y2] ;8VRI@ R0L̤ M |F@ !3^*2z^:T7Me|STƷڶ,"h{=Z-rOǂ's6"|Fψ>#gD3"|Fo&xI:VGQ@)Y̤JI%,x/ėU*J($ R-ЈG'ӛC:R~XRnU\Y:* BN:SK$qXЋz5oмceix.p"PuW`0A@^4"ה'ƨ3 ᜚uc} fcxo"syP_LG.Aоdq f%1XZBhkAAa:g+ ')$1s @` )hfߍ6 X5aBa30 .F=J70}19n54YHF@íƈoj+}o:"M;01&]ZêQ33ͬ99!KCˇMt0aQ/ad9#h\BgۂBh_,9%ZeɊ`\aZ03 b@MܟZѵ@CmtX|!D!ct&h,T)*e:9J1TiA H.8Sca2 U0T$À X)`!P<a( 0 X{)=֤P.+ESm]⥸*X/^ 0pr ͗yXcHw=L5tHJQWk\_00@":h4јfSM4=={PJp܁BSz͖cJ5ܠ`Ça&\:9 FbdvWqGCNulj~X#Ev9g[5%Ҽqռˊh `jR] p*44XdЩ]dU! Ϊ)f1\ڀ' 4WZ E纘BDoS_Qk+K5]{RN10vQ7h*J|s!fHPﴽRU^ިMm5{(-VЧ`|f tZF #hv!BZf>$Mc)Zu]dCsu!Pq*:A:u{X` 3BE(o}>8sYz!XmMZG1MqN&VEndTnaʷ"ň>͍0)FAnRUR~8]X!hJ.Q?J5|Xab9XԠ6A:uj%87H  NY5#Q]a&^e0Xe1[-]K@.wjSEʡWAS7_-3ltS+j"WUVSJhv--M#XPװ٥# *Ԣ%GA#!B# 9&U@ei 3Qv@VaSq1N1ƴP8EjA!h#MV R*Li+ޚ0)BP\]U` Z3X]a4W29 eڿXy}Zg>L;CuH֤>UV̪1CuϬzN Z7ޘm'/^-;6X4tld>̽4^} B)ZLVPhթQAݮ ]o\N[N۷o/\?lGVz@Fzbև1CnrK A#ZZ3㊑pъᬿjAJ5M55Z**4>]B"D3Cxۨ"T2Ql/(QQGN;zCj'eT E]  ٠$1ͅΆ4[uaF!ƣG'@t9R{%Ke vqd6cNVI˳iya#fqn6Bp{!oñs]ػA%" jY{yh{}b^TpS<& Jfwy ~H9w1%dwb%eLqk,+b,1n6Ɯە+ι9=;7,X2wOy=\*-4/ܼ̇)ei<ruSĈ4{(reřZ[U@K,d'oxxDxܽ@$x <"z/ ZZڐirح(`|!?!\#:5rUK5bF>sݾ`0⺴Bcr b=F\O1Ҍ #Q(Bϙy犛d+b;EX:0'5 fA,(cA,(Et0>BОެ J.rXZgXfQ @aAAAᙡy(<3(<3(<3*X@u/(1K8[c%ځ ( \b #uƨCw ͳEF\&4Y"QA A#!@hЬhGVnWftFYBXJEeVfP9j r YY&4A YS#s$DgJ\@F,(>+ B]@uh11PY!Ñ{r=5ڦs)vB|RvM8ObnXh UAŖ>B! #)tMk ]hGv4!0耏 .t#rx&.NCP>XA#P`9/,IcY+g2?@ݪ F:`nO?\rjFQ3i@".V@m #x^3C'm X0G^+thWs0"BmtS)uGVkJG9 }t@Uhۃ(9\ t/ѥGpl;o53صˊffEk̛0eK5$1`R ӉgV֠6/H]AM "z!P9NLF(V #HPnn%܆aDб|-ՄpY5C1[uj*-vB>xD@VZ"0F#͈H)s3en͔27if0`j <Ȗjt>D4X1ML_|t.VhAW!DwA B0t7t i6:Vǽ^mI4Nȵ@ B!ƬF cBKt@ԓH DyAO dHh CBi"q e#i"!U()@DvVmʓ @LR Ҋ0[#Cɜ|iB~J%?_N76q#Fv2P~P&Q$ݐB2zECKoc!tϓ-F |) ̇ZZZZ?HoVIGtTf/Z:e\dVK_dҲiY鋑#NQY@Jt4?rF~9#ndFVKFij|FVKFVKF0F&- dC鋬lLK ،،FtF@1EFVFVFօlO}Ye>eetTfm_p6ٗ5/k_V |_]—mȠ }}}}q}q}OHTB_B_}-}D.e> d> d)@699@loٻ d^d5ȼQP ;l x1RIӽSV^!0+~1SP_ sP۫f-sLOcJ@ }OoUB-Zi ,Hy]tܩ)tft1U|tS\ڀ%L$ZP4u:_9 45)E]vj<(а2zn,F퓂ilBIYᅍ,0nVq/YЕuyA=o]7R[SFVAt㦷ô%juϗɶ3 ߈)a7,AMݸY#Y~DQ7~:~= sNĽnn>=EMXy] iNt5[D]?K nލ-io 3#@Ž@wvm1wx[ݸ[ެqkWGwe]ݑymCuu5+?+M@_V4vEw3Y d|G+S^Щ, |˰Sg[}%9ͣ.і4:ɺ~ĝrd]<wl+봍뙎9t-N^7nVIS[kfA֩4ٖN}ee[ka尿t:+CVCޖuNсVzA^؉uYyuZTNTJu˪Rp䮹PLpzQ'aVnQOֱNֵy|u'+xD#MӨ3^drCweݼa##ꖙh7~7^:#uFu{aeJw =b-qgM6^7/'G >qgP8Jّ}=mE:WP{2-2O[:de\X|7K+(+S2#b' --v9䧠Z/ˑ'3q)BwdΈ2s2Y/5'm+?Aɏ`e,N&+J֍-oLVHN&'*Gu-tLV N%9ݲn7xT-ved[Fdd[nIP6=-iEr`Sɶؕ>'JF,bWNVn-a|xK>L(Un-7AOvLv<*Y'nvE&J sЙr2dt*Y7,ltnt:ZfO6j}ON6VUؖu}ĉɁui}ecddޒ.d:"rm \Pt29@d]̖0-lI$Nm[ȉ +mmziˉh+LT:ٖt_(J'Jn +iߙ4p- *?ɶ|m s%]:ENt*R䤕A[%bb ;VuP+KΰuYNԖ4L~T '+b۾۾jKnŨ^gu +Y_ݭɶON֭l-ݒL[tۙl閷pKnG[=*@$Iv'B}$+ 9Rfnߢ;Vɶk.Uax/D]ٞzqLN&'L]+夋rnc1Id[s(դ2~qdc&n \q24cvyB=[AwW6;=!PɾJGB*Aľc+tO q/G.)nETh?H܊ GEDB 7xuR ^,L(O 4[l>1" ߈pÏ|)V%Λ_':A@X =Q\$A Ty! o;@i!yL(@!VRi񡌎x"r2 _|^F7YN D>5V@Zc%''DEHX&poa܈O"!P=0_f x%٪O0҄֗n"}m}?&DA@R .k?ʐX()&"bmJ+"1nDv (Iٜ^$Dpx1O@/P d t MQ 2 mk1V9+V;hVja}'ƬҶi`#Sw1';mc:hi%"f|2.HSQR t4 qb!ٰvXz3XV#x= 7/LH<06(⎚dSPe=!`a}KmFa0w.E1+Y "FWTeuY@"\z X8yJd|2DcD;%0q+0"」5V )[<=h܋xb R r dFZ_vU}] .N>6  =KJȀl%@D.2rA$Y1uXXXrLoc}@xJ bHF G' Vxdzʓᨎ&A7҄//xn܅SU "`xK-DH+D2 yX DHchV(io[N 𔈋,nLd W2)Osoc!&!XlDvz\+i]a J Lzv`Zd'` ;2Y dKӠR !P "&ie'1] >6ŵ-F-KZ4 @0,@/k< f;Mq{cku;V` H0ˀ]"u9@8HGC-p4ꉀE2 ch`BJ"tyl+Hih,E4!tD>UO <%" J+V~, [ @ ba%6=F "A$M=a")1G_.+"/܈E!71O( r{kQ'Z|OۺB[E+_:W|db!VpCutBeQ[~ cX6&o| YB['D<9D„B +u`@ N`Dt A($QXzIpzRK?8>j |-I% !J(m{R vEF2q(GJq{'_ +V0b 1„/K"=bjlVRJp4%=}>v@Ŝn-k,+Vۣǚ+x"Ns nB,<[ >6nGatQT TlhYfC6ӛ1+LL{+cXQϯbMkՔ&JO0]>k-=[| yU ܅<[`6XBuF>E޶t4A6EF'դiFh.a).(KRn.dpϣ\`%')*ᅸ_尠4%R-r`4A`‹V}8#@g:fXխs{c=ROy˪St|,°j ZY4F9uu~A)Kx&rjJakL^⪐ Eb B-zF"0yj9`*9 z,ܚT*t&=o[.7%2B|) gӂDžP۵}Vx认+-0 K,˰i؇4CLÂsɐ|>g,0kl5~R,d,!K1xyb  2"M  f{9C֢\0d9xEդiFOW}x<\s7uJ[kPwVUk5xϩ}PCn{5UUYQop"mz!_Sb7$OMٺ@FEFfV刔t:=l7ɷ,@148x&jWYoA,(@'8Jq0SFRi )ݎہU tqD$FB~q$5183tn?. F0>RԊ@+wmT}9`|j @֤iFT!B"- -kY0nM)/Z[W*`#[=m;B/IM+oV0nop bOoP8.DvV d}]0 v77(5SFc>Xc x؇pilZ熄TL}<Ԍ #Q(b3/bExƀ#s7eIYXS֔.1W2ʀ0mٽTՐY2#7aDEʈ^F}JWDC+hr!XN Rgaޟ|~$+o[9MYkb!96 BO8Wh=F )eze )fOL/0/)5D!b:+U<%|Â1qѹ '747 ][ĻP3ӌ x`6}FHϠVq(tGp?fdq{#Q .İ烋`e0X,)FlcfoPS4#è]hJG`֯FVд>kS C%_9P9)FTGTs;0cB!@C` "e6[Q`aU_fK̠[Xb(gPQ+X*oذԇǶy@KE F0kb544RT A#@ %DPf;H3N@I"Jjn#ň4[ݸ݂#{l/vDEth6  8IF7r(a: I%c 4/&yڠ7d)Y_P bk"'9yk72MnPlV Gț3q֣@ m[w1L/&#;[Kg8U,o>dؚ]#r 262mJHͬ6Q@?\X$qdvY, [hn}1n-ҷNy0/můhmvmؼ_Y~&Q<B$PGs QG 5pQO h$Kڢݶ% 2B١KQԾMpLlwo&Qc?]%yqn&m6[ ln&1PQ'ly{AXכrkS 5d#&C2Xi;߲ͫ|Rv4F?C5h<їDYZW QP,+Az9'>^G<~i5Ә8\ޱBA( i kVB7!Ԅΰ۠X8N4 |9KذeYFow Mm0,f4g݃jz  }(N4* a=rď'28ePej,(Z}7ςىqL;'ʞ;8=l'3;WV=3GAT$^r{wu_TƯ}IC튣Wf.Nl){O~?ta={9?GMm{CMWad139!k3>dBaؼBlt%ۋg}E.msLx_g,$cxbis Z5紿MF]3IɑP':;aY-xa^/k5 v^ mj:q-8Ww\)фay ?ĥE2NbPpWRՆNpsKvYEP5F?jCCC٭﨩UjąE7ZwT%IH #(U@`C*C*ʅ[})bբ_IB{GU*ճjN͂7]-X]NRVs_bQ*:O(WR3JiuDQujE\dV Phm*SVQҵ6lXÆ>l}|jVfOzUϵV-֪ZXkrU*5t -~l1+ŨX($W-Vja:T' t½RWJVTmj;MZmSTG:j8^t ۠1rPT>K)\."<-w EZBOЈKBP QB]O)4AiO ڧDv"A;HN$h'k5t?~tO~Pʹ# ڑHv$A{ c.}RjXi*V-$Ӯ"hWkUC;AX/(֊\oIV(8黹WE'Q24>j'kD1RݵhF"n9 r$YFI{st\.,< 4R@G'vQ}L>Jz<@mRaAtn~P,+EIBBB"VA EP FHO?*ڹZ]еkuX$v=v=tZYʞ] <ty.B$VŴo*o*o廄\Z_еk}ͧư(׋I] vLt%0?1j?Uh?rA.ʅBBg8p ککک:)uSj/Uj/U9LsYYKXRRRRRRv!v!K:)))u_ꤤfffffffffffffffff CRRRRRRRReT / /uvQj^j^dF z z z zYʐ,?UTjX/UYrUj^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^j^i^i^Um|%;ldJ͢[2A2]W\AZtA*ʋ戅¾v'u 8F`%Gsd-%zV\)rP'I{$,W!R!Q/$"ha\KEPJu+ :-GzaYh5"kv˺H '<9YBbT,+ VzR_>RJ r-(s((ߙFgEEEEC-%T)d]<~J y *! _V2*`UBu-[b%; Ok2*dhUЪ Il,|!g c҃$~~! 95_HM %Ůàvd*kJc Io lwAĿХSE!F$Ja{DK )Rv-KSHjԧRjB)+n[)VJOIR._)ϵՉ$*iu*i+i+IJRVֹJZJj-ëSSQkSFVZ&6j-mE+L$Ax  Rb 6[_$ʵL+͂ ͋K\N?T1+./͂ۅ"R-> ǁ]`X|ɝ# ?o%.~9b$|@O+ܟpދ]kc&?wfHk˷_efhOd&dD4Fp=OuHɿN+$^W^^B J: >A}2v Q< 1Q]%(E-F&Rb)A etLPmZn(tpjD7'9M iI^I—2;[tsCd0ǫuf u5BA(KGW di4 ЖK ˈ'tv[˗`%p㑋 {GӰp@25É#O|#gqKY a#kzEp!=FIFD=! ,]0o*#F#Wm-=q}:A>C*f*Hy_z{?/, .l+ca3ꎨPLWXAS 7hq" 8'A%KϖIIo=P`^{Z~Iۋ֥ } ݭDBIwߴ9l=M6P5טu8뽶yYx2:Zݒ |<\7sܽ+HBRm޷7!}![Ox@L#t _£AFwf0IL #;6(ؠ RУvt_Wei;' O ݯgi#?&%C%.+q&lb!-aGp<É4(F[_eגNJ:Yd26 c~E\k8u:C'CwW4"=lx?xc;a>N`kt:O.{Yؔy! Gk&^L*@H|M0/$s`<ۚH2i4NeGWs3ǝ/e'$(l/9&Bٟ^݇pg90#R~ٜlZ/s7\myrWFk 'rBșKy0[\C y  NYjLN-ݰ!f+'bgc d`@ 1wCDzW#GspoH7Tȷ\‚#Zv׶rsedw&G呋 ?$yl}sK~<`Dj|9s&C] bRJ%o#64,f^0m3W:# Ɇ %^#w{4l09B$L |zA/xA'1bOc7/|4Vp9ťnVG@PzD#S#QPHݛޟu /klK[nQY۝y _.sH>^l/Ɂe@WGˀfpO6}ٱ=+.LY9IFB֢N|(:ja8#L&N_$jɫZҩV$jIzMj$y=4c`UAnuu' &YIek#mm-㶱R 'Iش˶mWe>"ssۉK9Ï p2O` l`$`[2]r\AFtOnOT>/ +rU[-OO_WhZC}`_N脼P+f?:ŅBpnvoC߭l8ml+5fD&Od'Y'BVfv[K$7ͤf{kP(nfR䥞=~P- OD(nwJv<;<\vgV-oY}[?-?;=%ԣdR3/d%!;_.4m۱"=peH㣝 - ZK2 `Y]&5B̝Ų;k68"7d/+!5BQ/$k8[yyi)["CEc-ZtN\^ C$Pf^(EBy K 3kNK` '7o>Hf@hM!Og!5sB^cܔjtbM(%"x9[ymwyk*O鏃!e: #r 6sjߠ b\lY dպkc>xBv;!%K|[wM0t+ #r Ðz 8¾\͠V[~L  WTk l͵$ ֥bU:y5o hkWWz' !G1IfkMmoȈRn֚\ovnz뷘uu̅SAru5ՒyQ$HYdpn ;KG3uVLv;*{ӳ .@LP: l_&\0xNW 3Rkmt^:@66FՐmJbǼ葯=]OmP g౞؍wvh렺7wM|3#DBacIg436Eg@8"مb]MkS/=i`47m|lc{ߴUY~:t{ [TH5UƟ=Aom_m*ˍ h놁uז=zheu؝J"(̩p͹&ҟ_=" a'yGTL vX$`2REjv~5f>AzRY~wEe2m$ȓ]fp[hYAdoMV28'`; ;Z›~Ep(ku_2Iܛ8/]/u\):E 庵g)߳,$l5 6vQl&lh8mX9ZH6=B8Έ@ٮn} BQ7\& ^K{ yP%&$pAbGl _Bl7I7; 8 !6BHJA% 1 (c&MBˁo%I/%äv43HumIChim>1AlfK7\PўQzUx'Ѝ}E kmXzE0Y!sG.fQ^I{kp+jDNlNkU{RޱX⬆f͎tgzp$n_\m̜wL,0gzw.RGV&R3qZ"bJA (&̪5ӽ1ިofwZG@& Ff䄼PB2F,"Pg-:Ҋ(a,V $vVB @wTC^(E! a g0/qM~1}+PjJR-6kYGW[%s'f;1ۋ^̎bv~ BI:8NRI*9I'$%Vmݶ"ė)Bt/bElJ9I3wQw"ҋ; bQBA~Q1"(e|}JĆ=n$Xn$Hټj6ЄD)-,ENJc*lB,+[/"OQJA|%Mj=r9./b80؂F3{Dt!SUlkc#6UTa$W**WQjԮ8#ثa4@ŹZvzʢU k[3r뽘Y\ެY׿`QLlY6nt 毸9ra2h3s]x|j7?^-&#oybF'䅢P!T UBqC3'M԰Z}̀t`"gbIù7 v!v^teg~b}n ߠ%y##xxC,l+#gdO-M ,gդR+:IY!pSaֶ~f}7\̒{7ۭمx&pOlO液g&sQ yg!5sB^&a. j؉n6[6_}{G&G?|db/a$]D;4nLOhjLg6Ñad\ZyٴZ V]?yBkiOζGl.\dguG @77$}m6"4'H7CI]OZhg-Dkd"h@P*nLQJߢn%IFH>x"d G`\)O&R3'P:Mnên00oBM@Q,`dk7nD7'`~& 5<&lUt7k$tl[T XheC2Q#oƾH2['w2M t|,K}j+Mk|Tow]:(T UBk3$TI';8]gŅST$5 Bv:i"I$]3Q5~-ƫ^-LW QP,Gh'&V%jIZ$%jIZlo'yKsbT,KJQתFjԪFjԪƜvU'ƍzݨ|BtͶU>Җ u~{~' e6G6 r]qsLUpojV \Т^62KjpqEA!vrB^zj̙tjvM ^} !" 0atȲuNjLv~}J't&x iѪ})"*\[5OچޝZ|"ֵdFgr1 H5/@7&AL[h- m+_o%}hRd:m5! 2!Z% RYWl5dhP wɚcֱAbF{ۃ䄼Bh*[ fێuȶɫ!v`q"qhm@LoϭX+=2~S$T5m.#ȿnL-N [O5ٷN,Pa{: 8܇gيblk$`ack_w[qL4(- GLО ݻ 6_ #f_pqm[Wݮ#!"݋, ',v/'gȖ |0f]({6;ȲEe38B MG7`K Z j}!DX;ߨWX,=ߒ|uwt^ive&' e'{ EmNf7{#L24M:EhQ>lns!,:ɶߟdnptH,* tgt'כ!r0=ըB:W1MKdo=*B/f2Mmf3Oh809* `5Xcp \ؙd\+!K=.nu$א;{/3 +/=ߐoX p|"'dMKƿmkŽo'BОⶊ  DuǓMl +4el5ಏD/ttw0NqKP߱VleQ?, ?1z]{IQdc)8;ݼ|A2Nɿ 01x^SD{ <-#o\<R6r>OD='%'I(m/-9%jȲc.Nr ϗGͳ"K. Nu< ipV\X(eANN*nm>ыaP4 'h8Q*I'rbD#G`4j*kOJ*_upxZS' B }ÅWhmKk{sɁ&L ũIɤfN-e]9Byi@s'{7[:O0A1*G 񎝆\/Pit@WT4kǽ4ZةuV.u֮pǹeB/9;btĢى(OG%@@,ɘXsr O/i~hK]\ٲ\YБ0:}2z8q39!;ޝJw2eR3/H]-$ {^r繱9usd>om0^;ϵgTb-λ g"18c qF&'`/lf$fLeBA 5Q-1.'+0ɉ=P}[``3֔8XK$LAԾb%'1+~wB䄼PBP)d.m\tKk+nni9]kkyIN0aqa{ż/%u^h+ y=_nJAK[hz'6ስ =|ٖniNj( 3 _jFڒ'8ũ/;A(2ϼW E$t&y(z(z(=U{ K(:5ΫoO>~XZK/T 4_X-R>ړ"9RX0b]WtRΚkaIn.i՚LQq!ԄaO8P&_WX\_?J 'A^lz6Qy^F8ɒ[ ѧ(Gk?p~1|V)nbi:+֎#G7wuMcZVR/~&vօt,IO<{x55bc e_YkcJTI`YdR3/d}m pB3+߲| VqS#P$itl![O ;?ux3'BO&K3| jBKv=@B ΄ `ڡTB.wG'IƄX}pF/ 9, a\9.Džba^RR(.6LFhX(CX귏(y8x|*nw{$9J!M׵fytRV]C}qSӹlxF zΫw>4 Q}zTMjK y; ̈́ zF$8! "}(v]AB4)ߒvbF6Iv+U/xV~o;_Fl`o,z 5zK`*!n~aqłW ^B]P9ubT,KJq4ƌ);Hdr fj}c y   $T$[ 7t$`7C~{:bgB3$îg-vo\o83W{}?\q͞_ddǑM ?HEY#bV:LО_vO$3 Z ; kA8{׿`?pմ=ᖰ'G/Dk nt3=xe-|zX'|: 2]zlm秌i( BIV;ǎ)U+zkD==^^P/d]h {8ʥ+#æ-pS›jn3;, xjX]&fx(C| G;ێ Tr੫f<9!q2=x'EE1*a G_bfVMA'p/{|$},?m~1Ч1K9(G=P-DْlIm0#9 ۯ~n~L ߒ + ;yN'TТ9q =k{aBqP!T Ui'Q4$Pg$ $~F3ڱb `b Dۧ4=wkyrH2ai0`tuwN 82ܰ! C3`lgqA}\@.!W?n8 w^VdxaI $T 3< Q:ԡ5uCMja hh@qF|L:4ԡ-cJlە9!/Z2]ZFӏ?&x0&T˄jP[&ԖlŖZf/@4x>-;8<ɲ\F;1 i9nG2``8rkN#tyvl#Ov&XHn}ۛFgrB^DLV<}OM?0O{`IK ֯6+h83JJrezyy05?;0va䄐]ln>7mXMh/<1v~Y)9!/*z=oINt B$`=AMhml:-aG/3Cd%"0^[G#cԳjl\ta ‡q?ےB] #m_w_Wb@!+$S{7%,'oV},@=,6f}hwGټc(Po\dYĭbݬxz9A0K%Q7,i7h {{l-;>Ͻ:R~MzkhHg.6s.e>k暘ؗ\U<^"J/܌ 3zh`=P`FN?"cO?~CX#xB Pք 5VHQG}iL>! jH޼:o *D3Sr^[kk a>t|YNȋUBh 0poS|4fC@ ӭ;:v@;vyξj)zŏ2;jM?E:&dC@o³\Cޛ}eBmfجb NР6l烸1B>BÊrCΦ͂#[}l;5^1@8=8\X ԔM`dDasBꅂP*JJhm%ʼnP?_ɖ ~$67G,?vϜ  BIl(^ZMjpDN*K2|oj;I7v8Ͽ78/ 8;80T8,wbs:w/<f|]$C`9F"@@\@k $@- X7ڤ|_ah1=->q- mvB0g/Msw"ա< #qz,qNy -x?X4bbIac!5-ViM%-B=aPF7~EpXcڼ+_Ia1xd68!=-b)bOv$LUF^m|9I$ۍdY"ĥÀa-0X`-k،ɗ>[ŝ`dp?p-8^1(FnST"iBUpi1(5jQW\Wz5%9 B΢ eRyrL]G Wۈ6&뾝C$3T\+Vl45d,_l$>r_'?cRg6u>pIA.6 %9<ïio-^: 5ݺS5HV_f)._g:\`~`>~Z&i"O\ܕa G?5 ëukՏ-~۹wKIWX^HŪ݈gx ;vIw=J2.B̾{Gc;B)IjxW$$ n n0ܗ2Z#zrB dhvgT1&kn1ґy NIr#՛ip/{jwfw;nI>XH2LԉYfա@ƬE޾@ZZ*08dkۯ mmϯ sLԘӮ}).\ŨXPBt 2u0{4T 5$/6lDG4չ Z_$Ђ_1bY):-V)P88X%'x4b̽=P(N+;A\ogg'gWH= %ofa&Vh+ړ$ ! !Ag? 1+L+$A &kj4 6N:i߭qޓ6r H7d+vlUV[wfpg P[xw…W (-=KpՏ~^Ɇ[~.9,8.XprШ^\/u:KEKEEEE{?x'!킷K^][-7.L|y;j/K8\5i{Ծ G`:c:b,'B$P ϛf^ǛП EpoM@#:őXuQ ^IdRz; d[Z $ DNȃ65q&G=;- ZjBCh n( x?JfBBCc$XO.`yg6p*Er%`t6Cۛ:IFۑ$ʆ'L5ɉWjHAA+ vPAMc&'" }$S6%;dk/x,>yȜ }A=(sE6&R/&wsk |:0/;`EC|lp؃NFkDvvcp8L^(EBPĐI+ܫU$Y NTNĿSa>T?7)$c7#X/plܟ2>yKsw.; ?*bְJ⳽L񅲃l#$HJhey#WV2}g} 29Zl {#'20${a(C$bOpJx' G6x/8yXSdq[zu<2ޜ'z) $ch˜$Z=/xi\,\G@ BaP-rQ[X+5 DžbX)9ms9NVŅE-?x.w uB]P-u uB]P-u u~_~_ߜxn/ B?,KaT#+y~p9z CvnǞ'v^1PbST\(S*VRguڳ:YN{V=ig鴳t;:ziU?L-Apa儼Mb Rbng)gY:`:`:`:`:`Ryyy]Kt){)%VjVjQmմ][eVcUk~4-jիzW R 3m}^^['k++əWj)zŅQP,+E;kUsPsCn~Aw^qa9*bh˨ eؠsZSA[YAۖmKKAi my7L)y8c# 5o?g3^&HΦnz&SE@l_Vk m;WqJ5IO漇6#ۄdBr/j]Y5cX3:|d7@< jSԾ9ۻY LP2K^y(bqgxB X1 `%Ug(&^%'$!`P& o>{-Q4w S QP,+ŵ UFM*٨S+ډýwb de>WLjvtʓ,Ç-&.Ԛ Bj"=@k$J^/Z˵G8ѳ"cz͂o;|kոXX/KJq-4NSũ+ŨX(VNr 6j5^c5~Vc5"}Q/QE=Pɯ%RIHTHTXx4[4[fKTjUZE*VQUԔ͂5e0m$5BcXh a1,4ưX1,4 MY[> n.x݂ʫ͂-ZrnujhEVjBP&T U.JsvCVճjUiWA`ەxpɫTvEXp\8K傫2HDj2?|////ŧ_O9?/矊?>9>r>/?}9>s|,CS JLTb§O,uP\2? e(~.CsP\2? e(~.CsP\2? e(~*CsVů?9> >'|Ox9>aYW:?չOu.~sŧ:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s:W|s\e\|*ŧQ|*ŧQ̥cбլ1bVa ^\\pn7 ^DP\^-\WЪF/Rk9(.LeLUVWZbTUVWZ]Ehue:n`.^K__ kb]^/vxKX%,+mиE\".!B*u^x(uAKɨ->o~D .H DbbfW\dW\Xi_`&.J}bJ*EmYVW)BVZ[ҥf.RPB A[.V%KUR +URmJV~`MJTJUT- Ph+{ku9'jXFzQO֚7kM}xCKb/r*?㞑mb)4[.x&K j[D p`$JT&E͖# v}e42RqO\z+}~wfk +7xu[=^YW+xeZaZ{Q1bd9B. G2,TgKVkC|Mvm^1V;ۑMAn䅂u]bX"Ȁ܄tǃě@=  xRgjymhLEIm tB[qS. t^1(Fįo`7'*{y"`BgObX)qaa>bTQ..BV ź+߯`=E P :;1Bygۦf"KGW=>T95kZNu V H)M @(<wmfR3T Poks7"ƚ= BM"'ðՑnrc-[)~ #Z؞okܓ޷Qd"N6&τ-Vd9۩foų>~o`kCONh z?[_ k@IQJkNХ%p(0g`px0\^x2O?xB Dn>BjL! BIHlQǕ2,M:$b&??ewJZ;ؓAwl/7 nQ![dƣGUJn,a  .d p} B \B%ٞLD.,?6~O&t^1(FE R0r/َKK-6qKNW|8!8pZ„N+ŨhMʸ#qqpGNF؜ܔa^a-V*Lk#DdAB% BP%FlFFmdfٗL5-D+Cz~4nco[2RoDU/d딣mGG#o<P(NfȞUiՖ%ŻP-Й ՉFkhd%2/$gLB3nO ׳s=c gsẂNuJulķJT*k7B5-<+P%/A W6 2rdOt^rcxcOG\qM4 NŪ:p{{G>O6|}JΆ@`(E25d&߶tp&u}acv{oD#Gro.0@Q ebIsvw;E~$sflx]PB.L{k/e,a䄼P0ʀO¿M?]Z? &p*?/s@]|Cg +wv +뿳֗O,# <: 'EBA(At>FHTI3#r !?,?J(}(ƍ1#xB$01qy{ 4=e:#@E{q:C/@7s%ܸW,`SjZ;eK#xB$l5!AJ%Rtgq zO]A% *kXW7{}oSM .NYy>ĥ@z]^"8@K= F#Tۥ!}[#}l 5_B$ê_ =+zŠX(J 'xb^1xǵJ)Ov{U㊇Q z\htJJf0Ln?sPfFnVṽsw<bPb55;@=rZW-jTKZM\+֢UiJyMw+~Lvi uNE fƺhj+$ɒlB7س|v%f%n^4Vkr>~/ X a }vPLUcYշI8뙛ye}Oч `K[]  R"DAԬεPB1 V*=Ⱥk< -[0kv2R)VBT],clY -hlK~]u$[d)fxVsaHpc``xOxFpCGo-F-FYH,zB$ eޙ5#x¼0n6,ۦ7M /Xݶ:·_l˸"8'h"C'-9'dv.?Դ/91V lN%Э~HGEu5`֓`%7#eČisC˰={y!蝳739BaJ U56GF+L_YqZjl p[6Epv[5d;,`v|E h2\ygoUΚ6u&c3:'ϣf@[ y qJdww=_7M{,( GdU쯶pL?v"b<`Dt] (VP>t1WaMkBj[;þn,MeG#=m8t{thy0dzăxB$.`G$CoXbzk`{~lrq<%+n~GE[N3#,`ؘу,[F)ɒ]rB)JF t>z~xcZ5D$t#TbA&&¶@fL[- COoa Ew,!. xǕrPH8`_[F,+ER];>_[ z1NɄ*cwM谅Z; !jڹbYLUq.5Of.{z͂o{^Sk<#h<&hp4ܠg7=P;h [\{|g~UGM 5g X\!yjZjr@Q+TBo)nETYY.jW]BjaZX׆&HWZ+ݳ@Y*,\^ƤUi-oZ;>[j \k \/JݿZ֚Et׋Vו^d JHYK_z~i?鞽bPbX)Jr;JX9}N>i;Z9J;Bs͂%mkP^Үikr59 =ӞiO崧rAҞiO傪 FթOQ}-$Q:x/4JjM&wXmծD;~sN|в7aQ}.8?KNDMO0L$RuuU?ݶꛨ#>2…okE):ERH,Dע:P:dAZL%TM%)REITR¬WB!iZ4_K[K% ֒kVa_:f K\tcY;_[G,=̫߳uBa>"el]ۏMƟ?ϙ~-/Jf*hO RY<8-QƚvMnI?rg g1֝n(jY~j˥1#[_RXe9bmgB w[>?OCs9tfCY.~X-X-pK~i۬CXpjo\+#RWɝR\qe;U{8(FEkZ3!5ю Jl\^eXgbX<T_l;c\49M2 6F:\j۪|0 Bv= -]nO^2uFW,'g&:XġPJEdv|Ij̽ډ j ^Z+k'hԤge?[![q5$I~(qDGutG+QIfWhr;4ݙ2:#ξ[%c!_Fc]n>,;5vZqҢ?&7ȐŚ51ڝnUC#/:LdOEy\i9$^Q6eQ槝 ^痾M`>VC/}s8՟ la,s<= H(63LR?2' M^]=qz\ď:{;>/xiԹScgS/A1o؁xp+5}{1yk'-?[7L4H;1 biȳ&痱RfbG쏵#XOG2PB6H( cULBwбDTEUUE/xQe^0L| le;Q-D ݞ~Ph N$20rhb|ܓىߨDg1BA( BP%krB9]~J1**:QY/ yζzX{OmxFQ9fG∫ ^qDibdO:_WQX0\ۃjedY#Ԃ식Y[ri] 6FcsQ ?`߃$Xcw&X[:b6^luUu /[k!__;([w;t ,݋@ڀΖhhz<`Pk7_TG&S&Ѽv3 eFnoy8׬Vtk }78J#펧dpm!{6]:(nϰ?l|lz1qOH([2=)zŠկbWXQ 5ҁtLtpbQwPhD 5`_@eAIqXTKS{ό TSyz| * T P@H"+RM+0{0w}_^kl1߈KET# rzⱣl\,~Vh=fﯾY[doQsGq %am.v؃m3oKd6 "j9rgv&,ã,snQ67hpco%Y< INrKPmOa^kU-.P8_9hbY?c&ɿwM0v=L>6ɖdzoivM<]왒o!oY[ ԌŔ,_h GmhKAu ӹnwv,#|ԏ d -<KML M< ذ8C D)+Nl&6AyS4!iE3 8dgN8οl0rA-g%mԫIE5Mlw5JKy;k;jgyyv>-Y`ۣٳ$~U6eeoۏQ{G?&jov fBP+43y<пp9Q6U/y/n*moގh&yg_o/q9B. +k7*Ouϒnݥ/)vK\BM!>B>Pv7_dV`0\=hT;f9_nMDx{\)ۍ->hN:O g dH(^I_A컷-qܽ۞a.t_`9Nvjfᦾ&l {0w;8D+,`?0ʿly˟͋ͣ*Wy蟺w9v0O'05PXP"19Bbq%|W_vB4W3ҾZ3q$a HA'HQ,bA6'R%6n؉lqiܡ/X+Z ?"Dt-Vl[ (4OC2yB tdTbEl;:?M94L+Ņe RJRF-R(TBݦGc~+Y3i@bT,KJq-JivUG:"2z!5BP)4!?Ȧ%?'=˜0wS3{nc|{Zz ϲt#\;؎kO(i/;8 zAk"p6vd+N+Ezhk Š@k ^&k4/]੹'c:3(H R傌+ Ԋ Jj)ݤ 5$H i$M&tAX5hX5 _$. (IWO ؔzC!YYC!(H2 R]A"T2 ւ zԽ u/H݋2ҍ2;ҋŸBdBZDQĸx;\-^(IXTmW(C((>J2< DT(U=JR(u6JRgOZx T%tQQFQFQ8?vNVVDi+tQXhx̜5&Dipb)I2@2@(VJIKee2l:l6lx*{xnBQNڱ(XEE6.J[3BZBXXLgSZBZBBB)4_ L i \2/dRHSVMٿgHBBBBF$҄҄҄҄2(A+ + +*dB_HUHUDN@s=6j#v$`i~ i~ s,   s3AirQ00000002*d_UCM/N -MBbn\zJ\x*]'^(Ui֊^|*JJ&e\(.-])m[)SRZRZIJ9Y{-y*y*zʜ/eU$V),KG]V)Q)C2nZ'^@6M)IY/X!#-Z"))5(5BږXrdWZRZRZRZR&e _JiQJiQJiQ9+-r$V"-f5*&؍B)dgJ]U؉GAZ,4V2Vp($SVK!&k%ʳŨ?WR+,HɩdR( jS W(6UdR`F5W)RS:JJnŪlY?SQH"p?;¶f~L`v> y!;DL 'F/xe*t+EG?vAv+Glu'WOJ>Hc>~q2K'#&zVmv b%^;1tS²W\ yFюO䄼PB5ysx]̭@!.}/|O~?!(NBЄ!,O"D &M$ !HL %HMҒd #L 'M (LR┠$(MRT"LRԠ&MR4!hBSќiC[ўtBWѝC_џ dP1b4c83Lb2S43b6s<泀,b1KX2b5kX:ֳlb3[6b7{>s0G8qNpS g9y.p\*׸ nr.<) gax^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZj deflate" pTREE QQ Q@;Q`QQ.QQQ! Q r Q@ Q`!Qe!Q+QQ+Q+Q+Q D,Q@,Q`,Q7-Q-Q-Q*.Q{.Q .Q@/Q`n/Q/Q0Qa090x^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϡ AJJ^SB~Ϟ#jk9%[???????ϟZjx^ϱ0Xƙ 5zr抽7? oE.yx^c`$F8@bRP`4ӈ0%p@Zr`ĘC<Y;Vx^;0D9wD$!5.VZNg2s_}\R?gKY $'o# 3C.?>g $!FI|)wW13kpAR 8r#z $go]pAR1;Cs]ypcC-Gܤ!/]x^;1 D9_Ps[(-yy4OIVxϺ~?LjT-1c 7Z _4wh&1\ ) w1\|!? $Ⴄ.H/=U:_{3"p& &8w}b ?>x^{P- z(FALSETRUE deflate4PTREE70 deflateP= pTREE "1 2@E`QF!GGHIiJ HK@)L`LyM&NNO-P P@|Q`'RRS9TTU 8V@V`W=XXY_=Zx^11 ycDZS".!j JK#](7m+P l?3K7nH9ܐ? ^"pCv'C4r!t+t+9v5RA?~ p9ܐ_upCjXp {w]v AϿ)x^91 E9RıKDT Rs[09drm>r~;umdS}9W[do9\ႬϿC3A/9\t9֠8 Y!7`kSC ?SG~GpA>M?}UiBpAXx^;1 D9|lDT BoҒ^=S2u9v{oWѪJ=gF# kSNH7KdĖn݇'$7RRz c|8!|UWjЇrߡJ Z _}"tbpx^A1E=Baiƕ8p⒤BK|r~>叒Z;gl{ 'd1#1@SNҟ@@C Jl__ =[~, YkNƿB[%Fg |0A8pB6zy+KWNڿ3Kix^A1E=@8+qto% hs]ί}}+0wS}pAr3 lI9 > zpA.H}٠뿸[ $WfgMU[>O o!a yq2ÿ!GrrC@NTx^10y|%"/Tf#ۑm3}Kh˫]s=*!>пTp@0?ퟹMz'7ip@Յx˷$I0Q%c8 C_lp@: Jf~+@K@gvB?;&b'_J(:_hDlޛ~{x^嘹@ /a?8"TpFkQs壪P?34QcϿ c`VJx^1 $ЁI4+iR?n} 8t}shBqOOf?=Q?O5M_߄LMx'||l o?Zӄ7şPިtwS?BO ?/ _Px^홱0 )ɒ- `>z[! ;:в%;Yޏ`b14Q|j6f;Dm^i6ߛ&jO>=Ͽh5QOox^;0s$J_B\Xo$MIKIڨ7M&OMF|?&jo5Q߻&j_?7vߍ7o?lx^嘱0 3,x^噹0 ]AQx 8oŁc؍/@H?,Bqi7ި&z&z&zX' {sLMf_Zi7??'''x/[OݟWƞ2Xex^嘱 0 3EY4*i Js$?i}_/v[ڨ7MFDmDmDm?'&jk6nj6?z?zDm?|um?|x^噱 0 3d{ UH܀w7 Cz~z<(z7'zS_'zcy7ş~/cz7Do?dfz7/;W#x'zbO?_~>.x^嘱 $nwc܆&jM&MQ蚨M5Q|?_ /:G~x^嘻1]~T#;[qKv!ޟ~?cy?LIڨvM&omi6? 4Qwbk664Q?5Q~`sϽk6ɟ`:wή ڈvx^嘻 C1 3R%C_%E:jlh[~W3b'j{6_ooO'jƾՖ'j4O&?=Q/M/x/oO& |%; ?'Yx^혱 0 3mJ25*iJ{n )vOS7Dmi6>mj6MFk6>4Q?5Q&j&joy[߀?ہ-Kdx^혻@ D)n}\B@Ī=^4֝K~PDm=QDmDm?oNO7GDmDm?Qc_/'j'žo$v~DctN:x^혻@ D)wPB@Ī=^ɛYK>Ǘ/O&O&OO&oOGDm?| 'jqڸuj{_`߿I ~Ԁ?-3<x^혹0 ]PLoM]Q4Q&js?{5Q >쩉ڨϩl_߿189_?Xx^喻1]Їnߊg؍_ޟ~۟}P\?&j &jCcϿ&jiCIڨDm?;/&jo=c_l56&jamx^嘱 $Ig $!FI|)wW13kpAR 8r#z $go]pAR1;Cs]ypcC-Gܤ!/]x^;1 D9_Ps[(-yy4OIVxϺ~?LjT-1c 7Z _4wh&1\ ) w1\|!? $Ⴄ.H/=U:_{3"p& &8w}b ?>x^11 ycDZS".!j JK#](7m+P l?3K7nH9ܐ? ^"pCv'C4r!t+t+9v5RA?~ p9ܐ_upCjXp {w]v AϿ)x^91 E9RıKDT Rs[09drm>r~;umdS}9W[do9\ႬϿC3A/9\t9֠8 Y!7`kSC ?SG~GpA>M?}UiBpAXx^;1 D9|lDT BoҒ^=S2u9v{oWѪJ=gF# kSNH7KdĖn݇'$7RRz c|8!|UWjЇrߡJ Z _}"tbpx^A1E=Baiƕ8p⒤BK|r~>叒Z;gl{ 'd1#1@SNҟ@@C Jl__ =[~, YkNƿB[%Fg |0A8pB6zy+KWNڿ3Kix^A1E=@8+qto% hs]ί}}+0wS}pAr3 lI9 > zpA.H}٠뿸[ $WfgMU[>O o!a yq2ÿ!GrrC@NTx^ @{uͣt$.]B + :L^h^7EE/jGB.U oG?cGLz3/XDO?}7'Ñ!9|$' O<QmLzqo.sNc>ﲆDv: tE:ߙrB& /K.PqGARN?!:r2CBR=;Srz-K NqJq ̹7A6x^ 0RzѺ, V܍mSX^+ooI֐ V-eCpdrHģ@d_vH۟:^L2SL<զ*Ifi$ _z^.eR xT"߀^Ɇ R/+ܤ;a5~v|v](dr!㪒a I;x*8OE]= Q 39Ix^ E%icl4ѴuG_RB^ ׳ V%e\ь-8Kay-mZ?/e-!?U+6oս5Pkim:©F%I?8nq ?p>lBg7dx^A0E{$)aD  uLR[eggڒZW%/CI&8.\qv^ w tMY-E @fъ[/_Zb;4C3Z=mߍ @f/Ѧ x4N98l>pDaWP\^+( t;\#!4wH G7x^@ E%& wDMa!/n ,Ϯ4Sʒn*I*jo@дtCm8_"^dg5 Z eT8) MBVP [ʼnv x//_8O]~8w[pf%rU yMD< r` 2f++(IGq4?x^ E!W]4ܸj;i1׸_gyva 3d\K T!ѸCL'K tiU!ь p#,nz!zD38h[$rk$n rq!?\_)ӟTןx] P pD r~, _#!?9[_|68x^M0FHVa7tI . [יi'^=.9^:$(goG9կJ:$C?O |鉒ڌDRN=r|*1(fiq8E⃲7eXkx@lRQL en mW&|v#U!O˰(p(oZ/^(e+O?m5x^A ERѵA 1mݸԖF j\ Ç ,Jnj \AQO}s _9 wHY iCl iC!'f {.H'q-wH+t9̀OoB:Փfh57` &a2%4pIow\QqF tPݰrD`7x^ 0ES]"ǤF>z~مdromrrOZ;$.CZQ CGs-(g3;> 6tDK 37=nZIZ#TK L۹[j a!a<, pN8Upޏ1Ӯ\K *_gUEl{o[}?.7.x^嘻 E$1 &CCdߛzt::KͶ\J M!6I޺R:$N?w@Oys 9M@[VW t:g-rɤ9iJFZJ EntK -?8pz}BJ t!%ur!s.6Rq N@B<-{5x^M0F9-jd)A=@[Z4Rh۹o7ӳhp9y'CbRc%ۡ SJ;'*5!ksc9k6!q$Wp\qnO\(/b-(lɛ5@#z#Wj a5TnP\Qt'.@Iq?4le~+k(7x^M0F{$ ʒ`BL4weJ)i- hb< [];s p3.u-=^F)wHtJ{ ?@'}!˟h!qq?s D;9|Q}=WP@M#8VZDh)y8l۳!%8Ԥ4p!:~с;$O9/cš}#?#څIV`c]E7y^K :SBowH4RFݸa:"zQcur @T.ߘ8x^0EK-!%Q"nt(YL{(;$/u;i`#0V;nqHy[*JGpl7sEl@>sh/" }jz=wmdp  deflate pTREE MC g9@N`P1N]ϩV,Z? ܰ@ `#2@ AM G@K`IZ_ c oy  @ ` x^n1HaKHBA5[cZ&4HEEZyŷxjnZoo~l`w At%ПD`"bviPQ~{|{nE4`HO%=J`o4!e(6?90Ml3yVcC}NSnDn?uM3"_PE\kˋ]/@G]R0w~ ?cLZsƻE1&^=2QyTcoN1G.u"g%0w-ZNfS/=@d yTCxOec`ߌdQ|}S(8͋)WC) h䳹?`bO+9N;E~N[Q@$`Oy>pD >{ý u?x [2w׿+0`&g&s  ` ퟧf 4/?i159)qh,?x^n0EI>EuAd [AS`>t}&g) 8BjZx!d6mV}!rmV}ׯ{:JlA{yO7C8;V6M`/O1Iŋ|\,:Q یQ-?I!(Y v_>YkM;3ؔ?Zi')1u f%0oo g_6@L%"C"ן?W`?[>A'^Nb\]JA_ .Mʠ=MB E<1N0Y **H\)(Wr?9~4`IƟ0:WUH'Ybd[t{(s3!8"ۨ^߰(ND>m#UwR4I 0 P(x^n0EIsAb#[: iq?L}hyG$g4e^}7lz iY1(H!&CmvN&2[g_}פ K'ԱNAAlFlNHƜ .isԡ?!|I2 :oYuGEg5`@)H71~M_k+wq%)%0Hu d\m Aj;E8?|MޢplC#gU4N21˶+v K<<9el ġ:SPŠrQq]@`>_ ~N?,Cj]EboA5s@Ћx^n0EI9,yMibcXq %]P09X?l7ۋ|u:VGYlgXrN2d7R4k$ӳd+c ˤd@2 V60CH2?['6I<8Hˬb]IS!;\f5`嶽VpYP25tʧQ@@ro}M˨ 1e:Ԑc]FU(wT8k]d͟L8\h}>rjP eV+he1>Yc&ِXf ws6}k/QT.#'Wos3mi,@}0{>' &U +o{:>v>ZHj`?i:4H^c9 6T;oJs֔'e\}eR}?͆#E2R7(l}(W_[$Ԑ?<޴A\:DZ&uGC$?_0Ϳ9x$C@Ae9[GyքA0$]O2<\p8x^n1 EI"^AڠNS@qƤ WR݆98Q$5qy~}PYIè):,G6=|^AȜ? Df@:'ԉ f?f9nAKEfM8PX Ҧ?} 6WK*&&yB믅wtw#)rt`96ՂA0΁|&*FYpΒ6 m}~-AZ <<"[:Y,C G m}8%|)+/4Eo,kCI9-;a0E ,_=b(B( յBAQ@O? տ7Nu*F3BZcцXח4K+!p~uoeD$@l x^j0BiFi66[ KM7-UڃAGCG^oo6f}]},z9TJã$01ĺ (9%/wytNXQJl ѱ|܍yCGoה|Oymg%Dczm?*cd|B ?Zg0*KsT"7ěo4x[B2ּtøx$%JDqT-v|(Xߠy NI GΣM,4aAYO] s;ZN4* ySP^V "`/B}h"`+/_DSZoS^9R2=EEZuXŚ@_'HwzdqlR vm>-[Ӿ4``|{o u^Vӳ H= p\r "+utm8SxVETmN%^nB(h}*{*S6?L Xץ\L4@|(m܀l_B(CM9 Gx^ݘMo1(DiQr^;!.Aq}g{xֳ76n۾ܾ0Nr>(!8 Yw-'9gJ;:̀zRFr qc@_H?W\GE-`$J!d!r rAIBxLc =}|9ʳT!g'3{O!I_g9/ t>=$Y:۔ P`rcҁ >Pg9G$~Jat0DQ4-HxEWVRxNh(`:ˁFi_Wɕe"cbc_KMγ3T^c4 g1$( J;*eyue5qCL//B,!RxKi@ߣO=ʟ?\ƶ}H.-Yc)O {(K!Ku}Ust*?}6m{bXI=zxģ7BT/3Hm RG5j|okӡp pߟ}Pu&kjzx^j1@I!6ni8m"i$ԓI) ܏"QNw`˫rcy^4Y⢵dx8a-hu\v?ٿ?֬uli_GŚP@ӟ4)w)9; >ׯikŲ5ih?3Ա=7N/ B/4+w`KWЕK@;#Ql;Ϥ@ \,R?|+6D@킦Ֆy;LMs?_1`v?h :hdO'!* ?`o8J C96H6.g\Ň[aUK&d`GQu(Xvn+Fs_ S@e9˻79 a=v|obd`};j$\s{AU'ߓGb#9ٟ灏)'W`'4TL8=Tr)x=:?|Wx^嗻n1EI_[Ɩ-r䐔a||( /=eqy<_w-oș|X6y\k],j@OˣGߗ?)M+qq kA!%7}fJe޿ mЂ0O#hXa!?Z-Spu<8!rĢoƚ"=>3Gχu yi xoP`7Hs+Ԥ'ûʷ?F@xٵݿVn)d" Ά(Tfch/[~Oqq 4p`D6mK~R 8zglIGRGB">/ ɔ޿k%#21;~=/+GE?{S[.*#?x{(:E›_/x^n1EI~?a2$&B vy5 H|Aqַ.eQCZ"[p;lPPf(y=di MynLJ!!Ec~)V$r-F?>_ "+1Ύ_cS~HԖo &h71lUO_41Ktrmܘ*IHW[/F%pz-{^}L_`l(.o浟Ԓ5qv:yRo oi2i~LOZ|m^' x;Zod] 8￞%+#՟Xiֳ?Foe FU+w{#I_X)Hx`6)\o7QP>ۘoHuLcx*?[}W0=&b~:BacnG|6z8fSa>tx^ˎ1E$]~-G(PЈюFtZMt?YZn?|ةVإ|%4Jl:cm[6+;I?ޯX'}^̌`] rr*m?8[v__\ULu!2?VbL)@Tx:g h4OlUl>/+¡̔LҫP5y>/ޟS ]uyibʉV]_ (\_ 'SI8.ֆRm>qq 2dRL! wom~8R+d[B M`/6,J m]$ #o,>M w_ҹIo\*${_i^M4 A}0zq(&-~Gz6g+?GLz-nB~.VqH;[ܕ yZFb46{{bj3 /F"ǵؚS{o@7eF:fk?Id 0;\?(Έ wx^ػn1PoCRa'36+-pq?>++M?zX3;ǗFK1re2R*%WKF/)'B-f2~,RU ON0ƞ?-^~9k7ޯ4^ gDVDp{l\un~-?Ue{lfUc>zqFJ|ko0)qouH&=?"A~/q'\tľGV2y\?'N]J;MMB6OI]lT_ѿ~qy? 4_OB I3pD9 POJ4m)+d1mO[1;ܷVqp}}$4UR_ c~}Y'1<|9C_?ؠKJ:JpVDgo_mU-+oorAz;Ocx\ VoG=Cgg_r KiT]JSp{*5OS=v/To3<wLI@Ex^In0E}$48싃:@܊"# ;@[."YhA&n6gJ7"*H-Lk/e/c>W7Qȹc3O9V^%iw|> ߊeq0lh3lP(c EiX8P:zo*B2H#\vıdק1Ox\Ixɡ~.m >z6x%Ԧ}ݯ[4Q?H>#8韁?>:? 5ew})R׀Ɇ&MHf 7BWot|V]k[9ONWwü|ҧc-dCa%@sqFe pqn?wJh+ooFf/qCU$~]m=3>.?_֌v]?ŗa}osZ"egouX/K?ܟoBҥ`x#) [lUV3iaؤtҫ;L IO/?59G?vf l4Zp!¾K1à}^W E/x^Kr1 D}$/GQ|~Ʋj,C; 0n,"xn҅@)+Y#MUCH[poF_!PU <=Ii5W.N+E9~v?*Ӧuh@&13xczZ_HS=%s%aqG%vqCkx^ˎ0`Ǘ^VJ MwPhv6OpWY/>%IV Y#KiO n;OXFWotRToL(>DQWKW]pV .X}WJXN'l fiYsȲUxNBrzni0D("z~BIwF ~ Ʉ(󛿸o'J]-ϗ zT~'$^QiSிzkD?_O'՟E:1o~w߆[&\'է_" d7~Nmrв빥{,Oc5u9yߖjSTc~+Ik[l=(IhM>N˱1zoBɉb5~+rȞ<{~PZHkn=T*2GnzD$inHkg`' JQ$wdx^n0EI| G) hP7H@q?4*/W#iw7}o_tkEck qe :Go~-.6=?ϒ5L7( :$JG_*zBaDΟ j CgOzrmzUUkp&~z e_`9䋤`t!Ȼ&~z ZgF?7M+*4)fsTR?*>4aY/7ˀH0ջw k[禑?<FN2i}'֜c믏!?r>;0_vq%Ĕc+`؟/Ddb|&~)Sحg_)_2qɚ-^{ZƢd%]~kisTwB{}`W _f~3!˾|>x^In[1Du$Ҳ @R|}K"ّ>AՋG]|og_BXm.'XvE\O=ǷrĚMS?YI.Mq~|=*ę3_@-66"U S/Y$<:iwxR0+SS=^糂tULo7s򗖀&g iWؗLo/5^a}kvr yP6՚|,"NOM=إpBH>Gz;9 _J""KHixXd`c_c}^! pLBD>hN7ZcLo' )v/ c9K@"r+ p%v«  o‘ r-b?/aNKPQ\S˿(PI~F~rֶJkX9?p0z[Dx^jPH̹/MB N6ݝ*n+8O0v4sH/Yp!4L0<2-fKޟ~>/MPRN< =^qAQly8n^O **EZ07O5es&mGDb{gɗy4xPmN;w o)^.}FN\?D|=x]x4Tx~FQ^7UR^^&co ^u7Cm5\uX!rr؞'ܕVxD)Ov6`uZwO ^!>LW/$}?Nr 0.#bwYf_#yn"u<Tpͭu;?{S·L׻;^K6)%gR(Ǝa?gVx^n1EI.LP^2,X_Qf¨Hݏ;/*NC9_bkopjOl*;4o ߟW6c\ !yi~GVƸ8&Ƅ꜋qqL κjMc\ ㋮(x z\ ?smKJ!I?$LB  W4WƸ8&O|TQܬzdm7oqdSPHcqO{:vֵ1.bb} vKc VXK aKcw.+jFCx8_,S9z'ZCe `wxĝ29ou,O=FׄO?w?I]=B^'ގŰ>r֨^[_f0XO$s&ܔ_Ww[;,ͽ咥@ x0l`R _H=g_@" x^Mn1 s$LF@Ead0_]UB.@F#MEU3Bu}/*p,MsB]Mc,SAUvq_~"0ײϠBu,z~ WZ6iwl.N/9.lUhh_e.s):pG~Đ ?zSEols2iplm_&~iӜ5wHh?@r(=N/fo]&~'Љ_Lbyg~ 8e?>珐fKӧ:qA<o5. Z2E~o!p?wGO׸\/ c&x=hpFgm`%G4M}n*9;n?mf1UWx^n1EIvU ! X.'4h ӈ~o-MK3' r?1>3|5:|˞>ޟ-nsw$XU 5(cw<>=[) ;n D 1Ox KkVFl O2?2x# -g)K~gb^d'cbal!-FdV_qu9?#U+קS53`f+.Ir3}^Ш8T.s>a2p"j_G;y\>:^/"5sl.Y._x^Mn[1 s$I2qu{v~ R`Wt'gY|D~m~?iD6);Z%Y)LÜBmՐzn VA*!;xi෸OS.ek)~{[+!U̧,!?krրsUL!D54z~+XA$!b'lЂz~| TUoX& )f ! {'Ae'C}9;uZ(l/|`r{?BShQH _"C`z~%_/3t*ݭ$^ޯ3\L4/w?"QuL=?>γ𖒻&2I~;lco{3)e{~E,;-(<_/=^u>*Z_[mTvsltՐ ;e(3%矻?F"px^Mn@ }!Nc-=h4eAr xF~\|Oxp!St`3J**6[p;תH@Voi7[[4 #J[!K+Ypx^o-I?Vn_8Ћ- |*g3Kűd2gՉ| Yj2x]Tlk<#<⏟͚TYۥ1J \j;0 #+RT+~#] ˬ%5 ~Έ*[GKSp+ͣ^^!9.dߢ$Gx^n1EIvU&$C l)F+3"x+o/NufcxIТ< Gx~pِp|SH:lH?WFk2>ħm'ɹh|`2?+S:_lb#VI-gwCtk?k‡C4JȾ1oe 0[<FOllH`҅?c}a@JƧcX5_N} VowX_4Lct _r9^ |B&n2?}ӫFC3y^od=JT G|'(-TnFc]s2?/ oj^LqjfSkO J7vXKWe|:$l.DKPҍef|+2?hܟenLJ\s 2x^Mn@ -H6z4q\DziU(ykrA#o?|8w/)u447ʳt[!fY-s-Js7+eH`G٠,+&FbOTyGfd?8bJ:|\lP鏄ϧ_ oGƦ&ߜǿ'> !{١+l*_'lO~ &el6u^[R˝{܎i~ZV r"L_\ŵ=";tEMďeQF%vE(_ц ruЊAH1"v/yZֿZ7/>zm~kec+~_\?@ co@o?AGF5@[dǝ%fRɾw/GE deflatex pTREE  Q@`>Q @a`)] @`L t   A@`r@x^ˎ@ E)M5L]/4 HeU^ G\ Ј6kO|=`MR_\m-|‚+ׂaa]WO뒀KL(DhǿyTĠINOf@69:"O}N@Y;1I:'"cH."q j{|>,d#176&ȵ@שl'y=7QZ %BINKǯj'u~"eϑK>GhE>[Ӝ٢ ׅKgHgMGn^`=чT~iǁm\6 G a_[ x^n0EIሏe@Q40bF|Ć_ 0_ =Pz'q>FfP/QdqLcoO;?K,*Qo|Q& s=]1HRwtʘz;sE&ؐYIB́{]ϳVT~qGKHՠXK%C"D'pCO \Tu廑\BQcإ Imo6?ŌRBk:28rP8Zy.-| &xY?| 8.YЖ/+wYe,8vdHQ}ϘllD_K7_~ؔ?ÞٴW}  YPcÿ*8z lWdAj% QQ܄DIx^Mn0 F}$S?6fEh7Ҕ4 &Ef۾$ZGi&؏Mp$ClsZ1{ ߽IL48F\m*$TYݟb x]h C_ꊤ蜉 b#1AǑV`6+Vޖ+/TP)RCQGz|? djit_AFKuxL•Mp$U^=?ϰX/[?V?=i*'ڣ(.pXhQedgsI CN xBL_G"Fgg7zɦ8;:H1U."1 ӄx^ˎ1EC W Ќutt&DA4,XeoI\kY)z z׃02Sz\K _ %׸R7e/)Wg|@|^*㾡]cK?(- i<m=YAUAlRO41^*v/e>,>izۿC˽zB՚=KO6ZSWj|$1)xc|C?' ǣ% Yte/#J(")ue&jTdĤn$a!"_4 &]i80H5-cP|{o آ awx^n0 ED=(i9-BKvC. ;}yg'qAݧ[,MvS Tn`1{j9M2 fG)3y>Q yiKBP ]њQfNjaԇ%my[ /8jUѣmZi38SF)K'Vk2{ ^2ALX%y^ Д7ooKrpw=M^\ǔ UbG}x,_+ |[ipSƠ70c!8Lu`La0DĶaϷ…96i:7І 8eL=6TiɊUiX0rhvh@me3!EGd3 |'+ٮ5y?z.⪵}Ц>|#do-=/wщ P`0Dp0@k{71IH₤ș@8$ο2ۯRHn_VYLeoPa>C$v3x^Mn0 F}"&@LȎigb.)x:Mх 6%aj ik[=mI퟇.Q+&)Եso51N)#W8KT+oWX@ZVwSc js*|%]YEι/u!;t8*yZ&Vb{9h4{h Rw錚'$یyc1Z%יVim=|7?/+;3hye wNsl rJ y9G r"Op[R.aD+pgS|68٢шۀ?prf%WHTI.兿;;edys@W ax^Mo0 Hx,aNI n BIx-/%VJd9 (DM^o_tA)fL7zM2`6xT9^*:P5}6W b}^oq$cZ}?߹  )% Sh;&Oc7YIrYo^;4w|Rs%^onQeW0s'$oo{["(Vyo)Iq!aE|dI[;3K<AWؒ{|^gw8E(cRX9I 7]G$X,z|hW㻰5I*K59ϕ룊i7z? .o]L+{~ BQWf(^*,3H>h|>q*6?me*7CvIg^%ښ9͉~x^n#! H8n5Z)W00:eGz/g! <{堐Q*OZ7J]L4 np U.>%*Pht4S y 8 vrV8]!r}~k|r$VY9_DLutx[*` R}EuV^i 8m;;}~~ǕYy $[6 0o\Bf4{x,Bܷ~R%*}ś5Z ?)׶ MWOAF_q\Te}b";Vg ~쁀x^n0 E~,S'}MQtM{bբ(Ulϊ2M^uל[ IϷ&8lY`/R kCjjLMQ R睼Y5[qu lE7; ,KsmTo;&]avX[L7 f)zV ؠ=_=@ pO tivxL{+rKk:JvñhA`~~S,9nU YߚL䚏> ;SFq`p@6?ag(q/Yb~2I!Ef%OxaX_g%oA PlHDp>eH>H<[`hZC_x^ˊ0EI*l&0Ȯ&Lm3dB}|<[}lJTW:}ހIRkA}p6: a5. ŰGb;G2G5! R`iց0-?x^ώ@ HbъvUQm<M@T!?4pk "R/!.AfW6(jgyMl`uV*x9W+g,i,Sy{܂6,@jVjɨ4 [m;"JW#dC*s,pG=cP)S7 C54귰H,jLg7k1>?v8 x^jA E% 1`&;k2i6t@6'Xg{V$5"a4\Q*<_@h Nܝn"HSX (pp~ 19VcA0[_|+Nu_5$򘓾Oii.ٚp\*8J.k(p)8 ].9{i&+֣4 DFv_{A?}#YcN6yR@yKbyi:m(wPTǟX[UIR/ZX\u8gncviqf d,qWjNoQ*q[H|d_)Rϟ(v|M*|$nx^Mr!Fu$8ND9@M,}0Yr۾UCW?rvPtǩwͪ ׏+lmV"+(&M@V|߁.?/pRZI+Zt+ic4s, !8ǂq2p.f?4H,@éz2U Ljr~-&v&hVIk(j ٰ7:i'0 Вo.??pL}8oR$>!`O6t klnU 1,?3n lsx/j!jN\>DJi+g-ըV|ݷ_+$5 `cDX^ ECpLVl &NDBx^n0EI>DrY 40tGI˨"||. = /<`! Uc/0L0iKdc 1^tmqњ|t*gܖ_3씏}|Eq\i'An2 ^7 1)w}uK- H >>/[rN/P ^0 3hڗ+B* 7 cs4s o%Li[/ QsI_xB,%7`V~ ^mK ٴqyx RT?7,x^噻n0 EIz?dRl`0ޤH'dvǎ '=$󒒟W:skWr8NZFF)a(I+i)ۜ(L4o}&X?=~֭Z܁mPp#(ITZ qkO-{=ŕ}9PmxfΏRX{iL$0oѺNlΚ$Q>'F)24-$(1_/%4/|7}s,/R4eE_j@eh>bSm |No?2o*h02ȪrNQv~t\Ay^/f0}c<x^j#1ETzkĐ $N3#n7j R_͋MS4L!KsTR#;vC4^ 1; /& ~.`Lo7 JJ!)RpV\ُe,P*i;FpS[f/G^81) h&+6۽t3:jI(zmt,.r!R?uWpCU6{ L ˉ $ňU{iYdQ_Yow+qOGaL GzVk bjFW개ǟ26mIChKm KP FZ5M[0: \~߬>g@Qe%et8?ǟ}TsVI-Dv,t@s'gҨkYv% ڜT]p4: \=17 o)EP?Oa7J?j69b2Va~ZJj0KtW$.{ i\LOm'pT}S0hֻg ǿ *˅ex^ώ1 H3w\ڲiEqlw4" $ޕ&Uel qtzF'glx60<=)hvnowcx Nލv;Z֮MPhǷ/AF)raCڐD 0KlOK\D>ԡ G8%~UV/ig{Msq~g '?: |[K*V`8\@z\m!NKu˖b,(pS!uMRKhbwghu|wȆQ.pH9%-q' dJsǓa2"pUF%'+zaxOD_/^x^Mn0 F}$QYr$]%0^@ؘd`A uѝ`[2?{ 4Q:0fQWJ V"Hi*h{.Շ0I[!۞:ݐK {϶oy@RXѴoz$xݻd5vO$Z5|8#Ҿ+ .6ʾ5/K i,׼n7)̦ol0WoLX3,bՐlJx^An0 E}$,iY$-tSLDǰ3 ÏtmߊI4_R%j8(oq d t]Hh̀{|n| TO>[)VvTO20mC ?@3 8^:]\zc]AR PN}#Qv۲8;}T0&TZ ^<߿̐Of 1AtXm!x_0r/p?Iz)VUf)`*Jab*d'RJp5cиa25ll>뼧cJwx `I \zzY9"pLT_0x^n0EISK1 -`ױ,Ą||vOМY]Hs9̪T'y<}\u7jS5h>[G}8/={PI|U_fMҐSE hٓA'_l5CGyXبoPG}k6:(p\iBYbM崶)¨7ϞBPԜL?rY!QoIxh` F'sM.xrZNk_rJ"/'V6q8RbЀZ^u~X+f| J QoPTn8,7dڠ:\O[*I>RDo*JdSux^10 Fai BJHoe#ې5xxN֪R]~b&$>p~m&-w1\t"O&UUF#knfR>y  deflate pSNODPTXڒ`hhh0 X x TREE K$K ;%K@%K`%K&Kg&K&K&KH'K 'K@'K`)(Kt(K(K )KU)K)K )K@6*K`*K*K+Kb+K+K+K C,K@,K`,K$-Ko-K-0.x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0>;T{p_)~}W_)~}W_6x^ϡ0 ð=[{"A3fN_̿ݚy {{- deflateE/8-pTREE*u78><pE&LET,c[CPbi; p@ !x10 ah /”"ěHH-x^˵Dq&h![PD29"^de{?s?a߿vg98zs~%:\$C9/>bo_<g(7yS#y.9纄 1s\Yem3T<ՓmSOv w7g z[O+\d{8Od'1/jS:5:QPn=Ϩ'KuY`f;Y'°>5᲎4e`>q qLwP%ҚgHpUVO^EacZǁ-IAePn~Aw A)*w7o08`Ev>P١31t1@#~0WKZn0(z0lZa?0f01:d"fZZYw6dOcũ|ȏtp %@y| yVGrAFJQ[- +tl"1Ya 2@F>`v+`Nv=wlD?ptYQA됮V^p ^ #08cAF5}Yn1lCgwt|n|BԻpӣt݈dN"3Hvts7[.xQ*3vrQk_ tn$/kC퓢~4֩_>lW¥o3=\Pp0yTZJʨD1ĬͥޭJ3`1 L &jfwuj'ڳbD$B_`ʜT9z!\:AN`:6N06Li/Y؂'CԢ 3}rpY` kzc8c7)oN]紺`Ŭ^neJ} XĮ4P>-\CTaæcN8wx^7  &c1je@`0W[g;?o mpvo.zΗO=uQE p݄qvrvs!-raR|Ci.uH)RwM378y4G! xsCB"P>:\A`5<"f2|8ṡJ"f`aNy6+`Tgw5'O 1ܪLO<@/Ƭ]|)`GMJxqۅlh olƒK7a#0,GS4- af=#5c:_qĺ9\,Acu9 `Nw^DAlX3:#D*S`Lt0s;Qo: !Q5}أUuuʦ3Q',IqѨgϴ[V0ǧ ب5 X$2gcf հ9] fxnnXG;o`iD1]he'V[rgV0($>PG$PsjG:y" c瓔jrʰK)MLa.E6:C1n20QA^3~Թ+NX[1)`A`5(鰎 t=.=3D ˛9 v[Yq4AwV0"pmj|`zGb01ٻ[aX:dMrӰy֚-xkGavs%:O"l*vy2KxNG&HԒe[;Qބa\{ :Y ,7< c*`/ vV &lr TDz]6&㮯lLmw5a-`d꼯`r2@sQ]v aߎŠ~+U++`r+Ş9=GG ̿ ]qc3Q&,]q悎|(F ;W͜ KZ%V1{p`_L+G3AŤ f3bꄭanX_J l6A9V:zrga?dU2 iJrq Kg]!,T,Ӵ Vi]|Y0\2®xY^Uv-ʭ2a3hx_jS?s:|'+D6sf! [)`9][\pW []'1$,l jh83YX>(,ƄgzV\᙭c6VQmty5MK y nʀL&3!ѽ_эMnMŇ |oqq8JۈM뾂u=)aow6oeSэ,|[ ||]sS/M[7@ֵ:{` >k{H3`; e#o#t d| )+ 5`x݀)R~' >`F2݀S`OW >8OG2XGh>ɏ|"=Gɻ>OR T<@W'T\Vl (Xmj&H, faJx̛[YX0Y"x%X>Ш 0y%Q͸?W`71MRQ27s,LJ”jѦ* u4+blNvYq7jWA2e8gDRqK 2HP7RM]NEk3n(Ϭlf  f~8F701f6 ճ xTds:}a?Xp>׀&\ˍ(36 aeF5|c:V31lviќl) d('xfct*OVHeuip"J3@\^L`e|\σn~y(]Oc`wuxb.OR06ŞyCUڝڙ1GG 2ۚ'{27Kjܪr[/d_Fq27{#,.ր]oL.;!',nbG0km6fX 6RJsr;jslk ~cZ1yؕ]0UmL`'73\yܓDHhTFٕ<Rs,(UP:Yjَ:и+N/ 5M`vrϹJ,C57eX.[V4c%oyf3؊i^c݅ Ђ)ls74'F|/2e@=Kx^F =jJ@buS{?U; \\}ksy\Ϲ_ | yPo`𹮷䗏o/%l~2^2;2nx_[Yy5!eÙ@ݱW `t{)$C g +g`ggC/W`_~2G`QGԼS7 F0-z֊FMD/R.oh ⟊!mySӽvJLSUj~K`L y~(z ͐2c`Þ*L`hc`[J?*'Sm4X<2w+p_ meOv.$T `gׁ} K*5;~l]KY]XM@)53'9I`oم rog _09 hJ!ohy0]U!^# (u cOr.{fn{v0']=س "<ů`hPNA:6^ JsGn]lov6W3x^̤{fs`fh `hcAH7&$Ԭ Cǧ 66]oL(wuR~`-,q#jk 6(,ອ6=,Fw6fUq7VOT+TnІF7X.脗X-5;o 7cKWV'J6SjI5kd#.NØ֬; #6|cr$` cx֏Xl)#\0&} Cn7ww8(}b\J`heP:{{FLm8pk´ꕍق:V,>Nbk`,{SpQS9yu`ўDw%/,z,FJ;9Z Ļ1N;\*V0- E)5m*O}/x*Ḟ3UKt,5<8EqD4/2>4hj epкؙfb5Oy8^;GDBĺ <\ ?/CCVLO AG5+A2&-XM¢w>t.'=y&. <I=jDֹ7;e 0q=~Y*eQcqNp2`ߍQ@ʪ/VԴ!.?[-Wm˧96d0 F#+WRCLVg^󗼞4;,r|g V@$Bv=Q>*dy?0ɊF <  qzBSd{*}d3GbĆ|ۙ X7ҠNXy1spW.h&0d00=fHdγ`vljX/YO# {EP.nXM? LCW(,LJ.RW:eilfN? iRuw`HHht f0HL0iXg U'L\op~1f0zw|"~%P`&Of/ )`CH`?̟tAK-'Ϛe`lu_[;iD0UN+'6@\L6gzėvuyLcڧ P a/:Ip"؝urDNK lD1LKx^d7 ?eUE 3{)w>[[k?7 ?g}%xW ^߷E i xB]A9ɑyܺ9Iַ._>fsX8ҋFR5<2V6%}"nҸ^j@ zߴ;ut $U#N0SB 5D(^^N*rpY!' 92WER#RרA/T*U`5&Ӂͣ35_!(*L& J! "Fg |\(UtJBs 7P~eUY:7M=]YO%xdo\bC[wQ)65܀$4g Ѐ0<%NTjrf%vcF<9`pCLqk˄/=cE2{[5bd#;Ei3uJc/rCͨP٭ [ `OFE@B<֑HT$ jnyĜZub}HS2ؽ6Ns3]ny &tyhS+ߐAQ3V T  = JF2AkS3 Rpc?RL 7` (cɤKu9mL~IgE%gDu;7MV$i`+H;W I9g6="z7tF9Րxz/xWlv'74#s+!t]3::{P(1 tl7`[DSr5,XQ &a m<IV`$UdfG)Q;0ǏEH xvl Ľִ[+3XDF`Ež`iG: .CQ#`]-L`ē:aŧd,*W}s1,cQz%i@7R8X4J(1y>9g V{(UȌ;Y}DmSb`ŻvuUd99{& )~>U#.NviciD`2DrS 9/5Fa57V`Ds|2Ü2 R6"9yy]&0'U5uRyv(NȎsh!c؀tk3P5ST=MB+,^WP:?G%Ϡ:E 0Q'~+svi26wr#Larwx9㏘WB*2jng<ިص)W K # o-fиsu<ؤ\HA<#K)ݖNV`AXS" 2p 0'@HdWv6T9-.&elX4o#wp4r健5JRL%yEI Ahn,@_GW^ (]P秊GX`wQk"z(iɏ#Й "XZxE,?M/<=orvS8H@kBD O V(xjWu?'5e9[s첇OME2% lTӕ?choC:15rqx^ˑ9 !քEQQ){o}gﵿ: Ygw;`a^7&}-t"4:9y |Krb0d.6/|[5ɴ&͗' i{p餢7=DEVnCߔT2MiNMRpAߦ8 o$0o馃, A8h~`>ts7ް-F^G70M.Eݍ([~1J&E9(Ju+uEQ;Rp&uFy묪M68R3VkrÊ 06c͌HD 8j 2l[XmZ~'#~6gMsG}h uP7W6TΆ0+oT)t73ێV{&0Y0iRձ|l5Fj)/.z:L UlП,> `_ lz7^ϰ~D\1cv gu'+ 0 =E5_:gWXba?f6` EJ6,MFivo."Ƹ_w11>- m42_ގ7Y)|{/ǼM1Q|߀ 0rǤR>MT\ochjrɲV`HE3 u`*4a\A JnF}_g Rg0GmqS .tC0ZG @|ҁ7j]p XA Vqc1 3"f4EL3 w|'C=QRP\D3#VE7YlUrS.[c 0H5`RA4D.߀)4u=݄hֺMIaWc$72\|ڴ`ɴv4)x0< mZEu`JހmAR̨I0#`1g}Y=lVIUT@h!yy{gefPHkVQnG_]zVty1\T٘` Ȇ H(E.4Wg D#e>yQABk+0/\z(]I:g1%Si+rWNPf-n ψB\G0e6oB1v!Ej7Ѓ ^xb(b(buǀ'x&q(z6{b; |YUQȬcä)Jγ7E/[ ӲlU0R1^dTB8N:nE[iC"u+P \Ty,VM͞%^f,HݘJ[d3])?T,xlvE:=drT';l(#񙬡 0yX̉g9k`V`rT`,_%R!]xXXS얲 >ӭ#Y &U0ܑLѭtIhNW}.]5<ɹ|OHŝ?ӄw.ބtmM TRv鞑,X<4=Tɡ+X;h|YXZfFLKjw컂; )bȎ= O1)835` .`Aڣ`s^D.:C+ tcW }Q.ϡmh*~>Sr9Rb;֨%2L9nf"n`D)`3<:Mob[y> m~](O1OQn B mj5 ˍ& b7,F5 {> _l'Nk1*S4.o  [;=cNL_mcQDZuƨ^ZgI\E3E~/nIʲB3 .^ =]Y9ZS{ oV~WbbQ-> ={J f5^RdЭL3i6{6qw3ƾrgBufF y?X'ڠN߄% $D*ۙCFZ^)Tlh( ^%JˡSCOjлʪʡxFϣ η# ?O % jr [7i`Z!*i`G7/yɪ֗VQ5Ru&zXg\Bf94+ a4o86]n`K[y^L_TL`Vfy7mV ɠp68 VLSO͹ O6̛Gv&Slc6 rT 6ȅ#N۸ߌa]&i[|+c݈c5tܭ7oǢƾ_00F!d y1isPcSs F˔m3m7qCEҠO0Q޿;ؼ(1+x| ?90%5+?1rshTk1MRFF|+VMh)^6Gx&u?;V]O)!0+`W &ݔTm; ;mm`JnP0aq*g>LIn8HvR]{ 1G3Da6=? i/I&&I *)L c^D` m(|W7 QNkiHc5 "“FxXDaeXg zL] xoI]Ҽ\3}(qkБyg˿ _ tŹg;n=e੬begZג3_ֆuFE%ocl_T0 DP6|hyx^7 a&cG7*A`0wss~߿[g_x.Wuuz y^7_݋žgw߰oFNo<7RSL IGjЩMExq<< ScMɄLI,IPd#L3sޓwR5/8a򧀇¸0. 8|G֔\co`As& ֫q94Yx*M\7Q"vb3괨 Oa,ISR &u!r35 =3XX7ݼ1אKV0v1ua2/lL^ga+`0*R8H xL Gb*^*5fy6B屮VQ]3X xŸ vgd1S70K<϶ 6亀| /&VYa(g'x⇋"rv5hEvfLĴ@qAפ Vˍk 4]>Jd7h9rU*!ߋm|1FWb)`R51;yp2~~\u Ha ^fmH*|%y#/i`h*blXM+nּ\Q̮ Ce̾a' *{Ws[0J bQIƙ%)æ[:w$ڧfB'=mc lu3t, vN"<" \Ra_4O*?W,'J#bBkӸKt\7 eq;b :}eӄ* t+[9]HgNwt$`-s6Y֪?Vם˛a_1%qK E~Buy}2#< ,c;5iFJL r7څْIa7m)c^nY?/v;jcu.^e&AO% (y痰CoIgY^7,ѱQ7. bXt+ďo86Bwv$Ԧ1 /!Zd) 4V@xS{0A \嫢86 PJ51d{A&ŸU'}E UVmwcGsQYz&u8 #&0Ff'ǞsNԁ3q8܊rqd_ԤOzW\BO6F@@<þ 1>Ze*9DgCSxt-Ϸn0ɋ]]y3 SJ+ރn``v@6Q>Qu-xRt+״E[ 'g?3$:`q26]~JĜu4f< 8O49EuQVy.Nv+ rIn`Z*` :m`>Gqȩ/tas+8 I<7a|&7Egbx9bh熵L0:nq\6}uVLAX6V63 _Sn8q8Lg,+q)^̠+x^n+yW<ϮœѾ-#d%`j`F¶@gâe( |ႽvwXiƍؽ0EX1k_-o#8֜AS@~"3gFUmd+xFY~2~ g-GX._NӇqo3L7Џ Q1c4ݻC٭Q~l6jOfc~3j RvS1t wfԟ+P'Y&6=\IkLBy;'=k\}k"dp2w}& XC=H. tSMD;7J:qtlF.Pkˠox{ 'Zfxl=I0b+*xjЕ014ZTyN`^@Z 3|`'!h2A~ ,bFn'r?5.$(1پt0m%=l| Z'u60 {7H4 `Ue|2\0~M`k+w'1b{A 8g~\?Cδ~B5qC YNhXS"9}ڏE&᤟`kW$ ̓m܀Q["hwD*2&~B{@Ŋ0]_yI3LzTalnzd`y6¢qCWpY^PZŮd5"j7_OjbD_3&eS2ʍ.&*IVbq9@:7G;UPcWzr lhmCatIOVkäY yS{?Ŭ)d^ ckѾM$QuN)%?]L!@:X_!Tua5DQ4ˌnm%l99;ƣ}Gu3{4 Imǯ`(cUB!~Xc`m0Mr:.]ػz7UJl t0E xfأB '|['?J # 8st>'2A^ߢ pzh]f'wǰUy|8% }AAfI<`5Mzpg XFa%h=9 "՚ͷNpnf ⚩f9Y8p`m쑦g-Fyt#/ ` ~ `6@ X@U PT۶%5 `&-eig=}}`RϰV $t0 M#όLV̇&aK^-`"cbS ?`v40,@0s!P۴;Btqt0˯6O6AU9 h[[ 6Yr2kHmy1ъy賾'[n߬ 0G&xЍlN; })]jhWUEM'2lNCrAK.e(T-BjVr͸M~g3 chYXGSd*$d'tch`X}FT3e")^f0.tqVYE_DT3`VomlZms: 6dqu!?K8Y,>7-x^9 aJ%c}UVK >{|׿ۂGksW: ^> <R^4el>@M%(=pwk)`F[tэ;Ww2C9c!%᱋ ޲ <ɰb>(8Ƃt(D?p/G!FF0 ӹ\g]<(OTغ7tA -`<F 0gMNr]UEp-2F=q~4/%[s `B;&+H 3BTJ{7 6Ps `3XՐ3WETFy4SII B>0 bhVE)G '2 ڴ54`0؁u+ro j=[1AYC%@ ȥ+=WKa 0}`E cՙL,85CDs. pc.gչu0yF !*;t. Th'Il Ì` ܍Y>̽k0IXUӀ)OFi27XTF瓟xh^s8q^-Mnlcx6x7?]5~CPAF &n7 >+^mnH)u؀2~xj{ELۿɎ&5e'0J`-fnR.V`e͢758Vz}74obZ^ ꯌ+0OZkgaȘ 0N *Mn>5`@x?(NKdYлQqAE :5y^L`;kWy{Ix3Om%zdnfOU3ijLV8'{]L @i@' U% d[U`̘ jlbs?ՀqcE;L$[LF 8YCL'0 n胾2@9aTkl@SD\xˠbɼ tqA`Mq3qVݐMW eJP =S8,_oc'GH.7$ظ݁Gt)HL\8 O:J}-l,s3UCf=]9h𳯏B7dilm+_p2<\^۳Z:x>t v:{sH$t抅McOqAurkVQsfT1&<r_ 6 >GsVބB{(;h~F J\8D1EJŸ"BnAū 30 XND`_4t_%Y'>U$u6`rA л1fsK<]*"2]Ͳ.))JxPgHVe)z47<EUE<(v^0cÓhZ^b/'MlϙlzΜX0nn'F$XA } sI`ՠ G/;N)L X`ILskGE2HBgZӻ{uĉ>>%4FsH e׍\m>>_rq#v v8c2ncs`m1<3N JQJ:c7<ʴ_{"wuns@V£:dtW|q?%aYJ6|j_Џzφa;ty`,F7H^YW;on( 5.,YATϾaDžFBLY:Zߦ8+En3H3D0URZTIԿkVblXHL|K95-4A|h'3*Tz/ҥ崙L!(rX ؍K.`]`Xe 6%uz~erP8L% rW`],o w _K6SuK#$/jNT%/$wo:`OH4WmgrCQ_ôB12#2x^H !=tEE*bgiUzy:yι__y?~xP~ް9|y ~8~_oNy?~La 2 ɀ1I`{!$ `iä|+A*B |^<eLцp=|h24(y]_;YQ_Eӊ_X`K+#F{]er!} !7'3`_jSa)1XOITb# MtQ lY|i,c0q XzLFJk 0 ` 3-C4fb)>ӱg !=hA{Y1!ݨ NJ`X @öm)qHp`̊ ag=W;iS&LU̱郊n$[Ĺt&n@@vj.ld)0X#fvw Ҕ 0Lt0pORx*` VL =e:{ʀsihpG!L(肟 G fnN(nF6#Bx?邫1rH`UBg3*`mO`s6|EWA}l+tZ7z7vBQ{LY\;D(Cw\{7`ڶxhPFIA6B.иɋM%ۄ~'pn YsC^ZN1_jP0NCbyX'J.`z`1Fܙw!bf,6f^DHמn3#KrM"J [/`};䳂1[JnTNf1!H*98;]gKIQ(' iߦY ՞ ٍQ0ǖA*< U#Q~Q^ PLA:_x^KrF }!/|F-x,f&ލ W\}_|Jss?}b su=%7sIa3Iݛ~3vY`}~ ~0 bǚ'0j%K6cuLN~S~y{t]'7|=߮r2#hrz&`rAo3LH s"Z/M 0oG0C <| >dg"xy) ,kdl2v)-D4 D|x49a ht16o6 #]LQkGX{'ca8 h? +[v_ǟ2`Pܢu[.LmB׹):6edi؞:7MTTT|Rք0LGYaĜ!&5_wNc'xV,[;P0YO;/Ͱ;1;3SF)9Pa?.dϕlL l"J%CctZȖ&د'{ס\7ɳ)tݒ:FU(Oiabk%Y] YN_ccHqO3K06*_w&M? Ub xⷱBX+B.c3Yl~2,ྭԞgE|VlVMakn*-L۰zj+3{6&vk9 D0( J<l v;#_MRxw޳ f}rcY|CiXr ! nO;d'x^*AM]؃؍J)x#.zDP /6p&5'O(Kx2]`?YP'1|lG?U;KKkX>9X*6JBԤV 0<;dc6Z(1L z(~ME 9~IFb);N즿B117;! ڴt3yBxI[c}H#;K`ʯ0&+% #d dD¾հH) U R9mbFA-a)$ c1v+?>)܎E_񥂞 9aaR 10Ҹ;l0,j>e_I#u*`}7,axAmtmӠKz7'¾>bѯ/́Y+IJ&YM a 0/([)0QSoT2`}ŸL+l?! ɘtgm'x 0'x~vCH3(&MI3Pť< T3]+ߟD ;62C[قgqg :{T_cxI21y/Hۚ`b!6q~%%y:cfcIOv/Pa}Rxt D>7w <68u6;&)4@x[|r'[{ @JG Ɏ&)c8E> =ehD=:LDi<0 ' x^AFC>[ṌֽYd23Kszss|_/ycs>}?\B/=qnѡ;yQo/ͦ6m֍Cm(/p;ų^0\8 "ɑgĻl_?N2G`>B7,m D C^㲸{lա;pǗ>[!Ǎ +|`ԋ[0E >.I` `ʅvπ%/8b{XA W܇QK`̿B7UEc`HJ q@G;.|vG;x)Cc,'0UI Ert N6&OKaf&\3^h+AI"񇣆(:!wbU#1מ3@*c+.;hTw0!0gԌz81 xv177f}RA`-քjFedLWR"xU\<,Z8v<`#7,'&$n f5XD!)}ag"f^&Qnmy8t$6.0VbyMR # _aSμJB>;ɸ`qj7&e8zj'U`]Dfک0mxrLYېE"bkR E}PF6f bʥf?vI >cۊ6n,:`cվgpZ rv!_]P\quxOCfS>8K:ٵ .'k'0@F-Oe͇<) Or2zē6+;G^Mn ePTT6n -]p6+OލV(`Mp)hM+9k 9$s iEE2+5w0+wa)m1H;}0}53{j/CǃBRnBw m~7" +`≇Oh ;+ \1Jˠ\0 = y_nF<ОT&GAY(hl!}>M;Eׄ֌% `2sCF #脣wx ֏Qi5Ta.M"P u]:g*crt^Iξ Id^Ұ.]t¡)q XDlvH`m!v+|71BO<3J[F;?C|.5 t;8ZȨ`E8"fcC= UyOF/`-|&hFfzAz=Dhס |&e@}MA ǡkT#3b3 ? %aΚWA *AfXPIex9N#Bzȹ+xuÇG}ܾ݀`n.f}.6Ӽnk<|V*bO!}nB wX\{?A` vq lPT5{[|]6<)f#HoyKp "h՟4x}Qm"l~^hti#N#/5y^`؄A4R3M}[T>]k }c Hx^ݘ1$7 !zBG:؝1ry |^y~s|+| +b^p=SC{@ZvQ՞gkgC3Pf񲲝$k f&aTue|B 0a"fN֘ٛc"akZbo?Ĝ1T`%oZ9`k\a),;믽o`0'œH\d y0vj7c~pt=¡N YPuX<", X4axۄXYKn/=A\?<A+tJHq`\X`듳)0J6 ؗB¡o:una }:[$aH}\V8L2Hoxϸ`zr(3.!=-`{y-W'H^V V.=l8SJ,)0%Wn)P2u]R LYle9]8QG6{H36C*T2ʱ 0MYa3euC~CNz%dpwD|5vl-f`f ?_VX1ZZ@ deflateZhTREE,Zx^ɊIan'[qƵW!xW|7jsNDFF~߽'fzLAO_?:?P}g?4q~9Պ+?﹚G?;Oo?ɫՏ7woX_">^uJki=Od>'}Ͼ|PDӾ׺'tio}\~cw.~⳾sc|5PK}I{׼\y%{~DžׇOq۪x!{\_sܳBwgF٩'_陧sesz_XwpKv.W%/;}ಳwݺ/+vh -x^U X ¦ _@߿6 u[ȇQεrSnf)^1Kr=ÿnArRm(X.v Fg hRۖߓYlӷɣ$q]` 7i ;I$]Jo;@B~x$[(P/W۽K{ ߷>c:n곔Hm迷7{bS0A}0'&o0Ј[e )f+"Fz6opZA0yjjT_ L.Ƿh.ZW azD =(>c~-U+O|NكŮ{vCv]Ra#AQx"xۅIG&n{O9b$R$q뤃ޅLI2dg IӦt p,tWHs͋όDEf _DC[V1ʋ8ax"c4Qwq2ܗ 3~dN^߼u=kf=YnX]{vGxEBoD3[:ݛ$o[O`KU}2.8;z\ʐBpy$`S?q#s)n"t(ueUBkBe d[uQw -{{(FALSETRUE deflate<{PTREE~){ZZ  deflate,Zhfeaturesbarcodesdatafiltered_barcodesindicesindptrshapePTREE\Zx^EyU{/chA*[ CD H&?ЈPR!$)J*J`KPP<3x9}sswSʲ<ǀeubuvcؗY!|{eCǿ gݏ{4/O+"Nd<ngax<|M +kz'O{d|wcc`B3|@ `?o)`|~gތ߀ ޑIVGefЖWUH>݌nGz=[ `Ÿ v:qS[0> _?` &2']ϻ .`] ֜~ 9ukAO fg巋%&pWƥ9?a}|_/k&E賛}~g6k73w<ՙqE0{kt~Vb~(ng(`Ms5:!fp͸ \㊭֎_ )t]ѵS^ #;BgU'~.7m ,^c>uQnc_1_ s#F(TҐ?M}]'`!z-g6L/SS+(;wð;$ǣg>iҘ_,OO0%8k?EjKkWFOyڅq=xGpaW)V'ײJ{8lG:{KZVN `Yjznt.)sAdM]_Wt~GF f< oIMX{taPLEkz,.MrMkϊjF9ovЊIrta|,8ՁPfݑꬕڇb85Z5VzҙsR?x_W ˿_F}n s5Jy+ԫɹ \ y\&)݂:8vKCX_'cwg͕ 4}w<3BS{~|p+g.Xw?ԅ_i~ltT\~37Y?c{t[=+|"X+vNoվ mSWX(Y?˒kNZԧ'b g\e3Y'^οI{rFq]AmPןth-J;zbzU5ˬ6ꋺ;JRϐ6*:K{%sr?y@!<qxB$||ҧ\}_7;'ς_ ~c.) &F}JFIrilW1>@3`"ĚI$))SIi\$@f l9؜siy\5 Eq1%`I˥ \3<Ξ4<O{YxN Ex,|y ^7 #ț;w}?yS~ξ`_W7{𓋟=~e#ş4 Dab Xl\2&t01Lddr.4dZtlz匂`ffxVl s)[G GV,j%ؒ¥`i2e˓`%2[Ef5?Adk)֬Ce a#28LpS, 7lAĭpk%ۑ;NiW܍=%i'p;TqpG82 rq49p+N<1O28TitPgّxcBQpRrt+*>\zpxDnpr6x.=>K >$|X>.x 6O_i}?h~ğ,VB~U+ f8a,l8bB6ĂIȤ89R8N XLӑ،8`f6fv2Gs\anG8b>6p\Ї E aq%bKuG+VlUY״XXG.gXb!ndؘ aC-&܌mN  deflatehTREE2Seurat/tests/testdata/visium/spatial/0000755000176200001440000000000014525500037017442 5ustar liggesusersSeurat/tests/testdata/visium/spatial/tissue_lowres_image.png0000644000176200001440000162152314525500037024233 0ustar liggesusersPNG  IHDRXWR^IDATxYȑ knU"iGhFz&Y̻qt{d,ތ@۾߀E #( p7=ھFoA$ߡh2l.a Ps{fDnRѲ, U$B%9Ҋ7G9G@vQZD!7 c NzvKѽvj~RSZ`rX7g)&Gn̯z*w#le3@(WDH /~ :5dZ rK!8IÈvP<*kx +Ñ6@1T]f ӯ( G7Kk+Hr 2<[Lu]eGo5KnZq255 Bp3r\3%o𡘯DR &^ƭ59l/vyy#-&!R|#KA$LiHlDB RhM-:CX,R r^oM59U%p'1(F~x5KiJn"yudN$*Z[?)i|$SH@DPu]x{yVc`,H+  J.y'%"&v7ELj:y:MieEْ@9v{ee^(5=GFBӽPn, z^'-#M(DbQ(N޵7i"_@җ2D}H"# B:ZD/H(,?%NIhQn`ӬcwdC-]dTx ^Y!4`tHeRLH1ND1Q&vzk;:sn[eUsJh2٣ + D).~Ce֋8Қ]V+L:0GLCdYEkyǦ[A0Xzq0ȂC!1dGA9 !I3Ab[E~C=M a>LiHlE#Fr:I[P4$Bf4|h^v^fp؋)71IO,􅭬5Y+`yln%3wUؾG,XpWi!\PZtϘ8]F$>Uw12~nzF^b[yχY㎊ܠϊàd-4;qq[A(zT͂:Vx4$b5K 7I"vʓXF$Ԣ|ͅabP.D(Q*1j1"}%b%8z~/#$ ό-)؜ʾUкcf5$nt'|!}^De 09B9I,ƪGJ6T+#s|aꨈ`BT}Yi@̻hvQ BM~iQ{F 2@ 8mt<Ё2pN+C&ALJ~ 4L L@ X)Ex|jT(#H ~@> "Vr݂ڤ4^5GH8* /$X'Igq-"OC$2W$>~h*t'm;!Iـнϙ1\)%'xx *r|$g W[m ֧烊0 RLKFj>w_pjȒS}$K< bqDOG\ASJc|ūZ2k0dK2 )r4 &$g%sGACwA~ %p?4[!I=ܡZNYI'e F fd+DK$.Y{ #Dt,3I' 9SLmJPBpHD6'6hs(LAHNsTF:i-<@Ap `AZSﯣX3HPCU)s-ZM;ߒͤů`IP>=ԆӀTXzp# D%!uUu]_ʪ*KyeT@z*AҖe{oZ$/;XKۑ~bj@> T[,)aC!!dj&hMŐA{T }lqPWl2ųWMWNU-fN"[(xv@xRBe W#YT]vFKK؍bL 辈&I~q v6A"P.ɳ$x=2ղO @{A&ZuQeI QQó LE#WzhfPjZv֫_1ҺL(xvVJ+5{}]+=Kό-C}X/-o0X 5ʙ!ALlljg5 |M~(r$yw$f\P8A0x:28 I0ii4QOS$j :%DJ[Cn RH}@Y]aav9*X!=Ξ'`y,+ @&6 ^P͒m+1 ;,*XFm2Pr~S$k=Jkf1E󈘐!^q4ԤxNc*A \;etZҞֺ\QD$:"iRδl LۅE)E䊠BTk8Q[ol`&v !(Ui=늴.%Tx-{xtqU' N gQa(9RU,W$FJ;BImY7xCby}!(Dٝ( f u1Aײ,y#YyYR3ڐWpS5$ƇF4AtS?K4ROh? Z WCL%3Uo)H@\h`k)+e@ ww"yTg\~ F,Zn5k&Lb@HaiW)\ˊjrYxqW50S3BbP3['mszK2̊L>"h)AW909 "I$rNYC!ᘇ* /XT9"mըhE d'H^ԡRji7mJ'b*!5FFA19H"sԶj ;Hz4 0NrD 2"a6NibOlQI. k # M6C3r \·^tKyqF QZ(&Kx9I(NWrDJzn߉v4'sTP/VWy8@}e(RoZʐv F$.DFvx &u6=8H'J0F,ɓiB;6V">1U &&PxKr V q, Ԋ J]n͵I&m>QmH^XQ%MՊ' -Hc'Q.wID'&$AN$+ءlstU!B`(RB qq!ElrsbhJ3D! pd Ņ=yIwPqtvgW QJ0}X +UK_^3Bui(}qE4IQ4#Cc:ҪEv6ؓneBDtW&29nC<߄vvK-8:qNaܦW%*A$/BS+6Ⓓ"EC Xu(js J3d7;E>:TE4_܎>Z=7 +t 9%eDΩXLnhn-ÓNxUxa֙PbݚgY'G'2q2z %{w Խ}5R Jh_W9IEAՔ 7-,x2I$fuOf%"x3V hR@=պeJ8' 1j ؑ<ĘHىrF/Q؂T(C3;x (WV)حD߻z\y(҆A :T[ZE 9I?m"ja!=ߟo] #8\GM<NÜʑd 1`|i9S;MzCro@*1llIfBo 75:Ѻz@w̓p02!A cd= ͭ] 3NcUdXEpf2Z&DV0B*hIlgdJa΄RtNlǽImU9(#.ay"-i-&O Htm& Zq)MDQ旅a`T0a4,S'ƑO|'!UDhh_Q 4Y]WZCV?=ҟ8-5r(q~(;Yמ0"cPϖb'qbօXk(A2o3urIU m<@OCQ =A+v E:JX%Tj}7N󝋳-҄kP)D"AL-{BJ!`)c>[+@(t,ϸ5 %# Y-X uh?SҏYpΈ@`$\[ne]58a(ZBu| L#Io2Ԥ(r %U3!lb7(,@ Ra 쎰$ "S).`9b㠐/NJQp6!UrmsLL ؃U"NS[ mW ͦG 0fY$Pb 1H3fJe%4ڐXN0iufYQ4dP@M Kм[2 bMnkvֈ"B)yhQ@cYri"]DFJ|':?Y5\LȪ[(T?P8Cv L*jB*ws%" \ږv4(ҒʂKtHL0ԟyҲxB5czdȉSYۦy@#MR%tuR^JX5IP=P&@^"g8V*K@WՒFNʝJ!RK^a:*Bb{[ٯ AtRp0Qk33,ԣBu[P_4ه뮌y3*Pz,XŰzd`ٚD*EQ74RjP*sNؐ@3dqt1n3aIz' >e,&R@:wdK IKEptjr)ȃvXS%=h6t@2$-hRaEeAIx;1q e5\tqy RySf 5D^-FHJ Aՠ)9 يnlΉaA(:3hd=@@72bԱ0X ?ڞEٍ,s>acaeBK\)A,`bɋ@:)Z< cIEcO`"!8XͺU+DcWCYB\ $A!hIC9ԵQ$AT }@%'ck$f}豒f(PdD|xKxbw&QDRp5 u=TK:.j'D<0T< 5?ф4c:JGP9Wol͇cH€Q1s!: f3`jdl2X87P 91S$T[J@+$E )\*}LrHxkjR K7hFfeucDC!~Xҩ=-l7Uj)Ѫjb6w32Q,l;J_-xF7'QCvRvϼEr1Kⱸ9mSۭSA̋ok^z-ӑЖۢJ&EY?GͿݨJڴ+d*p@a,w-~:\D͐ܐ^5hBjQP) S{2'!^X\h&_MCF _+drG"t5DPs1g``w";IPjBvRH~4G2_#>} _gs\I'$j"\r]?R O\uMIAOՋAWla~}R؇LR.*].T,OIsYuF,[cKa+A@VVxP?^vݣ.'yYb $ax[5- Ee !Ez{a9YJᯬϣzP 8;S+B'0R_=&w8MǬ]9н5{300Lo1U %Ge K;H}moe$v@i˓H E^%@P/+z! IdJcQʍ)I>¯eUtIBt#)) iHjx\yP9mT k;"k13j xnem $$e*IGոYX<_@% : 겊 8LWC4gNl4wтNX+ a\(PDԖJElH ,kʬijhNp&лXH-D &I:AݶK!b%0@xO`4 s9ORkUiuUcWaˏT3HpP9)TXl/#͛ HN A-J$ZL%la>Ij 8 ؟cNr^GҝX4j7` %.A#R ,ҎE*R$##ſ2ŰM ,5C2 G)-Z(}Q݅2t("#@V#H`#bHȎO{"(Cq m^= DDPzD(Y6 uĜS+bssALtlIF'@N; yHx*S#WHdD$Jp6b ]M U}1Dj_EiO,.%񊈱OHPr_^X}ehRn㶉7TlJcX0 HGRD7V6KYh υ.,/I3R2`R) u)}0UBَ4XCB#9DŽ>Ns=7 K{>S h)kS 18ʼnG&=Y' !ƈ FCNJM"3չ8 Ē '\Ki)@= ݴ6-HJT :z`y%Ed@m!< A1AQQhY"`iub8L#Qb [~=HIHuK CnX&tQ0A*)eR=Z)֥(oLzI]꼵8Lq!_H]˵QL킩d5~’Rk?HvqQP`t*Z%HR}bk$yL<uR~!eE@gnN)Z5?t1*rb0RXa{jz><'~`,i:Hj>S5ړ?1xNya?{ A!q]+!$c1rɛ(n5qݪQNRL4ՀIk !`W,LUKz4 в8MvX'6߄̍/v# +(ƌ %uŊW7s7t;`@_&I9_, GƥF \]"<+լyY`ιrpUS>4`F%( wf§'g>bCD&kxHpფ߼:D$e2@r1Er X @ q38~bce?nZMy $2Hc  O!!TTϿP$I L?j(74j| AJZym)\aS+uHg3Q(ؠ0FE8%Ǝ:)l|&ZHI"#>is$lK7F +$!^IiP`SzйGR*?Gr.JuP0ϧA}4eEQtzb]2{kA;h@DSUY76zA6a`-+Tɑ o~u(N#-o%QS4w ǽyn? Nȓ)J`RHKo0? rA!b fr.l8kT IFmu{E5/Kjͤo1 C*S\e"Se I6Id2):o:Y8w>-^04ݎ06T^wlVKuqnZ2SU.g:4@sh0 C|]GX s^jER Ibxх>q]:`kv@;ߍճ&pX.V0 oJꅲ-!xV)Jqla7ʮ&*?X ,( U] ZeY sB@%Q!h P=.ZaMPL#"9QN, '"09P I_Ax .Fbc #Hi_๗*29l`IOPl]$+Lzح3oOD29}|Ė JƚrRN3!ń $:qNJb?-ǟOȋ)_>ی˖nni%΅;>Sv)Ċ@qbZWԈaPub*>dI 0)K& x[t7(lUa CJLyzU7L{#$n:S#"! :tyZ7-$6mQX(= !^^ iPUk~n<*?TycVVeUYUiv`ŤU yX8%L &"a|ׯ~Z+yBȀiO ~AhlbEKK2!%ǫ _[ >FthR 1i 3xԩ*cv$9!Cq`=)~|=Ϧ\MQX;ۈhBv,E`6R'r"qְPPU# 68B%"XcgayׂHPA8{0xd>`,{ O.c7 uD$ 3~% ʃT]6LvŢ?ϟfYIʲ5 3ͫæۘeqpw痸yR^C\,^<0@rvʎlMD2oAU!0V(M'ZO5͊ lHRxG 1W&2&QD,2'%dHMpAHTM i:i J( R f.k]|r #5Z@"B\d8=O7^q[ZVADN1JQkѝd+ ^`mr{~ѨWU؂|4}:乼7Y*w6#rƾ/3hkCF-%;GH ;{|'u^+ b)z`uM9x-YU]זB50VL O%"K"0QͰDfDžn,kY /$Ťlg޴TGr|16ѥ$TJAtfO\{+/L}aR%4 )V٥bHỳɠl riP⋼#sgԲY%ٞvҙb>-2"xQ!:)bsUHn㾩wl:6`xBR2̜_tX8iRtwsߥ Uטy^Nם\=DƈXʪa/a5a9E#%eY> ٘?_0ڀ%@:Z+˲gdє@O$uQ.°4CkҰP` Y8פc &`6cHi5u2QMJ`ߠ9,8X$>g+[Ó޵c+u-q u(K1٩#! qdA$%ms.V$ PbdJRD%p|%$L- h[ :?=DC70!+bAQDȇtJ:'Q4붮5LM>@@x(ǻP+Nǭ&ΝNvyLhS5M x%LBX!إD;0Pk(<' y-s\2 kȱ\ː+ ժ$ 5t̚,,Ƹ*}p:7fyހ?z}L|qjk0["7'׏q6w@S Ҍ,  NT ~9cLdMeU?ֶ@TU/aVBAt(~ssq? _:UOdz"8zc6J[w7Ӣ}ëƘzg#Uut8oN^|Bm~z|j?9܉dzɈu N>^ݎ&}DbmϿ; >ll >Nuldzo竛˻<^|b8eY}zi>_M6/rZ?WyQi狪(ojsg"?.67'u}wyG5mlEf/ods,B㜜ӧh24hÝ{Ti6 z~kQ&U<=xkc0sp/7ý]7 ڵ On~wǓ{zG@xWzj eT,zdc=_ɲMbޞ *Mɪ+˄ODd?o??>|q0ޞ\Mƽp@䗓霎_@;aDMӛ..OGA4F[ ,˻4<ϟf^Хlq`qw?bc{3C=>Yubo<6F_>~pW-۳矮#|'~ӻ//n/oENN~O矮jkw~#r?p~ƃ ϳg?xrx|{o |~翞\01U1M.O^_.//vEX?d7woY&qyuzql˯Cfp{;N.zE>qQ?7|?)X>s'.p0ޜ^?ߗ\hkFg{-/KFlqwq{˾1DX-f>:P󓋋.dw58578w6jh׳UV?۟lm^p4Ovs rņ.(.>_QYnlrU{ZO{+ |ίaIxuvx79V96y庄&[;Z?ΞgN"vӟ~]|*vJSӧ?vks^1\|Oa_I 3nOO^mlў ýx|lpAPCkJ8Pb-ܮLsbY73 U+pa>^S ma+%~X.k^s߾:i+ nv7'n19_rzYXQ{dc8}sk$jl4z٠_ln\7\lQ?3&WsRz`gy|9>1ْw/?#F藟>Nw}ѧwgoy,wo><|?b~u;?ޭ0gNAw[ۣ`4]ھn[qcsuwBa"nbxsk˥+Wwۇhp[>xpgoЏ fhwcs5no]Hj?w7SUqĤhcc}tpr;<>ؙ{M cEE3[;UYUr?b8/ۛ7mIYrU0|8l{̧wvv6_~!gg;{{'|ҁo|ӧ}jo_o8񼃏˻/ヽ?ɵCo>>?:zyxq:7xoyiU̸U|jE,2eqOOѰ/z|0VH!LJdsc8/WB5>b6^/EvNkOV6v\١l:nԵEu'vEWed^eZu"(zEUUYu<AmG'?NN:!UʲE7,[+j uU m .{kwsz7].˲q\~xZ5U\~ޝYbx>ڜ̧eUWexmnow3{l,Fft ]vgUY&vw}o&Bi,^Nu=/7櫪-ʪZÁEURY6 "yֆvE-c_TT.@}x-Ve\VӻֆT&[t(^lc{~4}xlz&FN#VX4/G~~l^ ԧpX4ln2?-Cn6v6bml &b^>Fw.2ۛ^8O@n6=7;) e;ݐE~XnnD5eqgB mv ggYanز(=8:7P US{"kb1W1պiН 3ߐb}Kˑ2;u䩳<] 5):a/O ᠪWuZ5,,+ʢ!@r{6eMDJNhu3 sv# <V09/ӣ[M1+r)9jUzfge)5꫺7eY֨›i~# j6yb vL7֨aGם( \27\lCDyժ*2DHU}$ma6tRZ>DU[3D>vHi;@ 1ϟZxv0MϿ{Q+,z}Ȱ=<9{.O__옖 }.ɋ' Vi^]~MWΟs<{;'矮>ou٭+هO/jNzr8}TR67}|ywx~r{.o^ݜLn/NN_}M`ŧYŋ_ F˃O۫ճo_dYYC<vvh;/r gW7C7zz:~+Ofv=q(?Oo?/}.^WgNGNN^aj|1_=>1p l5Z_ojۣۧYbv0r5{j1/gwӝɖ]^Nn,bѿhX$Kow۳!}ߜ]Mw7nP;?Fzg#^noO?<+ ;΋g'7'织ۃGpcgk>}|-qq-geZVȻѓyW̟z ForX=FN6dzղ^.ǻXRYyZO;gx5/ժϚG^ܝ5;}9sVZiF5l_~{tϏwm6/w7wge\A `7t>.6,W^Lν/Wvڻ[c nUS)yUyk~8 x$×gu]<<,qm/Vf#}o/ z|T$4K/qs{߿qw=yzu~o';|ߎ<ÿn8x8/ojUūmn;VXwNO.0')pM)^|{<B .;Y-d%@>tMD{[Ͽ9>&/_\D7ƅn/o /fk\^==7'{G{ |,qY6Gӻp2$8/_r8,oG~vt}yWװ9v!lˏWFpwB`F[$޿9i==؛ۧ[-;1a3;&xs{}}~e7G@"H C j0q!xuz^CȜoД-zDF@>S#p ª,и&f4Qnj䪍We,vYwz6N֒sx$8H~sx7m DsVfR$ ": 5Zf90:ZeIRȗ9lDN7?db SS;yQ#!% u$ Nı]L腡ȯ KE?9B93́.Z`ci LJDzv6 t3~ zq0nq)ˑbuwy[<;(#*x`*nGd2@dek*j8S FV\SM6 O'fZ6-TBn#^K m M.+ɆH9zw8 yԻF1r+1IbTZj-#U!"`MGF@de]AuUt,&_U// \UJn+U]gp` ֯bhGT?Y3T@Q_\~v:H$W,7|ZUu/~;7wTכ[߂WTeϿLOy!IaE9ףɸG;6tiӕOWw/^z/^r5&ceYO6F7]r yC'm}΋%Z"dzzr篟K]T-?tu,a:6nH28x2 A[|J|IjmG=!u-*ƧVDߺ㬸OΗrF[UcwHY]]6ʁ+럐Ox$Mvں.v^ꎈw{> WmC[;g1RTʲtA58軤Nf4B\Mvv]Ma8OUUw#ɕMϕnC4K[Gg!9dr?x$6d] /Mčuq9Gçλ߾ΫpzEq]ɓ;ؓ9FA/(%8U|`vC.T 𢫙=+$Λ3>Jgx: aL~7ڽVTmu{$SRk-R}7&OeߵiVq UOXOv)ۧްmqvWT$>`\[/t/;Yq?=^ D ?>4t)+!I \Z}xj` }4|5lZ ^O潋f޹ziyN^5L=$_?/^+xtz 2 ϣaz|B_VS_mַcx&A~,5MyԿ蚦)e?篭A:lq<:JZ0jB/>_nwVc0 Z?r_] ܽ\f`9Nz|.'Kǵ+"8/嗇b5uO13/ 2rʞӗfLiҢbu=/-P lUstYఘ,~nYng|h:Zgΰ[$_&r76LRo$@x ǐ0A˱m._><`'lտihR0֪iQ?|3n\Ů+n}$ ~gj;1ONvx bY׳U2Q@r'`'cyyFQ5F//xOJ\ka>>_ P=ERi5+m,0ǏN%Ѯ9Mrǻ!Oϳۏ]0{W^^{ݯOlڽ"@)/fNADs-mϳj$aޔһ_f/ {BO_7j//ba;fQ[֕o. {_>jU"٣pT*~tKGQBJfb2rCJRP)`y]0k1ʟ#XQA.jXg<>6O阗:(,ò HX6[o٫DjuefųfN44Y훝Zm:Hd?՛^ŵ*tg9*uy[[hfKյ b04FͲM˶{8 xecU[fOPHWu۶t/X~6jwivF8 E|Fd0ݡo㘙):ͮVQJsӋˇ,Pޜ5Ikjhwp-hyA Sg>FΠY`Oqj%3I%pvjnl>ybƒvڵj oWf&=6ep_Oy#x;8 6NGEANa7;-7nsجtLU׳jmW(ZCM wFzq56|v1eMwWn6evz !bNۍϯ^)ME?p=fŽ.v}8Ov o2oPpZ7}ԭzEvvl1|53|5̖tj_9^/;i^}WಋUmT[i^2ݿf58)E^VtY&+T1טfFBzn"B$ePƣaiơ"q F)ȝ~\4ZSɅ'b܀0&B !W$18M6O0tI}j2? ;ڨ8BH3'009lz^Va (wmoSN|>TFI%1/+<W9,7b4|W0ot؟ֳR/<թoW;ߏ[? zA7gDJtE9t 7ફz=>M׋l4=1>C̱ݪA'US2 v(YǙ(גCiiK-'rf~~/śA3@/[9j>FfZ5 ]\v/gG=V %w,Amg@h2-W0"b>7GMyYmL;/gmOM2Πgak4*o~x%?tlϳ'|C7D+}̷1_a[?FsN0c_OiMBiw>0[`fl{WYi@f96eo݋0mqpܮ64;-FUo=;ϵ~?n}.*P\Ygx ;ҽ讧bDm[xv*ÿpIF*$ioulN]I_0a|$y1b<86Ұ}c(,x=_~$t5] ى#Tjǯ`YA/'w}c$9&8=C8WLIBnn܊|76; xku1DaY`XG?כMvzmEoMQA.l돟ˆ~oD$tu=Le7]V}XMGzTyŴL隝rw/_& +|;:l9C]fr9&y7f L! Z!I\!ض]2«׃NU굎v3Me6㹿=knP4`zet8^BQ6DYk$ e&1Iw~KՆ|4wNUA7//\wUOR5-B?E}㪍y|*^>QϷ)-M|o;7mEY޾j:{ (MlX@Lqne?xàm@}Bf]x߼ٱT)J\#D+ۆ$!2T\\CX1k4JH)ZgZfASZ͌$6i!ghsKp[ZLC@ HH)=FBReQPVUOM0mU eݜ sVЎH):QB )"s E\+#U-n~=خ}q=Stq:n;ˌP`r&O8TxYLq뷗a G1e3kFO6;jH;L𺟷(hR0֫c7Xf %t6Au]͗4A= &jf'P2;oJJߩp Q[YD'aҘ^%+NHYf t46RFERo d(o;o=STI~?َm&'%d*5ϱ3 8m&r{xYl։zZ~?NKC9η/ !~M)(8!k W*9|zR<1THKTW4ii~EA0 (^F/:g٦Br4̼(X̄WTC}ж]) Ѓ WWV# $4(Q+D-a Brq"<D N)(mD~# ۨ2@?X'\ RR,QIi׫ TX;AZT{6Tf-F xD :Ӟ a,Qp))FnLJ_A(,q@5*!M<∶ݲ*KOU*pjYZ?n "ЇC4d%`yGKK ַP3^(:CDorG* huB*9 Ƚ:&Q 2Ϭ1 ST4f(&+GR)y< 3u<3%$i̺ߋe#~–b<^Q"@VufQy.2ZꦋW ճ: ע@B1 G-#],kVWmeT>НPUw()c9[Hf8DE,TuC%^JNq sKP^o(>[U񐂁bI"c@4`x!NB-\' rAkND(JF)-t"=U ʒ`bgQo]55EPv$[]bQQ `H |6fDbR(z\h$Te)H1!غ!ЂiI#dW74A|8Dv,sgөR$f6} IFy͙T.8 vxT<†4sq}?||?YNVe`9]~|?O?hZrϲx9]V*r"z=_>}y.7ˁX0q|%g"m+ ʿ(3p?_Z?/_.ns?K"|;rmӫ{yUςxN*K #[iGˇj}[epfoz6r?_WO2j_nwZM&P3CHݦa /nSt]`Fda 7䕅8n t:vM~>n)^Xv@IѪl]զg/,V~~p Q=l4z ~4S"j/3ͮ6Ow~msSdzec?SIJ~~s *y#>=ΧK0*Պp(z[7/wzznQt:Z$aTkϱK4<ϷjW*|z*OyR᭚vlKn#J)]*?|j7L~aif F{?~N-+Oa;o_ɗapfxbѪ06?=L᫡ qaF?#ԥFJ(Sa<~znupWevr'6~U+P>y\/c6?~ZӗenC&u9ߌƭnSdQrh{_n7}R#,g97#Ow/l%O ,XLWwE$u)M>==ގW8]퇇sz%?qﺇ2 /̽˗zZH?Ojv/:6Ku_%nthF BIDATիl$iZOwAB6iܿҦQ! ۣ|M :jkn_(V+Tq+iw7Z9qB07ں4^s*5o8|5Yyq4l-6ayLZ()J)i˷W[8lKĽn=lE1!jҟi62mǞ7ﮀNpw5`uiemfՀ7h5Uks}1e~}vYfߏWo/}cƼ*5`ov[38^.PNϷarh6/WfRuR:_.zR^o/_gfA_}ݨ3F)֨ 6LӇ|vZ[IsVfҬ%X=|1+-7݁x<?֮(1FN=&:'S2?tG-{.G5NJO4T<|?W3v61/OWqVnM_:uNV,Ynz}U1z]䉔n՝A[`ׯaU<7$rjEv;} M:՚g:b4򑢜~p_;VY[ֵ撒9x 2|=lun?/e=OlL׽Fl$W|v-Y[LVv=Z:yxs5Lǝ>M}jnozf #$I~a*O]Wˎ7b0 ϿUaWnA׭yٖIY,뇇7lcI)Fkx~ީJ~>|ZV ( f))>__U͟>jUCF0SU Y* ǯ~mV7 R]DBhG3hV*1zv/5>>{FZWSfY[zJsyM:ݒިSuWZ9NL0C,q)!qBmn2) T7yTxg!|2(>0q³S^E/M 2< mby5?_KbeGaFތ)]Ǵ #IJ-gZ+qJXc ÄdMj9ie-Hix ,>, 9JiT$A^ gmN;|*+π׫V޶kA(Ћe-{K͓!oZ7CfcJ3 Hnt=;~a>bn;[~k1[̧.C]xN^Kjk8NYnm"#c˴`Q䵣阖aFh`b6l3 bdc[[/P$R00m@{{GÄs Л1D֪>WRTJ EYLf1kIaQV[5*ZW18J I0 [ k^oy'1䟂~(^9(L ٭wNTa/خw[tv[?wNs]`x_6G?`ضuk4*x }?\Nh4=4t8sS&'?7n/ Um70(>͇A^6؆;A|b5R{-) ?&y0Toy! 0CYсVFbFp fi! 0Vh` @ࢳ-a/&ˋW}YN3 v].] "K+D<03SIcC'L 6i:Qvf^gI<gƶkVvڬa|?b9v4̾Ij;nv>0ZsVuv]YSaoZMx cŕj%dX7˝]-h c|0H>U &)o^EUߟ09#sh:W07pUE8Sѩ}m09OU!8lvSawheG#h?E7.N n\t~wowB&Z9(S请^3?S. ,)IM|ERGPI7p pfiWe'i?oO^f/bCwCXiZ92ㅙO h6fn6+A FuO'.8]/=hiXEaл,'bMw|bӲ8]͖vY-DA] h9+7Աg؎={ӦSo.r gQ,'7C5<Ifp=[~w2vyxz_9l`aY-It9[^*pz^8|:?4%ݭu3e-׵Ojc jv2߭k?itj]H7&N 7Mu^Mc{l@Kkjӗ=4Z޻߾Vniv|5]ӛm5.Up{aY ~ݫU?}zy+|$4&OL[^l h~|X(mBـhMk ?wۂFn7;2~;ԟ~nzzg@Vİ ZOTaǫ6%pSqf/(!ilf5]E9iYHG>h3x4ݯZ߽8qoն;h Dbvasq5ۓpѪ_wחצ=H 8g5E%;+i)E Bqvlj)(c$7 8^/u;Co_u3RG l{C5a6{do&K, @;W|O?eCQOCmM2=I+r:EEeCKelU*'87xN"G-U$WfفHo!)"̅PB' va$iw_t //5wW Dat18E[T|uopѣxI?vi:ӈ寗ۇ/I4{tӄ>|yO7˛~Ok!)牿Z:kπAUib`~,QZDg `J c#7eZhϫ z^9&OewumZ|7ýp2QCT1c+¹#|?zZ͓G$1?Mhf fq\e?Ar'w4K?.:6jE"~ D/ޝFGj hHՉcLƗBQZYI-3̑9J3P| -ZpJRCqoJ`tr8&0JətB ycޔh?QNd6:&) DGUʡaZ&MHZ<ɱ8CCS|8GuKBAuc Em(VJ_5K7MAYXZZ`;.JXA˼ j W:c1SgRp? _KӼ|svw_!h  J>T*Q)LT>>jeRc4Lˢ)MrҶJ.w&)'/+͕ eɩܣ+DVC%=If1ڒ"R>5UHV\U"nX=["YoV{g7DQAI#{jz)`iM!:< TkL!E#d3 tM1|x&,)v) P]~=qʃtOK3$۳ST\ NSżX@DFB &yPXo*3%dzEdS/!;CRb.;F96}uyhqK.%@R* ZRђ/1 T( /uR-\JAy}$%cc9` - 6XVx"R7$<$y҈i1))mOL=$wgrR6ߎH=Eh ܒ݆=YTiZ`ddʏE^,CEP_UR]ɴYONo"_,XQя?Z q['jBj \B(n~<BQ@IP=T hDpXbVA~~/SI|ާgªq¹s7- Փ/|8-iVTBhٮkғ))-z֮qrXiIQ - t8#!5/?gJQEP(o=Hb/E໰N(~-QAa k[=w)G ѿǒ̒М(-j;Q*p㎲(Ek.Z O8i <9@E+&Qw'Y膦RʢxoIP DS,]eS+Y$E< nP ڜk*p"7@xq%.+΀o@|a)p%%#(S9@' #7K=J iwR%TAo  ‘ߍHk+WiA9OE6CPGbz<=Eh4z/$].&`O""@,D9&_ DOP8A,0th4 JuIWYNIPlKx$y8][ /= byJX[H6 Z3@_ZSamJ( }A e%a(P_ô$ƔDžQ|7< \j^:|%+TtSG,KUP r=>-4(A1Ľ(VlPZIk .bj/8SB/y2p5/sC.G_*~CVEEk*@s8:Ru_m(jbWuAQ T7s.e[T\X1E$(Q0Su']1U>x.VBT .ŲFŝEՀB!I܂΁>_`Ts F|X(`K0&@T<8.&H^ʌ\.Ol4ʿ@C0RT%)&1Xq,fl''C (nŠeaY|JiGa)A"%8P{NDA"FR##L]ʡ@ Nb! +d.Q4N^oG+urGz)"9EHD&ÁS]B)b ,h bx8 !aa!G*X&qP2C(*I ¡IV8+e:Aڑ oRR`\F "#JXgbӔ s>=+CRe#ȱYENq"x#MųVv\R h)L8LNb1ɞ-ӈsM@8-Ɠol$ieBV h/υ)JQg'B3T“ 5AL؎cZ[`TBeCg  Š"_i2N [*C>┢% Q-HǕJ?XŶTn8^ŚJA m!l9 zE.#LN;8n@mۧx[[NHUUzYAqP}d d(i3@b.%XQT!h*CTBhI(L:S*H[sv$5ɣJS U* @H"qP Ջ=LQ)g t*~Ǔ(X #<cD>3GR<sG1 d⾝rM 5Viʺ LЩ>/1cS52TR Nѱ(ՕC1SAc|QK=E}%p[r>g`ɠ*r9FϾLI^YܚFW'Dgnuz^8.(e4yH"QQl}R|&Ei+~@.)wWBN.5hZ~>䓈͛84LVLL}IIV"X(x \OOSI )U)g󞞱 ԉؔ$8Ϙ5(FJ07A?hg~LS6  Da;|b'CT[\,!( Chv@Qm"(aAh!e( OUWgQ|dVWUSx7iעDJz]%2X(7X/\:_PCaQ-TZgD{ E# ))MD邨RБv!ų[XD"$ 'G%C9oƭ՚錊`FD;pڋzD!3F΋Ipmx.A?"(zJw(X<-" Pri 硌8C"{3; ZZ" &ګ1mq(3H%qBozE|Yh<. D@";SANWXH_Cy/knOM$ ԉp'+(8 ! JPL E RmIä́Oa2ȇ*'eRsA9# >S T+ce-BF-ƛn1MJ*E/L\DI_=CoN^!#P>FT PscP~AuoE^+i 7zON`AZ&lZ' ^y {!Β_ Z2J4,gYPQEvKPn]D/ tBiTFY=AQ/U-2s|(üoO獼ť(ܦb !F'P)P2/ApuEGINZԎB4:p?*u! 7\uB*I{&Ge*4FĘ? Pb$؞ a@BZi21TF C4z= 2[ IB'PHV/hL,keRzzJ3O!.b9YtT,8Z I 4z_QM@<-A%EbKV8ڙrD֒4!P=JxI%P,~xŃr{1Yqvtqޏ~w瑢QGU",Ti|aZ'A}ii#gT$q˟?0kk(~˗>0MiޠO.l(CRm)%X8S\iTJ5ʠZ+ҢYB~IٞZ,`UM Eh[|-T< @KjgDz^z[+siXE:N%NRn?Ѧ 3$fM"/n7dyÏWm "Y^2bT  -$:aIb %S%fi(Cfw)Ҏ2PRWp**92^ޒi) A&m)cBKdžtvY,"te̗GQ5EGeQÂd_a8yqڽ wA~8iͿA=ߴ/eo.C:ROH'd9E]"ފ3>|:Û~mi֚5yؿSkbMֽm}p?y~R )r HT = -0c3C *3Ar;E)J[7ZAP PbrF<%! uT2h<*u$>T]&v@| $$N0 h U4hVPOPoS?X+m\T)M.ky S5PqnCBA!TF4+K$jW ҭʨ2AH^܋W 8Rt|چbk+fOu!]BO%|u ڱ @$+)TU6)&""+>A"b?Őzw4#( Z0@NY'$_*`#_}4~ڨN MZHO6ۓ[IrһaŊ7~++E ~Ae!+rKUՎ[)mC=B暖aYfX[R C6]jFISS^ jrOT9 _U/zG\R-Pm*h e*v2*؟rN/R bbJeaE7JEs81E2ȏ#}L*HxbwFOnhg?ڕ^AK؆4a7RnDJ) *j@y{>[OU0L yFEV !LTM)7"!O=V&L8C$Jr B`fR◔TKaP䕸u(QtRP2z-X p‹5@Q>!8]V|o#4TQJ@%eFT!:s!3[TG(*UT,ݨt<[NWx2bs]9]ר՜FoIA_"9Iub9Qɣ,s`L( vmRViFq6@AL҄seT۔xURfC!+ i"'ɑjjR@E)%iiN+>4tr@XH&+(#DGe_^XJt pRi ^s \+ 75Rы*<|#F~.1}.L@JD @1H2]'%D(g˖" @yTxGUL3Q@eN%WsJsh|d|"Bs(7z!Xo=6P*vfp;R˷ɋ0A~^xX EYS ٩sj()y I$}V1 0]ݤz^  %49A @ioXw*)#J8tn8l7vFWExߞj4zs J&#p)T۰D"fA2|#X òGR$D-$ (#UKЯd9䁪 %𕠮D jŎh7܀-JkDh跄a19x2%Xc ?8_wACQ|#_26+rMURR&$䱺$$f]֓*C~ J*lO/[-FJQ:7+d`R"4Y΄MI1 }2$$ϩ %e;X-r+x@5@#.)d ('?XA߫!iUo;[t[q"i8v?i4jtXmî]*p6EUbCBL`f7H/ ._]XEH,Ƌ|kYKs uJ_Zgz jzK]<Zzn).@. :@@TlvyKAHĂJ\G)g7 %pA^ $`G~RB`uD,ÔM!&|t֨VBQAcV %@7:u ^T)6(=gi&c<^iXLi^Beb}&cy#m# "z҇QX%-^S:Q!\eȶ-\ Q8QPsMOI]5t 2ʫǮJ6׳iF)ij C'"bS:~G0j+:94@( ѳ|e(]F פtҔA>0O8ՐXό\x)_zuR-A[v,Zc]qe؈r!qHDH)Ń ݋S!4IۮI4-*C!\*FDȅK4 %F>AL!EC 5dE(!_$ z-4*TfōFBG2\42Rz$ /8iֆ&9֠Ϸ\0V$DL {թl(RL^FDvΧP -ل rrAH*SMIjC0d2$XnUb|@4LStAŗIiT`m}l PпTVsDD55ڡ ܏Ģ JT3hȊɳ5-cbDd(bH -U(~)CأtшΠ=UtQ`\xT? *dl-'xsrPʳ]/N&&)5p!,8-y( \)땪nVap] اIP)7|%S4S 5.v ނtU|e#冬;B)`*6*_"0ݛBerˏ6#Xu-l 8jJf1Z@" BWmnRQK_L_rbL0TV 1uYRlLxvAJ_Xi"/<W֨_'=/WU E쟚v*UV A$uyZˀg(Du/(z~8=!Xv8Ԁ2xjhW: -MnI* f,pɇSX] ôa|7)*QB 7 sPI ,%EeIkeP:Pr"2-Q#KrY^HvgRZܷԓ fJUPM9 "MC]@E4P,FlH = <$g?;;/?Lo~/̥_˻?5)}ՍU=QUo'lnnWVE-@}tjEM++oJɈb}䘸rFK{}(f(+7t,}֔>4OF=mZ"=sJ\rLKPȢ:a'M<t=0*cdoUfw֎oLz*Ő R;4!EQKl^R@p"*A1IyȿH>%1 eHU凭_(,YYq= _D0BJQD՟ۜ5 :/_Y̚*&]5W5F2SspqI3Upa!9$CY}INQ F%ǥA1Gt2Rw3Wk/AG]'Oyf[2Ra0o_݃ %DţUR>[`J+9C6csh-+)h{tw !=yntk{fkC^{6~7_ozG/AJwfА )G l.UJF߸IrA/1&,۟.u wLsN܈K3nxޑ ``*p;ĻP|s?:}FDr*P<C4^]ii;e}NZ3e+ tT9hGAPtni,$FDFC* ȍ}1Y{'hw3'g˓E1!Tք39y!ݣrVL-'gV(&jfK )G+ $ h0F=w۷ ~ ́D=}XD ūrLf!CbR<'i =)EwKߍ$OuDmBAu``2UNNQH\f5Y40Pf5O +I`R-{iśIdq;;=NM߿x_~ٶookm[nm0׆\,D 2zXd&ZиZ v=`HFR"GgMyrhP)n! E- !Xx9Tk\ACx\W–+K7#@2H.AH2ge7ЪE>[QX*I"Q E1G1<!@™٥VB"(zs$U6#?L9qSH6?㤘t dҮm?]ܻu@EҡcNLx茽= 8,$B9hX%! <#=c– y3͝`|$-P+AGq* Z1z&-jR8hH >g Tl5oȯ %Ǖmir֖K#+)W&Hv5nJ8P. =1:XVHK4WvrYAFQlGX<֐\H#s[sޝ Y|0`xW*Sj\ (1%nʪ$o]ig^* cWn)(#[qx.+ ru*镠EpJ~ [^ӡ*Wع:~J"5P՛\UbC)$>PMw 40ṄonL|jD4qIMн#a!4ʱgKrgn(d6.y3b{w*Ղ5@iɚ &X"|\+$+Cs 9pd?Qet ڸ~E9(A=Ic$QW}PH/}taeط80&OhmL\,s1aIuO|AqZ*bfIpmϝl ]pM$6HHaΝdB1{pb/?CD3b" }-JI3 GhR!בڎ>֊YTf\<|3T%(^[-k|;X<1¸@e@U4͑rW:84@ZvR\2Y# S9.0M$&V)8bDմ HR] }&6۽rDWŵV7]0o`_=륩ߞ\;SxOF/\`rB=D"Z>L3la%^0!%j%ņ Dfb=&!Z"Qh4a-[8ă_hšQ'Q$F5wRR1"1iL)fRS*{m0W*^Bq +EBiUw!b]ž$dZb \V-IZ yܭvXsc@-|j6;:~46t}m'_>j&-z">)b;6:!-!Q@6ʼp.G5aD%2er aXl.~Q)"L%̂c'@rZ-cCGڿbWQ?n,ܳ>Lз ~ %hTPF tww/)DD2@59t'Avyנ|z2c ৥sst}/SQGkR_E%REJcg}]t6 % K@YwvSRKVA^-Ev ^?y4[dIڸ8a%0}_Fc5KBnT.+'6"o޾ۖ>~nYӯ}x?>}~J Qޝm 3Q- : [R3c-]j!@!ĦWXs]Z'p؁7̸XW4,^ JKA e3kCʚ @=PXftP"*' @{V lV#3"UV Z1!s@CV9g3v+U4w؂\nbX&M wi(R+3Ü EBu4e Yf(`Ab<<\1#:kp5H:TDn3E+=]XuBAv1/J*/IlPK nQ-6O\c@Hb?ųo_\y~kwc:wGGW{;N65\_\}N޾xͷTQ`0#"h>!Q#)gfWצz0T]}/xo_<ʆ쉿Op<;ʿO:BQ ,Tv (7 & ] N M# YVum"Ρ4dhMkv;s^Ջ3`eސm,Z9DS MhE1 S)h$zh0\q!bfOz>qgOiN-xCQv!M"\\CH%Eb1veFvXJ.0ߨXod#)A5 P#GJgN5!ÂaShsr'r,XW 4]*X&}ݳ{[WӋӓ?{~rko{u}1ܿ7LϾ{ޟ]_:*;l cȨQX7?#lR*#vtʍѐ+OYNe35iRkf\O^߿uç߾,Odْx{#T `Hf0 ?Z(>TAWD| 7xJ \Q<%$5I9W~] y^T ]Af1>goab twΨzh=j P.KZok[a {4 Y5Qd#iTAx߇&]:?QQƒеF~1d"@~x8ddӐGX!_w[Bh Hk6s/,AY3)kg:JhhQK\ %ݒP:Yɒ6K#awQSVSYtb>ݢ+9Yz8 ;'QqB0:!3k.KկMҖnּQO Fȣ2x]'y0\ 5f" ԩq:1-,7Ѐ=pA5ߚ w@(@7\'Q'8҆mI[EH2]쟅 :\;x\;%qtNM.Y)*cQX&S.D͛"w]^̻6K:z˷g''o/eK$M|-y>n.׷7kn׃#^z"`_%:̝Q01Q54:W3K~fNB|׿~r}=kO|X߳'8x\NhϿO\:۠DU}Ba!AkςiC~>'8wJ,>elZَwZĨ-Q1F`u;],Ni++mӸ22G~D5ސ0TGe>BOxKWEA U4KtΊy1 ]qՒYŹse7jX< yINaV y q= "h!˩:V TqEU 4F2uШ<``:mm߼y^Su qIZSD(*RS8;N"Ǜ؝ÛɤpiApqvM;w>5ؖPt@N } B{rHAB!W]cL(EN&o_{}y{k'UuJz%I &&C ^ոAx=IѦcF8S#R-6`f@D>M&0ؒ@?|Ω\$ԚSİ XCT0 iL,5Qүr>i۵tz{};5_m Fv>#|X{.@tY"is/(O*i= d lv#^hRC#(9(k#+jSo`TK}$#ETR5GrZ@ŏteh4#,񗕎`JF4[6A Nb4sk$zeu:L2; SswH2 E ?a+)~SR4D(ML4-OpC* /%K+`rӳaMyg>Jp=ڡjpZH MqC6I`6Y(z/2Jr( )@/;BJ aE@Cf5Ѱ%  '!KbXy{ :^AKO 8@Km(-Q4୞}k7N< $`cp@;Y&w#=TbU tYmtAK(u\%*k_;w@ VS#U: *冫Ǝ,Nj0/QCUISX|FdUb)E:$W84+s6eR<\`3+'i@A嶵ZB+F2Uؘ";LKFCޖh[\B~Fv.Srƛ[ƚ\v~+xFK>_(cS͍t2>-b%! $ %q5Yq-(J1e=ц?(W*L扊+@DD.cbXL:*-mDzO+Ѐzl֨Y(]*9JZ;*Nܤ4i&q?]VpWWc!0؄㕘."0b@>$Wʀ+*U\YQ,Ⱥ4ŕ^'/3^9[֦D 39Qږa)繌YteHA; `O 1j5ݪ9=!52lRCB;>T K}A()%l3\xY:ٵMc˕1 sWD8 \dPl5<FOy5kZMm adN:*y(q,]NMtC.JO^w]>|* ĩjԢe ?'dX24}N/ẇeu`s=k͔ٓGKzhz]+:OI2t15Rd1LE0p]l2%4 (M@WwadԠ6zt&Ep [ H/^m/E h`AV-UEXãq\.$58GPlvGϖX)Ei̠g0=f= ͼ1M<A"Hy%M7<}#5>Qe+*otCPxslILde,(o3!rb-b؅3U8ZPc3 At\7kɤi4X: tv;.KMt7]ޜL!Z\<;3C cQ?Pva/f@5`ԋ/`zlϻ7׳[|s3_Y 0ʴm^]7,E]בi˄ 1#4$ E CU(^/UKA.PْR!TB-.-hpu:՛&Ļ9R} wDBO;ʅT793c7 [,t ݉2r!g0AzqӁqoib!ǧqY%T¥[|w',~˯z'WW󫋣_4 vt;i>ΎN>#!qP&?W!Q:{\+a4JC"YuV,Ӑˀrv@"8QS4M s+VqD(6QPc}fBd-5ˆ}.s{qr1YIt2/^_nn=a^\{rvyϿhRR[xf "*HJg3T a2(XDKygq 8꺮m;!?Oh`}J>w/=~{*>1"UQiA|š)d8BcxdEf突%w%-%PѥBoF46J!y>%FROI9t~#gCa X~#.e!Y\p 0AkySVi?UjGae9lX:8} 2PO2@_ Ec.69lI(!Q*l}YW/e_׾1=+f$ ۠(ךHX=xїVéH&Y-E+GA*J2ّBYbRXu(vv=ʟl5zR lFX  ~cb i:cyu C`۟/ήΎN׷6JA2r|$s(GG},2~TpJ\%ĩN6a(SlLƵ(Tz] Y95E0/ЛN2 3lK 9Ӌż&춣J.R($uW O̥LxeBq:j@ngm-hu󶙸E4υdjd7q9ˋ UWQUlaC{en׵Y+c fD 2%9b%J/8ԮA_IA]*0M12ܐTeM6dRKݥ{6J&WDb(:-J}xj. .3N^ |8R'Sjuueh68H*`K-@,./m ǹ9!/,5 Q* -GF,cMJ&1q jzh )uPN4ivr!4t fN[ ض'KPbPQ}2w''C*h"]`^+iTE U@NE'dtlK'wE!ff4µBҚ+pUe$2d.,v_x22T4(טmWnziBat(Ab=]FPhx $F!Ggǧ[ '˓e:[BT˒3[ > uE>i~gp)͢SA$nؒVD%Ƶ<./Kt}Wr] P䋨&-A9 Jehg+T0:ޖMptW*%hȕ 25=5mh]Tem_&sNH  a5LAd.=1zڡ@J$QO21"+|ߨ* 5B}Fyzm^P?Ң+6g~ xt y .F>`y*i츟ssWzZĬr1`R˿Ͽ^>{{u~~߽ys~uxv.77(&z[i8n8@1~_tum@wPMHW_'kϣCOPOT\Š5p%, ژ,{oݫDusL+V *h2Apx@U!FAx":Q"P aG՞MK-\ŧPθqug z5ՙw3aQ\7fv&8pc@TB3! sՎ=r|Eׯ>}{}~zòX\CҮ<٘1W/h'W[n(h2 dKΏvng=sxr>q~h6|JZg6 "]3f*Tٶ;_@\P7b$|X IwbPbv68eԸ>XdY!nm:d ֤W/@P سTu8 d5-2׿dW0]%Rz$FTT&`FF!!] ]KE܁#9͞  n+/exXP_i wX!4^C!Kߗ b6Q6HnG['RKkkFkoݍdut`?zwխ;mZqWv&ORYHQk}ZW ,UсAgtmҶdln\0λ]ZNڦյ͜!cOnTV= _t߱{j nyxJ,kSV+b̳)"}Itsݲy*)\柙=TDX9x7-CP.!+})lɷG^\|߲o'@jhW6PlX;&[䢰Wre *Tqm nዴ`Q렢V_} )WqshR>zb۱2+0ԆZgZUow\!FF,ď]aZ11QCD(>!&=͗PڤWgkKPt:5aU 'í0y8'{[յvk9DuӤ23޽<{sxʨ܂L?v/8"Db"svz=ueW )63D$M+eJ"i3] N|Ѷt/|vuv}q]p%^^\RF5Qh_D|5 2^qqehZdke¢:G v7$MRoov +<0 ʛcTqPA1➡Dw 7YEcdabĉ\qa}Y'Ge-[a2'Mk L`fEk Gtx8 %@to"?E0~ [!Nx dA(TŪ{: D*EN9y)ĂDpumuuR@(JXi*DJ̄͑\)&gs[tL& "'WW׳ӝ{j ;pE9V%AeZ XHS%BB S}5M,jZΡ?k̃a @G,O}çO^g۳ӟX'D_vCnǘ9^,v"\n2CoAe~J4`" .tWC#: ,'/Ae^^)hVS Ժ8DL=U1ÄsשJc!ފ6fȷOr8Fi:,:KnZv* Q*4Hw@7jg¹Nwqs%_jEyjg*xl:#Or؊ XVyK*GX\dܶY ϥT-Wv@Zy0hT]+P]je4R|1t(Fpa]g8i*u}qԤwn^"a~9|pp7h51)D@"Sj–?"QrD7qU+M$.¾LJ{U$Ԟl~QUM0wu9I_{- tl6׈dR!5&ma⃰ȇN]x`UG3Sh|\Ba~'SqP\*j$Fȫ }{7uI.pPpcPY%'q(Lv l)6VXjg9$>A_JjQ !+/Ah KxҰ''!vd)3fU=+|6gwsrW"ΛnǚV$*Ye袧Ҙ=JYMrVm;D3%$+sÖcʕ6:,2֭*U$oW=sq梄_,L"hDa=(tl!ְG<#z*p\1MP^#tfiQ<7;S?B$ܷ<~MUЀ{EJxL ;4MQF(fj8Iv2 ǟDuZ2 XIhǬbLAN;Hȴ樛2,ŵZ4\PB^ -ƅU|JNeG/4_ؠZd~3fT./m"ъHbB9?L(bU,K;sI.h FwAܣRk#Eh2b!qKs^, mF"K]R3R&0D`7B*$E2OЎlR /yEu< :%c4#R`P^x$q~e T˃yĨ|䅸QP\UuI_Jd咮0\E "5:oqp‡BH:ߞ^7Mbh]vo^.ykkcNC.0ØXԏ0-f j#1WBx1q\YT.kݒ+[ȿ/b aND`Ld6NR9wijrfԜ:1o΀XpY2PFMt+SRss+(HriȦ/C3Ȩ#z9X3&߲#mR4r3A!8BE{8HR^y(/]6Ut0Vy IvaY-?AG=yCK ߓ%MKe++^_mOona`$D$womo!\ JQm__W7wwc}^kcy,U\1vQ jQwFV} E|+aQTWl6],Lܴյt:Ԫ"-S^,2&FgsX+PZB fMr&z HB"s H c@GbiҁuU/mz)lڼqw2I, $,?J.D\3ߘ IWQ=W =G^ËiZUU<z[ZըB(k pwO/oŢ;gIÈ䕗M\}O$8>ߥq"Kߒ)h:O6PeW[xJ*pb(Zy\bn˭յt>30xL2?9>m66#IXW|ݥd'0 Qdd&Ԇ } &H/4)$"W, $hNl3P7T9\Ītt u֜~T.'8c%H hPlUDLb0܎?uŌEcsj/}ѰԎsHh(堥2>^=n:S#/^C˨F/TW?+pp:BQOV9K6׺?U-ib'o4$F$6:9D"h7'7Onollln~vlyP)*b#%#~ɋq@OD _ 2y(^Xihsj^2UF߉`W-ְ q3W"b- Y؅b Ud,bmh 0QzGa rPhoણ]1DA%#7b )X%e.i\ġc mK gqث?B_bN4RdS/+P!X  Ve"m!zCU $nA@#6 cg1tԪȞG$B]ƓJdD߾o\Ǜ%N7T8< E0ԩ)N}:sx-kmRߝu6iRP/[CF@YW]\ x25,/a2CmE(]muui/ׂ6s"k,w?=G>?^lfs}ƌQbQQY>nQ(T (F쪮ɾ䭣y,[@"ޜ<ƉhB> *. \?тP'4qCOTnΫrjkf Gd4 tF`JفI%1&*&r"y>Np%UB<* 4:}(S y.SKF"MIDATL\^Yr-Xx&V<x/k0EcdMF850*5-X`3N]gZF=vmV%M}O+ɂ.Ւʔ;N**]9|)%OQUp0y¤u|R!A$4ˇO}y3MZrHY8~vw!~( n,X|9|mg^ȢTA;y'Onߛ_]?f_쓭MWLӭ7O_L?Ӛܜ^&]_޿|? 7 uc5RxxNR3[X:4w~5gZm583ЊEţa {!ޠսh >W2Kq!_,oy)i&I'zOOb9k r hp+P͵|{ُ?ȓz6&-0~x 0U5>책?=a \iS͕5Jsҗ1lg.맯.ğZ 5P:.& PS*O b=wвa܌WU~Cuuv:U3&ok}qBԅ rmXc/e&3?Y=MuRGP7$MCYbFeI!ݚ f7Eݿ"z)yl3*^ GozeE AA;p]&3Zȑl'ĺun*e5{xZbpV?S1(7Ȱ TS\֖7%XQLIWrnÅݞd7>K лٶedo!zƊG#A7V ZXֲ\yƒe2zbO=DA4HAֹ%_:zclxs[CŖ@4R*Y:HA;/4U%x _dBYz>R~:Ld aRz[gx2BؔӃ%- ϰhR2i Y9q+v1epLTAGz+%c~lϧrNx!yXtn2mmI1ۤ'qB#Db 7ٚt 5)Kn"ƏiE 1NrI)|^<m~&ZS-Qe7q\P%dV) \%e.f96 |<9UE% Ur!T֑'?r4 NuzW'?_[_1։bú8arJ/ɫP@dYePaL[u{%`h=+Y:I{CPEc 0 6@JA 7 zδ>vD`=e*"9Q]8q}.U#EIۺߠr$hٮ~8]f&CZVBmvط0\đ}37X]9+[$ղ UrE6^,o  ߂sswRcWX -`;T);?,P&[Jp3PZ/d=o"p C@2#ZBM۔•H筺 %, a@HI*5A^h/c~R1i:}cݜDPIrVD =%b>PMv r`=,s*#/Z B[(?{*W*v8P};h3\?w>tes,ؕ=%GZkşXٙw:٦5yp{ok"urML71Vо}`7"b"}d?Rsۛ߯n_>{pc_=Xۺ.U͐\Y ǖs)L'<;>?tz˽JH'oߜ{xy}q;$*]}O^>ynR<.tȵ4.q9K ?i*GѫLf(K "oޜ}G_Lnc%/g~w7L? z͍~}N+}#b u1fe$Y`\dfN*]1 -HO<7hQ!˫Y R@u4R@5+  VR1AV5 ꧘Q'3#!OAx:@T$3<eysjYpHu lmBslR+A<M5+ܛu<Eգ6;tJ KP0;,B%'ogvc孭ɗ??\\}gmZLfgng'{_{ٽ9Miҕ6IńTaq'WӉn 31m2Ͽ)i]RK}J$7}{rχPRJIG+v.Oĝ˶0g'|Զi߾ϐ^!೰; .cc{t4}CIOA}I-'Rd3]AJk2zуMņCPK81<~͗`b= Obu`1'+Qؔ@[rޕqHP8NzVq ;&7[REL[2'XaT𲂪SWE =;!,#ev$3J<*ж|t$IF5۹__/nխU;ˡd~{#̷7յi΋;|MJӖLnϻY~9ܞ-.y+Z٦4O^,ZKb&oO__bdcg7/ovgos:3d9_fA4*!Ye_s)/Of7gI + É+|3XtZU˛;ImZ>P!7ȯݥѨ䥧p$ /mQ܏2s y #'SpDח3Y'WЛHD^8i3"vۦމ\ǥ h2& Y)|d!>IlK#:~{ 7:ޮx AAtӢKnK3b2# ._p%}fAp}Z&>AEo4GgL[*Htˑ)WGXxL6@LLNUh<ڈ )OARKIQnXLi|R5d} :S,-*6g 4uyywރ Pwsnbvn]]n:[]>j'^X,wzhKH6vxt{һrNt;u477WYtsedawwh&իjGi:][=two~}r5t{iڽgE=Y[o7Ґ;M |&@iUda:K &O&Ĭl٣X~Sn~OD3iڗ޽Փo_L۫?'_S:G7?84W_,t4wX,DAHF)(S FaBUU!uW߀MnPzr֢}B}bQB* ō HU4~#<ъ}F;L[|A:2Uٮe$Gp ڳL+s3\_QjE;l&<#0 82ȐM2QkvOжq}4iHb6- =>5t{}{f9]ۜ<\hB)og6۽Oe;i}?hg?;h˫ m>}uuW?o6V+Bd.J˳Yγ6y_ټ9ZZ&nb#L67ήWVxsmrȷ4_7׫yeqt V.:%P"]4+וEoi᣽J%}66O{6n&{zHë8/nX) $^5ږjJ&>(+b4SH"yw+?~w7/PCxuܷ]__zzV2°]@ubwg-fd|cIf2<&s 3=fw8KOҧ;Z.?J)DI,g{z<^sʌ&f=x*+3ƽq9,jX[qq $r d˅W&%m.1$A|crI9CNTjm_&onv?B[wVo7\&,ecȍ%,q[ Lh>QgoW btNǁ@PLrbzd>O2-oCBqnѮ?cg#=inAPD qT3wop~D/C2I wTy)x#  I֎";I;s`(}=2 g=`V\C 9%ъ)ցL5395#bEh@!D#F|X"l"7jWlKYO!`Ўꏦ'Qq%>1zoLd۩LT"Sۤ*w<=]?'[LMfxx<&=m5mnm_b/mWKM[AHuBv0X^$Vurv>uE0`k s)ojӴ\5 )\puRʦ[d^]Zr{蛖f+\*'TwUp iNv]bϰ"˪?X|oƎEdClw0=KzL ONKUzNCSQ̇F$ ;''Ht ǯ~ iX2d5_m86~fg˯2pbʼm70b+)='j13D.W'o2 .߼W_xe}VMVdjݿ7'y6_?oqdL)^+-q"ߺ!$ JipLJ1P'@yM> P"0 }h)kP5W_MYsuXجd/pz>{$ۦuƦKS{tvoWit} ,\!]5ݮn,B~ڷ)16[Z0"^J,di2mmI$HV:Cuo 4\W/T&)۱l_T+XzS7t1y)t0oWRLl!ep9z}g3C~9b뻾:o_H,}IFim7×\ݞ$h0~2̺BM7SED,j:dЂ`!cx 3w:gLk#ĩF;5&3vjNɱ}^dۿ^|+?% <`[(9ݵDi?~n0hnó6 IRV*;X % |:YEӒJ/Ə^T P4yE]<>F>0<0|Ɠ>wjm8*`\_c^'J~aqF'Ha:'Ԡv8Sd@hΑB)E E =wBTgh_jA jkl6֩`m }@QS7`ofI⻪Ib6M1v qL]rj6+9&aKnS]@nH47-ӴW{ᢟdۘ*{zep%B$w7>Gj. rz=4"I"L"Bv3 gPr"mr5KSf>|t&(_GdCt|{r b}) DL~9rBa]oU~/_]+癔a՟aн2RpIbitpl.ۚgBI]^]Dnѱ6PZ֎[c"QbŪapDÛp-Q`lR!jy8.b#B!m"pE] ؎vz*4) zZ{ !$#y \NlD0z](`N Ohs*(:{+s400Bȷ=e g~f2Yiw$:\B\2j\)↴`-Ҭ}9T50M SE(  9h|2ȭFT9<۹5@׭l컮UW9>_LG8^,>mvW?6xs};%hzB.V~]rX2OW loj-7Y>\~=Rݣt3\\!pVz6ˈ1tɐԋ"_pW]\&0N )sP!9(WaD>&|_ 9&$Q+K7"^].?\p۔?AA&ۏաm~<eݻYVfc!0\:*r'w&d2^͆*>n~a>>. p̙0@7:O*&`ybSh|۵3,!◐Xrm_,r0'##&qjhm/vicKUJ=\qeU5'ǿ;:g~!R 28G t_D֧Ǥoם97N;a+#d)Fo!$"ݘSVgt1WWŢ"oe"Tu# ƻ$v-2޷mg!giRp+&e%<y)[Ut{c M-~d__^ٲ"=NEwX2{E  x9g|<\w?~̆cբ?4&_|X3"2Զj_}fIU ^ \I|}'<3ީb ]DW*q~U46[:ֶUͲcsoo4}3.\0EڨOYt{z ňSmQ(h|0:Q {&0 ]#6 qav%ϕ>}.u|?6[jp Ӣ 8(T1팺v/h:mbo˹P/(DG\iSAi M tz4LuoBR rj# 3q]f>D8Zxj19W 7g+!('! \7 |Zm׽ohol5hiRLXb$Z2L諮ީpX(HأvwOgۭ.$Υ^٢ֲz]N-A% 94R_˾ڴM2ʘXKVꨩXzݥ"t|fDg*s9R~k[bXXWZI=^}S|á{YHbb=NΎO}c3XiGc8Ղ4 iKܰjn(ׇN1s"is}7\2-N8VQn 00k`9aqm"3F.0LFI>J'oNCI"-jl)3g5ذd{-` |Bp؞{~gGA]'sYM셝F7Κ|W щ_(~w&2a E܏ yi2^yOЩybS穕/7gZ{ヌnkMw10dޤSD!^4Me'ig$lg]VqC, Vվ/D V56iZ -/"aM"Uzb $Ow6U٫w,Зŕ-)炕۪2J h/B`z$ }#UJڈL\$XEZJVF1#Ȩ}ߵ:V5]!gD6Ã^qܢŜb]^fCȮ!bf'v봕sWl U;ZV%=@͊ykJd ^$̀KItR\̴Ub67pYQ|б;쯤(_rLt}{yl-% k~'N!{=!PH@zbP,[Ύ๞,1}̿V8v;#6B'G21XG5o};kM]i3N;!8gry(W+f̆έ|DB; :U$^ %ne,G0ѳIq8W i0yE!(5l*,oP#g׃2tB~aqUS@0r46a A4/|NƆxBWg%LFGkk(жΏh]wRq$3rQQc:sU(\k4Ҫ7, M,"2cy.0$eD̕CׯD3F̖F\eʁ{,3]`ۛ^-XZT`2.ڪď7i85Z-Yee6 \dm$D ym/^WodۙLHc p/Exsyv"[|+okzi.SY"˂%dBXI|N_tiz1O//ʖ4[^ke rw+!/S8aP~TvcGo}<-]˴HG_7Hv9DRӏGM pA r~J??R)*yT6|)p=!ǁ[~CCє3exFG\aWC5 /)|o37α#q+:ox(fZnӉ~GEN Qy^_DrlH!~/$C!@4&ݶJj/*Gnj1ZD9z!3VwX6lY"rUu_& rA5VYK"[Qp)$ra&t}im+{$2u,6/e')Cy&ߚAKiEHWT2vN+f%0)ȶe/bճlu}|^`cdH] d(ImMRo234+,UJ,}uЍivӨy߽;Vě]̏(O<̯tqdA)Gω~(Ǚ/__-e^frÝBy"lsx"?|zٖ%Oc1#I#I2*[zbFZe4p|޼} /`VUKup*9hu!X5rm3pTL^NF.m1 O~EyQ<i\njYgsNLn:C4in뫺ex"ǃlL's]d:+TS=n6{FXc%`wc {l?M'[fsݽo?Wl#zb=fDI"p/7rIw1 m<&<}klCa+)B8CgxH{8~{ócJҰEQXt~0Dtl\:ص'$J!;gFodᘤz)37^\,fNtBpNW&s8,~[t&RtnwsZȰ}}"Q}`-O䍭:FEAr4F2u' xKˆ]FtL&2S ` Eg"kksWwMe7ٌlWfGPE,U~6^'/_\,DiV璧$)gHYu7i*,/%n[mkmZu]3SREw賄:hE"?n%T<˦q8(NibءRh΄jH'-NgEOZ/#{PRC\VA!?=F1ZcI";~c֝L(mlx sq@@S>o`ԃ>Qb%3ƪ^}"цs5*̄2VL[2=ڢyӨBFû.!}SaJ٬RҝVwm*ӾaW;K Yy.^.ͶjTnX15~o_m#9oZG5_sEj @AdD3Ø0BJي8ޢP$זOW +䌯7l$on2s!1 Q\jvY CvFZQXhG#zAhaK˛w׻][57/0U.DoCXofa=ۏyͧ~:/27XQh02.Cd 8Ϳ6% $GEmpBHJg&x._]o$evYYTb;EC(7!H@H _3_0q:41pȻyeyfݎw`$3GyP0=A3:3vף6pнsi:cxpG ԅ ; ĝ=Epe #Stb(ڶm,sؓh@7y ;_Bk&6z-=QFgӃmkt>lhkLa+[Cmd<)󆺤dIНE\^+˘nT3YT 8]rIJoݬLJÌ-?7o^fb%\ KXk^sFk[SXZ2Kv>aIZ h{_R2T}#qYdmno^/Wxԗ 7mΖouV$R~tFYL$9Mk΢ȅxFLT iݣY/e[d|}TUom͎f3Եm/0|/\N0Ĝ˷ jJ2'v;t W.NNc7֓,&ps}g ]\N.>g_t茆!7N8a5Uaǔi=aUcx80eH&9d`oy9W-a9m/X>w ֒'{]{uy5ƶmWו qpI 'rw 9s YsS3fx" bw&t ZdopO"A =ZKJ&y j奕cq.P#x۫eimU"}&y*bY!L5Pռk%f}]]ʬXuݾՒ,/Ճ~g6rց2Fzz9_o$K.& Sv{UӠS3_Y4(~q̚~,U^fغnX^L`0]]~Vs5TqR;{|0rx 2O_袍c"˪D;bO/'VGKQ0)+NzA (NAG1#Qw:뫪jZ=) ) g3F,A/ )M%:0'gtC(ZYnox6pOUX(>"y%I{(l."Kpڙ "&eSk~aõ$d1˅YbAX$l^6dV8ciZB% 3s2Q-Bg:ϳYZ6|hV|9mۓiV [9Cb2vmqb/+rLs[WRPw $n6LfFCRO/Ǫ!=lj ǽyc].e~,[*YM'pNv1T cmp;>I/ ldDtE8Ī.0bgb X>kSQÉٟ>6ą a\"c,gcz/E=Xp~m'Fa7`A;С0p] vRpc<vlNV\^,POp=4>nW01*F\&y9<Şa$_0T)2ݹDlێ0*mK|a>q y]sF;Ҩx`d 2xNEڦvm۝FaM?IĻcT82qz훡1Qk-8g##ÈAxOc.Xs텸!cɣ_({!KX,@\l\Scqr)~pXDL_g`6NDĎzcS` >ӛ0%'kN 瀙;&f8G6N#Ky`ǻuqBix*@4BҪ&A9KdFRvJ b $ٲpyYjP$uUDz9Y>ctJF?lr^],ww/Vt)f"[R;~\Y'(B2ĮVaMƗϟ$7̈́AA:al0; =*3NE*t4"& K A$f|p,ui'jG\  嬧17/>Gz"19/ '2,ˋ"2 B\f!M4pg0ay;w6oWe1oJ{uk٣I^i'bX,2bL 'v15@O`mo:=rI%hJSa W (Zڔ+~2J)Bf۴⺶5JS D2kBSt=ϲmk!x87[ &Szl iNLB0񦈟KUyC{WylP ӂ / ljdB𓨢9 43 I7׏Y.O*ݧn>|Z*T&D6\}{kLxwiw޽g /\=2)ppF*FO6ݪ@BrsY2k\s7s*198QPNPqج ׀SĈ"#ڊLqC$BRtb+}}&Q/R'Cev7?| g ~p1~/PMp&cC$IrS'R43iR f'LOd*|^\EJD/W.߾X=죚8N={4"O[컨N? ֞)בMBL+ r!<&}׈R''*5 )*s]M(E|'|OTީ| |[{y0?x)%CfȸO!@{48I]䳸~ܝ̤c:}ۛm7ަeڔ Mҽ\ΓO Nl 3c**ƄEk8SVu^x=,YZ&Bu&K%Lzl!fh55A,hKLCHiCKs ( )L3, 6'܆8(‘ "=3"RfP92i]8\ĉsfkC'^xz(CeQJf˿9ne*%'Si_yַQ!ƶ 9v'Wj\0ƴJk,=U8n5 9ap&]߫gLwG<{3Ahb#KBsCjbu }_L E!A_iLHmO O@DqS=;;^0@O눈#4wsgmZɓRI! 8B'VNswJ=<*)@+m͒M1"aG|q1SJ-_7VJL%K95"ٖZs`Hxg*T0SRbd8*Met9 g6ZcSsYZXco4eYkVA&Md(JRk;[y$,Y'z"ᖓemkUGtR&9;%Y~ۿ\Bb:CI<ϙf݉`0I5 Yt`H!䜮9CC(+j?~}y|ū(kmJ$ʤK厉+A|F}䯋>#x_/"^e)U{p@h}GHD&]Y`ܙ@}K.efsCLV]DE&VJo[pŅFw6˔@̥Q]շN2Yi{,UArV A\KÅ.$Y̔rt1 iRȔPz1ls]0aQH̋pm줤Bż[C$q[׫rfT7 ~6 $W}޲ȔbN!N4uۗᅠ!d<4J=2:/vb:gF|P< AD/zZOcG}:2GWYx8 UĸL:qGȩҌt8/(9aHO҃Z]br<Kb>Ou|z{BxbWK,Ѻ:O3 9Y &&\qQ<#BN~r2'WxOT,N 5) H"W sgGz̓TIQ| ?I7kep"C}1:@N[b 7sޗ ta2t\C#uT:W/ІѮiWy΂A vt' GVvyhLÙ7Y.PdBZsb8Hx.=;w(8d58-3LOήM8Aq 'i>DU(ɚ>].<3t02iR!`eW;v/㘻.)O08؉CH2맪" p'r\:uwv,;320T?̿ߺM?|P ml-r) r\H ]uMaVt1xXt5+.eF1c],++"Bt@;iɺ*IwB*A46rݶ= ~qCu Αʢl\I'|9'clK1HT>F%z:FYHs_  ;jApu3E.M&!"m fу7MqeF7J"˷g]נdXLj[cױ Jf2cq‹]N4OXl%Dө>/<ɩLN"O<A$`cr9_rQ0:&> 8AO\?'I|B)!N`/F˘:/t&RtP0iVQsև8<xM8Oګc!x,i(p2K6ӑڣ֓ F"N.3%+$ }ܖtDG)|1) wu$v8 ڝhb2l!` Sx p~'W:ݏqȥRD*"yγ$uVY m疮n{)tV `ba@!"u ;w*s#=nW*)/!#Ytvbݚ*^h0HDb bƅN\wkk(mh jW`JfJAK2׽ٺi]lۊ:@^|6(i o¾(,_͌:"EMqctBaloK!F?v MwwoeYnE{}@(/`ٶ\ \9tIe0R\\C*MH xω$n#=賙JNxaWh0E_8&<H,% ͟, AL3O_s,/fUl0Pj /0 LPOiZx|lT%@hz HT]nXb4r1x9{\z'(L4|*0099]dax#چCKL&s𳃜 2Lj94ree& Cׯ;$(N= n޿fqH#L66H+'L̶<?:àˆV8r;!2g)39"}{:w[;"A1wrD?|拟^^o%Nr)*ˡ_q"8~s96/M X S+.#G B(UiYi5:sMZbC|娅L֟'JI!|{0F|~v̓ pJd=.NkȞF!h1 iM'UAS:ϕusyYYaV,ts#Y"(I~gwkyضm$C#"ěM^k2$"#Tᡩ%K5wT1SIdLH)]*w2+BP9vb'\Vu7uyzrڶ*ivEOMzU>/T{{WpdGqi-xI"- 5HV$ݎ`H;#zӶ:w-1Gwwۇ&3iH)3ׂ ˢ,dfd"BQhj>(5a6SGk{ҐKcwi^cU|Lĉ8}3HH}K/J c6yZayQ\fͻeujvDһ+|Y77.NM %fgpyڌe2O8Gg'jZin/.p #-BK%V7^^_ax{Sڽ+f/~YZx_>={rTy0gwҾmѳ Ϙ`Oq|X3a ,J| ,e!%]ç2@QaR\N{'Oi2P) 1 3ŝu'Zގ`~3<<4j̧hHPx9:.0\=U<8϶RކO{'z6uL\J-Lӌ:ǫUxYQuȁ6|@Q۾j[Lȸ}`T1JPa}6-;bgE q]`6d[,gINTm Q>Ǯe/"SBRoZb|("y<>2]R `0azL ܙH*Nd3]򾳂"B& 5^ɨ:~Y",͊]%[ubpMMu?\Ju?x$b5L |۹m|.# 탛)uu;3zU0MW-Y_U ~)_|aIM,K3@#& So]}T]U.Ù}',$u}bԎ}|u(E`W˄M Q)˝zKn%0qNv|T:H;.}TVCrkcJ|;CPCpk/fELTͩg~99aiG] gNavVM =pNHZ6|4%T|Bp +|M:wb?-w4j2Y:k4Qv7b:3ozoӚ3'q0ƛ/+DI^xB6P/ClR+L\:< L3?Q^Nq@Ng#&%XcTޓn4<S7?T/Zl3a8Wz*y̪8sm6^X]X3ׅLbD;´uGF쐣rXqC JbBG;]U06f9n{ab)˅|n+I5A-nlbryf}eLy2a/M@Y Fj9+PX]Ѭ?=VJ氭|)ajyi!h|Z˲?>T,szX7bF3q/a^f]: W[E1.uжo/y2 Fúm߯W*Vk~QZkMonޙk'[YJ%6+9{xE?nJh 3*BzײZ^ŋ͗N~)^䐺p%>!?'4>: _<1`cD ]զWZw٪bp4qP9g`O7< ~ğٟ%KLA9&u dt IxN&zL板pO_Ev2vTo]Rmu>x:`M\;FnX1ax{Is_O|a0 B9ÀR r B 4fdj>VN]P41%t,c fY=ɑ}_ "LMA0+r N ̀sAHhq"6qQ%Zk M '>U' NƄJ{XvE48#H 8woj[FR("[͵ //AFb13̲x^Ҋ a!㠍gsE"og},ZzT2w>FG=El GյJ$Վȉ\C`% ]2bFR`Z,v&K 磟]nŒ.`aĻ[ɉlV;LHʀQ1nnVu4q.mْ3(FUm?_<9ͱvAҬklnpY %4;Qݙș2q|URYY,ӹ @e "·ȟ*9>4]13ݺ2#BFr(H[z+ cٟd?vNJ!(p]"1'˂)RP?W=}5]_}Xh҇Xo?Q7X&ksIl]/Jm BhBMER@ֹusMmQh$m 4u]w< ht&\Udmxض!Rѕ$[vik': *h8[Ƴw@ѢY*7ʈ﷤Pbn>vyVZqR~n}X5ml4*uny*k3(pV"')P(h\_yev,K _;\7VE᫮.wW׭T?oJ)b~8!(lzGʌ]W٢twS[WZwvA__gp89II&Lc'N)9 Hs(0YoRWJp8i!ΖR׾0ߗyFC>a'@o(O?3_NEɹx4a;cC9քs!bލdN"Ǻꤢ??t.~",q#G=O>v)O=~pTL/;ӏMdx0n1Qɷ1s,EvprHþS)YM_؏x2Q'_O xoDt) Y>x5I !aZsi\ TN:xIjH졫E[;\CʙszIT?B;Aus }`EPP@:!B+cD8yvUnnSxR`VER2F\;q{k]H,Kt*íeFc `(sD2}pq.˾/k⺅q</;h*{$90qY !vB܈7noO 4~eBI?4W)٬L,^LYv\uIbߙ 4މ,&iHS)~Kq_bp*ʅQ^U`Dzsc>Ox~J0(R*Npy5:߇Bwo(&rhNS$$$9{CzՋKq 5+ <nέdMFd>LѡGb*εc͓m(9[qq !>nc@N+u_YcIfFl[4J]O圥zgRІ(el!4@`GF)[\1+)xw} w2kbbQ۪Z^^͢>H9H`.D4M9־\eh.! I0PuLf6sd$-m߷.H1yυA pۓ1ժtҽ*mo)bAxC܈qL7RHo;㳋ECݻLA D3T@0eiR*bO#A IDoͳr n'm[,ۚRn\]GW[_:O4 D0O)cHj[\tC CFH4FvLC;iNU$_ 57'PPgؐ^|nʩ}t(i|wݙT4H4S"ũt*1%;ԣԉ89kDFcm#DG&taF=4u[xS ǎqAIG[6g_=?-]L,3E6vFRR9!{[CݤvJmݧVN\~]F*q:\4оZ\.b'wHqIJ*gm" "΃\jedgZGyI!b ƈB(!^d C"M۶3)zE&o q7MDe{}gB-|MNeȅQ:QC%熌DewݼfRD~y5{r\X7ЕF*#4hD41R1n~g?$lV 0@gbc8ʂVȑy4큓25qMझi` sŐw(8?zГv,' RX0p19F{4N}0d N2bҐmxra )uq(È6i1Nrv]m7]|uWUGŗeV(\>,׹TEt *Ԏ$ ֢A"|6ΕؗqCD4UE髶d=3XuBa zYH)ɻuo~l:2!8# p57> J >;T*SKg0S;U!L^d| M6m=qTBGpR"_β\fZLWPr(ex $ ^+].j1'7_{hf"ȵy-#䥸Ze=pۮyfchynEnʹ%P\ H|!TBH͒\N)J@\%)1p RQΎuKM[/@q.b<{*)Ty Ly&,%$hcA> $:k#Bf}5Dׯ; fV}twrЪ0ַ?||}\\.DZ=GB>@82aZk'/ukq7Og=WGW׳,Ӧ0|3/\#AzYW[_q|.qΣFʩ䘬HA>(8t97ӀFmOT`sVu!!~_?{ ٶ(s<3Fi1l0A a'C]/@P JǹTDAT}^ĈsH z,Zѱx{6(s[e7]Ԋ9pUVYf2tF P)^̳u B@E (I.2y=~wzZ@{u5|tEmRm/\a,xX;UÒ=EGԆ#X(Ik׷v)JwB#ȊvIq*rux,p߽J쁑$߾E^Ý~훯ߣ>Ԡg`xQ>mt|ȘpYћ&L2 hS 'w?}Eexvsz}z -՞*8o;T?g>eu2xVΝzm`:pb^H} &id-ǿBDC.3TNy0 ʽ%e7fO t]pb ԆO<:Bir@3Rs~;!6_̲L+,;ո{ i2" !>|zxu^MO\*}tmw2Rp:NJpb*&<^5Nfh4Z<JmAN.&Nu@y xʼn?DK3in$$F Ea :p"s,DiI|Zw))~F#Nс1TuqŤ,x |/?c׫,jCrUyB3KힷE"Fƈ.GkYg9D 1P l9\].q9,+HE\׵!<h7aKLHEck HAx,nb@,k)۶,s=KTk#FI~bn&⪔Mu{%Nt)D+\%n~>bt$Fmv;r˕6ud6%*4=G%(6 );G r2OA "ˤ)+wjl9pZ:,tZTC͖D},@u}i:#Tm-o7);v+Do݋͗ϋxŤm!"gN}IQoG1riʙY^̇z[Wmd71U:"ʊlktm|/)nHxQ 0>gHڏç2uj$VH#|xWww C@'.V~>l^xuf[[Br>o bds0`%b#L Isbj6Ml-$nXD[HI3%'7Ot)0rF|(rg_:Ĥpe Ƅf8!bA8p4G8S"L>ּxbstrqx_??t7o.~ljp7G2"XF.|-zOuAHJDBv]8rW4p25{o6އeBN.kk*cTa`Z3ѳp(12]ZJ`󹪺AvɍThyPK" *׬ #ve)@1g&#Tu8̲`g[KHAon|^zhOUL\3U=)P^Qbm]kx6վ˅ٽÅEQ(((~^/Ǫ51JBB!XuP!ϐ <-L4ΊYTl97 h^hc (1-id(֕-2޶SnCILtW˼s:[a*?r<Ȍ*h`o?d:P90_ۣ;0z%ae\ۻ qɃD. (k76βV>dC~x_>/ed)2xeyurIu,W:&v ?mW*{OnbٺV %q^eg 7[{u@;UC#ZgEM|2ޑb$E =;pm/;kᇷ]τ{zWw>rV!!Fu=g9nK)eӈ<\bMP~1a!rG; ϹIILpyTtrj5*kO0y 퉔VGp\߆Y#!`1 ڗIQ.n{c)b^?篾Z"}v!e_N%Mvl82Nd&kjb]G'2ӶxiKڭɍ=ȏ) O(ê`'azÖs+)2OUrtɘ&fnMyvgtt-<ߟۏwyW{Y]9+]f^\I<C=R .Ь68ej4٬z0SVm =p Dxwc3Ljd}E\`5ǀ{ xIS;gBEWB2]\h ^|ƅѥ]3%Qy-w)P+ȑ#\jbfE)Em_\\,ZK!]PX^E[-rd(n\9Sh&_dzv{"S E&=vՋWv5)4%D1X2;wQk3#HmQ3mT"cGyYUݲu,^hBp-D¶U1D#} k-総5!,.D[\Ǹ3,J%!w1+j֍v f:"Yx,/̞=>'v D|v2I&46)< ; cY0*/jm8 ݅)VR!}"T ȓ'C;HO>g d03>YD%T{/㐁)QikARLԉWSmat^N=q3F7y1N|1qٞDxF^~F|uD8$dzͣ3a]Uƣ Qd`2pN!ĿTI~d᳔*v4Gp'//qwayQz|Afs %hHE(PgؠJJ!h.RKy{}.e8+nd73Pdw/ro])}zIMZA@My_QBBpU$!I2m{F WeZr-` ThQwPEi]eU(PT1HrYvZ,@.K .g$D ԭb̔z\(#kg,IeIcʴMr At!aHҕ  @afPRfU&Ͷ˅nO3PuNvk E&M'BQfD9/}7eZ^(Z"ZdhGOԵiMæH8$]*O,Wxڴ^+Z3V`QRfbOqX-ȃb_Z_(p5s8"IUxx`T\dxD*1W}WA칺cc^8q:^ ϮTi6SJ~:9^8w߽?8G 3cP3jH3_`Om>e]YfB 8%ј>rhSSfjy#΋|J]lM 8n1Yu|'ufa@~1e4iO8, ^A׸>.:bd!7yupGDWRe"=R \E&C䣳JxHYmO9ꃔ^İȭKI @"FP|(@ j1+妉B RXeEpfmk%Y-sRDF$(6BĶ-X)[.% j2 f1>uMVPEžöKv`M{0FkbvIL ̌]@. "j\3jMh!)Q)=\ kǧmJ \09@ݧfHp0s"&fӾs a嬬ݮ44HX¦ԁ:ԥHYk"6e-.ay Xo= Glk_syo/Lpu_0w]^9U VY`敚Wh|{dx)φyjd^* șusGS3sI*S.Xm_X?}㫷~U]-K~oWB=i#R8q! `>y'ejD9{rX4hƖaK90ϊf]% `.>0AAzeW@:L'&J{qj:Ɨ{jjy0ڞW S\fJ"`LRƻZEeU!EߥıR}t,E`H m.9ic+CC`*Б xϜ`%;$X,5`D>$P+ X\<ޥ$ľ&ܔ]Y碪|JnCJ"K_5\t$],|p{rkwMbL1J$)eVh)3ޅm !R6V{W=q`f03B?lr aIcSBL]< ]$P襾{M8B^qf_15Uk!| wK(}-J}_|E< ]If.NÇs6dҙD 8`Jı%LbUC7P$H0hL|Г)}`LMA BMȴ8̪Ϡkhi1aNC:.a1XSEg_L {0wT"bxt]iv_.~ڸ}srYf_엺|cbdRuJ(v DÐ[c@T8vnzgA(-vs[!XBt>'h=H &O߇ek;9֒O΃dJbt5h2'$UQ )zJy)m 0F`۠ NEENI`U}f JBA%2*:6&%XEA1")s/u>YI5!JZĘtƯ֖Ji }pLJ6m]^:h*γL-' M(RjM$tBtN`2G@I⦍w]0)7reI`wFqnJ:zۇR6Bm4BrK(KuW|BۚJ6|W?y[x ˄)@&黟f+6b|$?>E(6[(U}60Dz cOK+;2Ǘ\ӣI6W].E$nudÏva2-~|{}>>=5Rӓ8:+EC& ʋU5hʓ`Fx ;t]:jʥq>F"DIZ9x\\7\*]^KWÔ N,KRG..n)$kPK2RS>b9O^x_f(̳T>u0\at'hϴNı_}я#T M,AɜY$`Iy%nzF4yד|‡ q(1<)}iAI|xv^'>8 b.B99!P I9R't77~sۇ L399e-%A7@mIZ꺭{R{eU}oID EMtQRzXd&&n 鱷.-0Rbzޅ2F3ħ_wm߇1F"" ZoQ. l|$]}e iIU4BBYfm(k! A4]ˤjUHֱ ]HwW$6]{傹ϲ1t5FXU٧De.B'fmcseĴZCͪuc ެ3![R&r>yJ{E#D(5Yǟ]KAB -vuBBbc^x&'3˝U13Z^\HJ}DRd^ۧ# ~Đ}J2Xu^ ^JH`@ Ǵ&)I6*鸫b  ,!Cug!iSH "s.r%cBZ/5t&$<#4}jZb'ȧPs߰tNb2norpw[Ч%_u>0TMz8duh[7+[v6RuS1r( 2H*]9^genB?؁#XaCjŪǻ%i mI$1!r@)r„h DSi_MIh߇utvF*y.ҰK?&b$j"MuY@lЎ۟vMy)//F㶝wiGb/z?4y(L Mv"n77ퟫxa2gJ%踑ZdP<&p,s]8³l:/ G##2hNτ~2}Tͯ_^3oMۆqRaKa2qLL(, ކGNn S ^1jgh؝L0) ovZ.yg죣v_5 H-4^`wQ:أ6[] `&VD̄G[RAIYL2oJ%|% )Y2)-"9*|ںDlvf}p%~t63ZaՅȾPKbR=ՍF!Minz/r{ =)֙KH 0 UbU☝L:QOH(AZd*x|oIqmeBrY! OZKZe !h%ԭW:.l9j&0. !@ W>-uB0$B"ekˢPfrJi Ƥmߺ$f-Ep{OOڳTG^.$UՍLBK aqެf]nu^Db@^,t \ FNK0;›\q Iki(~;ll1'L W t4k#`TX_˚⤺ᳺ]=æ*oq*%<{hz.>:dߺǟ k4g,CIg yu)8g]J5!@dKgRb0.FMI_%HO;egzן}>+LG!1D| 3gVCGG -b r|z/Nj76‹ɚ1Ǣdۅd~/(l3Q=rˌT- moeܵ.Df>S(b KuX%$ E?=W.AbXI\/pWc6Ob=w bwPd]c2tb -p C9nUfыާ"y$ }/Yu}b*.}}.M^41aWwmzˊst]$E.T.]_,=u$D2Z}B$vt˛/AㇰǧDv]e3CB hg.1N =AJxU.+> +@_8U88#lCDُWef6A'HM)ea>Z(ۼ^8 /%L 5Mͥ>Auq?4/$2`(bx7:TYQDepb_W_qMA9j/2'e}bģuHx?|58X4&n[e0ϬaL>nZDTBRReZs(r"5,="DU&b FhԱW=$&2)ID&9xH-= ~jZH)CQkh| ڶQjI%HṋsqUM_ R&er%MP"L($E+&ɪCos!74 1RE!#LR^ X{Z/`r '$<tS%KZRPn߅+1 օC状.dJ)q1'eS3q=pc ^D\kY1>oW7c;M,ߩk5zx 88}//Gs_[8,S]z#|q0H'嬮i~p1ZkhµLzv-6H'Jc L٧B,y5+e1k#0vB'Ybԓfp|Lbf|*3O|NN6/}T#n\(S/?/蛇)WmZ~1D-`:놗ɘr\m;wlT A1e$CJF(9a YPh2~9EoRK] MN-nO'LzAbi]ʃ jׇ^Sj_Z2W \wOբmY6}ȱ +eQ(<# "6"Oۖ6O1}JkXVZ(6[O2N!IMCT+ \jz<ݔr!%% |RIuݶl.0mɢ]ϞC^(EuUD$}7`(m=ZHY.}`"L2ϖnfۄcϨ.e׵@hP}Wy˥0d:uL.}J0&jڸԅIl|p!ó?{WGWR{5r>S_m iA>+TBE#|{,-H=\%itJM|vM-.9 Ra/Eb0S\x!ӇxLO]SZ;^S ;F#Sv"If>~a&A ܟ/Өy s'V4Qf@̫S9>u.R^ }r \T+wiw Mtq^7eS`95&Ugx2z)>93/mGEl^j4xܹЙ :+0|Qi?߼ 1iwa;B(CΥDD!'9 ش^3s )ڐuׄ j439kiQʄ =(!J@qTAA"2OR3T9o6^jx4YF[Qx(uxTV( ao"PWAf; b4') RB}lH6*u%Iv3k>K41FV(j}3` m'+CJAײSxRbyi@@-I o{A):i$(jݲPR=5y`*KaDm;DUXiRޯ $ Bc"xVD`D4mdɇXZ&6VۥR*fVRg9^Z)jضrŀnӴZ'ħmgstbz65qsd%$jRx"Gu"Sg(]vd o}XП,7sa^C֧pnGrqA]2f8gRmpoyů6wV7jx>Q>U@p!KSsJ'533:+E q'#f@&, <@//iP&t]^"H5tUMt@?y^Y`JyBO?ks\RnR&na)&9)-AQa(7]L^wdjC ZEM ksU߉ 3?\MttMt2_}ݯs 1>u=(5ªص\K^xE.w׆|HD:7!ŀ!Dt+EGQz,2)E(( ʄSEF/h-m۾j<$R.b.ˌX$8l|^iU4UAwBm{L%Z%)cս/*21$nU)iiSJƔT{ K~3c=FL҉7BN:<(x{/1f>2goJg2$%SBe}A7U|vy9" x>wH1M1>Qpf˜d&L c; ᫈͗Ww붋]UU (U?ggfgo\0ֶtOYsccLt(ψ~0Oa .e>Dž0KSfa`#Ox @O2vf da~W)*^dLv˥✞{ya?ky[RIdZXU(H)!S;NIY:pkC &c_WX2]<יm&-,`*ș<&s,/X6OBKu@ u1,H<2u]T s #Rb#U3H PX(im )QIZ*3 \ˢ8aT}J:1F@6=䖍>|*!K딨(!zd"K!B,3QRY Xd>y=+ku]Wd1JK,B Xuhȭ=G*uQXriX}{'W7R }Um&qw #$ VJE=o,!gn](-%(5xŪ (&&G{#|$cǾI[-]ˊaeU*E[F:cUuꠔ5}z7wJAUg\;8گ [{:CA &o uX/; 0fB<պo[-eST\A'n|9mp4y:?cƬ^ΧQX ݶ-W6RkX]{y!JIlvv?}ygy2p?9sʉiibɍ B+Oa?0uQ$fp<K6FWMSX5/|yxI*G:7GL*Cq롱 Mr!w%L24^`_.zzǯ< rI"敁"jKL|55Wre@A0=F 2 Rru薙Ȕ$+f²I^$r)#wY+)29#HPH_4~WDBJb~NZV-84$PV m{>(usY.IN(EJ)P:lU %qlULMϛ|T`z0\ϯ::l*/@넛*N9cә\+ksLRɭRЖaȹ\f.2đhX#ћUPW!ˬUp*ϜyIqf]P_/֌@]*mUδs2ϣmB^unAzW;-`mԶZUZR}-nCS0([z[5Jhܴsg27Z%ıe~|f[Ngk19—űɂMȺ1:TҫWs.SYU5GDIH߾{ڒ׽CUoZaPKYc|:k6R3Rsu)xwgv,v٭&:5k-s_R;pv yM"/5MGsX^xRc`%P8 SqJ8eT}}_VFɱ}(bHqQ IZ#H/{E_/U1UTb[z8VSm'NC )EwOt*6!1x;zl4ӈԞK.M8i?+Sa;f!%2eO:ƘR}|ny^xkZP컘Ic )EDa'6cZC,jE$ŮQ@[CBp>nL({#c[}B)@2ƈ!_P VMJKĪ1^>[ߗV$@p穰$YFKIq*]GαJ*Y)!B #$Rט<)&b 1LDNjBp>Gc'>U\]oɉ@W54 x?Gmo~z?&}XgaxBIKXNUTiE7O8Qq 00KƼALy {Y硶6 wJ4$ҋ75_ S O\!'2|<50qs*M|o)T8;O>-4KY"lpR)2>d%1}$,䵕x'Q:u(&h_,PjD(p1>VdB@@PAHʉuR$.,1E[$AّrO{!t(;džDn@2q+DL@(}D߇Iu*yv_b3)=='.$ P LGU!+D.db[GQ {׺\H7K5(+ 2}x^/t rۺsK]lensliQua&W];u"P 2H0ieP?m98^Xd|(4NbYmKR}u )Q4XZXw|D>(iJJv}rF2E|{6sPZ_٥U"\EJcfqfuip"BKC\ VUL%e"h ҆O M)ys*> g__ףALwν{{R}v{ܒ di<l^}&qžGcMTg:N}|Oģܯ0+`xknf+0<<.Ŵ\78& !Y_;<7 n\K\(Llwgme6O5G O9tx>K'@gF,e\. #4k?<.o_N6j0Bi+mN1Y[&; |8[Pr$@̨Lj R < >*!*iº4.](THC>Abh`A/-Z[ uǹFI( A,ޟ}OgCw?h$$ZvjaT:H˶EhwHF1ă($YiCۮ}`vH 8޶#7'6S"aH!B0Jc]g 2E׆_CX唼H ʬ SHm )pN Z+R6!zs]s^EMvˋaX"QlOL a]cy$$3ٍ"B}SIqf Lutvc;ѹ/<u&r%$Z3?=zIbF38f\'d1)Xw`eDLrB讇4A Rn%~ap唅9S&V8%hӤs6\I,pM8S\P g bdMqPE0Z3 ^h uڂDW#8ZX:bbRĴ{R }lRw^`Zں^BxN"EI!M6hic ʈ?sIyĔY.NOm򠑤u\D!K uD H,P]忸]6Z Bfw14WEbsw[tn}\F"wM;"L=qśK"CWR s(4iF!lދCxmޜσQ4N͕=$s8-: DM8EkWOpy:D~@GEN%pZz0_)`y9 8?W{I^ #G0L4QN0Oo<3y?=8Rp0Zؓc/,Lx׀cEw+O?<[#hNN=nj8 OI}r78˔읟8oT:%cVzX+PHYt .D@#% Ǯ}],4FcN= ÅKQi_ @80 @@n¨o=31, bBJN*s!c!Ӻ)iٶfHo\aeź9/EE|Ιc|p?]o+IY}ޛUnr@ &I @? B G3!C3==5dv|9 q#=[@!2~3,eě#K[A v3-(')A1qY /f}%kI%<Jl\Ri JM42syAU(Jtf!H ч07"!V],bJ`?;>:iQibnr0hHcRm{UOHKT1@pHQsj8+dRvLȔ%Z݅.Q&]j**Kv-RcJYQq5-m?ϭ^\4>ǐi߅h06bbb2 J>pk]`D¤!sk/f:eD\\#qnt9댢&6sR0qLeS9ctOcOߏ3"xq;U(n_ayh@n>R99Nچj Mۿ8@3)G1iھ-q`I,PUfR$mBtkgZ5}w((AΉI x, ry!Թ2hNj^F{w7}޸S}=ʶιLu4 !>8(77W+Y(vQIRYid6$B̹i NH<ˤv{A_:N;vFqp)9?]iaJ?m۴Ġ"ݺ +d)?Q} Zx?29YKt;g%:?^/m]~|, ~Z½?ܼfmvO?SK?۾b<4|`L霮f94FT|Y0}-Fԛ'yJ8 ;]>Fxьp^\LDzmwlMO=n.:N2?OB̾ ?m_̬.s̷ZBۄnRې#႙` $n3D"84%]!=iRѺ %$Ks[H97K-]ef~׽)B!4_Q?}!%kq.Wu01 Ȟm~{S0#`y.smz:.Q9BcbV1"Zh5b]8HYa2L9=y&mfifSH ;xSXLӾO6h%FPXhjި -M2ȁC\ +)9e؄] - 0եɜsܷ뎊DB$ ʒTaiצR V) }Ԕ]wS`+9iԻ¼2-TV ]Oѽ]F\9RHV'ms:#ٔ`alpLߖ]7w1*0@\܇3 j͡Q&Ln Dt+Z x )*5Wb~Sc%ϗN(Ou1m$dԃTQ =S,UQ([귻W}Tl݇;YE mWg"ͬKk>3l#urOԋSƞs! )1&UdPCJG}`|R2FRoU[(g{1ڰU]\~78B=(].RM/,gUR#_2yZ Ʉ2&ykrzCU"N'62&7z<"/Kn~s||CD}>=l|L*YBN(\P2.*ecu~\DERhgU.>l{v}((k;Cm7c](ٺRr,dO)Kht>dB{7K8 80}įm??h ,#u5w)6oҞ~1}YY^+%.U?0J{: /ꍄ—jfr,nt9/k''Ȋpyq=%V1rwB@#lFκoϋtt/.<?~>u,{DY1qڏ}6ߓX7/C#uq e)Gr>R ҈SƠQZjJ2zX(q]Mo*s&L{[_(}fL IS^ 9ef.uA P4Ev f㒕"BV'TjYZI65 Y"J3ݞcrC@iQ6Ň]Sai,~ڢ҇bia) =җ^Vt'ײVR+Mr^aЃj.JV.lX9j#ua|i{%\vv3$ҫEפ"E<KrljDrRa>%v8ŶM &t__9xȪ*@JK{mJk ASfMow樂Mcb̻ծHq=Z `(bn8 !iT\XP]*$Zm"3lvI*}I[WR Y\Uþ ԾUn #B˪3 Y+9#ȦuaHHc*+3ؽ*u{S].M"rҶ]ڞ1 Y(4^IYh)50+úߧᇵ7?~[ J#m;t"&bB F*`P֬1FLqZLĚeAB fJcb90AF(5)pISapfGN. `ݻPG .l)$&K3kurJ ]Ք2[ s"+6:|Xnu}P jU>x!Qb\t1DŽL AZi c)fj#^Z ԰No٬% -4`Lmo6]_[KdPF/p+EP wkk]e夨sK҈P|ejiSDb?%ɠaϣr甁0z]}fAG'᫷7~wڨ U/u{o^I2?sf肀gpI| OV :N:NLQjg/>u3ҾiQ#y<kR7f\):me2Å9Ye`Qc@!z\y1H;OuMl0%3{@YZu;. 璄m^U}J{N?!Cq]j緳jׅNHELM/:t]K܄$2Iɐ·3Q$@$v69Wr_CD*.f #im_w*9D]&l vYkۆzNI͢nMnZ+LfFn|wwz1闅{L g]m|QIbR>ݮH =_衋v2"AA5iV^Y P7]"}Ð{{^)+WHZ J$ӮG"ӗ&ȢwAR^ Z=;x]/Kxj~ڜtڶ;UM9*g>ӂxgȒ.5=|OyJZ!N54~Jy =ՠG'9$8^oKJ^‹\0]"qzݫWo/|x=8#/fϾKs%meA&{pBAY4Hu%IJUBbJʪcp0և-Jl9оoA]Xw"L˅/.J -)QܶI ޣgǷ>ծρJəx#Fc V(R! !<)Rr@d$MU}ۧ`RƼuiQҦʤeهEAQӥwNTKZ{u=,xQ :dmv!AE $+%\\2RxRh5Ȃ A HO }y1e1=U(90dOVMowuh9U`}tBU=.Dj!P4n*}2s`i$lv^*iQ) k+o豮Ke0&~. QnE*v^K*-7S R^jdtZԬSaʀ)']LbP*(eRFj#d[=<Ñ7A?™pv%HL1> EUjO}|cGĖ˓𒿉wǣO?Gg|pd7UcY b`ϴc՘ <:=cŒa|gO:IZ؏~8qVz鰞`ȧFO儧~xOP|z^ݳ4q=>/ѓ6;}<y1tx.gW|S/Y@F:f@̙.0FAj_){9A!oռT6x.qq>RZMBbZK=n`^n>3JbFF^J)1sY[йD|ς4>Ȭ4p`1k4s2RZ.*SB!L]HB4jc¸ BJr6; #Zo:]X!}"`OA&0UJTB޴&f61')0RZMbZB2FrP̥;1MFK0m$ K)gH~@11Ֆ*F&Iw;. 2HBƨKR)if= ջ }zhHrޓ$t1I2xWo"w̪wH,m铻8+o39g@baTUOb0S"rB}֑~8FyBq`(M˟(MC,Fhg2DMl%si]iuq%췥Tt)ʧ<:1_z<ȜDsSC5 >[I< Ϋ'q49yw^壱'TMe s@Z<oOyF#G1;g"9OLѡO#O.oWjez֏?~n~u|a&83dtTZOHh]<'Հ4\凜/:OmF^IT=.ϓFbOsC)sGLJA3-!w a!(H.'ǾQd7=Xz QhLEf,gaŇ6,ЌFkDJ.mwUmۻpUJ 2e\Ӈu5XfSj{e[Q_ !k# H iq^De%(J) Ƙ"B鬸 ݀^;Ʒ"Aٮ{iv7˹$օ 3Bsxw 2))$2dMz ЂBf!H|]Ro$ f/7]0Z./jc_ '! 4=wr 3Rw|zhHBe!D)6! r.hE }(zȹS [PW=5(umJ)D!%";H(6m7+M/jBFXo|UZ!;BcHJrRoVlYTbB]덖· A*18|uЇ|˦5qQ-M]hLгK1"uJjm pL1F-(vhgOAC (hQS0L5 D3|}xO~GD[Y/ ?eί#<"0̘wn/Mc,aw6MP/^Ƅ`Sx8ŒmQ([pͶ7y駏t;5 J%|Ɖtn۳u?8tk:{'`H%3L=hQhq)ė!N\z3Ϝ8.c{EÛo!fc-X B}!`fș!c!LS(dR]r]M=i[Hq΁Pr빱J(`G׃EVRU1FBo<ۘ6&ɃR$ܔb+Kt\{HƠi0 JޕFE>Q+)LD@Rd޹`tA"hZ/\׋X$JO9ü}k>FWs#bmJfcU஄nfuE?UZh:C8)j0wR 3SA]MCH)6y4m bUhB[L(:e-+4q۷} E*yYA$TڗrV*һ~Fgmtg!F-A=jX0+!RLu_BjixĢaf9U(~x@Ql HB;5[ɳXÓR y>3Xv9eOs*ylc+vR+m,D.ojhHVMT?l)Y+?,r^^}}3Q_?Kif׳zsqMx`lҏG Ne`y}g#qM?fnr؊px~T-*%w}ψKmV7W'uȈD,y)8j/<,8R:ht{pwQ{B͙[yT^/5yQOQyT(8M xry'xjQ3ht™T)?o^=ζXXH5Wf.Qve}~K$ʹN2@TxN!w D9d[."٬sZStAO s_}!113jmc!yYJAԆ$$1]p()ǯJ@)Xfh(pk+CMh!>/ E$'%壇ON{p Fǝ3kNnܺ]HBKf~s1^e}J}wFHΉ p=Z# Vf歇9fbj?}K; .R>EF[YǺ J_Zf׹7jjHI}bAؤL޿yl. KwԫҨcqVU i\ JjQUzt+( pB*! fq$&BuT[s"8WZo=xSY ߓv: 14v&!geg ifۮB%sgv׽⫛z9s>o^/.#wcY\'d1u<1jHc O 4xlj^'J8qx4B>Dˍ3-_}՛wWm=sK18$,N3-FkEϽxI~|F_^R=[s cM(s|iBoDfqڠ?/[,dP U߰Qd)bQəB1 42$ wZ\J_V_|U$|ݸo_׵Kg`ׇʀr"o:|öIDAThBKa_kP*vkR']H?ۭ*@Lp5ͲfZj) >6ܥ D=CdFfY~_U[.VDe p_-Si O]{fǀZeZM>;h-o}tmTTTe ^.'bX$.\6\rQ]TLr4JAwMH}]m޻Be]nVBr,c?1* @u[icYmj6_~U;WF@* Rq"&tH(Yvv迬7W*|uxg muUjWM҆g23 YPQئᆴ}dW7 !]g+BiEaYy1+㽿o@CLӷ!Dw%9)$PF&,içkMThШ1{GN)Fp8be_/DAсONR̙2w xfΫuuլv_ ֌o~] yVta|^/<?ɍɎcg ]$ 8z?gU)ݶ]/gRC˛d.u\ )_f|pνbgs G>lt ֍oe}lxw=9<~ya6 j ֐ȘR.Jm)f<2] 0˅) RD9b, mcYֵofݔ|CUQh$]A bIJBt9Qn@"9 OQwR`8G'Jn>/0UTĄRȨ3)*%UJ7cYK$)fABͮ_qՀ딖r_ A0@Ed}Z.tT+)a/K"eUT,@V+-rʨDaĦPYX2Itۦc޽.&ԴQBBKLФROU$1MJ[H ҄zd}r]$d dYiKSLgp-_ϵwiʴ!̴$hXȐ! -eU f>P.$@p`䬐P徝Kk6CZ` V>gش.eMi 3X2KSâ}M̱ʻXGHOi71fe.#rL$˜Hi #1k+-Զı*)ub1Se5 "o7ʜ #+#*eChk]fě\ ԝp)Vk)bdTCm%֊ۤbP(׿ZwFnȶyUǥe<p'B~YqKvUeͻIH9syk~JN{EUܾ`qeU\jFH∭pKWݓQh_~$-'m$scJ{);(Qz7oׯ^5>(:[뛷߾~~W_Oۯ|P_7Ƹ'-z'|=3kO@Щ$ݹ1h:kon\ %&"S ]B&aA3r S3"(( ã ]F)u!!/J55mT 'F+C_[t *5:c3+B>:f -%1}^KRv-=&O$|,5˲ķ3}ShDNc Zݪ``i>ZU6)b>PI8k M$.K  Oz;c&˅rĵՏ֒iI;[*B7UA!0\-/P@w#XB6r1hb!qf%Ts_&[?uMR:G(}ê@g%di4aȝ㢰$|aUSdc2D;A[ʵ )!\L^6J}|.J2iMcTq^T]vw̦a4+MŶuFHv\_Ϸ3d̻&EmevQ]\v۶&((&G{aCm SI)'U1u%!',!LS̤c7舉pkGD\ф{.0XҼw7QDXDTrq,*{C !,㷿}7Yrx+L 9Gvo+!g 8Σqlɧ= :3'<"Owکo*QIkd$sAKOL1(sH:K@ؒJuI`]2&܆`nBaE~iժ AMʵ\j1rⲨR VYiAbFk/J[|Jp#.7),TchE\!Z\EVLѳ$(5<{ʋJ+zbC׆21VQmBh96΍JWTSAY tM5r@*VcSZ[DusJO9ccmDri=fxm K+oQ*R>Uu|WW.d\,$ֻ|rBZBC] %ŏvhհa}NP*"fPZ죗2&A|\!DJpM#灶}﷩%ܑeĖ.L:f 9 1?,"J?c4@AaP$I2i"lȪ̌:M >Y{ر-+(]ozTS貴])b PX]FFRޮaH[#E5~Fet 5Peɸ9,$PsJ^8(0D}F€۾YKkT[R\B9G I"-x<JIOE4&C|$o"r!rTqռ_εO+Ƨ ĦIq{߯_} bܪ f*%QAnFqES!$>%m)DQV^u{t0BAc8g 9~A}A PChSTYɶIh7>)#Ea!%Ghߧs+0v5`m̮ PNrJ>ADdYr҆}4N%)bd0ֵ("P;$fDP~s=Ts8R+C&4"!JEq b̭*B_zh:9&n}$L(Ւu'F RUGH1*H؂wƲP,ǔ8/ nGJٶPjfi9vHh }@$$A}}6V-kp1s-2RGw5!<.ҥ~z\uQqH!M^7]!fT1wMW[Jg]UJkn$]ʹDT\{y&2A lJSTz{ߠ3X܏}^_Tqdo7eqy\ۚi 2]W}ead/#D`h,4 "0V6r{9Tͮ9DBHPZ"rY1*@ V 8e}b!2*O!Jv`,F hӋ!3$ A-|/C'>6c @\ֺ*o#^\$^w]MKIEBoXB+> )fJwJvon)]mN{#KeAϫAXC`ψo}P8embYapJեEHS }ucιƀ(Fȋr rea0 y^jDhn@"%B߇mw_Bj!HYoͪt}f]PŶùCRP*1Ru* Y.E#Mr^EGXΌL?P:"<&PO8))]`cM)rYjpq$e3|ogX|rhOx|NcfI·Ifk1ԇ߲v;?ŁY/]1o~۫ mtgan O:d>pŸa4 =;I{"Sp0GuHl=*y8LGK?Ff=sRzX[Ѫѷ[ǖ+S,ZxrJ 1 a}V]~|N9ȡ2*Bc3ĪԗlfgѦ@\GZVS>lB#pjFCgҮm/ĵ7CYJBTC* 9[7 V BJ2)^+ʚoڠN!yw}t8&Ȥ8F5Y<4󗳐$ZNClAL9n_[IJT @^ߍQjwkϝ (:A ,1o/<@ܶ 2aӆ]*U Xbb[r`9dR8]g3ArB4.[[؜iG(+PdX Ɉs)*Y}t׫)Cץ<+] K[pv)V0QDTnu#Ήa#ev0iN-C>oV򲼾QYL%s3 s uȰ|e̶)XefM.gyqYȦiʺzVbV6Ǐm4]ys;@K.~x~%g FPG/FEg~^XitÙuBg/Mé2MG׿~f}ɂo~ZT#|#B _}o}|s4U 'xh;?y<2e{gz>(krGxZOG󍏦:'Cx߿z/yJfy9J_?__}^Eu7ﮟă^׿z?|FZ9\;% !ۤT=xӤ|e='c*G4sZ΍']8U;ζd\SH0EQJhsH%>o6WQ..E͛SDC@r?$mԼ#0~SW"%Eۢз;_ZmiJ!(t*Y}+$b8jtypn9faTX]؄FB4>Uq=|)&Pz.hVvs[vM8 IQ!!1GFIšrx,4#3 Hhfࠉ]NZ@Ir9_m,ABf8ʑP$ajo H痵ֹmC>{VĘv=,)9y[ Rs PeԼ*BBϾYú٬OrlV1}g2nqݑƢtrTCWl#3-}+)۶KSʥ2u;MtJdִw> 3!qu@jVsJ!t1F\4yUmXq%fn=ݕ*DwMIsM_r+gG|*?GtNaW; )gu糋k{óWA {..g _olۯ.94g6o~A(#nB0_gpJ?iΧ&Gq՞> gϕ}Spb9y7\Kgn^} yyɇͫfUW? O&gOAyZGvb} SL>mp@GGTO[579;ѹѱcuS 8!wVV?tW>gS`΃֗/^uLvV#g۬"vD;7Z*G1R>۞Hʒc*PrQFz3<_!KG"4k)cFcXD*bV`] Ԋ2; =$4d@ٶIaŌJZRlQrfQeQF,lŔ1CTHX͜Xb/Y\sYތDa>ej?Gbm"?AC(Tӟ/杭rpVhVͫ/_z-Մޒ߼x~afI aq98xMcS {I{)='`ʄL(x'"U]y!ͪY\2MR( '#6]UY6bfF)c]'ExZ>?'(8='<)qIΫ~s~OB}<|N{dݎY_G :߾ѿ !&L`rYY(n%>ei,\. !7C)qnQ?܆D)TinQN1$+(>Յ`݇]DIQg>K2]l1QbnR ##Xz^\j_BOrԫVL* LX= qsgAPάsM뇨0evݭ|H(]Qe!|^V3ݵ]m,,UV]]JQd3.>0rܤ0#wo6RC/ծK'thK /*!69VNb}(mn0nU/j,&%I)&; wrL)Ag5]F9x0/+$M,L釛%V03W}&j7dVP׆nwߴ=fu4EigFV1Z.$RX-z@lxwݙ\ۏ7zAudu F1ʛ]ẃdw}0nPBWznI(٣0]e%)[mۜ!Z+@%#{k}5 $mwERX]'#OZBL4kL+1昲9swr{}!r{_)F=wO ,0G=!'"s|}Gޥ3}8L/k`O>Rݬw}_^7fj7<_=~ ׷??}q$= 㑓ƒS{#ML.Es9/8dyCG-1t4$"sYDZiѠ2)(*+ۡم}amfwe+#{g.M49G*L_E4jG c!H%љoD"1^~뇮z?JmgQ>9ƻMhm>^yr;uOi}/N)H SǒL jWW^]yR˟Z^T}9u?͇n'e]sjbqrhQ؟%w ~2JlyKqŎ|b'>FNcIϘyf̟ ep&[VwFN/Wąng-RbLJqӾ ޔ*TWv{Qh}Bl | 6Bo.YYH(톒r7(uhQai4(y}2N9E)$0>\LXZ-FҲє#,L *5`B(nN"!|XO+cޓ1 !ZlA{9 }3`a&2%$kN1LVe.Y5@YLL2LBV㸅ȱnE$QO֖I@i~>A֭BkR1bc" gX)ZrfVш(C*bCϭ5#IHɻ Bp!t(7Mbg!_V!S(pO6׻o6k"w+- ɒ.)" xKQ(4kVY@_]DinU.Xn0*ay9 )Oek1Pq@h S.Z cC( M{rP_ݫS@1^kT/Ed蓸S)Dܟ^|(+|i (_~b~'9_δ&߶ei~?MG" >M)t ~L2giUN|Q돩UUMxf|*ѫ/z~^2?6~F=%ޚi˓UACY~.Ov~ej=0 yb8>uS]Scxi? 7_O"zXXf圸Ddv;`ln$,q("[ R mSY{iVw1\Zw8ȝQZoDYj$JэW3Ri®Q_ԦGC9_bѬU H;/EI^j2D)9A%PhbB$A(fx6sD!E])90 5JqPDـU׼aQk2aT !e}n/. G BCY* {M_B$պudKN\16ݸ8=[65lQ,K>dTKf>?>}R1=(52dxbY7 MWm!t#hL>ZMBŨ*bQǪ474ba˹ka's؄^mgq*H4*FUz9F UQ}۪2$l66\,gee0sUNtK[~nI"Ї`zrYc;#w}۴ i\?|.W9jr^!χ#<aLG)7ܡQ8#A(W޹xX\,VZ<??=ՄG3>?7P{jg>9 ?K*#EzO:q)*Jer*ƲSq# p$N W瓻X}RAܿYBCkcp)3xt^+r,4P2{rLx P #?渜¡dy܉GS'=VUN\N1`X8sO)gǍyyHԯc|SՋ?.|q-_\kgCF4R1Z(͟|yqQX8!m;؅0I[fCÀ *[!vF"c8N $#=$Fؗ[c1u  Q$1CU]Y&FUK}TT./t-UYia) KO1qs7w1) `2Ym*l]pe5,(1B(C6 w&<*m;hVQ*(C dH{XeMߗN7#3gs]Zˏ%Z9EɅcPZӆT0^7R:X}$%VU _2p97C}ΊzU6Ij3G,M57mgcJƁ<} ,6ź݈ϥ12틽 }(dF J'LD2gZ2γ@޵1t1/Ѕz0+. .T|4 4yK3JuI; s6U9,AP0 ( "m>$!<φS2׺ܵݻ;z !r;Qr6"a3T|rѽP(IqPp~ݨ2< ǵ&8xdgagxR}ȥ}<{c ly?oކؗ~t_on?懋ٱK>w#DSсսqrt'ye yS>MLSd *^ZVyVSU+!F0(*~:9&-ԆqjGJ]] Vvz}H/ 3\E=iB%DžJgc}Y Ԋ:v$nvE?]g uyYfуcTTeVͪ:W| ?g[k<(#32ᅈROL3pmw)=y~Wnw8[TǓ?trSh%·׿w_\|S7IIǔGA,gM*˜/`!rdr'&3;_\|gvݢc#ޭ6Z^]YecJw7o~yᩀTOSև <sa'>|ɠPԧt#x LΏ#Ǒ'qv}j|W7<ϊU$1HO%B js;TJij3G[w$RheASHB\07!Z½ /+VW3c px!{s uRhr95s3օd C(c]OTC̮GcH3,.gT mUff a.1聍^ -0*۝oP<2s`V5 e%1I[HD ˅)Y0fɠ JeW _>m;_}RI)JҾ!") &)wg( +K7;_jӅ.l9f3r, ǐQ[A@Q*JcHm KfAYy}if\T56~p"QEQ4V5w(Fa+TӴVPl #ɿ\}u .TgDkS4se,%3qY :&~nZ^GYA&փ\ZQ<|ragE2|mL](6sʔEC˷o7Ck]|v~zjQ=gW:6>8;4}Vk H[v1@cvF( fQGz22K3X~,N2+^<+o0lzf(RqkRQ6/sMJOPo4qQ!Lb;fU;14|9klvCYR3R>QirYYi C.K\n6H80Y699TY"jܾ;dAv>` a^$ ϪȨ}=!I 3~h+cլF+h~u[$Sj6˙)'児IAʌJK ơVֱ?SU퀑 !x^,63WwhZVM?7-Kc 䜇!}l8\-/>_.%yHkRtRx^l$)Q6aKGb ?<,B~vl9D蔫#aTݟoyQ,HgWaegudBx|y#X>Θ o~a|O$x᩹\o?{aǕ';Jv8֚`[S'$0ľBCPmr6䌖=&叛p,f RbL}7jQYFYTj'brجC5R&D3WDZ`HƓ겔CCl|3cf @1hřscL:({FHIk&C12]0j[meQ(iaS3cIkVyQqFBȻݐ>_펭yIz\h $XG3Xff(ۭ չr7 Ibi1Ww-+ \7Z2NI>2+$a= a! "2$5כXYcFymrTQ1g)C:塣v1PQruFln@Q1|6 檦GK\RU吂4=Hd+a+_͵M_TvJi͇Fey^UY$LYy]@f^}h]u3fq g_}e^~9VsR9>rT+r"ӺS)XOO "wЊarƒTLh0[l2(mm(zl18JYEtG Oܲjq9onlYY,z1;6݅'A"gN*L*ro;#~% w[Y4듨ΩQ΢i'f S٠c4i·]t߲< `⟧l? 8C2C_O^}^pBg@0N8 :}֪} (!;$Il !CJHfddOpQiQ .+$lZ?/uRRJSm4QAk >Y=n._Ͼ/>GvڀDzW*pP6+U$0/\@1z{suYוUݶK-j!K5!5HHTzvV1bFh{k͒f%` =<иn!fB*v.QM73Rs"v]9U8#ۭXyfTԅX:97ebijfR#d .}MQcC,  =MB)}E6ۑ*RBv%^#pZvVmBK }P~ t`y^XxӶI難"H]2iBM?vעX0HJnjE9>C-vp[݈Rn7[$;sch3Qg/0ie 3K/_fPK[uŽn֞:6)*K#3>1nMW%#se]i\T]yAv.2UϏ6>4{ z5Gspܯջo6ݦyճQΫ8Qz?/ψѰy$A}>y5gGIh_UJ?gc3- oMl?BڤpY]h3-Y$]rYc˥fci2.gNFfLA(akoY]Q 93=UY'Nnk.4:!sY8oD/E9ĝ78. XAxnsLя5|U<fS3|~M[qU\:dI[%RDB#ZF(>ebBطU%ﱶF7pcuB9E&VJ2ҐU  0xVs|/ ])?Me I"u e}ĊЧBRFH0mו(sBC2B.>t!s"DIzaK 7Hd;RI"];CZH&̻>kI!ؖ’(Ɛpn0wc}Etׅ]ΉWua, *\Xe#Çjڜ~`D O|݆6Yf8s@TvF(v TZwCN3uS!!`d FWjCRBxȅ @yLZ|غ}i )m'!r nT&!K(DtNJ= SSURBS;a™ǁ9[d ѵtsxxA}-5QJTq($MWcyc<3ՇnᤖpO@6b?LWUF~Wb1 sr>RZ^ޗs$?1s2+Wg1gQ^6zyZ3JtO.˫ʖgYlW|=gUE\~z<ŅyC/D^>͓piGfF 12 ՔmSa2Rc!@K!.]x,H|(Dɚq:=caElBN V֥vAH$bćnXƉRtV(C7K) œa4"AVl]*3*h* bN! #aK}ߔqq~6JM_H) sR@VHaC3#}x]nH2Z%@ :O!ǪK=h%) U l9|J*j-Q#Gqꑗ nK m/9b  &,quP$Sֻ},-E6ܞV+a*>e!(F RU4VG  2'0@1p^] #PK݅!a?},Ms(d/`zyfU %hT;Ԇnê)6Q 1)A\4ޕ6_-LfeiOiKb* U-l`K)ĠƹiR]2_Z5`'nt㋒;œYf2Rղ:m 99}~/pqh.s>v\s|s:͔6DB ]8{R|Q294,D1FE=Vn#|{&`d$L"(d@iQ Sft km$,ő@|(|J-%%qv!tPc*$Q.%VHBP\hPDd="R0+H&19oleJ.%ca;u..#ŽUH@}N!&RkLw[?IH!Ȕ j)Ϳ "FBꇬ wW . dIChy?l__U1.L}Jd*kb8b8R*x'>Fb\ia|w-WEQ2XEOY+&x>H).3#a1kt܃ؐ4S/'S|u–~29 >;n>(i`s^~lǔ&VC/>bOKn]< }9|La>7QOgB3\M^̼ <}`_/*mr}Z-tRIJi,F@mj4wL%|uRۇ9sFoӦ _Qr%EdH ^i{z]Qk!6Ř]R[r-3(Qd);:%r#ZU˯|[ҚadKl6R|(}<\5mGS !p9H!qRW,(ׅ(W%v05$}1x2gkfn\JuB* 1"2LnB *-=EiLE<1Y!bJp}r E+53HLXjy,}ΰsqD#IRʑL:7,0SSbBlX-xt39S/ZsɹdӇ4|r.[ %k~ʧ#d5d`uZ%$Rv*-U6:g"4cʑRH7>qf !CB(nU"vkKSi1Cj3[|B\;zO?{-1ld / Rv! d G_&$P&l+e-e}-F5md]HӦz1JQTZ " YZݰl b!eF8aU@.`vsHF1M3.b#%h}JwCmFrL hFp"Ð1҂Dr/JAR&\jSINxӢY\$c{hj]uPDaP(/6e}h1w dk>Z1[ =jEl>,6a^~F<*1iHPυ,wTe`{DrC֊!ђT~kT#e6J~%(+ (NɣLMU  UrH1c{nzj{~jS1ed.LYљޖ$9"x*٨.JJ19>e C/W 66C!sO|NOvgasΌa/% \6vVJ2c>}Y]1SQ /_EgA?x2bN;?faO )o՛뢮~quU?mqz?VU_f}߾=_L4g= 4y^braD'N}gSy ^/7O]`޹f[>kMOyɞO/eR_߿VR4$ Um'mȱ|i]HL;8#k0!E) 0g!ZhBB 9B>'SSrWVIr!TVZ.02U"cv9&ޥۭ!e~( TiJcNDlPhҊvד)p*dHS]*m ?ne@uB#YWP`,U]Y-XM@V(I 9'#j.L΃]Z8$Z& ,QlXMFeߥ."6rQa)a l\H9u%)@FzR@n\v"eU }H)5@"];hB$2n\ePPE Bq;4>eB1 V3d6aā6`ft 28]ڞmA,AHnݽn6;hJ_,JR~PeId!f$R֪CWK}+8UPTŮU ?geo Y&d9}#wֹ̢7Wj% mO!o]8__~:z(d6W3svNٖ>?7lsU"|&y AsH1Уq>oÛoޘ !kLP֪ѩrw}_YiV7 q=n~3:!;~4Ni-gs|Q"83S/ 9L"4,p=:n1W\ͥbۿzk5jR Qixnj KjfLFH@0F8.O" d10F1QVYB$=XVFAF><aby%l I]`m9p(.Q\iAJ%n3PYm!}(K3f1-޷PԷiYjݝDygV΅!yb ew]*j1E)-i"Pj;t)KiLA[E?U%E9K CYbW"`k83j}pmf/`ɍ*^]UN }%d!>G!\/*(owp*T>3t{ٗ52mtC BÆ%oʺ>,0z~ VWb*Jx!EUU^|:r'l;,4K}t ٪nFΞ D^,+[z{pj R||`JIB_Z?9o~Zn ]jlQ1tJ/~OM %M]/H8<Ƴ>a3~W7k)Ef`-8Cw?|(f!W_^6fXUJK!O__zs]ɼp ng8eWg]I/U E~jr6~9p c?xٴDʋni鴰,9TΚI#Ŀ߾o)-I^.aR#K ,li MfMa5QL{wu]0d̘!e*IRIR2FF]!9! eV퐖H! R$-B) ƌ!.K OeEJ0̐RJJ%zAhHB$sbP}Rar ()YBi (2dG:L4D1OwW*~l$lCNE,Dms;P6RtȻh)Ғdd# V Drn}LЮ J(m$+><>BI;Am|&pl$WKiNiz4Yl ah{2#)W@6ݺ1%ZS"UVn S~aea节2G[0ֹBdJ)gEVi8'mUcN6i!\J'F ӲLNc}bD)E [-Kc3gZ)AwwxbEFKm)ZZ!wRWlsJI˜#jԁEt .j/J߻8 " P-^]VolsIRTeTKg#7az2HS@jL .4D8x"s?ӷE)e?ׯWOa2"v]2gۮ,rݜ4}W-+) /+U5$4q>sO̳_2 @ 1?_}n?Yg7<9ùEqgW@Oٛ8gD\@e0|ߗԓ/Ӱ~?~%#Ya.$MRbBH]r>&̔mvFK3B-!D֕&OX1`SCF+{^mo>,+!bDBǐ1dQA OqG *Eߥ%ZDwQW*UmKea–)FDj \&HY!q\ C޳}.B2yCVY(NS!gL#Ե2}RB)A{l")6e]5eI5ƤQ]ӺQB&oBǘ9%cMYueH Xق(c*}E(̏hMl.F )9s4ZvYhK#vw9ȌTLIZ`BrJ8S޷ I*>mw]JQt >MXE]R> u)#pN!2,st^4l?вJ(5m)I(6NpdQ7M9P- k[ZBUrq(%L$wm~V  cGHnR2;>|WMP;7@;޻ᐼR)dyZz ژ+LH'ټ1s9,Rz6Y8ӅL1rpvV=CzULWuLբîv{HtI%oqȩkUa^^Z‹sO򉗦R8ӼӝPNBbT_dW'D+蘯T/vxfxq_:a݄̎g?|N*v@׿XYVy4PG)E &Τs @qDAS$?Sc¾)rJTxW ݻ,VK@fѧ@!DQյ*BԇC_*Jsqڊ6&eYYLIfxQ;kt rDjC.*f]e0@$["w|}!Cu$ZEe,UĂvX,$A~4%2kR`%@g3sUѶoLD|p \)1` E7!EZU#fu I0ZP Lc7C'ץBWthsJ"r$rdhBz=:kB( ƐP]!Qmmq#~T ֠Q%O~jң__7F%II^5ebCES6FXFTDæk*ik$BzNB@]SP4t`pP[9d jLm.]qvG׵zҙR9GTFCCd}RZI*5\5*rH#Kg-m>|UIm[[kB+#ZBI&%r)'-=H/LiznjOKHF*?t.58IYHq?gr<9Vгy[^/wwa&+ zMU7;"@(?~nw?7UҼ[~xS#zk/do׳"3 ՞\p/xl8]{ۡsUsC>_Cȓ =T :ksy/q@ p|x_/2{?Em?oBۅʘ28 [^XF:3Cb䔓.]V}r$ 2I@I*b9āRr*2!cTs0dT;)gHY JBζcf&a߻_XP,0o[_mqR>'Bk.q)P!1K:ͫUIo[5Ki]1Iɬ;!RPק[nz]ݲT̆Gd#Mmv~FYƃ nmf3xO(zUlvB]Jcc!QLwwòYe]w˱2@ B֋M 8>+U!e#M}{؍_Ԕ"$`,,Teaw H>vT\6Z9wucpeE18#aG㾮˯Jq۶,pJ6{LnaUWݐJHtqX-b4ϝkU0 <|seU}@I0+cPIM&ˌ$ պԲ,tJWXB{5|\ 㩅6(ِ:M<Cj֯VM6n~ sN6/Fus ?Mv٢*E̜IP1US<$3}8lp7RF fwe*Vm0s kC1W¤Ebj/X փX<ʊc*S?tz.e"yICHaiU9q$< م9TZq*f.6uS*AF ā@@1IcȴwNiIH J&RCtVT)p7߳m]m`1/gU˦;4&2a({_hlً&GZ(G 2F8qXՆ0=Cj[hbH(-~Tk,f]Č7VόBamoq`eM۳.Ej%8f.G8H1jt4}t٩ڒBI||x&FeY\!eI1zf딤ui#ai-#gPVf˲b %u9pFnJRF9rna.QASٌ!RqnwuSI9Bƥ+$ĔT(@& J]ia|;7Pi)}߆.+/:74LjչL1^ͥr<ǜYcC\/KMY>7Re ^XSuTeƩg.pk`|?K< ǹBC=&MLxOO'nv|E0݆. %S@ciNiTrOq#>IaFGq1 >Mei&gcy~t7O>cNjJ&.Ģ6Ow??~~XY\` Q*(H"2 Gn-a込(!āPU"(ݲjYb؇8Cjd̒dpi\5mHt# hNZuVVw9jpsX"c,z},4mRkL9ZAqH$4'_ժ@ 4;kOF!q]/+Ӥ6B nb[-3vV+ s\iB18p%,ZgU|(T`Ӳ2a"Id*@Kg}1*ebR"zBiQwnU6qH}~Ub 1@UBA6BY)-A*⩈~S`jw#YewYK$|ӡ))! ,DvKm뒮͡dee d5bq!d+Yxl߾3z݅F؃ЇLJ><^ql~/7HS YmHH UѶ,U2: ahC7ֳC,X_]X*I_a E8=|f =|تnqu(aUM-*'ȄL8y6J"FI@ ycbm ,&q&acF?LL{磄J;NΗB49؊ \4*ļwVJ Rf-6hKۮS 4 1RطZn] IiM˵BG|V@R0dFCEJʙ}KiA l2$!dn`4LNQ*J"*-KKi$΅ T䆤 { wjȱң;RbBږAΨX sY5hF`-&9KK^.^ m L޹Ma#Lîغe5mUIp5 a$}'|q]z)U{RPHڎ1ԇ5HQJh~zx4>v(H:#ֶ\j%M  5EՕvW#urY(-i uoXkJSV|ncRzHPB )(AiQQ*۾"wAH=F՛*c+f2Շ2&HR/gL4凌8Fs8̔(9ė} `1û;(eb᧻R=NRDG}/B3͍盍 S)3/NrmeV >nLY1Xx~/eH9xcJ=E_ r5:0>Ѥz/iy ͺff8'i-?+xRa3,=9ΠDfA Jj!6R!&B.JC4V%|trw;- $s ݹXķP)YzpZubY" V!AAO CFkhJ1e!1%st?=vScYJ-)NHE{*U^W%߅em;B~vﶇ.+!-߬m)posBCf}.dbemx2WM1:=K8H'ҦvîR!Ċy^BYjDV-0joeF?`׹O.ZR4BrCpn(jaQ[A9gym0aލN3H,n6r$K_]cUԊI}\c.L:aHK!kόeNtY,J ʓfl.E'3c"J>zWNPۻwnn`1~wYEn0~DߜՆ3NǩY$&~zj*woU(ǟnϾ96Mo>޾3xpdepbgTZN-y3y=yO_GM8V;Uwox3*X~SzG[nJ eAsc@E)#,I?˅ cDN(n쇂L]zAá[-=ncȱ.MbBYIx8h+_TSʹ@Sv$(BT$Sˑ>.!}|HR%ЬZҏ8+ba Q}$M!R #v+Yhy2V1_RjPhJ]S2d C!PHjg5L%.zE(aQ#)UeѷaU+Mi(>aCJ4=Ҁibl}OZᾋF*fxp҂ѡ`\xUiw,GBd,J-n[vIgƐA/ B`kap^*BO)m>eZ4r!FO XDQrL2K%1F"Bu]8rxX{wBZRCOq߇W`:Z]l7_ԃ>JFX؄)tg#k&MCS3s: wB> d }d W;;OWIU$4bf2x"o_R?5B̼1: ~ i$L/F;O! -Ua%$K){!bs}ue Raskj;?6[xWZ)+X^4m7Hp= w;Ɠ>Awi>~gy.ۇ7_^@==wWsczu7ӠO9Z*Aټ;&&//1Zv /OCG ?Bb6F"7#Gf7R׿y+|e]ZKA3N?W}#HNyϸ9/F U,!b -jba$\DI(cAf2PZ32rEm.:=J%ZBǪҸYuBVmZ9`v>V &!V.h!!w9&erlЅ0"Ä"ϛ@y6KBQOJ)$jP IȘ@BդPHIDATڍ< .d̻#Edh`Z]jTz @ r-j܎N 4/$cڬqnLv~ڴ.s܎L&gmk-gune)2SQ(rV< (^΍2i]i#Le=C cM΋eZtnجF0"'!fRi\K D#r9yƪbظL:!3ruUis J&n}vŬJ\z  ɦY(ͻWRW"&/[Y_PE?xe] A즴 !H$ fZR%B1$Ls] ])fD1leue>n fIcl̰M g?u&xH=>een.]}ICEwݎ!wOraB'Cꧏi(,೘Z<o ųǑb!ɿ!!io6:ƴ~\__U0og•|M)oVۺTxU~n~Trx3e\ '/ H,H&]`2 ziOz ]C4"zp^fd^8O 9'*Cdl4f+7XhS(n[$dVɔtP9 axt] ڥ8b "Ibe ! HJjS'b?7(ksPѾVʀ62R.4, 4,2$ɳ+$@1rff&[E]$ IR+邠9k%%gb$ .ؒHPi~L-KsUFSmHE]p¾q2GL76ULc?i=Ȉ]v[^UVbb,y)]rKmg3 IJ2??m6Vi͗ˬtዷڇ nb˹ MUZEӴ>y%UطBA"~zHNM.TaK%lxY)Ds(YDz*m ڗƴN *M;l72Zcl5sE*FJ` Q ?TE9.VJH(X[8ea yѵ֠㛋ZLbYܵ,)D]s1K/v2(M6AI Qp&HC#*$b$۶u%/>e$Jc¡OV8 4*Wz$8?N2T{"rPha (p'Gz~Eq vTio%l R朻m_Вde 8NW~8:L>ՀLqrCm%Opwsww?jy=1۝B-LJo+Vd>(x2n3Y.<ɋ+GA2sY _iu ǃZ ߟhE¬cu!99޽_2j! 䂳u`vs[F=Y `n #r2 TԎcbURFi ª.Yy!R CfWKb=ˋH>UUBBLfX&2HI Vcп1&@ 0cJC<ʒ#.!R1 )}Hh:9hT)49|%&b7֍ZQ+IspFƐ)mϵ6Vl-hٺːSo=mE=R.1ǧ.D`~Xhi17 RHX̋ճw.-kx}QI@dU7XM43+-!BF"fG@5 >GP6)GA m)T9~;׈Zu >b#WJ6̠FUYIТ(Ʒ^7/C8?c_h]QpqpmTvm׿\p"eROO>+-j0Vw*vd?=|R 6(i‡V'f-ٟ8 286C9V a>Bu^m? bJw?jVW%`{ ^Y474e*w&߈8ڏ\w ?sM_9"|2-#@!ȗa4sT<[ϜBOMWoAZē{8kR|7D:c%=ȡ.zK#ZfțFiӢ^Kc.fZ.fcLy7HA/mWfħfYͰŲ $jg>xUDjB%G?d/HԅWOM2KKT$ywrB٧uś.>jVإzVuEO\}1:Rι+=0Bu.DJ@yR.2AObGB)yƔR wg]j Y_}ӏtd^ Rvk{=k!T=r_a]ln#^&2C;#T"fI 52U/W 9'3.*j;wUfȌȾe!3 IkB$ T r4e)X`(vRْJK Y\̋Grpy"sJ |VhCa@SX-DNjvs"M9ޛ~^$C~]Jhǻ狥%iR6-Bw17GT~eUTrWL VP5KmKD>uoE7@Q[a(RӺZDѴU&Ͳ6۶EW3h\OJjSƖߔF8!6o>TGwa㵭9m7:1exZ81xG;.ړ'T-.e#é>KM 6(8 75N8JL. 51T'иp_ x(b穖h38b__´1͊;DtHgi_?㉂0G߸ö?( f#[$QZy_N)Ӑa1R M- B\ВodB)B(DM$1"sDF$cAFPJC;I4G昸Q 3&+uq/UR[lpߺ]6c]4Zd\ UQXĔ7˙ @c߅YYD<¤j' Yi2v yne#G2gL|]j%k"vLiR>m)baYYI. NIřSbuy\P!k1Ys,F-Բ.u?\Bʀ[Iyΐ)FoĈR.fZK,'JB. 3ޯڙ'KkBPJ w! mVj(?oAt,L3YS臨I!KvhP6QY $mKiV)l^rY=OJyjGX>}cڸlOvo泹vC^3sti15^P~ w|G\ =!+:77: n[ ]/9ѽ{`)rbȤk}|UT9NHZŇyGq,vg8}P'q2|G'>-_{?|Svc2U 8 ۭ^s" Ykŀ˺ VEi,*7]MD.x:NR*fHP*`O3 R G:rD?C}JM*Y*}PZ,*RJ93 RLaSqB#B̅Dr[KR%dVeRllVBU]=710-~ bKspPh= B R%J%*^TԢO$[,f3#kmIasYUCw̐&&*#E>v>"'Vovri7 u`˫kEԙu/ۛj-KcWW@@VF>>}d ê1g ~yn- Y`1m~m&6Ur^iiS0#'oם[RҢLC|iP*AP YB, UIoWDpuYxfu4#N6™3#ܟ>~v-Rxo?|zW/Cp 1zz i"P: Ξ}ϔ9(q<_| ﯞdPEM_]]/fؤT HvE⬴(T@vnXe(X J3ab1 JݧX 1,2h@bʔ.PJH{]*x8 %*8 ZP腮 D ` c$ћػXjbe!bߥy->f rfU }vTZ+DA23n:2j"LB"X֥Kҧz)QR2'TX)nPג>=fS*ܵ4\ e$VÛY(f䄔u]a墾X,bbVˇj\B J !cTր ,!Ei f$%*Qt?έUd -?s8X밦%z՚u)2q%ę)ȧ{WHm49wDB|z#ʛ ! 1e}e ]jJS%/gF0! Ib#sȁC)%Њd[(ԧn sL !=gIG(1QJ>E#)b\΍ !r$.DHNb(dwQ$'T)s0:l+ilL1_\ɊDU~yZ/:`)- "ZZGfYus$x鉠d(R!#)dA e.K |FZl4 e}h7YA 8\TY(r\9{Zw1nBnݐ\6`Z|_ ×]Գ mW6m!2s >EYg A1> P pv˽߹ >ûxjuC>I)H I)R>?Jil\"O7_Y,Y\'ӒB 2~¤'u&L򁯿lj 9E'O_DLj[_lr]XHaApWDK"!$2 "m1~qUs(R. ՕQΣ2"k;3j\imc0J;Π M.PX[]H -'Za5d%*pZ |wi!/f&*0Beie l 8$Qf-bemjqٖʧ$}Z ٴ# .BVڥXhy <1ڼ"$bύ6X*HR\ZRR{^o~|Ɍ]K=O~y@I˂- LM3OOm?eWvQ*]XHLhv߾s|ĮUv.ۅ>>]<+e0$Yj^lǧ dN)s1 ;c[S"Uۃ4}Q;"G.scڟ{upbEsN9M!яK/+81$A͞L 91xl<=gi/_~UCSpz3֦9sN Oբѻ<~?j"^#9GJ@'Q}dazGb5Y'bxN*O'IgϙAtsНy<4ٌ=t>UT,* #!DUQORB&.4Mz$ 3 RzOs5Qj=J)5oE6}_2X1IP.?R>'c0;.D> Ab$[p\QT)9V%@ v<1+Z$!rka<"t9/%iҤ#BBLozM8\ ˿ ojH?<&)q>ңvaF6]-!D ENI6 3೫Mѐ 74Z f d\2V>mBup(8j%z+6pNcҠGS0fҮv2#Gz^v)8O,02WPJUS&0. sdѐ" }0VEvZ&@Nss ʪR Cp@ (6A> jYQ}Dʔ"SdMVrV Dvh^O~fLS\X%YzU8Aޏ>|ǛR@k6Y [ھ{Y j-8F7_>||I|n缨ܨʖ ܶB%Xn^d4j?_yҫC#Cu^ {ٽĨ}x1)cnV˷˲*VN_fz#+rs3ه 2+sxLp0~i= bJ,y3 `Y?*߾ `F̽ECw$5!$ fΪoxY*[R2(N;"cDu&bR9m>- ȫ-YJ[䐁N^֚L| U׺՚ m0TyJ7R |Ru\_46"&bYoҒmc34UeK ˢ.E3B3CՀ ٱ"{3%JG <+"자[,cJmschp*=t^`X}LֈW"F)~zńF1x/ o]IBR Y?߭.ꢘuU! 'vܻEU oƧlDGqì;ǝ ʔ\D_!puZi1\XӇyY ΏN%Tz)4J+ cJ-"1zЦLpvH%:ڼwWب$Cx|˳+?x|p_M{vcnuUvMS7W}6O?|:5]VkuYN5Gg3V9힉>+Y)Md51/o_~s#~?˯~owR3H^>?<=wW~4) RL:4'`r pk>>w8s(Vw?~yJ@,$PQ̌."ȐbpBtJcL9R)}Lx9"Bt!>.++#qD 9R,,FdY"$S+'* $[rL̳J$$8Ȱ&Pֆ2VEQ?:ǣK bd RQ?5s qVO+ L SV9*H̙r4&$ݲlD BEZg!5˼Bq`_ RȪ#c c(gf9&U!%vxseC'3!,z)%C" {.2(ԔSہ5Re \%TXu(s]s:_^)AXtk׽g1ǦWJ31S{}Y0Pƫe%o76i?}[4 ׊_|s62 Jr^ "|{YYVg#e.u1V3SKh'\ۮkagXU>e`g [~QH& ^ /+Ky7"QUX%JL0qR'+iב7u|z}:'6ļs#X㹙<~8ovy1W8"\^-޼Z^I8Ҡ?!ջK)o7eQsIrjdUô m:psɇ4=ⓕ|v *c_}bp1<,Lu=<92g_qrʧ&< 60â\3QjM5<^,=0]zgM!А@}vPCd #&ICz$oXHOU]7} AA T̩Go+02#LMB9 }e?"9e 1Cv0 ZܴIkJ9KIKJ 3T5BD@8bEY>HskG檤󉉴$bbdznF bHR{~ŹmǫKLq'I@QRHTxE|!d[mD)`,#PJ91h|]J"秜ޕ%J}i[tuQ (A`f?gu!% %>>6}mMf* *nn)HBZwsRеCחUyeheGEڴө sZ2$wHx٘̂8U].jAۮbQ>[+Q&͌(.U1#mJۯ\*(>~s|5W]li Ƌj LJx;VϤCyy8=f~ir2)<ĉev #.|~3Rpc7L-I|ΘN4-y䃧2OWQ9DuCO$ ,L>Ys~M3(@_ K ׮EYMo/_7% 9Qh͟Ą@Y!rfS>&,X @QҐO[|$Ԗ#JBj PW:Y9Up?.dIFכB]ed>y#IJB" h#.ʶuuo7"~VŢ}MRɌqg jxM,SJc+#Cw2DȞ a09c/k{Xü .FHV a4ZAkX,u$iJ63殏WK =VYd*$vHˆLzHWlj~ڈg'~ZE%Y0 Eirp)R4"Ri9S8G,~\mmS([SPSj-HI .)ay m.Fa5 PdToҮFFY+D)!w #1ztCPI͑wI]raP{D—>όbfU6yUUfgM*ڂ$Y ~E)520hi! >tbB[ϧhΙփHٍJ2#O?:tcLNS1fTYW~hYag,q^EA~&꒶a;_2֊ _\/޼ETVbK +h[n}5(^1w F)F#$AˑL`\zsjo\X3x`E<ww Ob ]LxI\>yYOsK;!I2ҀbDBi7ێ$--H+4yE=tn++23VIl@njtwsF* >md]X_)БyueG<JCFQ}*V"f,L9^2 m*b໇PbRm]?h Z71љPuE|Yce&U%!3 %.iQ"7va :M(]6R!Ov] \6]^#9=t!$v1 @&9SnCӢT7h wFє#)Q"JVV vX֕jUX9r͎_tuR%JR+/l*eSҋJ7#'ё#N눘UMv,# 5ks%ƨ!$mvcb| gj%nkMb ġEpU]9U&ROɤ!]`DbԔIp Vf[mbHfFP?oՍՓ'Bkq3~` SPԱĭl?P+oИiAU!EK߆OC &uTQ,Y+e^׵_:e14Yf%DO 2t)q٬K'w.M޼y0'wwo. }: xӽ}2\juRrw:Bi(N/6=G fgJS0ߏL Rp_7Qbiw#9H FQGKCFҒ5(npȵV]CjoʥB/Rժ˒#rUe 04qt0\;0Z)MmѐEFFl@Yip555&PȓGy{ȵSF9!iueL#!S>fʏ0YhH +jxWt.MPUX&+ךڞ^ơwD*gnkg-# qgEf?6.4*Ji\0 Q mt Hi4_C$HnA> T!m&U.ZmC `[n{X6+W6IR^(fl*r^aHODbQ>ThnpB,JcD1q7eUSh@ s]Ъ4DIf2 5vQ2$OzYgE&=:G;g tJsj&ԉ@ͮ~ڴm7׏qFfG~_~˫ڎ<{4'%3oychTY\ԓf.s4g{~&>BK1WWw\O߽}{w|WW Xz O ;֎N0jNOfpv/ 4̓kl_Rcw03>I("dٮ 6aO!Ri$'!v@62 D  lUAdd8' d.&Hf4A*g-&4"Ec4NVi+TdQ }^Ub~^!N5~Y(F⦥:^lv51KY,iy )srNN>'@8^%FX43H:&s)r(DI@s 2$FY m҆ u1SZzQĥl'q>IJPdHV$sKBcF<({@1Ei7\.t"#GeSh;|Ŗഊ).*)4,V*T=&]&}&q8 ӵY,#?}ܓ".rK4u(YCb=ᙳ9&42Hj2t58\=l*}+C ج:m̮Vuz9p_WK*|CiPrơ)J G4 M^:UV \_}.$86$CI)!kk4e)i{i0!hLt /ן}kPU%e.ffx3ٛt)(gùF|S dNd/[VW.&IðXs@1slnU |z7 W-O S ϖ|X(j\'RXBYMog>YK&ӟ-EaF ؽ\񗏠bU  J(oxɾ|s\բݻO/\y5:]ycysrLVE><OIx`3ګBƺر}V^`yekEʁ&A'D]Jh 0`3DC̵19 ȍ5DVXbȻ6y oX8{0*IHXpg@)d :$YqԮ Px{5T TڽQWVmYEEvŲ6qQj.!!G V,#cŀY%PZZJiq:8:h$*R1ŢTSAg*椲(媔6λR_ iC]7*9RWۮ+ ZWDeIp Kh\86qmUQ8ׇpFs5&Y<8 9GtdZ}޴|ކ+tj!!q]l-2Mmt b~&Fћv$rƕb1a*|@V[cS|uEiH=rfO>@XO4xhCҔuDm~P[x*rQ]pU.u}w)C`·w7X= Tl9VizGNu#3)ge ~϶Z@>l;o14nKˆ>;Ԯ.{8!3901džtr2Oܿ8nԸ0p;߿o\h:5*0Vjtbq@SX'@CD6.ǮƊ@-oaHsbö!VH*MDB4m3ެuNĜ^LXnVHRrĦREQX]ӨR~`ꪶ7U9 ɨI,I3FBc e] *5ddFi?6eW 5%yn7!r=[M"P*ĜF$CCN!iiFm d5x; )Elz\C]f(1e3 ui߽.xBVT WCFP<>&> *'{ѯ֥쬹*Zs>4Y^zKZ2)%%#GEVKgli\| F e#00G4d$aH>T^{Snpigܺ_/V%%ATAR\-8d\ƪ"Z߼WrK)W ntir5ɒIoZSDip)&66އ֠ 5vo?)=Y>)kO ?_z4m` ۆwݔFt8io]iYa].4>n61 %b`X6KGi<"0\)('\gn۶C{F^SS9I;&hym}p٣Q~^1p4|qVO2r78'̔S &<-vuLC>}3sm Yp~~;C7`$wpζ%×O7]]SKEܙ_>ȧ U_x'K͚(BsNH#'EBdPL]O%L\"hIY6 :0#q*:j[ڔ 1֒$FB ZziS2aPjDM ]At{r]Ҩ.%Ed?1C3k% A, (t s? KG0ADa"j&d,!S>&y6C&dѲ4EYHL8R9Ámu!qr>6~Y8ZC$82:LZ&ˤ^ 0uC^U-֙îUΘ#pwC@Cq\ Q{,@4  =^&?َܳK706]^>.J@K v]4JmEQ,Vi>0յ/KX$5UgZZ!F TVƀBH!#e~$w㦺ї_ʘK7FʫFvC,&d ]zLjQ9U5]W#}8$ M-q2 ˘AiYjkp{~,J4}(F눅%TԦ7m3|Wr\8 =% ֢q~/K i҅9 8`-U ^6}bK^)2tE.0KficL^'KsqbSOQDE)NG #p/L,lx7)o~z^^`ѩcG?_fpXsIυZ*Ѿdc@?ҥ|/¹4qTQς?~D[_Io༽M[%a8V(g?~~{M}780x̫nP/i̟,maB]uӣ.EYę0wFς ?V'~'v?ω^(#2r3!A>S5)@v/æK,+c ɤoUx?pDyv5C4$D_6dͧaPC!)2^QD ՓZ2_:&4|xh^.|j\$Z)*# & `dcu Kѻ6XQaXE%ƀBj| JfqΖuzsݰt8$_*h߁Fd7[?D(>*QZuqIbT΢6Ʋ6}ק ,zQFa)?०o[GWW0 iP,yp\Vv|Ynk3AS!jel24/L1~^pJژi#*k׺~hsJ yg6tqwUAS6FEu *9,a1A$yoMƨ{~- +H]e~kiMeG-ueB]k1͡{xh&ʣ\(CNc7/ĸmz?yt0Kpw}eY(jdu62R_k6+k㼡EY!GIcMԢc; =H^h/xqU] dVpέܦ/u78="{y__ -V_(SL|L"yu~}YXgp]/x1Ǥ#/>R '(Lka/xrOb^H,V7fU(ǝ_]ŷ.!Ez$}9O YǼ>lƜ>o'Y&]$9zY?m;˴N?<J)!!<oXX'QAqt2IC j3vf]xK aLtJ 0)127`Xk2 S"=he":tiQ#ɖ놲 {32mƋ2mSwڥS΀R 0i4Q=4Hv+b"yCʎY5L9s r,:RwқRf;yOv*BkTE Ry)t6Vcᬝ: C0BXV3}ԇCEcqŘI+ gu# %ǜBoZԖ޲@ߍ(IQmXy=/vH(Qְ++FI1.t.־b-~H YO^&0bSO\݂.(^C8\-y]Ljn8(vD)=5CLΩ>EmVc]׭&4 绮FP1bѮVisLy{wKTRVe7Ir /WCs(LJƑ1/V~hN(:GGC34M+6[޾Wg(Y8sRh6FmvM_.z+k nooc8!? "4 DpƖ} XkF?/Ry~{XpzP—n_SΜLS7'';YЙ8M2M*D CLBNkh j&&Q@O]jY7G<*՞ 7,yUIӌ?†˅Ni'9M(ihY\T*Oy:M Mҧ5ªu-s$oA[^sN2޶2lÛ'32Ng`9" !kda!1D b=n9Q*GdQj|qerQkHYn. %Vr]`Tjr )%1kA&ސKٵԢCjڇ2ʠ47aWwFKv(M% RZ{}n Z)2?8Č+/_^iņa(}3ׅibJSQ a֔uF y<=9oo{JL΂"KC!K_Cћm3a!İ=$evz)'(pnaXKsNEYt>ߧWK"GKRe!n6ۂw_Kꪸ4)dPM%kkW:]׶EYʲ6x4Da e%}H!o_:R/zECw\3{:L+pXy}K9eox?n$5+1zd17sC6B\zam΁D|I2R>C/,= yh^G\ /ҋpĀrsViקA7jJ$0z,*X-AOm`vkd&TtdswA!ZG0-.&TddS ƥ7eQXcȠ2C}CUo6pԧVbDI#[ֆ^C[WPM,Y-dݴQiA k3%E Q Mۅ! }bCo,B`v :YeC)k1 zoʲp/^-}9 krSvH4VW`ooWIAn۸צ0FQi3gWڀ )BrǏoS>카M|emEʓv4N>le -hT߆UY*ѡNbD}#Ф X A-Hof9_?8'xzl1'tAZ}NjBEHm9[ q`ַF[k%)+T81}?6WF4zmx\.Rt)4()I5{gMy䔒뫚 k~0Z4!9ը@>GC4wjA)-EwjMHygk:qzzuY$t{sc`Umf%]E8ME9Z]~[OW_-Vk_cT.:O/))Qj( rEQ>W쇇t.]^+edž_7Ze<}ӜT0ٞ@w*Beǯl]ΖU*4÷j?}ZJ<_۟by?~Ykg' -gg>©/p/ aOOE{K%_;Qا,ǠH^-#e*^ ( {=kDSN9<~aIYh(F4}'msʹ }n Tv^ dQw}V^i}⪠Ee|C O衍 K@kіbw( G;fM.]3:,!Q"or3(> ._-픙a4<9t6]IԮ!TʲR+ jHMo^AoIkNd$l.Xo8Ǒ}fHBsdjbl^W~vxa]Dk9sHTXxڻmJaveP#gHKCBJSq.&2ġO">*r Tf)޷)0Z4)eNM3ǓʕXXY48DF =_1 s脬hCڛ~W"'..*ۺqf7sK# Pf3|MsX| Ue˲oRTz+BVBLJ޼k*Mqj* Y;/eW 4 ʺ™JQ]j]/%"Ҝ*$1OvMf6*"ggԧ$eOoDkҊ~y^KD4tZ'˟߹r*3四 4'yFKpzKi~3|j'NhuS XaeK/S*a`1.:u](+. YzlTe~lDNf(\<3rO=JVd&9){X9 pA/Z~Cj>vOE<MӯTIS>k{X,5{[I~(X-)LIdEA>'}ulw墐a RmT!/1eR4Զi