pax_global_header00006660000000000000000000000064141214211670014511gustar00rootroot0000000000000052 comment=aa03196f8331e1f1fc41d46354ab4d2e02f84992 q2-types-2021.8.0/000077500000000000000000000000001412142116700134275ustar00rootroot00000000000000q2-types-2021.8.0/.coveragerc000066400000000000000000000003001412142116700155410ustar00rootroot00000000000000[run] branch = True omit = */tests* */__init__.py q2_types/_version.py versioneer.py [report] omit = */tests* */__init__.py q2_types/_version.py versioneer.py q2-types-2021.8.0/.gitattributes000066400000000000000000000000421412142116700163160ustar00rootroot00000000000000q2_types/_version.py export-subst q2-types-2021.8.0/.github/000077500000000000000000000000001412142116700147675ustar00rootroot00000000000000q2-types-2021.8.0/.github/CONTRIBUTING.md000066400000000000000000000015131412142116700172200ustar00rootroot00000000000000# Contributing to this project Thanks for thinking of us :heart: :tada: - we would love a helping hand! ## I just have a question > Note: Please don't file an issue to ask a question. You'll get faster results > by using the resources below. ### QIIME 2 Users Check out the [User Docs](https://docs.qiime2.org) - there are many tutorials, walkthroughs, and guides available. If you still need help, please visit us at the [QIIME 2 Forum](https://forum.qiime2.org/c/user-support). ### QIIME 2 Developers Check out the [Developer Docs](https://dev.qiime2.org) - there are many tutorials, walkthroughs, and guides available. If you still need help, please visit us at the [QIIME 2 Forum](https://forum.qiime2.org/c/dev-discussion). This document is based heavily on the following: https://github.com/atom/atom/blob/master/CONTRIBUTING.md q2-types-2021.8.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001412142116700171525ustar00rootroot00000000000000q2-types-2021.8.0/.github/ISSUE_TEMPLATE/1-user-need-help.md000066400000000000000000000006111412142116700224450ustar00rootroot00000000000000--- name: I am a user and I need help with QIIME 2... about: I am using QIIME 2 and have a question or am experiencing a problem --- Have you had a chance to check out the docs? https://docs.qiime2.org There are many tutorials, walkthroughs, and guides available. If you still need help, please visit: https://forum.qiime2.org/c/user-support Help requests filed here will not be answered. q2-types-2021.8.0/.github/ISSUE_TEMPLATE/2-dev-need-help.md000066400000000000000000000005641412142116700222550ustar00rootroot00000000000000--- name: I am a developer and I need help with QIIME 2... about: I am developing a QIIME 2 plugin or interface and have a question or a problem --- Have you had a chance to check out the developer docs? https://dev.qiime2.org There are many tutorials, walkthroughs, and guides available. If you still need help, please visit: https://forum.qiime2.org/c/dev-discussion q2-types-2021.8.0/.github/ISSUE_TEMPLATE/3-found-bug.md000066400000000000000000000017421412142116700215260ustar00rootroot00000000000000--- name: I am a developer and I found a bug... about: I am a developer and I found a bug that I can describe --- **Bug Description** A clear and concise description of what the bug is. **Steps to reproduce the behavior** 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Computation Environment** - OS: [e.g. macOS High Sierra] - QIIME 2 Release [e.g. 2018.6] **Questions** 1. An enumerated list with any questions about the problem here. 2. If not applicable, please delete this section. **Comments** 1. An enumerated list with any other context or comments about the problem here. 2. If not applicable, please delete this section. **References** 1. An enumerated list of links to relevant references, including forum posts, stack overflow, etc. 2. If not applicable, please delete this section. q2-types-2021.8.0/.github/ISSUE_TEMPLATE/4-make-better.md000066400000000000000000000015321412142116700220360ustar00rootroot00000000000000--- name: I am a developer and I have an idea for an improvement... about: I am a developer and I have an idea for an improvement to existing functionality --- **Improvement Description** A clear and concise description of what the improvement is. **Current Behavior** Please provide a brief description of the current behavior. **Proposed Behavior** Please provide a brief description of the proposed behavior. **Questions** 1. An enumerated list of questions related to the proposal. 2. If not applicable, please delete this section. **Comments** 1. An enumerated list of comments related to the proposal that don't fit anywhere else. 2. If not applicable, please delete this section. **References** 1. An enumerated list of links to relevant references, including forum posts, stack overflow, etc. 2. If not applicable, please delete this section. q2-types-2021.8.0/.github/ISSUE_TEMPLATE/5-make-new.md000066400000000000000000000015131412142116700213420ustar00rootroot00000000000000--- name: I am a developer and I have an idea for a new feature... about: I am a developer and I have an idea for new functionality --- **Addition Description** A clear and concise description of what the addition is. **Current Behavior** Please provide a brief description of the current behavior, if applicable. **Proposed Behavior** Please provide a brief description of the proposed behavior. **Questions** 1. An enumerated list of questions related to the proposal. 2. If not applicable, please delete this section. **Comments** 1. An enumerated list of comments related to the proposal that don't fit anywhere else. 2. If not applicable, please delete this section. **References** 1. An enumerated list of links to relevant references, including forum posts, stack overflow, etc. 2. If not applicable, please delete this section. q2-types-2021.8.0/.github/ISSUE_TEMPLATE/6-where-to-go.md000066400000000000000000000100111412142116700217650ustar00rootroot00000000000000--- name: I don't know where to file my issue... about: I am a developer and I don't know which repo to file this in --- The repos within the QIIME 2 GitHub Organization are listed below, with a brief description about the repo. Sorted alphabetically by repo name. - The CI automation engine that builds and distributes QIIME 2 https://github.com/qiime2/busywork/issues - A Concourse resource for working with conda https://github.com/qiime2/conda-channel-resource/issues - Web app for vanity URLs for QIIME 2 data assets https://github.com/qiime2/data.qiime2.org/issues - The Developer Documentation https://github.com/qiime2/dev-docs/issues - A discourse plugin for handling queued/unqueued topics https://github.com/qiime2/discourse-unhandled-tagger/issues - The User Documentation https://github.com/qiime2/docs/issues - Rendered QIIME 2 environment files for conda https://github.com/qiime2/environment-files/issues - Google Sheets Add-On for validating tabular data https://github.com/qiime2/Keemei/issues - A docker image for linux-based busywork workers https://github.com/qiime2/linux-worker-docker/issues - Official project logos https://github.com/qiime2/logos/issues - The q2-alignment plugin https://github.com/qiime2/q2-alignment/issues - The q2-composition plugin https://github.com/qiime2/q2-composition/issues - The q2-cutadapt plugin https://github.com/qiime2/q2-cutadapt/issues - The q2-dada2 plugin https://github.com/qiime2/q2-dada2/issues - The q2-deblur plugin https://github.com/qiime2/q2-deblur/issues - The q2-demux plugin https://github.com/qiime2/q2-demux/issues - The q2-diversity plugin https://github.com/qiime2/q2-diversity/issues - The q2-diversity-lib plugin https://github.com/qiime2/q2-diversity-lib/issues - The q2-emperor plugin https://github.com/qiime2/q2-emperor/issues - The q2-feature-classifier plugin https://github.com/qiime2/q2-feature-classifier/issues - The q2-feature-table plugin https://github.com/qiime2/q2-feature-table/issues - The q2-fragment-insertion plugin https://github.com/qiime2/q2-fragment-insertion/issues - The q2-gneiss plugin https://github.com/qiime2/q2-gneiss/issues - The q2-longitudinal plugin https://github.com/qiime2/q2-longitudinal/issues - The q2-metadata plugin https://github.com/qiime2/q2-metadata/issues - The q2-phylogeny plugin https://github.com/qiime2/q2-phylogeny/issues - The q2-quality-control plugin https://github.com/qiime2/q2-quality-control/issues - The q2-quality-filter plugin https://github.com/qiime2/q2-quality-filter/issues - The q2-sample-classifier plugin https://github.com/qiime2/q2-sample-classifier/issues - The q2-shogun plugin https://github.com/qiime2/q2-shogun/issues - The q2-taxa plugin https://github.com/qiime2/q2-taxa/issues - The q2-types plugin https://github.com/qiime2/q2-types/issues - The q2-vsearch plugin https://github.com/qiime2/q2-vsearch/issues - The CLI interface https://github.com/qiime2/q2cli/issues - The prototype CWL interface https://github.com/qiime2/q2cwl/issues - The prototype Galaxy interface https://github.com/qiime2/q2galaxy/issues - An internal tool for ensuring header text and copyrights are present https://github.com/qiime2/q2lint/issues - The prototype GUI interface https://github.com/qiime2/q2studio/issues - A base template for use in official QIIME 2 plugins https://github.com/qiime2/q2templates/issues - The read-only web interface at view.qiime2.org https://github.com/qiime2/q2view/issues - The QIIME 2 homepage at qiime2.org https://github.com/qiime2/qiime2.github.io/issues - The QIIME 2 framework https://github.com/qiime2/qiime2/issues - Centralized templates for repo assets https://github.com/qiime2/template-repo/issues - Scripts for building QIIME 2 VMs https://github.com/qiime2/vm-playbooks/issues - Scripts for building QIIME 2 workshop clusters https://github.com/qiime2/workshop-playbooks/issues - The web app that runs workshops.qiime2.org https://github.com/qiime2/workshops.qiime2.org/issues q2-types-2021.8.0/.github/SUPPORT.md000066400000000000000000000122421412142116700164660ustar00rootroot00000000000000# QIIME 2 Users Check out the [User Docs](https://docs.qiime2.org) - there are many tutorials, walkthroughs, and guides available. If you still need help, please visit us at the [QIIME 2 Forum](https://forum.qiime2.org/c/user-support). # QIIME 2 Developers Check out the [Developer Docs](https://dev.qiime2.org) - there are many tutorials, walkthroughs, and guides available. If you still need help, please visit us at the [QIIME 2 Forum](https://forum.qiime2.org/c/dev-discussion). # General Bug/Issue Triage Discussion ![rubric](./rubric.png?raw=true) # Projects/Repositories in the QIIME 2 GitHub Organization Sorted alphabetically by repo name. - [busywork](https://github.com/qiime2/busywork/issues) | The CI automation engine that builds and distributes QIIME 2 - [conda-channel-resource](https://github.com/qiime2/conda-channel-resource/issues) | A Concourse resource for working with conda - [data.qiime2.org](https://github.com/qiime2/data.qiime2.org/issues) | Web app for vanity URLs for QIIME 2 data assets - [dev-docs](https://github.com/qiime2/dev-docs/issues) | The Developer Documentation - [discourse-unhandled-tagger](https://github.com/qiime2/discourse-unhandled-tagger/issues) | A discourse plugin for handling queued/unqueued topics - [docs](https://github.com/qiime2/docs/issues) | The User Documentation - [environment-files](https://github.com/qiime2/environment-files/issues) | Rendered QIIME 2 environment files for conda - [Keemei](https://github.com/qiime2/Keemei/issues) | Google Sheets Add-On for validating tabular data - [linux-worker-docker](https://github.com/qiime2/linux-worker-docker/issues) | A docker image for linux-based busywork workers - [logos](https://github.com/qiime2/logos/issues) | Official project logos - [q2-alignment](https://github.com/qiime2/q2-alignment/issues) | The q2-alignment plugin - [q2-composition](https://github.com/qiime2/q2-composition/issues) | The q2-composition plugin - [q2-cutadapt](https://github.com/qiime2/q2-cutadapt/issues) | The q2-cutadapt plugin - [q2-dada2](https://github.com/qiime2/q2-dada2/issues) | The q2-dada2 plugin - [q2-deblur](https://github.com/qiime2/q2-deblur/issues) | The q2-deblur plugin - [q2-demux](https://github.com/qiime2/q2-demux/issues) | The q2-demux plugin - [q2-diversity](https://github.com/qiime2/q2-diversity/issues) | The q2-diversity plugin - [q2-diversity-lib](https://github.com/qiime2/q2-diversity-lib/issues) | The q2-diversity-lib plugin - [q2-emperor](https://github.com/qiime2/q2-emperor/issues) | The q2-emperor plugin - [q2-feature-classifier](https://github.com/qiime2/q2-feature-classifier/issues) | The q2-feature-classifier plugin - [q2-feature-table](https://github.com/qiime2/q2-feature-table/issues) | The q2-feature-table plugin - [q2-fragment-insertion](https://github.com/qiime2/q2-fragment-insertion/issues) | The q2-fragment-insertion plugin - [q2-gneiss](https://github.com/qiime2/q2-gneiss/issues) | The q2-gneiss plugin - [q2-longitudinal](https://github.com/qiime2/q2-longitudinal/issues) | The q2-longitudinal plugin - [q2-metadata](https://github.com/qiime2/q2-metadata/issues) | The q2-metadata plugin - [q2-phylogeny](https://github.com/qiime2/q2-phylogeny/issues) | The q2-phylogeny plugin - [q2-quality-control](https://github.com/qiime2/q2-quality-control/issues) | The q2-quality-control plugin - [q2-quality-filter](https://github.com/qiime2/q2-quality-filter/issues) | The q2-quality-filter plugin - [q2-sample-classifier](https://github.com/qiime2/q2-sample-classifier/issues) | The q2-sample-classifier plugin - [q2-shogun](https://github.com/qiime2/q2-shogun/issues) | The q2-shogun plugin - [q2-taxa](https://github.com/qiime2/q2-taxa/issues) | The q2-taxa plugin - [q2-types](https://github.com/qiime2/q2-types/issues) | The q2-types plugin - [q2-vsearch](https://github.com/qiime2/q2-vsearch/issues) | The q2-vsearch plugin - [q2cli](https://github.com/qiime2/q2cli/issues) | The CLI interface - [q2cwl](https://github.com/qiime2/q2cwl/issues) | The prototype CWL interface - [q2galaxy](https://github.com/qiime2/q2galaxy/issues) | The prototype Galaxy interface - [q2lint](https://github.com/qiime2/q2lint/issues) | An internal tool for ensuring header text and copyrights are present - [q2studio](https://github.com/qiime2/q2studio/issues) | The prototype GUI interface - [q2templates](https://github.com/qiime2/q2templates/issues) | A base template for use in official QIIME 2 plugins - [q2view](https://github.com/qiime2/q2view/issues) | The read-only web interface at view.qiime2.org - [qiime2.github.io](https://github.com/qiime2/qiime2.github.io/issues) | The QIIME 2 homepage at qiime2.org - [qiime2](https://github.com/qiime2/qiime2/issues) | The QIIME 2 framework - [template-repo](https://github.com/qiime2/template-repo/issues) | Centralized templates for repo assets - [vm-playbooks](https://github.com/qiime2/vm-playbooks/issues) | Scripts for building QIIME 2 VMs - [workshop-playbooks](https://github.com/qiime2/workshop-playbooks/issues) | Scripts for building QIIME 2 workshop clusters - [workshops.qiime2.org](https://github.com/qiime2/workshops.qiime2.org/issues) | The web app that runs workshops.qiime2.org q2-types-2021.8.0/.github/pull_request_template.md000066400000000000000000000006121412142116700217270ustar00rootroot00000000000000Brief summary of the Pull Request, including any issues it may fix using the GitHub closing syntax: https://help.github.com/articles/closing-issues-using-keywords/ Also, include any co-authors or contributors using the GitHub coauthor tag: https://help.github.com/articles/creating-a-commit-with-multiple-authors/ --- Include any questions for reviewers, screenshots, sample outputs, etc. q2-types-2021.8.0/.github/rubric.png000066400000000000000000007014131412142116700167710ustar00rootroot00000000000000PNG  IHDR,4\sBIT|d pHYs.#.#x?vtEXtSoftwarewww.inkscape.org< IDATxw|g3AޫEEQvgmKhQRD.#g$ G)9IIr~vkxr\7ޯ\b0 R1NE/zQ@G E/z'w:~9 .VhhD?^Kg7o78s7HG^{5]~]v ӧOլYD ^-ZٳgW_TB iٲeںufR)//_lRl߾ݤIs8#8ѣGرcٰakQ ___۷Of (p&I8p@sIO5ydXTRZ~ҦMk7oZpDO<1ӧO(8!?0c2dȠs,fݺu_6 8 2?sI @C ޽{uСfIZj& ilϟ߬EILR^DԬYq]]]`I&Ѿ6vUV5o\TD$&~<н{%6p^DGGk֭˓'O/Sڵkhh#^QF &:t\]/TՕ={ve˖M/[l8cǎ%.aÆI ᅲWڴi#F)+)<<\&MҡC8$^dN>mR7n$Y*Toh?^@\UTщ'4tP.]Z3gͫ-[jӦMZf0=ŋMw!;֭ۗ`^q 7|iĚ3gNrN/p6mq$=iӦD Klc׮]K4(z1P ><Ȟ=j֬`gϞ%Y|֋رc; IMMd4k,$ zSLÇ; Id̘Ѭ#GԈ#e\~)z)׉''$w(z6{_uѣ6ͥ\rʔ)SIQh`7ne˖zyr8bŊY4\:t蠀UN/ N%ɝJ,iXQVR 4l0ruw*g͚k׮m+][-Zӧ;8\ !OddgϮGd|k׮ڵ-j9<{Q۶mu=O,XP+VN4ӧOu1*((Haaazː! (E\rJ>}2g`>}ZgΜӧ+W`0_q\\\%K+WN5kT9$ϘԩS:}N:]qSM0A3fPtt(z{xCpBmڴy`0hڿ6mhTMݻw~zmذAғ'OL%Jyj۶*Tę:K.i͚5ڼy;(ƹB jٲڵkB %q҃b[Ew/FGFFjŚ]t<$???EEEI8>vzRѢEuʕ$Uzu |ڷooҽfS5jO?5ڶdXª6m'o׮]2ev!k_?aÆ=zj׮m\֋}I' @;vɓ'M#Oҷ~kl9z:viӦiѢE*W\#""gIh@s(^x큁:tlbUѶm۴m65o\S\̚/oݺufIү_O>5y̍7ԡCi[bV\_:tf͚e.]d$I|'Ol; Kwv=zƍ?~ϯݻ믿Klٲ7oM ^v1UZU~m̟,[L˗iU7oVٲegƭ^Z~~~6/xܹS-Z0~;w֭[Z˗/5-Q'EӮq#""d/_^=z͛7,֊+;tD[ʕf]|Yyn*___b={L}_|amک`ѣյkW=~8...j֬vء7n(22RW^UlYݻwO5?l-v%kNϞ=x_ՆY'{R7'ojΜ9ѣcך5k4zh3Ʀw~-\P}1.q,W\ʕ+5j%KW^&7ne˦}iҘ1c#Gɓ13G*TP`N:PhhN:G***ʤ8UVUժUU^8m CK.Uv^z/_>uI۷ט1c4mڴD犈PvcլYӤչsgX5ٳgmV ,ŋSddU󹹹QF*U1c(,,L>ԑ#Gt9֑iӪv*VMs1X{, ՛0a&N9TREWV맟~RVL*R :T|MK}G&O&L~I/_N:%`0M6Z~IsΝ['OTk׮ԩSڸq:dϟ_|>C3fnܸϟ'8WZo߾ߵh"]~\,Y]&`0رcF>cmݺլ 2) *\Kn߾QF~0+͛7Ox͛75x߿&M$oooQHFi3gNmٲE+Vx /_^>LouIK.ѾϞ=Sum(UN8!wY~ʖ-4ᅰ-[V'N0{ܫݻ9rmsuuՎ;۷QF&iժɅ/dɒ&رمM6K.hLƌg2eѣչs؝D HtTsxչsg?WZJH.]|rLlx.]TىbJKX6mژK@@/^hyjȐ!&oݳhܫ*, 6,ނ׃ԭ[7 ^4j(s*VTbr+Wȑ#&-Zhҥfy!,,쥂Wٵ~7L+O< {zzjŊ*UE:ɓ't5egbΓ1cd%  ֐!C}xﻱQFrcWX=c^UNO6ͤЀ>}z9v- qx^^^ 矛DZYyy v*lRժU3{ܿjFRIt)=<<4vXs4iEc9~Fڷoϑ /W/{6m(**1ϟ?7-IPʫxfpݛhٳ[nf-o6m2|ʚ5Ѷ˗/kܹ&LjoXBׯ_o1͛77{̿ 2DF֭X///w6k,~&SM74{ܬYnU g}j^`['%@dɢ ?P͚5%Çk&_j]frs{!wfaÆYtҥKM>јH\2sOOO?qӧOWddqyGFFj֭fDZ3w߿uY9r䈷=W\:zƌ*Upz7ճgO=zTY Zn%K=nڴi={y޻woI QuujnGh ѣG͚;gΜfcUvwĈf/IgVLLEc|_~{}c,aB.]#G}'Oj…*S,͑ZtY``֭[gQUVɓ'/=K>:vh| PbŊZ~4qD(Q"c.[,>7n4{^KW6{LttM[Z5ըQ_wEڵ+u&xwRJ/]dq\YfUjK:t`1-wy֮];eɒŢ@ҡ0[5n8;wN>e˖-Ib>}Z.\Hρ̞7$$侏?̙3GYw9Ң(`0y>dȐ%} :Tk׮5;γgt]YAVe5zh;vL~~~f9y݃m@D `*Uh֬Yy6mڤVZæ1lH޽{+cƌʚ5kx{{+s2d={fQ?6ofTxqcڵK&ҥKokv<<<4j(|_xxVXy>}̎ 샢fͪ:wFiAAA]ry ҥrav˗| <˔)zหW*Zhrz^{qv8EoOOOu>(zYlYY'!%JД)SѣG9֤g3f4hvOj…;}{v,Y$8?oKy{{%hWh?cGnݚ`Mj]t)ޝ@I;K,<./WWW 4(-=~pڴi]Ec͕?~ġf_њ:u͛og- ۢ<}P't>K5jΝ;o|_Bw4Yzz-Cյk_sm޼YZ2-_>>۷Oϟ~}%޽;N>}X^DrUw1 ()+V̤~E`NME#F=&""],>ԏ?_>}ڽU@sRiԨQfwzuX" `0ԧH"]MrI8$XQ$)yxx\rȑCٳg7;v@@@GJѼys/^q͋?ĹW]7k7x<9>%gΜ޽LEFFjɒ%qk yQ'ц/㏊C6x"""WNx\\\TJcĉfK6l_ 6H2~a˖-޹UP!̙\8p3fҦMk֘gj˖-駟tΝҦM]2E(z0eWӭ[n:;d;w9JU V%c`ھ}Ec}zMڵ|||7sLi5h rUcu%… ?~!e3(o|SNfꫯwyޢE ȑ;Nä#ƍ_tСDt)Ѽ}YƦŒuQƍS*UtY驁=?,I:y*U}ltګ;ِ!C,Sox IDAT̙?ƍho^ϟ$O*[n޼ia_~~~5c խ[Ws-ZTڵ3{ܫ"E^z쏢8tҙO>QPPPrƍDRرwNYr$M8Ѥ3{l}W/=ԛoi/[nVkYTx!cƌyYӧOe+W.Oy瞽kƍ/=H>HֽիIc@A IBCCѣGIxӧOO>Znŋ>Sg}f~ &˪_5l0_OV>}z͞=[7o8gԮ];-^8N۔)S,*R<~آ<ý{C㚲KU&LPDDDי3gV…͎oJR˖--&M}Q''O?~\M4I̙3-O>QM]|EL4ɢbbb4h E2eʨ哐bŊyO6guyѣg0ԵkW8q챏?{ァiرj֬i,޽{q}9sqձ-AD]6qM6+s?;6m9sZ/^DrmVztMhʔ)EذaC97n?աCM4)ν>|ԩٳgiK.VXaSXټЮ];UL=|P5kԆ Lk׮]q (`)-.&&&G5cҤIo~z#o޼f9poΝ;3k˗Wƍ-ۧO싢8K^\\r5k?nqgϪaÆC_ʕ+-1|+Vq111?~jժk*,,,N(:tHzoN)SXT 2UUzu2fyKNk֬QƌnZmڴѮ]ӧOm65iDjR```>iҤ-:F޽{jժ._lXI5j(G4Gdd>3M>ݢϟ?׻ᆱ 6\}Uƍ4 jٲ[]rEu6lؠ GqܹTbQƌ71 R l:-R^,?~ 8P)h[nСCocǎD_,YRwz'R``jժ[nY< .ܹsMWPPP=6L_qMqFjʬ15jЁl4ib=W*^rȡhݿ_/^LpNWWW-]T:u2)u9׆ lr|g,YԨQ#UREEU TLeϞݢBBBt1ݹsGGѺut59Sҥ?~e͚U+V4il g<)R;̙3t!SjĈ6E/p"ǎ3wb˧ҥK+_|򒋋BBB#ԩSq5lP+WT֬Ym_@@{=]r&{Zh#&&F%KŋMfi&Iٲe!\\\4sL}&?~&MYϢEԣGƮ]Vm۶qF*Tn'O[oeՎhǎ*W޳g֭kRߌ3ڵk=OR&MGɽ^z_~w5>I>>3 6LnnnVBR}ڽ{5EǏg}f.Ç&ϗ/Zld4iDP۶mu…$#IZxڷoqe/.B iKj:uFOon(xq8wz+WNuQ5k,rI&wﮀ9Ҧrȡ]vi5kV^Z ^/tU>>>&8pݓw_˖-'NhĈJ6m(VG+ >\SLnnn۷>dƏhWWW <8II8ƍy/_>-YDGUʕ%S0ڼynݪ5j,~|4~x]pA/Vm61...8p._qY|ϒy{'ّԀ>}zԩSuyS6?שSTR% ˌ9RwVٲe-.]:mV-[6go4n8ɋo iq8ǏB 9pĈرc;sN::qݻgRӫ|]5kʕ+5~ٳgڸq6oެ;vfԭ[7KEM,Mw=(P@ӧ,X`Ǭ'$$D?l٢={$1%JP޽եKw>秥K/5!W\Y4h6m$6mJϝ;w~vE/p2A&LPcwuYƍp߿P`0(}ʘ1ͫH"I~bbbt=zT/^իWu-EEE)44Tʚ5gϮrʩjժz7hgѣw^˗OSb=_|ʙ3gr(IzN>G*((Hׯ_۷0e͚UYfUΜ9UR%UZU JaWҥKzٳ+w)⿣`Pٲeuԩ8mK_e#JmQSXhzw}g9H]R֯IСCq̙S;vLlQû{VX~(OOdE/8s*""gL[`G111u넇kĉX ,jժiƌ<5jҧO;rOĉuM͝;W>bbbԬY3>r~wkq|׺sKϊ-$y~\ j]矪Y"##վ}{-[LiҤIRu֩M6F۲e˦k׮tGŋUB>sqqy$N`>T)IZj5jR1ۼysm_c U6m|ݻw`SPPKvڥƍÇɔUsۗ/_.XÆ uɗ .)S$Y~ p!@[dwo{ ce˖͎YNJҙ3gmwqqQJt%dɒI&HP``*TG%د\rڹsgnRիСCfK6~Wխ[7 )$(u1т$8qB׽{Y[o=&C ڰa/E/$2~x$WWWm۶rww7ɓzwt{ 8P&/^8{/ )E/$pYM2ETL:tH۶mիyx 7OVݺuum{j+VLVR,YQFѣ*Wɉ;,mذ!NfjѢ"""-^vޭܹs#TΝ;Z|ۧ =}T3fT…UNuA>>>ɝ&#^I(*** nݪVZٳgFK,ݻw+gΜI"C`6lM6)]tF/\u͛v u(|X@ аaCmܸ(z,D `^) / Cr')**Jо}}/Yvޭ9s1K@25k,]zլA }xŋkʝ; 3H](z$VZ:w$)cƌݻ7nB Ç:~֭[;v(&&&޹E/dzi޽jժiʕ+ǏרQk׮xkr'؂W͚5{x ^T|yܹSӧOWڴi9w֭۷o'I);,88X ֣GSN)O<&?ԤIݿh;;3b-\P=$kά$UREW 8#^vq,dɒڻwrmEڵkP0;v,kk ,Xmۦ,Ym?wUݻwi;6l`ql=p&VdddVojUDDD7n^zzw+>|X+W*U1 E/; ygϪVZVYvm-_\ڵSLLLv۷k/=WkUl ՝],պuk >>>>Zl\]y=#77޶m}R{Fe5?QƌU\=}T]tQTT󻹹r2 Zn*VV`ACO8v֩S8:~d4iHO>Qn:e͚&1R^v֡Cyh"9RFqEIO?TAAA/p4,gΜٳѶiӦGu{n={V={9rh>`lSjf͚Zzrm>T*Uty)88XYdU);&NoTB-]T111SmVϟ$կ_p*ŋu]zUTP!URE 0kݻkɒ% TF-[*M4qڏ?={T^=rH(z`0hݺu:u8...jԨ&LU4j޽}TfM.]ZO߿_~ӦMYo P0{Ծ}{ouww_A4whhZn]vY^uRHDppj׮mRK4x` 6Lmۦݻ[g2e}v ^)QH@dd7ogϚ=v&ҤIŋk̙ʐ!ٱZh!˗]t)S8z5ydy.]:Ϙ1—$ 4H޽I1J(~I7nTLL83G^8+QR?XK/ܩ@&L5iҤدկ_?_zbbbt1} 2U@-^XoҥKզM+WN^^^ʕ+*W~~S@@6mj 8G^8+˸L0a&N(I{%sVWCՌ3$sڵkU~}}o߾^z%rjȐ!o$W0 ϮKvz'cFQ6mZmڴ)ނ$̙S?&NoQ_l+:d/'C ڲeԩ|II>l٢u&cVwJ>#ծ];1...7n;C /gX8+QRǿto /)y.\[ 7rHٳ{ _@2s a ^@ @ @j/d>s[ `PLL$z3۹sg _@2r ca^@27n\/B @/dΜ9oF4| 0@E$eΜy--|EGGkŊ86yXXl=ٸq_~eʽ1`OB`0h/ϟϟ+M4 jΝ;Kw83fPttO.777]rE|l٢;w*>lyXXlz Y|G/cΐ!nݪ~;?[2M/^TB|x _THɓGRddҤIիWe˖VWUҠ$W ^3gUjd r2*Yܹ`>}hVK/|^pWI;; p2ǏO%I}~7%vǗD 3WUb`GcǎɓcΒ%~W ^Rgx!sQUR%vוeUX1c/^X={`>gX8֫N/N(xH-̠A7xCTHHN8%J=5lP׮]:v=TJQ,ԏ>(zI`0hСq ^i %r2WztAUVMTT)޽[%K4:&((HuֵI+[lN 03WR7+xC   6L3f̈}o@rs2A...qߺuKΝ;gt\"E{n˗ϢoV3 ^q:^;$b/1V\riϞ=*U@ժUK1cbbԷo_ ^u^;$5sgT_Rھ}|}})x`Z^%vz6_kǎ8B&22R-[ԡC,#W\+00;6l/`&G^8֫N/  Yf>[o%cfׯ,X 2h֭z-Ν;W(P@{Q- ~^pWɋ^P8 I&i'OiӦVz״sxw|]rE7+::ZsUxxgz(z6`04x8;vP8 Q&Lx٣Gk\ݻjѢ֭[w8zq^ oXEkٱ^*W@\B_WDD̙3_Uj,q-;:wlٲ}z֬YO*M4+*X_\|YժUݻwg;`>G_8֫`!AiΜ9(xHLppW .HgG`Ǐǎ/` G_8֫; Wڹs'/)yZj .H"ڼy>|7naÆF򎯂 m+*e`0>x;wRJɘ /dÕ9sfըQC5k&Iʔ)6mڔ䅯>s ^aXR.75wgڱc/)31 TڴiEDDEھ}ѱ80} D 3WR7֫^(xHM테т$KN?4ibѣGjРgQSNQlԋ* GHx!Wڴivx:|䉚6mjQ111;v$ ^%XW0 8p͛EbŊɘ ϫB p IDAT @jzz HC @/d?A)$$ĪyҥKM6ŻѣG5yW%?k͚538z^.a04`͟??vڥ2e$cf#1 ֭-[J*iǎjNKw|iܹܹreUr caJ}A @j/d&Me˖I9w}n;c8p@+WѣhrWUN/&BfժUС^}}a_͛7׎;MV5kThh;&IU\9b+*KLLz%K> r2V:uS˗Ν;-[6ܺuK VDDI/^ݻ[p6^pW_LLzR+GHB&((HM4%IǏGʕK:t0o~(xfr `J(z_kҥ(xHLhh4iwJjԩS}z-M>ݪ8q `r *x.]:3LLLڴi3gΨxڲe"##u1-Z[jwTTb b-֨K"1D{KJWc^(*-5(bIٝ.lly>9-3lg&puu)S&[VhQر...zAdk^` Ȧ) ^Dd1laB#F@׮];j֬cǎ-rvv\rXr|988`ӦM(S^m"[WDdXB Bf~zi/"2Wyɓ'hѢݻvz OOOۮS.^B ˗زe ɓ'cҤIf^`> &i =UȈㄌ*c_B)R^wޅ/d;c""Kze!BA^ŋgEDf2ٕ*U*ץ۴i8 ǫWKWPXR^Y/^DDDDdS2?SVxq9rNhMպuk|RfΜ)ܢE 9HdSXR^Y7^DDDDd3x%Lr Ο?-Zٳg9`H,8eÇzDy^DDDDd  6H24RJ(;kQ(p9($$$ ##e˖76mSm_ؾ};jժ]3gbƌȜi߾=߯#1^Iݻw{F+<<ڵBÇQHIdnXlC/""""z ȒXݻw-[HҤzLj#k۹_...ѿ,X׮] pIB ҥK(Sn/FYs2%KoŜ9s0i$x~}͛g`EDDDDVYkIKKԩSpBtfϞ.cs r㣏>\"[dT_#%%|e .xxx9c-Y-B?Yky5ksx@TT={")))cK*'NF:쌍72"Ғ+SIMMxɓ1k,Y9<:tׯ_Fl^DDDDd27JJ(#G0""c2hܸ1nݺ]6??? 22ك\۫]6qqqq֭?kΝ;ѦM^zeJǏǂ 9s&LbpϟG6m >D% nR^&^DDDDdu4^GEʕpdDDYLFFڷoÇsرcao}љ]xYVR'N@…sԩ;w.Jcjժ 6jժڿ0"fʔ?VZiK׻|XbmY+ЋBi&i/"2WH=[W#G 888 ڵkɓ'# @cعs'n݊'*jsD"KzEY1"""""P(,Ld\\\qF#>Rݻwzejl|խ[zRK?QYte^\̍-M-[W6mxUɓ1k,sqq6*V,c$2'WC/""""8۷D'/"2W6! sIKw .>^2G}5k4^5u*3lc$2W C/""""( Ȓ̛7o_|)XU ))I>l-+seR 6u|DyrЋ,Fzz:2ZlY^DdluBt*̙ :t(TEFF>-ze2eԫW~)f̘-[HHH{|J3gDttmr~X"""""3}bҶ'GFD-OȔ+W'N_räIdi  P+Mdl^;P(?3/2 +ҖB 4IKKC>}_Ix 999aƌ8qʱQQQ(S Jv777=zT{\t kݻIdk^Y 6h Kɾ|+WQ`h(ezE=li ;̎NǣYf*>&MU{yyI&INNF6mj#}Gdk^Y3lڴI㒃'OƬYo޼'O2777ٳ銡?GFD5OȌ5 nҸĉ}ʶoF ԩ6l`И""";udP[Dƚ땵ӧ֬YNիW/.]=>cxyy4www۷Z2"m^>xO/""""2;iiiݻ7#mFXX/"2;|||b ѪU+$$$}ؿn/FBd?q8::GڵkmذAc Ϟ=CuٳgѰaC ȴXPҋBZZzꅽ{Jۼq1+W.FDN!`gg'=OHH~^l5j @ضm5j|u~g}FDDuԩL뫌!s\6lÑ???lڴWKd^Y[nr労mٳ'6lWWW@WFt OɁWzQcEDZ'dJ%tI&q@3xA0Jv{,X@˔)mۢt(Rw^\v PdIbŊ:XkѨ[.?~vnݺ\튯9s`ҤIZ+>>-Zݻwxɰ^\zQRx+Waaa XҥK1bxxxhܸ1mۆ}ʲի1|p{f͚ؽ{7} ҂5+k7xÇZU˗s5j9Db2o7֮]gHHH@ٲeQ^= 4ڵQЋLZZz`i/"2W!_ܿh߾=;whDEʕq-Z.XQZ5uۗL撆퓶1""se2BtAkĉhڴGFDze 6m!C`С? "##agg'{_x)^}b۶ms{{{m 63]իW1Oz%"""",-LȬ^Z x 6 ϟG|82뒒!D/#җ-+Kh"ٳCJ˕+g x{/9_ ȔXۥK}vy5aT^]帤$,YfBRRRvѱc< t[H)))ԩJ̒-L( ̜9P^=^QlYݼyǏ70رcѪUM/[W޽{6m@T/ĪUPreٳgF?} ȔXߒ%K{ 6mNxok? .vj ;f! ȨRRRХK>|XږxyyyȈ 3g`޽9s&D 0vxôhCΝ!@jpAGzehٲ%?+VƍqIDGGh@Uw^dJWOE˗(]4.^"Ez^bb"zPbC^DDDDd4 ȒpB ׫ ;v4,Ǔ'OPfM|RP^YKbĈjի1x`|򈈈L|) 888=zooou0`IKKC߾}{nM|qyC""""2d^Dd18!t?w) |L<2ˠT*g^-ZΝ;y42W!""?JvI1Riܺu <L|=|e˖=../_>[m6tM~S/uЋd.X"/"2KQk׮Mׯ_m۶ٖ"`ܹ Sh޼9]fQ5` JC AbbV_x֭3XRSS1h !xI^QF!** ;wFHH<<<>˧s{NNN_ B>5aEDDDD 9"mX"='dryŗ+)) ;vdſӧxGիY+o|Ep^dRWm޽سg͛7޽;n޼ ͛7zoumTV 6mһxO/""""Mrr2:wGJxℌrǗ;ۇ?#3/[,XBz<2tWnݺpv튱cǢ\ry&~/e˖8x d/3"`2oɨZ*>|C\\>}ŋGzz:=̙za߾}ߐUŋ@^DDDD$ MWXXJ,##"Κ'dӱc8q(VڵkΝ;\M&BbÆ jϟ!!!hܸT1ժUѣ 0'CK.Etts .CN:*YkWӧ_|]ٳgcԩ6l/_n!Gؼy_l%Ɲ;wвeKDEE_@:t 40Xk"U ^Y-[O>ԯ_͛7j*( 7nA}yu͛7UkbEDZ'dbbbЬY3/ B޽WXv-s<ܖG۶mqỶ+nXh}B|7M<*"^t IDATY+&O3g`ԨQXlv'''lٲCR4oWWWtIe1zΒ^#kIMMEqm@ձg %%h۶cpfhB~^1Xk"F ݗ|ZJJRUDzeY.]={`ٰw ۶m|YFƍ,7$""""d^ǎe^%JȻV?={ETT--Z`(]t^Hv7 'OָsرcKe˖ԡze*AAAׯ,QN}ڷo/;Q_:u¦M?~iìފDrbr/uk$''v/HDDDD6%)) ;vT j֬HW~p<ӦMS{M9D"„̉'hӦ|+h ݋^z!--M6.]K~~~ h۶-Μ9c⑑1B2w v͛KW˖-~z>}ºu0x`xzzVpp04i'Oh1c`aEzeN9m4QdIqѤI:yHLLB$ڵk% 3xbѧOQn]QH1gϞa l^[% 3,ׯ/vZǦUV2ea͚5EBB6l4}pttosZѣG (O^O|A___[շo_# 1g)J*#4cEDDDDdle3gtؿt% dpTzEʕ1tP}_3fDNٍ7_{Z2MW… z}aoo/ &|-}HcZ!xQ`2_GŊ3Y:tqqqZ[bŊ"**ȿzx|hU/ &J,)֭[p'd&NvB BTf;~RPls V*Ν+>Si[۶mMRΝ;wT&|}}ʕ+T~A*Tze ^w;v^~WϞ=u8>gggq֭\9y$/WkYzojԨ!F!ƌ#>QV-é*UkկW^^B0"""",^111y=4Rpp}gy=" cǎihx IIIK?6=z$˧$… 2NrrZ;OT{׫WĸqT`el^|nݺ&b ,UO< ѣAc}h۶ƿ~x~rr]6/29+u.ŋ`ݾ}[L4Ixxxdɒ޽{Z[eB؃@||<>cKjժÇpy82둔+WbܹqQ ~0JD`ʔ)={ hٲeJ>7n͛7k֬4hm{QHm6l@TT\\\W_iL29rVc+V ձ?3f={ƍ/_>,XÇ8mΝ3ʘI^LׯQZ5߸w_ܹsuԑźut#88B@R o>L۷#::Z۷oŋ`h i|EGGk׮HJJBҥqitQ~~~3g߿ѣGAcO>\Э[7̟?_+",, %KS^nDDDDxݺuK$e'Xzu'2y s!~(^(QEDD___wPD qΝlU^]]tɵgϞ GG ȫ )\\\J{hdzY&@bB>o߾SNjUT~ʕ:ѣG2nR)zowʕ9?ɰ^Ç /^ܰ0Qt?uMJ(Sʹ*U2+21""""qqqqaÆ (>>^TPA\ZJ־21˜~왬gn4ɫ 5kGGG}vxÇxzjtR5;`?߃+K.W=M2E=3F6D|YN /==]dddhe-[S 4ڵÕ$%%:udСl}ʼݽ{WmƍӫѸq͛7kVr6ٴxkgΜծ]K,00wUoųgd+%%E9##6lLk֬A?6QNjɝ9Ocs j ϟ?'f͚믿ƍ7p1,_v۷/ .m oÓ'O>ɋKctըQ~< iiiZ pٖvssCPP<<7d_6.oooO*۞>}S(G$7+'dooʕ+V|sN|jغuk0""""Ih۶-2 渿XbUzu瑑Xpl@ѢE%11Qֶ 9s* UTGrr2\"=vZ!pYxzzt ˗WhQh|޼yq_|ƍ>۷ԩSqq9.\ƍ 55Us˔)|̙]CUU*^O +åcҤI(\0+ٳg/K.!22Πl2:UN_l@׮]:zٜٳҶЫ H ͚5Syf͚ٶ͜9ϟKDEE.:y9!w/OePQQQBH7lؐMvޞry7n0vuoVXj.\(}O?xصkW3FNdd$V\y*ϯ\y4駟l+Plip ]tO?W^!::SLA׮]e n޼)پt8o-\0s6mڔc? ӪU"V^VT\Y(JYٷo_{-KۙN8idmzY1M׫Wzhg ѫW/#GLprrZ}%"tQNiB)S|~Me'k8{ptt*zիWe闲k߾}߹l_@ӧ UOIIReNʒxϟ!V%DRRRgddڵkkvvvbΜ9zqd' |j8wl}sݵkΩSd5ee\xe}x{{˗~e.+@̝;WsF֭[G ł ֭[O$c2_|)ѣG*V/6x\%KD6m n+1""""" ,BM4qRE+%%Eߋ!;;;Փ- 9A bذaFkDD8w]ׯE``С(Z...aÆok7oVJ*%o#Qxqf9z״ >ʕ+'VXq)fmIm-ZT$$$zR'NzE}cƌ믿eʔWPA<~ؠA+u}QN_X&N}Ϟ=+a0' īWDݺuU>6nܘdZM|)J1}t'2NNNbFzdmBwxTV^ĤIĺuDxx,KӮYFyvlBH"Kooox 9QFiGEEGx$^}wïUV~mVj履~ WWW@|z_I-+T*G}$J*|ĈbŊXg+JѲeK@ԨQhK4C/""""+"ZO| !ĕ+WDÆ uV8{^5*k1$ʜl(Zhڴ2d;wصkyf~oV]t%ۄǏM<ݶm[G.X s .,&N(>| i&i"핸Ν3ůXk2w h]n_^ZܹsN;{ݻ<^/>{dtWmժU*>ׁ}5G?Zy5 e%^DDDDu>|.Oծ];ѽ{weKTcǎ 777m;99͛?4$+k7V^WtA3F͛7?CL4IZNIa^*J.;tO uB ݻwgkwذaZogg'|}}E^D߾}EN >FgT^z%\u֕~"44To^ ty?CB={ژ={ίĉ?9֪UK y V+ ~>?_IMMGΙ2e/l !"EGGuָr労q Apd/_bȑ9s&ʗ/HOOǢE0i$( 7o^fdd͛|2ooo4jzW IDAT"uLٳgK-[7m4̜9S>_~%JFn݊ݻ}kƍptt1bIs׮]Xx1bbb4SN~6RcѢE?bp[?RL%,, ֭òemױpBlܸ:]|yЍ7PF ( *T}4*UO>U{%K z;t+9y6mV{Q\9|r>}y3g4+#2+cǎ\>W|m޼{ 4^戡be>v-M,k|ݹsG*UJ\zU~__dluBF%K޽{ ???0^˺tڵks=^PVZxe:~ GGG1a.?kZׯs]YӣB b9} jѦMƪo={L5uT̚5K>GGGT\Yk֬slѣGz*\˗/̙3x𡴟2޽{ѥK̙31e[j o%J+W;w ""BzгgOAcGPP5j ,,̠ϧ)))(_<>} [n_+VĔ)SЯ_?888H۟={___$%%@xNɈ 4hlق~=z@jj*vڅ@=G{|esv܉K.ݻx9 .bŊN:h߾=6m '''C_yԍlWx5mڔWx7oވRJ\a_K.1u[м,~YTC &ӧ+Whu^NKf>˧['O^28qܽ{VxB.]Z+'22Rl޼Y㣣EZ|C/""""3)~2GIl޼Y+SnWnDRRtR-~ M(uB&...-M>]vr u׳gDɒ%xOzO~7u W.ǐIOZL-keoo/n޼)k .ޣ}b̙p:_UT[lf͒M>|(ǰIG\L)k@GRR(^ *v؄/HWȺXELL쯁W$ʖ-u NiK~ ̔Yf ذa4JŊe2|`E`2^...m͘1C6H>*U '''lxerttϚ)xFY"pR9{+%%E,YDx{{~e>&L zw-[=zQ@s_^z뒟x䉡/f؃΋/ЪU+\vM֬Y3߿^rq_DDŒoݺu zln:888e,dLٳgK-[T{ァoXbZ9sLL2E |'رcG}=z4NhҚ+S B~=..}d﫯jl`` bbb#Fݻ?s_K,AttA%U8pʿ7n1B(PBÆ u!!!UwG}Oܶ-bEDDDdf?VZ6^^tAAAFnݺcx1„ݻwq!̘1CcȐ!:MfiۥKЮ];ڵ+uꏴӠA7nĖ-[x@BB3Gӱze LG/z˗/ҧ;Əx5͛NNN4hnܸ͛7zZ4^"C^=l[p|FxB Ç5_nBV_7oD&Md//5#"""y}͛sIC 7dQ︤!.P(TϚ5Kz֭BT*ŋEƍȑ#ׯ/Qzu:dkE_ӧOӦMS=z4{*&M$J*eВ^*=~xC^iZ땩[0G֭ٳg ;11Q+VLZT*Ş={|՘ 3>Xٳgֈ-[,ix_b߾}"==ٻ(m. ҤXbcbA-F5j~1jbѠhL4jƆX5*"X( /ceβpkKvfΜqq3gիW/7o\~X2^f"!!kԨ˂UVMx=c˘ӧxA(2r;uǏ^ߘݝrQ)Be?<ժUXme2msJ*x13ZJhoɒ%z > ^hj̙3ܹsgE ze^d2/^\e;ʡ)Rxyyq||}D LixVZaJC r!$S FEE̘Sfdd()  ;;:ttB'N "eI$WT9lllhǎԣGJHH ???ɳ.22wP;Rک*{ה9իhVU~46;F7vŋjcܸq³V^Z-ZЁٳԹsgu}ʕW?ze~R)yyy\߹sg4TET'NP2eDE2uPة+--] R~}~32pذa񕝝k׮~M/0|r\\+VL܎?׭[}3f ِ!C4^믿* vssSAw[N}5djLP ׫$˹m۶LD'_~ .ӧ?u痿??^B#F0\=z`T&M2}m^=zeƬIII S\+[pW^LHYծ];NOO7u@sb_r7oŊ㤤$u۷oGFUd:uwޞ;5j`GGG4jRRRH222^{~{ T .ԹΞ9sF! Pv\GAW---gϞͯ_γ.==ׯ_իW+СGDDhݗa1o߾ `+++&"H$uVQPʼ bccgv!B/yBB&E /&"6mGFFSd6lؠw^xBhhK$?B \J<ﱲgiӦj4h/_֪/ׯE ZYYիW>e_̕L&{rƍ ڵkgΜX .8p>|8 yd\D j1c"B/xYa۷o3g"b32׬Y DWd 0 lZCuKxxQ~~~ Sk֭k.Gy۶m,նyMgiZӓZ6M6FPꕥq]~v޼y¶}dz0eh߾x]x?ܹm+99SSS0`/ G,ÇyŊܫW/c___8p XoܸUb_ׯ__lTa y r*% qF^ ĉ`n͛\|y([j֬ϟ?+ ʒEGG44JNNf777&k׮tze9BCCUr|EGG+wy]|Yx+/ '?^ӧ]u1!777̏(D0 r93F޽{MMqx(ɸUVJߣ!CQ͛ӧD;6+;ްaƻk-u;w9sL}JJ^L_Zjyꌳ3CСCLD\t<{Πs,lzQ\\0uVұcG^`ҤI{VlY^x1;w\5J+wڥ8ASeWBd "E0OMѺ}MСC˜m۶֗-[T* ںu+)SFaM2^J: ̬YhFr;w\.71ݻGVNJ'N`ڻw/xB$ /4rH322[ntĉ<233iȐ!`RUXʖ-r}Rn$Hhȑ#JOOOF>r)11Qi^x{{ƍ)22ƏOvvvz*۷/zd_PJJ uЁvMŋga^۠A?T~[HHy{{իiŊԠA:~~Ȑ!deeفԩ@A Wq/ r 8p徒S;#GUxϞ=+Qa+O<7ݻw`ݺu7j(YveFv` 5k0ŋ!x֬Y\hQرcZKSED666gQl߾]!⠠ j>}`@^JNN`.S[P^,Ⴈ޽{1mۦԬ666||WA  3Çr45a@WquѨQH.4a6mɉ^|w_e)wN:ENDDTH}мyݻw:M)SL&ӸD"UVѨQTnFDDdccC;v=zܷ?7nз~KǏsu҅ǎkhf)wrOiԨQvM63.\rи8>}z~([ڴiu=dr:!?~hipЌ^eo>^hX>M2%ߏs6m{yy+7i҄.]ʩޗbbb^,?֯_14bf`"bz d^DĥJ2I&1D"ᐐy)_c5h qu^"ҥ ;88xe4_C Q|effWD fz-+ˑ^n]#:%___zZ@@޽SZF)|UZxS>ʕ+GL};w׸8;w(]?yd RM6tY^ ͛7yޞ8@mڴ8/y3ԸqcQL]zer.^+V=xJ,)qH[l ҥKDkt;K[FU1B:////vLaP[z@… BUBڱcy^|I'OTѣG~F9pVTɠVPΜ9C͛7W~͚5 $[v-/>|XiEDN]t'Nr͛ÇNoq^h rY[[S͚5^KIII&z;>|(j^P֭)&&FxW^ mR:u>mڴISRR`kݺmb25jԠ:uPfҥKԷo_a֭[Ӊ'TR-"]'!9tEg777:zhgQxx} FݻwGeT=zmөk׮Muؼys #;;;۸ѣGx9+m^Y ym˖-"1%SuU>0bŊQxx88q><===iƍϗ/_0.h߾}T\<٣r?[[[ڻw//̜9SeVZѲe˔sqqpjذ(ER4A*W_ѱc n?%%_""*Qzjٲ/EL=hтn߾U{_Ν;Ӂ $6 D*wЁ+tυ?_pAN*QLڳgY[[+~eюiժUDDTjU $__tA1%J/_/czUqF/+RЀtn7>>իGDDD'O6?^"___a:"^ٳ4m4w}8::җ_~Is%777{ 5nܘ5nDM4%J==xnܸA YYYѾ}sڝ> 3j(Zf խ[]tzяըQ޽+ܱcG:|hz /_^b())/.1@ 6lm۶Mz1K. Be\^ :\.UԮ];:~m+ R222ϏΝ;GD.2eʈgB/\T^۶mS9 ЬY4NVT)ڰauQv߿OC !Hhʕ4f 2ӧL2TjUx UZ޽{GDDw咽={jߟ9ږ-[zcC?S9==]3Ә1chJӁM6:[jܸ1LԨQ#C J^f۷oM6њ5k0+󖖖FG7o5ժUNۃnݺj^:OԥKm=~zAW^^4hmذA``fﳻ;Ӈ߿oꮁ'NTx4-ּf7o\c|aĿ(l;c|ĉ|;ɓ'ޞ9&&Ơ&O,G:uX.kg̙y~ҥK%k m.N&СCUX>u:uE; ӱcG&"vssM6;&ze޽{sŋ+Vl e+++5jīVDyߋp%[n^!D"˗k},^x1;88|J*qDD1LNEDܬY30/^p>vҸו.]>|hPr?~\h~YXi;;w"2"S+Ntt4(Wݻw甔Sw ߠ^ԩ{W˖-9))I6WXwŋW+믿o H^*11;WWWyvvv_111~1^zrw 6T;͏?6uW|;vL^9KjѣGZRT- j0>^PEFFrٲe>1c{7l0~6AAA*\\\'?ޠ ,c"b[[[>tmr۷o9<<>fW^||} $+󕚚*\$HxʕJ;u+WNK.ƍAZ^[ .x ^P *Oe7n bUч>|X^ZhK+ggϞq`I&ZߙveЮ]zJΝ;^}]H|yNj3 4ҥ0 SwdPӅf޼yj}1רQC+ǵkxܹܽ{wd\2wܙgQ ƃ eLfkkw g},ZHЍd^VZ֯,RM4#˹o߾immͣGXmm߾͛7War%w˸L]@Ç3ч;`/^h yKJJ>GtA;MVZ_`$PDFFR6m(>>^x?͛7 {X|9M8ʗ/Oqqq:Mj۶-|2:DB˖- &U̚5ϟ/looOaaa/?uPzz?@>>>{˖-4p@"";w}222hgK$_>k׎ʖ-Kŋ8ڳg]tʔ)CN"OOOΥMҋ/ʊjԨAo߾*֖KW.S>}T...N 648hm۶t j۶-;v0 +O?7|C\BjDPٵbŊt)OE-%Sn޽{\L;,޽{XbzTz@Y;T-[Gd2]ކ oƌz;gU?|PVԼysu3{*mmmŋ}<˸L]@;Ϟ=UlM޽2u:uJx~wKHHEqժU5!Cpff@W!==L20MXbLDsiҤƟӾp-T-%K1cf|5mW>1u9rDxڷoq/H":hze9UI$yѢEݠޙӣGa5k_߫WK.S{ëWbJC#2ud2nѢ^uƆ |Ҷo޼Ç?W+++:Ŀ;?^6˖-SZo߮L&#0a޺uK &M$i CY5tP^… ɓLN:53g/0:SȜg󌯬,ްa(Q5k 3'TV899Ys.LBzeTQvmNMMU^&oƁrJ~}6A7of"RJqݺuu/9 wMa 8p 駜>aaaloorcǎZcӦM}U 9q{SfM~(&&& w{yyy ak׸W^.t"^n^x[r>v3vݻwÇowѻSLa"L|dz}G͔֩sV\|Y(ȑ#9X^`^Æ Ce޽Ea'''>{,337nܘ_s{gΜ-t=2/"Zj~J#Gd[[[ïF ?+WN;9Aw.\`;;;Xw|5J?g^z|H!!!yi„ Z33]tIɓ'vZq:JPn(##R)U\Yu}xٿt֍Onq@5S+PѣGԨQ#Zv-ry=m޼ᆪwމ/LFÇ,>|8& IDATى~ ^Yw n޼9]pY`N?~n̴f""j׮ e @W˲mݺUt9T߿mkחٵknZ|W |ǵSgٳgA܍s1c+VL;˖- $;;LJo߾̏?z펯L.@//<Z*yFӄ^z?r\+?^>3~hdHNN2eʨ 4/ggg:t(Ϙ1[h!ߋt2ܿӧOϟv܉' /[L}^~sŋ4xsA{gϞI&qff^2jUjU!lۿ|988[ =-P f͚xh ::Z-Z͛7CiWrr2Khwb:@Wƍx stt4O2Expl"( F/v&Mh0buN>^`,5Ǐ7xz m:6lP|U,]Ԡ#S'''u Rh&˅; ?]Ɲ;wV[?۷opG3sBB0p{)^8תUkԨ666 ٹsN@ze-Z_9uHGuq?.\PyCfdܴiSsssS|yatyf^L= cH{#G'OG||^ܼ,ްapdɒWi&+X; ֹ?S+ΥKՕܹ۷oW;u쓒~~~*f͚+P!_^H.ZhZ1a^z5;%\.ZjiCW *24 c&".R/Z߽{!!!,JU~94iׯ/7dDvvv#G>}^ᗝw~}B;s1ܶouQ%K~aֻwo&, }ꢦjժ+3e}Ndz[|ڀJ=իp@Y蝙_|} ʼd2g%ݛ% iF޺uK=<==իWB/0[׮]CU@s=رcy֭_^m5yd}^/e@ɓ'x ;88Dʦ#[`(m 2_^K.rJ *U(A@fe{X933SvmVVV*$M8Qf͚ܷo_LAə2PyڌU107ol1$z x;wN[ ׯ_k|+WD9VVV{zz L *ltٳ':t(Ϻkr"ETֲƍszzAP^͛ YZh/^й=1JJJ^ 0 sU TB+[R)Ӈ###=zB}u ._a!WFFK㸻+YsWŬY41B mרQCzN8VVV:}z1o۶M<(\l۷/Q SxM8-KWL*U׵kpWUa}6 S~ݻw?sů >NNNLN;ooon֬׮][PӐK.+zu҅E>k`.|ʒ|+ʊo޼)1NhB ԫ5g/(ʊ7oެq4_&L~B/0+W PS,ڵks]!*^^SVi a%K5Eadz^ݻwyȑ 2) ۷ogkkkJ77o֧۹uF cccWZŅ;v4A\,ь35koߊrܦM )SF6tz޾}*T:Zp~Zc! Lʕ+'7yd='NL&S&%%9ƍzqTT^|9::rxk : |s9sf -ZPΝ;>ɓ\H*\ڷo) rdeezeY?Ύleeŝ;w_~DcΝE]0`fE8ݠ^ի.zj9;; t>֑#G>S KtVX! XrLMWdJMM`.WNWo߾>˗D[li_`}JDD^^^t *S}Gj@p9^$ˉtRA3gΤ&MUX*WLTzu߅UH$ԴiSx1`@0-Zеk(00PٳgCxx8YYYx͝;f͚t]vv6ߟve10WC@@޽lmm?py{{ɓ'5{:txUPA9P̏T*%///;w,zEDԩS'ڳgʚk:u/ B/0sΑB5e^ѣGŸ_x>IIIxb-Z… ŋݽ{hԣGW8g^` g}F7n{Ul=zN:+nܸADBwܹsiJ!2-+ˑMoߦӱc… 3|=}h*xk׎oNDرt;!^RJ\e )ʁ tf`'''9LbnfϞ-Vyذalggӳ} ۷^`TxƄq_y~}||Lݽ#w=y(md2ݻ7eFP_jj*]u癶988++Kv5=+͛ѣm6ꫯC%F[@2wm۶͘޶mzyyq||ю B/0^ˮ]` zjnٲAˎ;ah0 ^xsJNDdnR(T_r!2W...:}}iu m/uK͚5F@2w2K(U0f D#af&EDD? M:,Yb^!222t TVR*VHO퐐6lVVVqF߿(ǃPӧOo߾t9Сmٲ/vR^4N [boɓ'};vk׎iܸq?|IhҥcxbgLi@CUp-\fΜiPգ!CPvjժLvoVnݢڵke0 Mf8rHEi;++J,pǬi֭ԧOQ ʼEGGS6m(..>L-[$gggJHHzVmUR8@^^^j&j߾=թS7oN;vEjRzB :tprvvpjԨmذ*z𕒒BRٳ'ݻxLz8gϞ3iL-ɻwQF:O#J[n?l;::˔)#v|<3(0DӦMS2M~~~3T}Lu(>+^:[YYo߾ͳyʔ)󓛛ߺuK5MuئMNJJ2i(ze9BCCՕ/^Kxuhvuu3gYw ^ 3g *={ժU*첷篾JgF>|XwٲeF>(0 Uhhhz\xqnݺ1qǎy֬YydܱcG&"P<'Lj?K-[N߾})--~'51cU^ShL/0‡Y(={FDTlYjԨf:uSN2P,X<תUTuy{{Q`` mܸfϞMS5P``N*U ^L򢘘jٲ%>}Z6޼yC{Ǐ&((HeM'ORҥu':WX:|0uԉJ.MO>U233UB9{,SZZ/:z(=z"""(%%ҨhѢ駟P׮]>pXl_ҥK%$$A : Ȁ%bfTJIIUR DժUW^Y駟ÇK~d2SNъ+믿֫ Jcǎ%kkkڲe QժU֭[J.P,ßIC%"-[Pj'++ wxT;!{GAJP@~"x$(Cӣ ( M"R DE@z( ded$3ILk3$3뙽s/_>j@ٺ"h?CpXpñc̋/hrֹ"""L.]̱co߾Fҥ_sL 8p Xb)r%̓O>i-[f\n?mڴTzv$+8 5GMu%44̟?듕QG-N;vj$o73p@J_+Wvy5k֘ Cz\}P2כVZ.TPtl޼٭=W^}hzKhxSy=)y%wf&!!繾;bo1!@ozmܪ5k46lp9Vddd۶i޽;</^4mڴqڮk׮>ޝ QǕ+W@tA;d7UThw_1& :Ԅ`3uT<dmԫ믿2O<۟7on>ӧOOk2ӧMBc M/` 9szj֬Mu_Ǐ7qqqng[nɟ?d:ug Uz\CBB;c_4ޙ3gLJ6G'4 ,0Ν3$?ȑ#ft)9ӧ / W%&&k>s^V5+fLٲe$^p*Zvˣӻ(P|g鎟^3_x4jqR%J^#^ݱ~o̜9ѸtٱcyL*UPTZ5sٿLɒ%Ͱa$%%':oJ2Ok SD kMdd4+W6AAAu .8~Sxq'r̙"_|sIJJ2< /PϪU&O쳱] ?k|ݸWעEK.mZje5jhnwܹsK'ߐ^RJf„ fԨQ)|ӿ z5]n[v- 5{ldiӦMͥK|%KL@TfG|=zԩqaڷooNj~Ws1ᅴ)Sڵk;WJ- Ä 8*,,|)0 IDAT=xy]gϞ)Lrn|vd꣏>&U`3gӿ?OgϞ_ݻk|W?;/Z;wnՖ-[ZG|/^<ŗs_B+p;vΝH2 4pZj"-?Lwr]v)={y衇'Oӯ_?CÄ IRRcٟPߦ.[t]wǏɓE]G?UZzӿ]PP>z.wwQ2;*Qٿ={ qf͚W_O_aaaf͚5>x@hzt]4qcw,c…fӦM~_4Lܹ=+[lI3fʕ+e˖nS^=3asY_}o= ɬZ^jz)+U涇r,ub 7T7/SسgO9uꔹ;-\b"##9++{?qTcPPPG|}w&44HKD;vf͚}QHH>}Okٰa/znw9s[l߾ݱ7GxGzV\-[*..q5`SeMpyeTF Ƿz6nhy{W4c ܽ{~T۵vZ͛7O=Os!{d[lq|ڵti5nX{ŋ>*"""|rrze+::ZdI 6UV~WJr͛PB)nOHHPΝΝ$+V̫#8[Wׯ_O111jԨ.]^1cMm\B2+WUVN A6ʚΟ?SNzѣGzox>}ڣ% թS'͛W۷Wxx׹H3|7&gΜNKX 4XYʱc;c~#GḺt$$$x-[8ѤI>M6x;w>!)),Y4iZ?n6l^yǘ毿rSNի[^2|U^XCvXƌ3+VL\֭͹s<Xb1-ez彸83ydSf4Bxxy_ݺuOJJr۷"EX^0<88ٳǧW_uGÆ }:>&d۷o76{nN;)mcq#<3oK>Ku2]7ovٳ 8]nvn:\| 1oS=X?ܣzSܾg;wN ҪUTD ƽƍ+I*Yqo;wN6mryŋh߾}}Scƌh̛jJ?xKk׮Z~J*e9wΜ9G}T;wtVxq :T111ѣGuEm߾]/rʕK,ѯQ2eʨI&)nߺu_O ,PXXGlʕy/Lx:tݱ]:^:uK.ٖˎozɓ'W'LW^1Ԁ3 _KHH0:tpjݺuGSƚ-[ '|>,\дnڄ{dD…ڵkFdFHJJ2_|T[Gxlذ^yܹsVZ/88ؼ[?Zjiւ?<_~e5m$%%Y}իNuǀB l-\-[֜:uHo߾~ 53fjI{QboLג?4I5rHOLL4Zu|rꫩYvms K.Zi[^yo߾}{==EC GiӦ~m|ݰuVӧOӨQ#SlYd (`*UdZnm>cs H;/^4<˗/YrG?>MRRW^c>&>>q1_~q"""LllqB ld4Ne䃏??ޔ,Y2 {ҷ9&f͚Θ?+ߩ3{lHLL4QQQ.kjb,:w)^x\rYɓ1F5,eCW=z sy5m۶5|Yz_̆ ̒%Kȑ#MTT)RbfΜ~ndƎk1f~k|%&&Zw\b~aWpauVǹpႹ]֝R_~%#%z]vy4q$]Zd$^d^ݱ!C8^H8Yti_nkTHeRĄ o*sZҥKxx\p NsSNNX]~|G&$$ir6zk׮.]8^9s7|Ӝ>}:m_n-Zd6mꘔ NsO?ԩu_'O47h;W޹vѡ 6r)MZ[/9s47'OLsXK/}ٿl@F@6|C[W^u:o;R Æ K$SD 駟:iذdrՇ=VLF{勣Sunݺ^9ttkuPPyG͈#?~s1vZ˧橧"%n>j%,jG5.?ݗ$SNS@sY +$%%g}͛'{W^<^bbi۶m%SO=e&Ol~W~sQb ӷo_> d^dq˖-Kz펕̝;(P\vH.<{5'N4Ǐw>11,_y睎 onń aڴiNGPEEEl *8>^ٽ{wj5k/f+7-[>|1=ܗ'Io߾^eENRRٳ_DzGy?ޫ1\b4h<; dY˗/WTT^*I ȑ#էOe= .t|})4448qmۦnݺtҪP|A5lPe˖?={(88XcǎO<ɑ 0@v\ϕ+.] ژ nz畔nsޤI?~  Ǫ^/_Ʉd+:tbcc%It42_5nԞ'Ojǎ>zc^u92}s~n~/q|^?\C Qxx1[oiܹ :Qhz-[^4c޽:zlLRbbt[ǎguU9rp{P ڱc6mȦ?Ç_~.\gU޼y}2$mV;wT.]<:zPB8q/^,WrUM:UTdI6'Jm۶9~>t/^yF#nXd>cϒz"%DGG+&&F5hۊ+jŊ<d v|oҥNܼy;ݻݑ6mdSP!s ٿ2d^CBBL {g=Y瘀?z. z_7Ιl۷ה,Y2ZÇ .-GvF/:8izDVk֬֨Wqu/WP`o (`$%Kx=6nh^x7o^'w&22̜93ӞpGϺgv˖-S6m5jTlVr1'N)IJKÇkҤI)sw(::ZѺtݫC)w*Qʕ+R2!#n5h IJh>|X֭$uE+V4;*TCjС:|bbb_)_|*RjժE#o}k֬i_7 0@o={ŋׯ}%IaaaV^yˤsͮ_zJgVTT-[qKvڪ]&N{*&&FW\Q|TD ժU˯2J1xo…zꩧtuI42R5d5kj˖-6&r֬Y3-[L!!!ڽ{#L/f̘{έZ,-۷ٳU`AܹSԦM{nJLLtL$vmZ|A̙}?VHHO)SFeʔQڵUV-=>} FG(#l= Ă LhhR+G;Vѷo_e!S]J7/اO,>}Ӓ{y'LZ]H:;wcbŊEB }sι8Stifk奁WQ^=k_tDxbir̙cDDݻKaj9Ps뒆7xoӦ?ᗥ{ؾ\r&wnՕ˛cǺݻMHHM\\(4pO3fݱ[?`J2'O;ŋMXX`'&d_|UZ5qFSP!5~t'ӻ.\،1\z5;F޽P'22'c? >q=qfӦMn|z7|5ouϟߜ9sXv2=z0yo1""|WnƗz@ZWtRnMbb̛7'e˖;'Zjŋz뭷$Ӻuk^'  @͛7/Ekرvʖ/^GPPٺu ȶtR bBvرT'nuaKGs)S1b9y>]fڴixܥK|5M05n׮[G*ܐdkӤI䳿Ðl22?>IP3tP/6o>׏?u>Kٳgwm$'c@@?:tׯK4f IDATNpoÇ\r)noٲ/^\X`ڶm+I;v,+PTոqc>}: 0@t\?r6lHʕ+zK 6T"EtI=zT?,Y;vXʗ#GYrȡUViΝϞ= Z^e WwѣG+$$$Ξ=Yfi̘1ڽ{_͛W?Vco߮h}~ռys_իWK>}ϟ+!!!DŽjيrܶn:EFFҥKڵk_W:u?SGʕ+tR=zRիWOժUӕ+Wxb#7o27W_57˗/OEXvZӮ];}qiذaFOy!b޼yСc ;V=z9'|}O?> N=#I7n7;A pwׯիW+44}?^|F+Wx}uV/_m6}{9M6o?ׯ4i 1O?+xy|-)UR7{W5zhK:xy}JJJxUjƍ PW׬YTϱkر?,Y$A^VZ"EI&YjxIMCjϞ=z#G-R^~@ׄ Tn]۸qcYq^R.]hN/^̙3} z9mVfJ|7.\XÆ kƌںu{1ZVXA M/2shxqN;zϞ=` Llݺu`կ_߶A ҥKkҤIQ-O?v^ΨW˙3-Z뱂ԼysmذA~~aoذ{=y UnZd^VXUVVZ>ETT) 7 ;w~ ݻۜ nݺD>ѻwo}ᇎ7nHK؆zxh|_)&&F.\PrTF ^dB^ǏWnlN?~\w}Ο?cBBB4i$uyhʕ+>kw1!$&&F5Jջwo͠TUwY͘1C˖-Ν;ui\rUV-EEEf͚~I/:ZJ5˾Pׂ Ծ}TW9rh̙j߾}&^d2s3}Ժu4 ƒ%K4h IRկ_?2ȪUիW{d|>}dP*xzw:uzѢEmJz5iFfRhhhILLTǎ5k֬ Ld}4$nmxkԩ1'((HSLQ }ŋu_~iQڵSbb$i*]O du4ğ_|Qw}FӧOMBBf͚jժw4J*e˖)^=&d]UVMVroz\\:tu:"nA1FT^=5nDWC6m_($$$$&&SNZ`A9vZhG#&%7j֬YرAqڜ {nh<ȑ#իzHUTQbŔ+W.]pA֭[W_8z7ߨbŊ|Ƙ۷q~!7|࿿gcuQ۶mS2eo>;rDK.U%IaaaڴiWns*duԫgj߾㋯j)kuEǏ?e˖;.hz`iӦK.JJJD+UΝh"N:ZhJ, `BU]vʙ3֭[%K(11Q ʕ+ugPz}c||Tn]ǗƎ_~'c^e_4$qƊT\\kOʕ+ ^dS^pjxM:U;v92̙3̙3>788X={￯0  5jȭ%ho?~}7Seoԫ#))I/ .>׶mԺuk:tHԳgO9GIרWX`ڷoׯ{ݝwީ5k֨TR~J>^d ^̙3:tƏxǫUƍD*|  u6hx*{9scG}TSLQٲe=#!!A'Oo+WHz1c8-M  7xO?U׮]ix5i$M<~z7լY3-HLpE˿Wُ1FСC%Iѣw *d :Tv풔w;ިW/^hz\5Mgyd,1ڲe/_M6i:~PrTV-=j޼J.mcjdULۼy4h8s/^eocƌkx-I˗Wu=@ʟ?N8C)&&F?rB4uThŽlzL>];wN~x~F V%%%… ו?~!;`B\5dȐ/$iѢEСEUhQ?Q{L+WLq; /s,R 7U`A,X225M6Mq /^֭[kɒ%ʛ7o͑#W~[>c^;ѬY ;YՔ)S/y)ZQpƍkݺujӦۗ:2ed`2dw+eZ%^78 ?p>}: /2wVXQjժi֭j۶˖-K zO|׎ixޡM4ydKN 3f)1!xիzuy^~FԳgO\RŋwިQ#S!;^Ǐ׳>+c /Ghz#5:t`s2H {}~^~F?nݪzH9sOd1v M4Iݺu 0!Δ)STZ5^~B_UV)22(WAk׮4\5fΜۜ RbB@^+IhxȔ(W 2O>Dݺus4BCC5gnd2 @^@đ^x@„ @A(Wy1 / z PP scyC{N sUV6'(W 2? 'Ntjxi1! PP ^cȑѣSkΜ94dJL+z Hȑ#;8«e˖6ט(W  Gz ^ 2 @^@ #F `0! PP &7#FP^4oHo: /2 @^@cyC5|5oT2 @^@d1vNÇכoN @fƄ @AiKqqvM q7劈ɓհn X)1%RN)dk4& Le ]5@璴T @6`4p)2%ȶ F @`@^4 Ȗ zq=<<\ , Sb@^4 kbyCɓ5qDc@Ν;8B ʗ/3gСCXc_(pz-gΜ;FTItI%\`*z?-[ܖ{ܒݻwEY>}_||d*ϟܹs۶mB ٘\ٳףTHk+}[J7;!i=q~9ή]t]w9o߾]ժU1~p\߼yjժec"pz S4HzmSpNT!r BCN+]Μ;@ʙS x?$M[%&J $/#Vi`Ś5Сv[b/_T2v?d*2-7WƌΟ; hPFΞ; h@HM/HҔ)!<ew g۶I&%/{Le i,SݓLV>kWGNhz@zfϖ.;OS87O+S}jN[$$HӦI7ڝV'7);S~; W*U M/pDž ر޽v'ŋ'7)3ѴK҄ Ү]v'EHRXIenN ${:LN; T)Lxi(Y N!ii^}M/ܶmҔ)R|I`ETvpҤIҕ+v'-ZHO=ewlXrvU]J ڝwI}fw XեԸ)5^`ܹW_ٝVEGKUڝ…vU}H5j؝"ۢV%&JSJ?dwX-*ew#͘!I`UtT)%^K ]N+InRٝo/K'JfwXQԯ+I^୓'>; T)񕙜:%!>lwXQԷ)^ J#GJg؝V<+vpvpr)u=N_>Dr$eK驧Nߒ:|$Y3gNm_;Nt7;~O; IjM/ Nj԰;E NxC~Sdy4׌fL>Bo_lYS89SZo_\9Sdi4.]J>۝V,ܤ;߮\I>gܯڝV+EGKy؝$ˢrGv'+&7)2ӧ#CN+ʗ|SYM/ÇNٝVԭ+iw gGJÆI'OڝV9s4~Mcefͤg;ݻ DiDzYSd94 #]+͜)cwXѩcvpOҴiRRI`EǎRӦvRhz@FY RZvpܹvU&թcw,d35kNʕ;?*:ZPYM/HWH|"I`E޼M~G 2? N; YSz-S8OiYM/VƏbcN+ONl>i8~C U6;iӤNl&i5~A $פڵNliSKzS8ki,S~A nf͒_?;Nli2S>G nȝ;?Iv4yeI`ExxTBv'ԩYM/'7)21cN+JN>+3pA;Vڻ$[լ));& .?nwXQ۝Ұaɿ[@@ \y1Nls.\; hHzS8;p@=Z:w$hz@j:t7;M)SkN+ڵZ;[wI_؝_KfٝV!կow g+WJ_|aw +4 =Iwiw gs$7*W;s%KNXF ܤ(X$~=yMN+BCEڝo ԩ?۝L):.\Ǝ; (Q"w*($xQ?^ڽ$hzȺ|L^:~\6L:v$JL}[ IDAT=yR#QhHzS8;p@3F:w$ᇥnNihYnjNjζl&O^; h&l&}gw-4=;+/;-djNX-Ulw gsJK؝VEG'+3;WZ@hz1ֶ mo$$HSJ?lwX*Q$3F1CZ$@hz7JHnRٝo/JK{؝V-*T,.]&Lv; *^*Uɓ؝V}wߩ#GND |aC饗Ncᆪ(o"Ev.^'`4 0j4E}6ł 7XPUTDu0 3{g]a{ hIHcNJN(4XxZtl""""""""""J@~S(u XHJѧ0h֮E'!RQt Eg۷NA5 Et Ex{NAM/""""""""""yxR'{NAʚ1h@t ENA^DDDDDDDDDDRkkIdf[.NB*V"L&': 6TJ>K__t,5@`$ ss5ed$:ID^wNBĦ˛$2XQ|Ɨ:-BCE'!-Ǧ*k+:Py"*JtRFV€%KׯE'!-Ʀ $:;wuQg_N񢓐bӋ(t,:"__[t RȑΟnd2IH EDDDDDDDDDTT݁ DP?oS&MDPtgE """"""""͕gϞ˗x)}HOOG\\tuuaff055ER`eeXYY|򰵵EEXD L<.:L&eat(: ) "RHHΞ=gBd2 $$wT^ݺuÀбcG HD3~~~I7_bĉ]hD@^@Ts$Y<֬/X4 =VNLLD! Ʀi\7n@ODZ%>>'OqYQAAA oJ*aȐ!6lg/_ .#\ll,~7]]t+#:i#:%:I `?H /7czzӐbӋ\D={#G-֯IO>1|k3f'|Ԑ;}={`ݸqd2H;<Ο?F҇Djdxjhժ;;;ёH[˗|Tt,۶ɛ;NBbv\9It@lzQS+ IIIGgϞEffHE֭[4h4hիWM6#ѣʙTʔ<=卯Di䒒uM E!Ml 64 ^DDDTddd 00 ."*RSSqQ8q222DGh׮+,X#(99'Nđ#GeTXQt$6ի̙dy~VMto+VȯE! µ%XYz50b,_/^dË1ydTT ñcǴ/LM6N:سg8DDy:s 6lcǎBڨE `D)=} ,^ z%: i˗aӋHBÇGzdDFF1h ?)))͛7ݻ7VX!: iO? BÇ5@\$)?VbcE'! DG(6VZ֭[όZFF\]]51>Ct E/7NRzw -Mtlz0h޼9\": Q/_d2QHL 8:NaKouN?]D  nݺGt">>+ұIDE[Nc윩S33ibnbVQdʛg}ӵڸQ\ /7i FR{lzSzzzGӦMѬY34k 4@ɒ% U755wޅ/O?iiiΟ'O`С8~8ttttl"ʝڷoVZyhܸqflZn֭[c֬YǦMn:DDDI???]cǎ-:xP0^6oƎ D)0=]]I԰/B:tHt,=&LAFlz)IWWcSw'''tA%6I&hҤ &Ol߾ׯ-x9y$֯_o$"Eԩ:t耎;^mѕ+Wٳݻwc RsСCϢ%*2nn&ŕ+d9z(WNޔgx5u$YN,,#D'bBWt"""6l-[cݺucGݱvZXlzYd+ʔ)q֭[E6md\2e BCCl<"m-Z`Μ9z*^z={`ܸqW6>!CBҥU:ׯ1|A+wwzu) ?.:)][t Ev׬FRkEDDDY&6my0r A6m_`齎;… 8r&L0ѣG*H*U | >sCc300+sΩle˖ *(GJI@L4r%,,GGi 卯ɓ峾Az:ijRtRsEDDDZ xܻw[nڶmED ;w.BCCqy7N^p3F3?|||p)Amʔ)`˖-xۇCjLCvvv8{,O1i&'SJ&:V<ac~ԻwUǢL/"""EDTڴi7774^;v U6'uV,V#*\]]ZOOG C&I>1m4s%5Ҹ10}:`$Y""ŋ~7Qx__I| ,Z$lmE!5U|~""""*\022`llƍCZD)FpvvƋ/T2ƍ7pK%6f K^;88>>>prr6Qt ~-_ZP]ѭ[7zhӦ '''8:: M6/ּ~:.\X鉈T'66VZ""vijk׮գ"&N>zBt Ek 2CEP!f|MlzI0`U777亱L&C@@<==QJi?:t͛o޼QADDDD/^@jjdT"Y-"nޒjҤ %G$_/ۛ j{w)]l dfNBaӋ0sL4hCFFF322q㉉XbjԨ~z Dff&tu9IdJx7"* S=\N*Y-"SId-F%㴿?nɏիQ1e дZ'#3>8o길'-DPt({T@lz'p|(]k׮&_|,"""""UȀ5{S.3CDp=nZ####޽&&&%*6<=n!̓ _Fas6qwDPs'pTlzٳgVFbӋ ((HҚÆ i(L:M6ŵőҲeД3]HțmAyĉxXt16<<33I6P>ETwc*dɒEFFl<"""""dddয়~%$ID'883f̀-Zǘ9s&;;vv& w}036l@jqއJbbˁ'OD'|`ӋBBB Gjjtqqw7ũl,"""""lذ!Ǜ c000&i/_bÆ ޽;j֬  &`޼y*MT,5oL$:Hݻl=I m_,/56 ݻwَ%$$Oec`ĈWjDDDDDRzfϞ-iM##l vJKKãGc+Vѣq)dfflYfa*OTl} 0|;!!hߎEyNNȑS(zX /:Qq t~Sͦ^ *V(iM"NZZ<(Io(DEE!22AAA Ez/edd+V`E:.Q2l p$ 5=cp0zz#ܗ__GN5`F`xPt^D`lllϜ9׫o6m`aa(.HDDDDTXXb5 0m4Ik0p@1Trؽ{7Zl): 8QޤvMtǺc%̌E)8WW * |Yt,ǏE@rr=7~xڵK%a„ ڵ+Zj1 +)) CEZZujժIZctttlx`g':}?}=^e<=ZDP{7g5)6 N:KMMŐ!C0m4ƚ5kN>H^H x5MLLO?IZc4h???o033x167wG Ai':Jɯ)ssI6qVbӋ5jLCƍq"JEDDDD$ޅ C+9sFDD97n޼ۋCT|U"oRZKL*VEP \ <~,: }{ziElٲAQKݦ}s}f͚ݿ=z@1c :::Ep"""#z 000@2ePZ5Ԯ]%K,,wA|۷oqMDFFݻwjԨQp-<޽9,--ѰaC)SP#44111044D2e`cc"Co޼!C)i]{{{L0AҚDD9iڴ)\]]1x`2"I4iL,Z$:I.YVeLX!J$")%)iixHzgw503gį8Ө0c0$Y^_fs$ ɘ>}_4֭ NY5jܽ{7_?wΝ;ZjaԨQ1bT2233ݻwxi544D6mзo_ :TOpp0Ο? ϟǓ'OD(QBqqq߱eܺu 2,[=;;;`ѰWDlٲ;vKrF__-[ķ~CWWfm6ܹΝCzzzCݺu쌾}QC"<<\K,uB /0l04nXt"Խ;|85gʕѤfMԩT ml_*T)jfdfś7OVP?|̹;w0vrl:UhtDE6N%8X\ސ+[VtǦx‡IIIhso߾zΣG0m4̜9{ѣѥK7LrM6axI ___bƌ3f O 2>##wݻw1|o˗/GÆ %FDDD9sĉꫯ>:럈ׯO>={D۶ms=$4xIq$ #:6nu~j 3 [KKZZ©iǃ""p v MΪ)upן|RcڠAfd 7\]{0lzi͛7+|Xf->s*sSSSw^ݻvvv * inݺ#G͛Y[[e˖R J,x}dP;)) 111Ri6sss:u UT)tt?o߾Ir}CD$x{{c~EM"68[9~l\Lѥqc\X '~U+1DDL(6 TKKItItŦW1cƍh׮I:+W˗x +,, fBժU1rHɚ_7oƔ)S5eT>7Osmw^|7y>nݺ8y$^6lXYb> @GqF 85BʕQ^= 0Gݻsl̙3ػw/NwJ.͛7# nnnhѢ*W5kC_Ct5׺W\ӧY={fhXh?q0z͞=7nP:ѿ2220x`ܹsG7ou>&66֭zD$[[C%@*牱ʕ*ɥ͚j1F*o|TDk?+ ^ <|(:VґiؚW׮]ѣGDEE!::Hgf\\\(Ww)))r.]0˖-|icccL<ӦMR5ЪU+*PݻJ]kkŘ4iRk.\񜁁_`۶mZo}ݺuos=_n]<|(]4N86m|nbb"Zl<~)=:9ꫯecy& wPZ5DGGg;׼ys\r%2(00u} LD֭[1bݻwգ/((5jx7$Hƍի%kllW^.C{fffcHFGGO?T H 30s7x0)*gKR)5TBvvY@Ŋ0 7T:N&MpzR~~NSWm>#?s*MiL`̛7Wƞ={ .x"""bU5TbHp/^[n?saÆ4T :4[ 7ixҥKx+P z.-- Xvm7ooPX"_ /(UT!---_>tĉl /Xpa^`jj1cxڵk? o\ /^^ / +*d2ۇcƌHIIHt%IJ~A!^ X`°RW/G֭U:O@;1TcGDP ,_ps:5j֬):Z Akԯ_gAAAAСe SRR ]%ǏÇ6 ^JKK ШQ#.yH$A>}DPlYۤT)3 R8o/'R'nd9MqMZjV*zM6Ç9sJ*MNNƀpԩ|=>11K BWԩS.WXP/ ={~g5kRs=?9rAAAَ4l0}|}}T]"""^.\!y-Z`Œ%"R`` Zj 6v,жNv",5 ? 1n㔿モR'g۷N54ebbkkk1FN|hTT gFpp0N<#Fz}t 4O֭[͛u666_bEz¢355Efr=oggΝ;+~i/%s`zz:NoF^ z~c׮]^ҙԏ.$EٲenL8GVMDZSGt E{GN#F`7QY"88Nh)PV-zJt d(QBt '''899!99'v؁cǎ!99bbb0~x:t(mڴ)s-[,^z~ÇB~_SN\|9s7Vzjժzm} ٳgs}:6oތk׮۷8w~GmV%qA8;;+u3+|yyBU".XxHtltttqd8桵GnMUuYpK"bK pCdjjÇ㈈ҥK tZ*s~~~n:inn^ହ =F^ )L(/Jӧs=sN<o߾Wtt4nݺ///9.DDD1֭$kע&"bddva֬Y8<ñl24iҤH?s ʥ oRWŋgD'ɦtɒ7kJ`/"]^_Sᢓh,6@ ;j'ND`` vؑMǏϵuʕ\'eӫVZ[ =FaLR5Ke?猫S*W7n'"...ϱT'iX>TI 믿VIm"n+V gggڿ?\]]U:V [)^^@q/ +U_J^7*6|ڵƎBQh(t,a4Up4^Ň.Kܻw={^£\W?~8,YP?TD TT) U5w DlHD8;;͛*)S6ч:v#GhSV^͛7t "ҿ?Яn֭#13u+Ws$)L>AS({X+FBD5LQ255EݺuQNQ4£Gϴ͛jٽ133Çѷo_>|8Lkv<444礤H=KK\NjtbF|Ϧm۶ׯJSfff$""/99}.\~-~'&"M&Mp9l߾&L@LLJ7n6m뫤>;Ot,gQ(0#G9=p"ָAKTQ(⯿{bN#^%KDŊW\Y`!!! tuuQF TR+WFʕadd333D7@}- !!HMMEXXr!R~}CݺuQY~mqF,\|*U);vAΘʭٔ "^y}}|)5mۆ*UQ"""v6lΞ=ǪU$"\::::t(ڵk!Cŋ!C'J&ݻdٻWsI|޺5V}x>>>>IyL~Oazzz]4Y!QQo>۷/vD$\Vf IDAT*UWWWYFwE.ym"&^N# l"o|A0|I^wO^)?DN&Tǎh ]n3ϟ//[2 C 6kԨ<0̟?|dKm۶?>BBB0{lY0fIƢ,;=֫W/T\|0yF\*QD猍y7o.>lῳ=T)33#GΝ;UR[nرcgիO?y!$$DDZJ>KKk(Ա#ML$y}Iss5{ӬbLc^5kvUVlcǎիَO0ׯ_G޽U~7!̙ooo7k֭ڵkU:pW@@@>,s+f?pɒʫUB4ʕ|ddd%!"""m%0vXlݺU%۴i(&"RX @ʏ?(y]"fo/oRH`b <\t y6ּFZ_$* X(4V57o.99>|8/_^w2<_±)S܇3N>"1㖖9^P/^(td\UVMq4ǚ^EL&Uv\˖-qq>N^۶mÇ%KڵƎBQh(lY&I<^PD^JZSmjN(<tItbO^ c咍QPnnn C(ʖ-_zӧOVVV9xݺu(Wyܸqc4˗ϳ9~d2b*߰aC;v,ŢEХKIkfdd`…$"}N`:tjSÆ(%,OJZO|嗢S(a+%*mze@Y_N:혋dv)![dxP˖-+sjiӦ9>e˖y֓ȫպuk4Jʳ)xE$%%a""""3fʕ+URv8ydDDJWW[lukΝUD5 Yt E}}gaHIt 6 -Vtid;֭[7+VZ gffbhgz'pȑ";ԩ 0''<7}d^|RJSNڶmwaϞ=EfԨQgϞu5""uUbE_sNIk?݁ DP?o p5u*Ф$mz͛7kܸq vc(fڵ+}(r?E0}ڵkَuc^[[[j*>>>Iq#ڷoy̔IDDD̙3믿vJpiب>}W֭[%GD*V"L&': ׮-i=/@ >ߧ覗 ʖ-HLrؠB WVjjjc/^t'meccCCCc.lpAjՒ$:7ce$]p._>mO YIڟzuSKmC]JHHݻws=M:Cq\RN:3f8֦~<<~i3 b۶m9reN'ka?>OH2e/FD$>3Yk9rDzD6m)4I/A3=-QNz))2-['NAEr۫?$ڵk|.gw$kZ;&N( W⣏>QECСCѣG\i8:u*5jTZO&Bg3gyOdddmjժ3kvލQFiZh T:0 ?ghFӦMOD$'|"kK.ZBݻ% h>4 +gJ%JϞNAZbKKr(?߇Jķ~y+++)?ر#4͛ѶmM%kkk]PK,#?,twoqm۶^:'11NΝ;y>)) ˖-+t]غukB9s&ի5k׮Ev |oz ƍ_lDDDdVX.]4:͛+RH$WWWT\Yz˗/eG uKtֈ@׮Sh:}Xg]VlzgݻNAZ`KKիWqٳg֭[@C'''&* 93gP^=;6rsMt 14ΙbÆ / {Aٲes դ{w76שSww7/ލG}׍ĠA: ;w,p]Z%K`:t( \ ;v쀃Cם;w5ȑ#qͬ{!Ν WWW,Yq[[[ܹeUV/|o>lRDDcǎ͛7eG {(s3SMNi.`6+kӫ-oo?epR<~^~Z֭[s} -/_~@ƍڴs#֒m3'==K.Eѯ_?Z QQQ9Sոw,YmۢaÆ8tPlll}vO܍7ƩSPj碢Ю];;vuΞ==8ުU+5vZL6 uƍz1b-Z `ٲe5jj׮ׯnJJ 6ms4ܹL6 4q/+v,kƱcP||KJJB@@:uKKKTV m۶EfP|yKncc}zI"""2iժBCCeG [ 06ǏtH+^YYH+^(3a&ĉQN <|7o^%j5޽3gɓػw/>7k(y{ԩ׿>={`Ϟ=RJrʰϟ;fӦMvڷ.6Ǖ+WcvSTT:w.] ͚5Cr吙\r;v5T*xxxyrܽ{y?WիoL/>rƎ[^K.۷c֬YZ]f ֬Y{{{ywq% <gϞ}iiixQ&UZ{AÆ Ϻu0rHE>|gnn۷C&"75jԐ^xxHaҥ=Pg1$@24CUIN/)=IIKJScc pZZneڛ/_(UlmmaffkkkXZZK|111ǣG\M4qI$#'''XZZ}/QFARaݺuӧѣ9Kܽ{@cUX>>>=z4LLOlٲرc///zc#Gȑ#ռys,X 0^J%K`9u+(###1sy벉DDDd6mڄ#F(ڳg>Ck飚5kZ/""Bzy@ =SԤ>]tltO`rBKZCRV U&5SM M/-o߾Oӧ ]Νu>fqRPZ5_uZl#GΝ;عs'?أ+7NNNh߾=^zxTa =zl2>|laa={bM4Kzsilٳgjiidd?>|8zj\v@ﵶ<<<иqcmٱcLkb֭lxQRreYfgR6²_g"el L,X :IpOJΦMɓ__I-*c%J޽EG(VV\ :֭ubDxx8BBB/^~(QL899Ut民>} !!Nµk8 Y&5km۾~4mT"#Ch PB Ə#""sBCC4XYY5j@˖-{FH^"""ҽ]vaСSacӧ쵉 ,,,r-mV!=qԡ'P!$@Ϟ@LX4 (UJaz%[-+>o>P v$6yԩ;&:J6mFQh1Ujz:%uSʕADDDz:t(eeMDdlll"I`ر#_NY7P`K DVRZ°arNBy0~Y'Odv! 0622š5k0tPk 333j)ұ]mD mMȰ%nUgϐ)>ӄ @VSP* b̙cI&pssdtORd 6LDD$11QZE&Ć Sgg)4m>,{ِ'OdWM 7P 6dnݺ VӧO5/^ѣG^Dr2bp$ I>Z Z\*kًZϹ|yYP$vDd`ll۷kדR|}}Q1rCRpB;VDD&)) ճ 6U$.NBBd+ymj@56VL%¦Lq!CtL4 cLO^RܹsGDDѣGLb&<D'!m4o.O"#{"RqBeLhk)_ ooo||Jo:q9t /_T?S*RZtҲ#=p>d IaݺÇN)(XHH(R !^IU;#GNA`Kf&&&pi4o\g:99?3t6&)իѣb ^`r7eGzY` 3Stưa@^Sh:^ӵ. /]M2Wt ^ypylٲE旅L[nCDDDDDu5tԟ9s&OHm""CvyYըQCzGm5aЪlYj5uݺPT,־ hFtM/T*… 믿W_rʲԮS[,uHׯsΈS'fϞHm""CVq)YkUm9":i ^]t M7m.\XY[Wz%7P%%EVЪU+미}6.\/ÇǓ'OrgҥQreTT CƍѮ];N'""""*ݻ?2?'ODD͛xlJ*'''JIVƍE²||OOsi$ii@@tOb&{lz4 DbKT*ׯcĈ9gdd˗DDDDD$ѡCDEE)RS6QqEe*k=Sqq?0s&": V*RbTI=,Z̘`7p-Y#9g,ibEDzvvvlx0AAAر#>}H#F`ٲeܗ(X~5۷o/k=c/Ia_-:Ǐ??|d}NB%FRtM/""""""ѡC<~C m}Xb^DD.k_z炂ŋ/D'!mt 2P….?۲=dYtNQEDDDDD$PXXtHE껹aʕ02_򒕕ٳgZڵ&sի IHC}N%`*i7Dcʕifb[^Drs Dz |PE47n"M6ʕ+lԨʔ)#kM2&:ihFt M7kFW׵Y3^;r:c":@I{ի Ǐ/^ôiD$"""""@CE6m GD8xyy^w޲$y3`ot.: i$oIT^WKjh-||X@(I ZGbƍطoaÆnzaΜ9 ?mقWɺcG]7c!矕ѯ)o+:EŦLn߾ኍadd'Ncǎ9ΥsQl|"""""ѽ{wܼyS[Ea~ `۶m^z!55USLc%`*i/*L[.~7n3]4܀'zZq*98s@G HɎM/<}~!bcclٲ8tƍ IDATZ7f͚x"""""*t/_V~Vp*UJDT|ܹsnnnxqJ>>?iii4iFtEprrM͢S f  _ -ر !C:kر(ee5wwC)6HVcĈ٘&&&Xx1<==s=waӦM:CDDDDDKHH@׮]q%E7j;J.H}"*ѵkWɺZ"L7nUV7ox{{Z1m(5JImh=aLe#KeU ] #FNBqqx':mmqQQt"E饥t[1fM4:j믡Ve:{&<<c(NRaʕh׮(T\l 88}NBŘ- W&: bҍ7r=ާO[WT2e .][ʚJ*bbb xDDDDDDDD%J?>Q8֬_[NC^ EG!R*eK7o۷nݺɞL2Wոw$&&&Xb<<|Xڵkc֭8r^leAx{ˋNMfN"J%:^za'M'N(H$6:55syW^}ƖUTQ""""""""'ǴiPF Q,Y[nᣏ>Drt||76+`RIٲ%[ơs&031I68 Kaߜ$wܱcKKspB,áCЬY3ܻw/ʖ- *(̙3AAAuf͚:uꈎ5je˖Ç3f LJCt4KDGN"K п?mߎm3f^Pret<OHЩ(bb ɍM/-n:1ZQFaҤIHIIQl@ݻwG\\[oҤ TKDDDDDDTիW| ܹcҥ EGSSS 8NիW1zhXZZEm[૯DԤD/Z[cPX:a^MvT|޵kjYw4xX<{&WE"-iHMM8V1o}O>>}'OĹsp5EWQBʕѮ];t{F2eDG"C4j ܱcd;u ppNy/]=[Dϖ-_\7>Dpd$?WdT]4>Ϟq#.GM">|xMKHH別ٙ򂗗DDDDNٲeѷo_[nҥKtnܸ;w AOfMvhݺ5ڶmvڡjժcQq1m \&:I;&ŠAJ-ZhNH@pd$B|8/^˗/äI`oo/:@6662T|Ԭ)5)f$[L 0>0cP4TX..R3GtlqqtONCTh\n:+&O,:Q{Sh gD'!mh!'/$DƦ ֭ŋCR@ڼuڵxc)4ݻ5E{wD$S,GKd(ٳgXt)Zh!:QNY`:@@ϞSh/`j #CtcKF3f?(lƗJ_Ę<hDt Mv[NAښ8QZPl":Q%3///lڴ Kذa&NqJ,oojU)4m;&:ipqBCS^ 2d];d&MŋX&""""""""*lm&$ْˁkD'!mX[K$RS+˗E'!z+6cǎahР"cTT ˖-ŋQNE """"""""|HM }!!6Vտ{s`BIŦׯСC8p T۷իѣGȈDDDDDDDDD´hxzN)"D'!m4i"O"#{Id":@IRеkWt8qΟ?/ CJJJ-_<ѤIj ;wFuQwE')@RPa}! ^-:I `"`TZlz阥%z=zhGRRRSSK ,,,D$""""""""3 &8p@tl%5MƍL8|tO':I UD!rzvvvcQQxzJs.\$۾}='6Ə_%:I{jpI4p3(4o<4m /GDDDDDDDDDT*ݏ ~l >,:iUKt M[׬F"e6l؀+W`׮]:u*W/ҧ=RS+˗E'!m^^>.-sOcKK8zhs):nhh(^#˚5kаaC?~\񉈈V$>Xx@tFR3U$$AA`Kk5jt5fE KS###ѥKlٲE DDDDDDDDDTM'N)2<Ѩ0m<|}{H06K.B|_/FFYYY>|8g|?Bt MAA위IH:FN)$DE/: plzA۶m" NBr4Υb^cwo)4]H2<_t M/KƥNB%^E,:ڶm@-[VxLL f͚%(i?hJt Mq3ru0iTUڵkcٲe9/]Q@͚Shڼ8p@t Җ7*:[}DM"AA222vZAHԤ$[z:j!SSpt$[Fz5矢P ĦW1ңG6lZ- P' @P${J$˗ߢP æW1RjBCCqi(WӦNcO O{*"Bt*A*Fs=~'!""""""""|u):_E'!mt|BB .Nt*!*FnݺӧO8 Ր!@߾Sh|XHM1h0p]/E'Mbڵkؾ}{ݻ4DDDDDDDDDT _}m+:ÇMD m|=E:a":>HJJB\xycPݻy.o>@zShںؽ[t ҖShڱؾ]t *&ʃ &L˗/iӦhppp D'ɖ] 9#: iTItlj4II`-ԩgb;1H[I/Iz,] ܻ': ilY27$[Rۢcӫͱe|駢ܹsQfM1(֕$* E'!mԪ%-uOyPIȀU@&&&@.]DG)3gDDDDDDDDD$v퀱cE ̟ĈNBhBSX ^`bb-[raΝ={(DDDDDDDDD$77)4ݼ)-K$: iO`P)4ݽ ,Y"-ITHlz6m=YCժUCݱvZc݁Et~#]Et4nVNBF?:7Yf]ŋѲeK={ȀU.5)IL 0>*: iZ5Kt Mqq?": 6dpelwcǎa޼yE/88qqq9ǣG|r """""""""ֲ%0a__ *JtF_$2R>^E޽{#11@CVi̺uڵkhڴis/^@=\1 &:%K/E'!mt .: `" !AtlzٻضMt ք @VShڿ7)H06`ZQ KƦM`nn?0""""""""""T&:#GD myyիNif)H 6c>|sqq;#k =t̛7Oֱ*x{66dKIV^aeeʈN-- .^aKK@f!לUT;w"y&MJʕ+ Yj|铸8 QSϟlziܹsV}ԬYfff{G)Zjh֬Y㉉8t"ci\KDFӧ67Zt M~~үT饥+W8ɓ￑O˗?ѽ{wE3c]qȀt .: `b IH;#FN)8XPE%^Zxmoo~ FFJMLL dF'ODDDDDDDDDd zBӹs@F$C>}Dtjlzi)..N!C`ii)(&M(l"""""""""-Eд?oS<<6mDt y#lzɤu֢# 111joq"""""""""{@Shڼ8p@t Җ7PlT饥7guUTIPl>ܳgt )#:I4 |Yt҆rp$[z:f p$06dmm:))IPlgϞ7$""""""""\U"5)6*VDД,ZNB bKK*Tx}5AI$7n+W<4DDDDDDDDDdP7LBǀ+wտӧ/!: )M/-U\Y둕%$ZɓJGi u !:``Biww)4@\$6TF wŏ?($95:JCDDDDDDDDDkPO)4]Z%E 0@t M׮ITj$$3 Uƍs9s&2220k,T*3cԩX`[ZyIIF )ӘSDSF%KASaH<}[n… Z7ڴiHDDDDDDDDD%TJ0i$Ef\͚Pa99I/OO +KtɫW%RPj.V/Ec޽{ѻwovZXBgVrr2\uF`mm-0QRRRpׯ6l}Qaac樃;9)4YXUff6^Edn{htΘ| gzAѸq/3(B2eйsgJDڵk#EG "zDD21RR )8IM~O5:z=hԩ#c5.EDd8ӫO?,+++L0Ah:aÆׯ rs_!CQ`""l.^ΝH6jBuE.NV-ww5_!bIQD-ҢjPE[ZFդZrί۴k,.:Lcߗ !BBV ?>SK}q]^N~{|ŏ M/.y ѡCIX>}:>⺞DD C&Mxꩧ,<<5'NE .xADHWW#+/7x]_KJƏ.!g+q~k_}=]vxw3,d2Ig=qtL&Z5<`*djՒ0+(V.B:OqIO)(_<֯_eJQIWt+,. T 89I؟ҥkɞaÜ5!;'"#u\{uؼՁW~t,\wlX C]S@Nt ,^ڸ|K'شiv튓'Ovlܸ͚5k """"""g@`0 ۴k0ctY^zMd{~s дi@rʸ(_L*s__5S8 0POթ#]CC`,uMխ+]Crسg:t r~|7իHGݻÇKWX ;wKH]Q+,ED%ENر^nݒ.!-ڷ&Lt0{q襳+bCaÆؿ?Sl$"""""""+{5h?rXΖ.!- tlZ֓l/HWX:sXHO.!-^z P9-JjZt >P,!=׿uk6T%f9^=.oHDDDDDDDdKvF̛$'K:cJWXzU]SII%E "#Ր"!Ah8Qj@/]B4O҉e@Vt iNRKeI3*^RUFrjy:=}&]a),L-ɚ*]BЋ0]+H1cS_FuըQT#ٷX8Q !]a)4X ϗ.!pEDDDDDDDdL&۷KWV&H[+HS~FeEidOҶml0mPtYn.r%pt i_5jH's.!L&SҚ5޽մi@z֭SO(AVV~G|ӧ|}}888L2TG$j8a2aQ=N.^.!-VU7KRS^Laa%E@@PtYzΝ.!-*TPהtYFڇ)C/+:y$Fjժ^ϟGRR2~ۄ3''o~>EDDDDDDT tSrOxC6_?` KΩeӥKH>}^dMM.!-zxCRxRRKJt4g+HNNv빹>}:z4lر/#g`: j(kW KWV9Á=+,eCᗑ<\ J(zdѢE?|TRXp!K8=`#ѝ;wsθuLj/#ټ[ jTys K|l"]AZM l)]aiV 8XWM% ^: |o~GTPᾯ=z{Ffff88W%fW_"]BZkKWk{JV&PtuөdL&K҆ ?JWzQJJ f'sغu+ڡC0hQZU )KRSE9d{TQ7p?JLz9ҢbEuMJed}O.!-\]5UtYVlptIWEݵkW]KkW0||?#ץKHFiӤ+,%&g%ާyҢn]]SoWH=ݻ3ghÀM2{t:KWXTCҢ];`D KׯfUmڨ$*J KH-վqFrㆺbbK^EvZJg`ĉѣ}ߺu P ttZlO>kIWXpA-iuAϞС.],RRKH=å+,AA;%vC"XftsARW_ݻUDDDDDDDd(F]HWXڳ0`p{w Kz5PP ]BZ~ʕ@nt ikjHo$G˗%vC/nܸÇKgnݺzjj*6n(PDDDDDDDDƌWWW4|oMԮ-]aVPڻW2u+,[tie2^^6lv풮 M5a~8ȑ#ڐ!CpY\z7nħ~)S`„ Xx1;777ݛy~Q Q,]b,Y"SAoM22ԞqOKԊK̲eˀǥKH gguMU,]b9|Ĭ C/.]tkXj֬Y__߇7&L{SjzRRu?٨Fԧލ$!=.!- e#u ;zUWxTr20>pt iFrn8ڵk6~x 6~Ez'lٲZTT#"""""""֮_fR0=<L$]a)* 9.!-ɓ+,ݸ11%Eԩbbԍ%vC/-~_LL+'ʗ/w$GDDDDDDD6o_W+,],ZJzCJWXt XHI.!-zιsG5JRDz03SqQFF[n5k (/^|5#""""""l{w Kz5=mo={JWX:xX ˓.!-^{ Gґ#j_&=/$]aq4ӛPٞ={ҥKc ٔZu+ytii#]a;+Hm+,qti5aоt˗ l^+Wgdd 88_s/n] Kk!!U@Ptۥ (L&aC K6;vHWV&-]A:K?uYxG!>>_\r1ͩTI )\\K22%KӧKH K̲ (ʕSהu˗89ӦUJN8ҨN:x"BCCEZ6oތ/^"""""""4hn(ɭ[ܹի%Ez2^<=>u`U5jkAtF 4{ڱd :O\;V]S{JziTT) ,x\r ZJPt@LJJB%ٳ."""""""0]nub ^Q@%ɤ%f6)GMOIUFz<&П;']B `رp(}6nݺƻ9 IDATd^ХKxzzv<""""""B,N.!-ʗW7*T.1/Wˈ)WN]S+KeT#=>'' @ "7XXO5spPoЫ|}}M:Ǝ+@DDDDDD ;.!-M̟DDHꆲܾ /Kj{W]S.Iժ#̳%z_|ҥKKg>ƻ8*͛㭷ޒx?Jb""""""2Eaä+,')ܑ.!-uFt 0o,]BZt #]a)"B=휔$]BZt/]A̚5 ͚5ΰ0dK:~U@^t i1dлtÇ~L:Nh`ॗ+,?,b嗥+,:,] dfJJWcKe˖Epp0ݥS 6ļy3` wm+,iti5aоt;+Hc+, [']AZ t"]AC/x{{ᅦh;;T\Y/_ti4l(]ai` * hXƍKWVӦMJWX V zJC/m9͚5oooJf&lpt inU$]b;zT(S0\`J!ҢT)uMU.]b|/%մi@zGA݋:t#GDDDDDDdKsҢNuCHn˗KHZwMݽKKHj5(]b ,^ IUkYC/+Y&v܉%K?EP|y| {ŒHnfbcKH-)S+,ݼ7KHf͌7hǸTTt iѨzڙ Ǯ^غu+^{5VZ?OX=z4.]O>5j0a0e8 DDDDDDDsG 9Rҕ+/=;GKWXPO&%I:IWXfKH~[n^IIIԩ 6󈉉1g4ogΜzMHcqk'''t .ĵk0|ڡDDDDDDd/VR{<Wұcj/=KWX:u XTIHu]ٳ%@zt iѷ/vxPNNz#G<{1p@;wXrvv+W^y ñcpDFF"&&YYYHKK*UWWWԫW^^^hѢZjrYH!%on%f;wa%Ÿq)KvVהўn3zBBKQ#]BZ۵K`Cs>r/D^ʬTR񁏏O&lؠn)]BZL?ptƍ[6MP>ȇr!]#Rq PbX mTjN.)nyÂ,X౿V!""""""BV.!-ʔQ/\tC%ERꚪ^]Ch|૯T#N@PtYAzj ԑ(nuEDFF>)Sz1DDDDDDD@Ppt iQRɝ;@xt iQ\@3SSE0COpZx.O5Jԓ..%%ǩS |+WÈAAυуܼ iL )$69.!-6U7$.NOEEIo@̚2 wM0v7JLL|X"v\?ڶmZjlٲ_>|M#""""""?|?_=EK`h Kܹ@Rt iѱ#0ntH`lٞ~[j ]BZ< 0itEewC&M}ݺuyfT\Yو,fz_dɒ{'CDDDDDDTdǎ˗ /]a)`R 3S_]WFrӥKH}W_tZ!z (=֮] ???$&&"//uRJE>V\+V# `Nʰa%Ÿq)KvVF.!-FVTHtYHt i1bzjn55n ]C5lzڹSD'\I\rТE ./Ct~զM+]AZ=$8; * @g$_ |ti5e }dغؼYz]uk^\1*&&F::rr+ÇKH GG5VM,/O]S%Ԫ%]aVP^mpL&N KkF* __İWݺu{-77_rrrEu"""""""s åKHͿR% ,ZIzȖ'"^LK*ӦXQLLF3YҢ|y>UtI`ЇIp9]äDEE!::Z:Ⱥbc365mn(I\DEIF̞ \&]BZ4h`k*1Q]S%Ezig:zUR^^^v>Cff.2ړ^[lN """"""*@Rt iѱ#0ntH`- +,] %3#]a)*J}#.NhZEVewC/h۶_OMMdBÆ  K!i蕒YfIg'ˁ,Ң` KgK%K/KWX:^-u&]BZ "]aE$kJt i av.^}yףQFhԨFe˖!44IXDD}\v }E'"""""f.`: jsg K!!ڵȑ@n ((.!- S # VKH!C@Q:zr!##/7<<X|*U ʕ+J*sn{="">>>p)o߾k׮!77XGDDDDDƖdܾ}?Ґ;wX| *|UUe i xx}J&ګYVT%Ŕ)jʓ'K̶nU%ŻkQV5ӍxyGwtݱˡ^u,[L1n߾۷ok .h>/ѣ$&&~]v FLL nMJPfMԩS>>>A&M*Ur"+Wj_=&|tRPY;Jא&@2uԐhO7ڸ\DǾi#F11@t֮Jڷ/֨R.Vާ\.+v9wy+V@~~t ٰL\xaaa77n >>YYYsp]F*VRJB ]6WOO{lڴ)P]8MDE3gܹs8{,Ξ=ֲ?xڵk^^^ر#yt^^^DHjqc*,";[FIMU{1yx>>5TX* AgB=6`4gp~v;[anܸGȑ#8upڵBœv~+|||O駟FN_lҝ;wp19rGѣGmOIDD"""j*@ݺuѯ_?+h׮d䊋Ӂ:uk7VC?|0@15YZ6TӥKٳ>Փ!"iuߗ.v;O>۶mCbbt L~~>N>ȑ#qFwݻ5kk׮֭^xT^ػȺo>ݻ{ůڵk3g̙:u`=z44i"F%ի9t V @Pc}VZHOՐjUkֶ;g\(`L5Qj//. vN5zj~ʐ.]‚ 0`TV -[ɓuV`ڵx7QvmX~=ӥӈH޽SNEVPF +X`Ο?o?ٳcvG6M/ vYF SKIO{CHWXxQ-ɚjȖt"'gϞOa2Sؾ};mۆK.I'Z^^v؁;vB 8p }% -//۷o޽{g;wD7}iӦ裏п$*IBBԓ^o%]BZ۽_0@Nq6߯3mϰaڹS,4T5~<}~W-HnSJgQ11l0T^;vČ3lrg)))X|95k[,HDr]sř3gJΟ?_~m۶Łs$k`V jy~3NjFٲE"ݶ  "xW"^gaɒ%(St ,//?F5jo߾Xz5nݺ%fغu+ڴi=zIDDh߾=|Mܾ}[:J`j& x) dj5k={$ (Ǝ.ߺuO?IWم3ѣGh۶t ĉxQV-t+V@rrtVڵkZh{l@AA+ Tii-[Ԑbz 0mJG۶M]SK&}.1{2DȦ'E`OUZDDDd0YYY8p VFoѳgOdaoL&SҚ5= H%)ׯ~Y2 +,mtMˎ⋨\tL^^FkJ= )ʕ rKӁ%K>_dXvǥ3OFT#=> @=jYYeJHNNFʕv0͛c?FDDD#Ff͚ڵtlٲ_>֭ڵkzpww2e"%%HLLDBBbccK.!<<Vɓ'{a >.1KLfOՓ?y,]*Q8IIܹ򒮡ª[W L.1}7O]SO>)]CM"WBB>3lٲׯ_zV_K.ui"T*Uʕ+lٲpss  ::QQQst2rrr0p@8p7!BV|}}Ѵi{lРjԨ p|?TpIIIݻ7?77b??=777M6hӦ Zn:u98vZ QPP#F… (sR1G=a~tYXp!0y2Pt X{78 ]x8`lO[%f/kq-[eFzz5* ?:U-CLd'r蕘^z=p<[v믿_]:&9997k~~~ *UN:SNbؼy3֮][O]|XlY ;v?N:E(]t=hݺ5Zn>| f̘ŰHbb">c̝;b4|%f0~"Ttim~!>$l߾'OFV 53''' 4GŦMkѢEyM IWXڶ ؼYuo Urfv`& ղFcati5~<Сt]u+tcwCl^?""B*WcǎYfIDDDWfM;8t"""`兖3rqq|ח_~iyG۷~."RT!C`M6r=3g`ĉpppڹrrr`㓠n] K?,]Qbm޿[??X4]XKm`pM5iJWV&#]a)8XO5 N>]7^#DDDd˗ȑ#h̙3 BllUAT… ѭ[7899IgYUq!-wFnIR0w.Pa&.Xd?zu`6:m`|eҢvmėܹ,Xp_TzH7MI..^.!*JɅ K.믿ij>ڵkh@xWc„ XtV 0WΑ?V;>QIv~Ȯƍc޽V{Zbrrrrl֦ /]a): KJ>!!ye˔Q/ ݸ̚<>d@O=L*]a)&9S][d{|}Hԟ}QQ%Dz>}0zh4nO>$^u޽e~! tGAAaҥ8p_-!""*Ne˖Ÿqp%sVc8,صkW8ܹSA tKԓ))%vNZΝ[c~Eti\"+ S:d{vF̛)Iԩ0vtW9s[K4Wr d!<<k׮Oµk_FIVSfMWZ%:ٗcذaw׮]o?ہM+RȩSXYaFe+; +H+,_/]AZt,]a)$$z=fWZZZz\X^\×Jebɸr OnXC GGGݏ}%8p@Q3<<W^d@&%]aiO*-3g*ozmXU6mR/M&#]a)8X 6}$8غUPnUn^+((… u?,\xQ:j1rHcƌpwwN*v;wɓruYDTrժU ?t?u?&\tYVZqҥn..X;:uM9"]BZ8:!Eժ%fj9V~698?IAz5t cW ?k׮\Fz;w*OAӦMqY,[ 9qƺwݺhر(UJ߿z~%Dݺꆲ$'WHؼcqH[m͝;@P.]BZԨ%f))…MUӦF ,^ t c |hNN^z… hC\c옏3 lٲ1c u?.lGgЫf7n@P*ҳ/²5_ԱJPL 0sũ(xTB0{6pt _Z?|}}ѹsgZ c˗1| """*&}A6mt?}t?&Q^t=^%L9wHؤV!g'',4 Fz""yӄd{:uƎtKH +,EFs AAAz~~>݋{3ϠI&Fj *B \.1336mXYGľ}0yd9} 0@c۷CDD]vxQzk@b"PtuڏiLq8, sHǘ>dqLn(<(]bcy,Y99|hV>8P*ٵ 0ӍxƌQS}~Y]SFIFk知KSԘ1ώ7vb"""")Sus%]GDx'u;^vv6u;ِaC K6֬H?__NcWT#ɤ2``V jTys K[_DdC/?~<ʗ//ADDDdU=zxH^Ddz>Ub+n(W,]b9mTdc""s$ҢC`x KjH ]BZkL(]au5L.!`C/~ ggg""""{蕚 ##Cc)SF!٨A^t8l%]"./?nn;V*td (~{;KO.!-^{MRXZ$t 6lt jI6^Dd )))vpppxd&Lڷk~t͛q{R?*UҩȆ3nti5jz:H֬ KHÁ=+,W"q%bQ^=""""]UPA4u\Sc09F l.]!Rt4ߚ5E:Fz.:ف͛o NUɖ-٦ɓ-+,mݪ#Tb^s'st\2!<<\c=䓺섓0mPtYn.jptI/(YQ%]ߟOuQ~>W/HV@fi{KH+ [WҺuT"!%jBfŋ1f.yHDDD6/33SQvv6"""t;^K}F uCHKKoΞ-1>>^5kTdGRSE9d{TQC ggt׹s%EŊE!|_~%ƍg}+VDFPPLq`ܸqhذ!ʗ/&MUVxgO =6o\,=Dd|z-$$DcUPC/zΝ1c+,EDs窛vo͞"X44 Kȑ"#Ր"!Ah8QYlU6t(`Lb(`$KFVЪU~O^^ݻwcҤI}Ϸ~;Z3բwTTW.BT+VPĮwE &` {Î{DEPDqH&Rso-ֻ8{q'55U9&&&r?6)) J^bŊPb#=zÇx%իWLܸq7nIn:%$HU$/LBÇղ>zDC+\$ÇrONN}wɓ8TMXZۛwO7mߡ~zLN aZA88cjnIr~ [͕̚DEUH:::J.kӫL2jF(]vhݺ5tJ]vaܸq#55U/nnnXjhY\]]dtaL8?|/_^dׯw.R:B:(Y^XLB۷o㉒{[nݘ"n * |w~~f 1[*U aHct%Lz|F^Ν$ǵk90q"; )aÄ>N$͛I.#$6W(JWW˗/'I5Fe˖}8p N>;w`ڵȵv%' ---iSBSj_~"Bp(M\1 ?_KK `h`0\\y :;QԬY@ƼS:rDx"itaC)9~\Z3eF֪iڴ)*TPQ `ڴiƓ'O7600@ƍaccSSS܎;"++ @``{T+WZBrT' 7ƤIG{>DVV,\/^f͚z{ӧOEHgϞ!%%Y5j0E!w!b @yR8ڀpP*{de;w 3)lmyabs8vR5v F%0x'dg ˙NC ̘{;I{1eg;$ 8<?ǏARj*SSՅ!uuaT `Z*. +33T(]&&k6?}oވS_ ;w杄h8jzizQK"7o"::woB2001qD*GѣǷ?~V¦M iԨ/_{{{ޜEԲeKl۟CCCkILW\uf;k^ ;B4\fdS:u#]4i7ZCKK SNeR1K\PF cnԪ;R>}7*UƌaHLMs.(wv$% {ƙuNC dɜ1M=́ x&":^ (a_0 +ssԫRMQLFFӁx뱐 l",ڤ 4DtҼ#<`ԩx)b)ccc\znnn 7rSF _Ǐ/FݻwǍ7ЩS'lxB صkFsfffbʔ)6VZm_CBBrՄ>(RA`B#+ݻǬ^^Р_t#JSG,%?߸:y|U)S`bl(Ԩ!H`J[I"VR^ q=BefeƳgpݶ Əe>oڅn1kx@ZFB>}±7p.?Vu<"cc?A{֬y'!^>>>~&N;KKK:uJeYV^-3c]]ZZZb޽(Vhyx5kε\%lԨZjsGo1GQ>}M%7---k׎Y=BHѵo>{`ѢE"M`$)d}+4)^DU%˗qujto?i(j8:N!{` FO3BCww,+DXc~<4৯_s ',CKGGw/YRI`L!&#$DCQK4nӴiSޑH.Z|6lyƏɓ'k 37 NNNS*W}" !Eff&zk׆%zĉ1b% Z!yߟw YO%%NR(QqqpZ^%u_gO` )dx!,; QDn/N!k`:,' |QffqK߱5F@Iu$ j.ȑTD` v- D3PK ZZZ011GWJ#ݻwԩJƓ> |۰accc$ȑ#ҒX9sϞ=cZsR&8̜97ofZWWW?.!pq|wз/$:q_Ta\nIY˜wGқH3}֝;8vLS11c~9z{ˠA(idN>uK5!q065a= //o?W\AGG>DEi&n~/{yիWs=-C) U-[2I|8r֭˼;Znͼ.!77ʊw=@"61X]^zz쬒6TTw Y{/NAT;{sx /!9˽a=|8V9f⹺֪ 'ӧy ^&88wRWQϥ,۹f͚*NBQW݃ӚCռ Odgg̙3hٲ%~'2?ѣ1{lu T,:ƍ,M۰"#1oԮXQ" )UJSŋN#9Y3#I"&f<[h2q"@CCaLF[JkV#Qk yD_"***c˗WqB:ƴiӘ/!D߿amm D>}i&jլ)\H`J[Iv(9ã^*=`DTTTz5; QDT)֬{y-Z8:b-y*_^zSu@P$DPK!%%w BH>򛍩, !ׯ_gZwިVӚgbhԨ*Us犺 A֭GG)d{X!4$"1%VDvv5 |3BCww "w&M3yxx>NB##3˽S6Um Sye&DN)hٷowTR*LBQGɘ+5 ! HIIA\\|/_ << ‹/š6mݡMgEWD*^:6MX⌳6ǏJ՘ڧgc(u&̰ڽw_63fHk<".]FΝ ֮fJ䝆G@` ZLC CÆ?NS[>\^OO`i-H 54D@@:;!oY$}@$`ɒ%xӚ?#6mʴ&!D.\@Nx!֮]QFBQ&1u*`fLp\Gt5' IDATR5*.#F . .(9;IaLMЌZ3t:u$9nmLx!5>΅ɘأ8PU./¾qS5DiB 1z#'X|9R[˝R4o"0kШvT]BYkalh(1h֌w Yǎ>>SE99-[N!))H2231sl|Vs@QKb̙W޼y;!D#BPRR6"VP>}ЦM5 !D^FFFXbnܸڵkCHWWB)d\n۶'١.ҫ+P ֬FR85N杂OooL۰!@Z Tij$j^j&-- DNPfMxxxEtB{%(bܹxӚ`ZB䡧qט6mthy-"5Jnn@⼓HNGTz[/^`ѣJ0+Q+&L`HM VjσE cJJ{ ޽; #G }\lܺ; Q3R_ܹsQ|y.\P҄6xG cڵΞ=Uv1!DaҤIx6mڄrDHުU.(KITz5>,c2rD.UQ*5VL_1E&kAA.L${`YnQ+'8aL~; Q#/JMMűc???jr"#88!!!D||D (N7$9rT>xhlm6E кn|gm&hbm AC׮aۙ3rŒP`߾˜Ui|=|l,.Vw"a˗رclق(qQ{aĈxYǍaaaDFF"#/\o޼֭['Y.pB͛7}%-7AHo>lذy*U`ŊB?|ӧc]6u놾}y4ۃ3&ݻ8v 07 bZ{@xլ ~bHC99 EIaNBS)Sשy'q:5|8:hW>zn^_3x)el1aq7eh(ƭ\}5a \,rpASGNB$Lw(%%DNPvm,_^Hٿ?ڶm+*Q&Lg"11x glٲVVVԩSqEUAi }0## cǎe^W[[;wD%&ܼxhٲ%*U}w,B T;}癕{?W69;CG.9BӼSE5kN!nW a~{TTijvgeK8Wc QIGNA$fzϱk.l޼_~9sٳhӦ 4hҥKT 111Ell,"""p-ݻQѻwo$%%1=k,iӆy]BGhh(-[?]vŌ3о}{ޱ)cc@B4`w%Ah)9`vhTR5sTL 44aI:ssY3iHa 3Rl.-E?!uꄱ]ajO U,-mLL c<<08XsN߸͛jk cjt -;T*+K坆H5DǏK%Hݻwcݼ0; <ϊ+lx8Fa@@.]uVZUVywpp<Xd :v(V4Bǣk׮M4E%‰'p nK,Avx"*KVV"\m(^~V,VURQ0oj9*ilIrkT?4)".NS@ {ˆΝQHsZkkY.\ aQZq^~N鎩2"^ M4AӦMhޱl߾kiiDž?y߿!7ggg;vy]---l߾UHVQUA4ށPvmlٲwBS ֭bc lYɩJ~\bHraSp0ft^$ӡ ^JyFx]hkv.٘;v6mE;B޾VrDhyC}!A*za``-Ze˖UahhTTÇxΞ=0Fs~ |!6_nܸqG!Z k֬[cegg@JDGGcر8y$v؁"LCQQu ر<Е#R45r]\WG7C)=`VQ/שÇy'!y3w(K㲇:ϝ;/_rׯ#(<˕?$)Qί'O//Y+yRP\\87 5~x,]'33!9KBpt.(Ki3g}s6gDaիpASRݨJܲe52k~ر˜xv7~<$DUHڻw/y!M67a/͛zkkkhiizǺYj,dgB|}}1rHdee1]re8p $D4ocR+>>HMMEdd$m% 39Waz 8uQhUWW`ta)8ؿ_C1nJ$$'+un-Z6mA1$9TE gx'q0zD42©%Kld(c~=Æ<..B㫐[f"^r7֯_/ׯ|rܿB˫"Hڅ 0ddff2maaӧOܜymB_%J@&MTrt~o˗q^%(ѣSy QHӥQZm0Y\IP4%F''j\ 3OMFtТ4s23;1պ54jǬD Y:IJ_I:u @u*< %m; Hw) aeeSË%ڵ /FFPBm[nů;!̭[ЧO<(QΜ9CKB5 v»wS*{IHH@Ϟ=q T \P`Z (Ca_eVO|XPErS˜SBB[B t VŦiD}ҥdn.NIiD`Fij$*'W9iʕ+QN4m^^^_ưvڰ`޼y>޿+W`ԨQЖ.BǏѵkW$$$0mhhǏqkB`…x]>}9.)) z+)-EH^5fBVx8]cWDGZթqݺ)U^=I!%>E3S$"Bx D- CV}YSz H` [I't:֗/_Ɛ!C`eeӧŋcFXh^|ϟŋR X&==dž 0rH$%%D!.] ::ymٳmi BxBKUƗ/_гgOʼnzB5w YAA5ߖ^~,N߹TI==l>ZZ,t;7oի_y'!h8w Yo+WJg9O5fǡW{`ր#+{`*F"􊎎ׯ:`߾},5Ċ ƍk׮˗X`j֬ @X~;m&J4Hvv6h"ڢdɒhҤ &M;v ##wDB[hh(:uO"l2͛7'ڬ5qU4h@~cƌ>!L ;{['ظQr AJ#rg͛I"~IWR0&BX4 Ay^W`왰ԡWq#Wvv60`XZZbx)Xy244qqDDD`ӦMV.wSuԉCBBŋ1vXXYYiӦXp!;!(ӧOûwD鉑#GRBզMܹs3f F Q GGw)9s]]䲆u+Wl.L(Б }x ;VI(%/{N{FI##5 03FfPիΝ@v6$DL+""˗/G5`oo+mmmn6m§O~tzzz>v*JH[QQQXbjժ;;;lٲ?~v;vļyo>ܾ}䘘B.] P;-[)SRB K__8y$J,)9̙0Qjœ׉DpD=slBqqBpDQ..qRrp0j&0{œ9Rrp D4镕sΡo߾PΝ ޱTNx=_qơDr?r6d,]+Wƌ3oʔ)ggg\xŋ1h 4oBHbccǏR3g( !DG ׎3󺄈B__lj; `/DZwoƆA%]]Z4$923I\]rxȑ %!ػ7t^/t Ɣg0 Zt)W.]#x f͚+++jrl"cǎvpssC¿([,֬Y\:t.EHQ3qqqܹ3޽+J}''',ZHڄ‚ _.Cʕ+" ++͍w LYK#!J)SF¶R /y'!07Ɣ>$9=D55E ּHӫT)ὯxqY, $D$NDD;.\mXU !ѧOݻ7?~)SАSBBQNbb"z;wRԨQXr( !+(-ŒjR0㋓Yt(D3UFD(&fM%%ʕH{#k3 (ɩbj\r0޾坄L#^R[PWW;vɓs=^zufqQӋHYFF뇹s"++KѣqasJG!KJJBqU8~l޼ZZZ'֪W___3/]Dz;V姍0A;G˖ *fڶ&oU(I"Z_wB"2wѧukh36[ea"4pwDcid+88w’BCC燥Kf͚1;ݻgj,GGG~{{{{lܸ.BZrr2w˗/R~¶mۘ 'U3֮]˼&!' @5JJ5Wয়}y1$%NBѫ0p =:LLD-(KUAԽ;0t(0,z%,; Fns1 c~ }-[e˖eVVΞ=7~{---xzzҾ]}ҥKҥ G5o<!.<<|'B6,Gs]|ADt 0ǎ~Æ qIRBxpuueZ͛+ӚD2e A#F($˜$GB缓E*%4) y'ɑ$ $V7|aS))}MIlY'kzEGG#::y]o^x9šyH%)xDdg+8FH>gff֭[A!ddd`4g IDAT8z(4h???RBx5z8Owuec#\Pf%_ְaŊڧ8DjeL)%2XxwkkI!%_+WoN"Yf%JD%$'3*U7~V޼ᝄ0qM@jԨݬ.OXx}M-9?/_xzzzͫy-{ܹSR_O !D2331b:tHjٳgajj*J}BIKK #F`Z֭[LR&N!CVBak L;w&Ed$$D-[NNS {Mi(+ssfSR 5cۚ  W֤I;v Shf>zqΎ\DV:|0.] }'HMMŁ |7\kTRyx"222[Qzz: l?zQQQf$%%Ȩu-eбcGBĕ#Gb޽ԯQ.^2eʈRBk׮psscV/ Y-BgaH7)eԩ,ޑݡ/Vy.U kՓѫ02Ϟ KN(pн;;IW 3\D70`V+95Yo~Qxҍ큁uYth\KGG=z@=c_{Abb51c hBĤ(t/_^BBB憿K,prrkײeгgOUTXhh(֯_S[#%%ƕ+W.ϣzGӧѷoBG`` ߿qOOO :NA'I&aݢԯ^:.]_'_>ŤÇmF"DeƏ.(_; >8? ?LUdZ;G /\C cIq+-`v`dD4ՠ~ظq#ñi&/ԟ:u -[D&MU!T9pwwСC!Ǐk׮شi\CvsB-צM|Ϙ1K,ZBBQnoMÇysqq͛R4DNA4ɜ9@ÆS~z1h҄w Y N!9Ɔjed =#Y=΀&8!ЊDD7nƍׯGAFX߿c֬Y8p Qn]&͛7G\\q}8rw^8ph޼9V%KbrC>>hݺ\GQR%̝;"'%D34jǏG~6qy4nݺزeKc6!$/...Xz(˔)sΡjժ')c"V-2!X[KoL}\)BD~UHoL} xzoN" ,^ S lg)TP$L200@~Я_? {w6 |˗/;;;;hF7m8}t :T,rl۱cG|>>>8r߿oǵQR%ԫWvvvݻ7*VoaÆVZXx1.^߮)ڴicǢ[n=?ٳ ]nmLMMq=&,HQyaٲeԶj׮-J}B%K2W7BԎ-0e v-$DSl 89"ХP?_MAKf̜+x',XqJ%0YX1fո10k_9'33'N'PvmL4 K_ω'sssM [###9#Gh7v7oǏ~!=BWWMh3!jk577lllDO!ؘid,EKz ah݁H`>Ir| _̜ 0~o * N;I@`:yR4\dggC^й3%,-(AA59@R9 caas"88GEN 8::|0a=z$s<22ٌ7/???\pϿ"DLLLd211)E!߰xbQj*U gΜA=ڻR1GM/Fx dHK)d;vYYE ;\ݺlʬ*^0hгjY{[Q+:::իΝ;/^`ԩr-ÑM6aÆŁ\Ϫ{{{tۏڄBHQ_U%KĹsh(!@R1U_!D6䝂h3}&oLJw (ggE )d8QdgʾiLhZϙ3S9PKN5kիlذA;1h o߾>&&&I#BHZ 3g?f͚RBMBBzg"9@SMT;}??)\]jxup,*z r2LQϹ <;)5 &LǏqezzz>/11_|ؓ'Od!OOOL6Mŋɓ'ѦMQB:bbG!cb"\P64䝄h~Ɣ^?//CI"r2-@@$*uS-˴ )SS>Mztۼ|PK ڵ޾} /{y"""A!5[l( q kN>}Ĵ"EpAVTޘXxwJ&@P$*üeiɴ^XY 3S$6Xc+#5(W-Zw;++ ]v'>p{)\B!(ڶmƏlǡCСC !D1DM/RdjL;$͛ӧN!`|QƍYxxxB##myԯ/Oԇ\PӋ!=== 0W^ŋ/0uT|}NNN([,6m ˗:'M!I;vرcż>|}}ѵkW !D23ʊi=B$G``)&qp BV` n; QDQS Tl,$|i===Ԑž;cN!`jzVZXz5>|+WZeee! -BڵQ~},XyoƵkXD'B47ƌ#JKOODݙ&Mj_T^i=B$oHK)&[7)dݺddNB1hг'nx' WckkgZSa}N!}a߸TIȿJ*ggg899ܹsXnN<)E'Oɓ'Xx1*V޽{W^hӦ ?ƀDYB4СCɼvލRrG!ʛTJ*3g_T=D++RX^Jl!_ׇ.h&&j1 DF 8~03 ᝄ(QS7oNiaL ; sBCqk5[֮ʹ&M49aLImvcFM/B.]ХK`ƍغu+{xzzƨX"222$ʝB!ݢ:::صk 6!hW2fy"1xۯ7_ xחY Փ,WWa7ox'ɱ?`nNՕ07c-)nTka^Si{缓+ !UTnݺ5s~^B9AƎ;0 ދ^H+YR'~* xyNBad$%y'ɑ*,p0Q!aLIiVvz:};p6$|ĮוL/TҼcqwjzqe``~wƍCCCޱ!si 4kkiiaÆ :t(ڄiôf-#DT*\_ի5[ȯR%鍩` (w++aƗ01ӦreX3L2lgm 7Wx')$4*&M`ӦMǪUPreޑ!pYOHacY---[ƍc^B4ôf#D-5o.,&%>@D$DfN!+, Xwsy#.-5g@vdn]%%{_h($E5$TRprrBpp0Ο?ݻCKKw,B!D-?{FJJ ZZZĉ&M?iM333Hq Bxpp BLxI":wFBV` v0K;;`)dyxzѼ($+;==)7ڶe^ xZDENRdQKaooǏիW3gL-!"q׮]C>}Dix)SRB4ю;35۷om)-kCo"4MaU%*0x0Уnx'!0Ӈw Y Ɖ:ؖۇ/_2[JԮXy]Q3з/?LJ❤Hoj˖-ûwaԖBk׮HLL%K0gQjB&… ׵g^7}ܡ?x{NA5u*ЪN睂(j$M)d;;Es)Jm/m_:N!ES^j&LӧOq!TZw$B!Drnܸ$$$R~6&|>J^B5gy rsjB"Sw Y>>B^ CYkϗGNQPK ikkӧO1vXq!ɸu~Gċo̙31|QjBڷoünϞ=annμ.!H\$$9RS%x'!00.(KitaLݹ; Q0,,x'ɑ!,z$z.s"Z=GvK)[gi;Il`.UIjz1CCCxyy~Bڵh iӦ᯿6!h7obܸq1b(u * $:X杄(|y鍩X`Z 0wKKI%Ã7_L y#N uIoa!L$GBq#9$E541l01!n>|{{{DGGR +V6!D3#U 7?Ç e˖E.]%$EJQ&̙S <<y'!h;w Y?"n]%l MD:*Y uvϟE;GvP\9DZr)V޿睤HXz5LLLx BTǰׯ_E?f\Rڄ kkkxyy!33wѹsgƊR6!K`H)d sDz "ƌB֛70 zh&]j XBB1%r4:̜"żERcêA@4+h%c5ֱ^{0&$73KL,b&F{FEQ"Hc%9٣&(>x+/p8;ך,]a_%^Dr0lADD.^Ν;#>>>_裏wQ6DFFbxk.X`q{CͧW\ya~6t(гtѱcK%C S.P @S59zduSVt 3/G!C0wÅ yѠZ51@_S۹X1Ν.)t8""""p k]vŖ-[xl޼'Zn_8~8233{_s=QFѣÿ{=Bqcݓ̟ܺҭ` ?|Ԙ` *Jtԭy  8wW˝;ԍZ(+ ?ݸak4<>qo+U ;z \\ EԮf,\ugLDDDvڵkuyT IDATp7ʔ)#ALxBBBpqqAӦMѢE h͚5CժUP?G_{ 6lg QAyum*WŋsmHӮZS}$]bቛt T6Iꮽ|`pz)jAmuN'}=8E==Q %]\0=iiILDt|@|?Qti899YYYHKKjݻw8TDo݁QKHDŽ jM}tjM+]B:ƎUCK잃Mɽ{K5Jkuw\5 D$&&Jg䙿xץ3  0szY!EϞ%bQC 3mmZS/$]B:]S[V_^SN1ٳΧNIp\+"@DDDDDD[>>>x3΀!]b ] 9"]B:9Z^^%6YYuj MOK-'GG,<eUJW9#]`8"""""""SpttIJeA:奆frt)pt Tk*gNd 0Pt.-]`˖ä^ŞlY5*YR&#CqEDDDDDD+͛7cҤI)DϮ ?3KHG[SDEoo_ _|Q:żj0)HTݺuc)DԾ=0~tQx8$$H6m@988`R^> *T1ٖT(pEDDDDDDb'NE<0 VRSKHG>AOtQ]2?X6e 9;K؏n݀å+(pEDDDDDDbŊ/~z49 DLk']a l,]AƎ:tx_H b8r%7n,cFP/kzQqqq.^9T\. ]@FAAgIW.__Q# O> V cuÅիֈ($dfTЋ(jժwww S+RF˗/G2e99"]A:,LdeK.??+lV`oKT6Eoo888H+^(&+c̙<+/Y,@C/"""""\۷/nݺ}aN2bŊ^éSn:T\Y: ZZt)pt Tt@r2|9pt psSkX1` yk z5 ۶Eg4U<<7t(m؀%'tRS3tiQprrBǎ"::ĬYP^=4+Vo7n`͚5xHRL 0>%]B:WC 3sGHQLbc /77C~=>.Ŋa@۶;q#5 ^nnY[j=E¡QsttDv . <<˖-CQct2doߎ̛7wMx8$$H6m+""Ԑ".Nt20etjFvFŊ'#htF-v놭b?Ag˖p,KiS`l !ݛNDDDDDT8U^&M¤ISNСCow!))I:Q+zs(^tYh(r%0mbだ gնtӧ%JHPNꥆ%6?Ϝ9(UJr{w8ǣ h Q}P 2d*S5*VDڵѡqcҤ jV$EХo& 7o͛cFHH>'ODJJpUX[FVкuk4iEcʉ`3FtH</޽%qFV.;{jDArj{j_p?\p M1q[ҨX<+_ WG5РZ509wd4tZS.]B^Kg`0dHnS֧GHЋL>>>DDF>} ߻w_ܼy񈏏GBBC$''###Vwޅ+dɒ(Z(QT)B 򂇇<==QbExyy,._YYur5O]TySDZOOm[TdtƍTX2_ԵkZ?5M7mRΝYX,Y@Xt H2eаaC4lP:(w?T֕rs]PNOQWժa$$5.]B:*WVC 3{XrEtTd?Qzح[ ǏܹsdN$""""""7nٟ-9f 11%iSuƗܼ%IG(:ZhѰ!_&$@pfϞYYYO}\%зo_L6 /RV3g%KJPNڸQ%`r5<1vy=]qqu%6aa9sek(:vTkjjWŋ@L[0Ro#g^P\xBTT>| $''۷oGƍ|>|Mk`ƍظq#zTZ5oC@ĉt ȑ=%6!!jMM8:JPN .(tѣ<ɓ>d_Vv.9yR)SEk(Wkjvg^-Z :u iiiے?O;wXf DDDDDDDc:~@1kR?.]bkZSCHi#GKl PC9?'SK٣~O)]B:&LP[4:$]^Ξ=~!G߳n:/߿`ٲeyDDDDDDDd6HW.^]hf`> Ԭ)]a-]A|}:u+l1]3~~@4h"Ϟ˘"""""""SHIVΞ.!J ʥJI<|Zt (QB]P.W.G?-tW#e_Ѣj@.]b Yc)QkBg ^ŋg˗ׯ|||~=+/XV3W\ɳ$"""""",\DDH?3IHpQJה5R~u.t)rR%wO˗KHZSNlٲǍ7L2bg>)SѬ:LnTkmѤ #]aTtt h|`| *JtԯvW.hѢO~q%̚5 %Jxc/^ 5))]4u놑#GSN(Vcٳg~-DDDDDDT\,_ܿ/]B:vF0 -KHGǎ1FWK;o']aLMH.!m'JW<8ʅ={ ==;::bѢEضm*~III k:::Ý;wW_aڵػw/_nݺ=BHn%]B: z0:rXVCg`W/ 'իY_d0 U[>|(]B:U^뱯988`Ŋ6m͛ __P>S4im۶>FDDDDDDΝ@LJ&2mТt_[HW)SV++Hĉ3 6m ]:HWjzitN>x =:[ϱrȑ#1bĈ~x !""""""6IW.YS(0ؽ[tY,@:F[*'??A mۀ> ]@F^Ǿֺuk̛7/[?…%K:w 777ٓ!""""""+V<>(.(+']b|1pt (ZT ݥKl23֙GJ"EԚcl ܣGjߐ T$]Q(qرc`8::fԩSQt?|#7onZhhh^Ȯ$$@xt RE]P6wK+WKHGJ[SIIjMIOO5(bŋ%]*ZT1J˙3g /AfͲ9z*U z_V-wet͛?pt h3G(:?_OÆjHa&oSQQ%AugĨS%N 4>իW655r9ͿlٲcbbDDDDDDDv+, X wOtt ds dOqFԄѶ-0qtQDp!?׫NjL"]Qp̽{"555ۏݺuQ\py͚5sDDDDDDDv-- X 8yRt-.(ϛEed3.!NNjZtͣGjMH.TIj֯.!]f+q襩J*lܺuڵk>|g/9 wŋ+WKHs~~FIIҥ@6L&S0atQr2|9pt pwWEKl<>8^t/.]`8Ժuk{{~g"""in/m׮*Tk>vW/KDDDDDDTP?4j2۷ *Jtx{K<.&X.!u ׯK84i{hѢy!333ۯ1x53|3w*WK;t']aN5`" >^tjL,]at|J(45n~݋M6eK,~e2d"!>>>pvvJ'N K}+NRƥIPaq b"]B:zrxC;^mut QK#FeZFFzEܹs'՚7n\]]H/} _Z*N$""""""*v+Hĉ#n^ or&z%+Hט1@ǎF96Md8ʅ3gTRaƌpwwG:uPJHJJ:;;cƌ4EFpȑǾ'Ndu򜛛t֭_JW.__((صKt4l(]a'JW.I ;ۥ+ ^PBO^ZZ._(CM'L?OԮ]q){geeg_(ϼ_5ѣ%I <=Kl=Rk*$DtY,@J6V+aCT"]aaptQ+N>}UV c_駟ZBZ[oƍ|{EtttHҀtQRt)&]B:*TPVr2|9pt pw|}EKlYL.HrԚrq.IIQgƝ=+]BLo6mB޽s\KƶmPlYGcܹst6XVV>6"""""""mfr6ܼ)]B:40ߚ"#KHGݺ[S%[ $.X.!7z|s+WKO8|mڴ֯_@--; A<<" O:ZI~+ΜQ[H޽+ΟW[>x ]B:z 0tImɚ,]B8cq!|5kիyzzwزe N>&M[Mn@DDDDDDf1i7滽{M+H@F7JW1c+6lrp ȑ@׮F!!ڵ@Vt Qr(ڶmmŝ;wPD =UBԨQ㱯)SbŊ|~TT 5jԅ IDAT@͚5DDDDDDDb,`,mgOajΞ.vw#eZS%6۷滻g,MٹS)H z'''TX+V˗q ''59sGF֬ʗg% _T۶5SQQ%665e){Ԛ.ٴINK7$""""""WR88Hܿ|:쏇,]bb2]}j"]bΌ3]}K5UtÇl.!z?I]3`| 2Rtԭk5 ,X\.]B:ͷEue_[S @xt QqGۘ?>KܻwO:ȼڵ&L0vM]P.!['KW]Tlt h6M 573͚m$* 7D8ʥ˗/{R 9s7ٳѣGxyyҩDDDDDDDԯпtљ3ʕ@jt &550iҤ,#""""""3ǫd~`& 5f +F~ _X%c(K `Z +Kt t.]atf )]BC/M5o̜)]aoK_̑0VKC/M9z|-жm|!""""""* 0:w#=ٟ=C+.^>@mI[7` 0`2=ѹ30ztѕ+jM%%I!4UP![srr€! cW^0[`zj.!F]HWkYY%cp{w #GԚ̔.!C!;^ K.45lߛ}+NRƥI+Oxxxoȑ#޾}PfM=gϞͷՊ{/ߞtQۈɎ@Pt9Smwh&l*]ANU۲@`t4 hFh^`f "z];w{iiiXf 5joooL>}ѣ\vzz:M\?]XU+6m ] PtQ` ][hV5"dKW!=0'{??~uؾ};Oʕ+@@@XbX"ʕ+WWW\rpr5qqq8|0\""""""z-.͜ P:uk 58;;ÛGF?#]B:6fϖ0y?uKt4nlA*֒Z[d'52{*2RQzijРtVF/˖w5J(,Lݝc)g^y;V(<\ݙ(]B:ڵ&L0vM)lI47ADDDDDDDt&]a꜓L1t(гtѱcyp 4G)`*u~ٟ~ԇ+Vs. ;(_<|_ ݃oމKD4-::{8gdLuֱcO瀇0dt :SL+|w7RLKݫ֔n0A7%65ePK^DDDD/_756I(wd^5\oش (_Yt¤KleHc!ŋ%6AAjMFE ϝ. RF*&ooo+VL:T)5(]Z&-Mmvt pqQ]]%#X\w5R9;~~.PT۱~t ppPKlV`z!zFp DDDDDDD4ժ f,Y\*]B:*WV$) X\wQyy5%uWܿ|pt P9;K$'}dРA3ӭ&n:\R:RՑ#GdɒB5DDFEF;Qa/Ξx9Mنߺy1C P!+QQ6nmTQxqߨ\QyrM///s;wٳ]bc ksC\^Ov Hge[hhtѯ:H80TޙCyC) WyqMQ^KKM no z>t=3xW.pd͛7ǔ)S3*117n!C]Ȧ#DDyvx~} #G۷+Ud+Փ0dyQlY'8~\[7kIsmujtԽt ' ơW.ԬYŊCZZSSrei?<ʕ+<<2==111ؿ?..]K.DDOuEkܹhԨ`o^DTxq ѣ95f^U:ODfp*Х гtܼy08p jժ%XVZSI\ t t 30W/]b.7rKz傓ԩ3g<=777,_G"jŲe0m4deek5bQk ㏁ͥk(??bckL`ZhRrUk*&FF֭֭݁kH/k,wZ jMk']C^ԠAdž^%J L<ׯ_& +ZQtҀ #''Uה3P3.=##"~UQ//{*WV9sKl-ԖOd_l=`RjזTfRCL3HN/WC njlo TJ GjMqC#^.=\3fl5k,,\=*&"""6p 0rtK`" Ly kH_fz_Fx Ĥѣ#5j,ʆƍ.Vk7@KÆ{I?*W__ߖ.sǶ^rvmugoH 5Ut 31b@ 3i.!}JW=KI.!zCHW>39Ytt .]at2lpt  jűcǰh"khӦ jժ*U ^^^miӀ-+زEtM j%]ag(]A&Nڶ0 6m ]:HW<l ]AF:w0;`zj.!#FݺIWzsڣG%d8Gʕ+E1c֭[ÇիBbb"bbbpl?ӧ /`ǎXODDDDT|}5+ݻ+Hԩ#]ae}40 >LtIWm|t3xE ;mۤ+H̙@Fv?^ ..'ND:uyxxqJJ 3gΠo߾hݺ5.\gODDDDTJP˕.IOW[?.]B:U/wwL`QZS*Hde!!%x9 ظ8tHtUHWm8 ]A|}ե+6m +QFg̚50 K/>V2LDDDDJuAL.\.!*oMݻΣ .!jM99I$'ss.^.!nnrѢ%6~?/]B:ʕS%KlRSՙqOKRԟ}KKؤV'OJq蕇֭[nݺ֭[YYY{c_OMM믿!C $=j0W$Ӱl&o@Tt _|k*&F.!uo@ ,XDDH5ͷ぀5QIb"d pt ^yd=zt"E`xQmݺݺu󭁈(tc7+W_3:IW %m[`D `B .NtjL"]at|J-ӥ+"#Հ>&Ft4m ̞-]atZSoK q9^{ YYYZQxǾ7_oDDDDDfW/ 'իY_d0 V>.!}ue&gϪ-RRKHG^?3'%{w`p KԖ|}9R(,LmG$]B&áW.bذa(}WGGǾ3fhQ2EL+Hĉ3 V}7NIh&7JWѣΝ+֭x}1U($XxHt !]at(@_'s+-ZBXbҥKuV*""""<`NdV/+HРtѶm]Wg&||t3Gqi&;vUdf50ڵ  ]Ӧ-[JW}et^? G[o͘1+ """"P|Kddw'=*]B:QC OOG5k;>Y,@J6V+~CT"]aq#ptXե+6m ] -]ae {t^&8mڴyn;#PDDDDD*URC 3IJRgI ԅ"&pr:ETѢ%6Ο.!ʩ;SK.IMUgƝ>-]B:JRJ.IKSt pqQkU&=]msxt M_^:%K)]B:Q$6X.!5koM\SZ5TBZS%re󭩻w՛Ӯ\.!aziAx 3'%%a^&""""{֡0ntQxP ]B:ڶ&N0-KHGV)Fׯ5+]B:ZPgIdzGLt hLe&7o5ut h0Vk*:ZqoţG~1ԩ&L[0`Ӷ={|׷oߞ/GDDDDT`0 U[I}պ23g+az0:^mut 6L%%%kW`H 0uwNRt 3FU`b 1Q8Iثcǎ={_VZ?~ڵm[lį9r$O_?{wU{! IDAT`n(\TMKrռ.2d{ݺR.i*Z{(\Pr Qm:̙~xs>z^gf<߇DV:$ln|y`p$ȑj́IWsNBzS{J'1;tHͩ1cGl@7tSvI'1 VsjX׽ R*wNbvڏi8H4dy*(H:YhSkM\Sc63g{m/h޼9ƎkL+W~㉉f\""""r>>@)mۤS^&%Bk&`哑l p5ejMg$۶?O'y^?6H ƍS~d. @: `K؇?I&͚5u$/^FDDDDUbE$f+W;>L@ժ)rrի,c2իK7֪FԪ%Bk:`)B+ عS:e2uJZX)_S:;;?K.mHH~Bۛ`EJKu!}IMU{<#SFjsڋY$G2/Pt4`R)$*RH'1wX8qB: Q:O)#,#C;zT: Q<*,+Kݜ }K/wެY3ԨQC(ؿ}DDDDH4Pj$!=NBzԭ.(IbS4Bvxsu``SZꂲܼ ̛p[ TTRp!%Zxsm5""P>aKt/駟7'nVKDDDDm[`hZ.s i0XUxى-9F̚ d7W{|jNK'!=7V{ɕ+'//ԜtI: tzpW~巌 lxf%RVNuj!&x5ZgΨVwH'!=w Nujɚ":T:Vd$hZQAC`ZQQjٟv퀑#ShEG襓=ϟ?/D˗CFR_d~`Z׈:|-#:T*$8Xͩl$ǠAj$!!j,$G@Ϟ)VP{}[:VXlړ^:i~?{,;&e׮]9sSz|JCDDDD$dRTd&`V哑l l,2hT:VP(8Nc~t kxukZv)H1cTKr#ٻNAyE/<<<zlƌY6n܈}O|#""""S&<(rrի O>>7<[8p@: Ԭ)BسG:e2);S^&Pt UA쓯/Рt mۤSPaK&M<ؾ}0|pdÒ۔L:o&<;ʔ)癈D"E"IRS^LgJ'!=ʖUYTZ3)$GRjN=e{ˁ'%J F| ptңhQuruNbZ>,(THͩJݿڱK'^.]NBz4hVIB:O=p^:9;;W^O|Nhh(|}}Ѻuk.]...Dݺu1}ts]oDʕϢL2h֬???>}ZW֮]zz5Zg_ӑ4H:Vx8xSO.!)"#En%;ѩ0|t (`ϫ#GJЊS+Tm #B+&Fݜvt0j(DGG#""… FBBRRRlM6UM"""""+#FFzO@.ڔ  t,B+8Xs> <M:VH$+K: 1`гt cǀ+^_dzN,[$$ӻWFrꔚS9!eM⥗^DG@DDDD$g Q#Z7OSF79( NAzMx{KڱX^:5~<кt ]ט1j ['9hN:ڵ)X_|'''Tvm3Z՜(Lt 5kS^@͚)={S^&)BkzS^&Pt ᅲNAzK mNAzLt M-[SXRV0h}駟H"1ʮ-T/.,- XTR!SS..I/W-(.()#,=]ͩP$GѢjN+',3S<|X: *VNbvSIH/ ZU:YNjE~t^6`գG1vmH_UgUK]P67yh$TR0>%VML5d`B 2R: Q'YYIH=Sh;| !K:j߸t$G@߾)NT{I'!=^{ _:֙3;I,z٘7BBB흯}_|1_KDDDDdN6Nl(4 Ocat kxU+Z;w)Hc^N{7/5 hN:ڵ)H#Sh:|-;m򀧧'~g|xgXE;#ԩS'OEDDDDP|}5ShI ||Z?J |}}߰{ԯ/B+0ضM:e2^^)6mlNAzM֭)(X#Θ>}:"""0yd1p@:u _|+f񉈈^RHQtwU[0$Gɒ_IUP$3Ϩ9UtL:a$S+J'1_ͩ`$T*,'XF!d2իKZ8p@:^yRJŋ_s(Z5jO>QQQ."""""kxx/Fr&0o-pw7ޜJJժ_F ,ZDFJ'!=*URB, ,Y;'puU7YqRS^LgJ'!=ʔQsxq$fiiSSͱ.]?k.ܺu 1g7ݻwGfаaCxxxM6K/!CGPPp ]?1hL,B%`, >^: Ѥ 0mt +WٳW?.իfT+gpҋV/#WIVtcGZG+VIH7zNu*ĥK'!=Nuj!&x5ZgΨ;IH݁ASh;ZJ'G`ы&OVdv`ׄ @V)vS^c/$Bkn_:5jЮt uS^#F:H:tXɑNBz  t,B+8X ΖNB`ыL&C:V@)H/__NZ6/2ShmܨdL&K:V` ut ktQ#Z7OSMJ R*2#9%?#yڇJ'!=yFͩr夓ef֙GH'!=bE$fYYjNt\Y:YNk!$IZ8p@: Ԭ)BسG:^i5j@b gggxzz^ /񸻫 5F,X?/VM]P6d`B 2R: QSNNIRRŋ^'d*TPNbb:{V: Q%%ǏG #88qqqHcC#::۶mÄ PF <GDDDDTp4iڨˀptңQ#` ZWfEaCy*.N: QTb"0{6+]xsu`\ &F: Q͛@tt^eddØ?>z-4m+VDQV-4l͚5CVбcGtCr 6m ";effbڵhذ!}dee?:w NV$'K'!=ڷ~[:VtPstWѣSh].(?_26mqShƪ9(h8Q:ŋ@tңys`dZ.ؙM^:-[ĉn:?~V޼y:txbQI233GsHJJҝ0p Уt #G+ $ǛozI:~\ٙ>}}Sh< ,[ܽ+x5u2ӧVt>SIxjɚ*2D:Vd$hptE/իgrrr0|pV~t )))6HFDDDDD VSh l ;x%ZwI FRd>_:juz5#6L7``* ]`uN\ #tڵů)Z(J,+WDPP-b=#lSG:V@*~}QI` }t dR|I` ut k lެ~>M 4m*B+(H>Mx{Kڱ7 bK_~ sAL2 [FF># 7|cq gQEeef6GH'!=Q eeE: _+K'1Q+sNBzLt 5kS^@͚)֭SS>)NQ SB0gΜ'>p5j"##gjժ=g͚5͓.ُv?#IN.T{\Y)''$f))js礓* EH'1KMU{1=+([VNb,]"S*R<#w՜ NBz,TI~I gyR ۇ 믿KƴiӰsNbǎ:t( ڵk-Q#H^f._NBz4lhbjB0{6ptң^=ͩk׀9sk FrS11IH㝧nƛSII@TtE/+?yJ#44/_GZtF!<<ХK4iݺuêUyf.\Xٳg[4>BoKЊ,nݒNBz 0zt huM$G6ر)bbT"1Q: Ѫ0at ??UT%Ӣ0yt KԍDIH&MiӤSh]SWH')0X޽{XtCתU {A*U:Ʋe,:fcT~Lo_ƍd oVO?W99IHaÀΝSh9-<M:VHj%0@` #C:cK}=،3ЬY\H+_jԨ hۻwoCDDDDD29Fl.2>_Fl*1CGh$[7K N6Nu+qt k$Ho6lNAzMڲΝ@@t ,^8qB{r0eʔ\x3,( OXEDDD@ZZ._W"..vp-ܺu IIIAJJ rʔ)pqqAQbETT UTAŊQZ5ԩS'W+ĉ|}՞F٧&+Kݝ\;Zt YKܑNܽ!,_xJTsj>J^^eJORdkV-yg穛7(ʕ󔷷tĢN&l׮]_{- dɒ߯_l*nDDTp%''ѣ8vN<ӧO#<<믿_g}mڴA۶m^^^ (6T?L:YBM{%pvvf l'K'!GѢZE1{tK| IDATTJӐ4M ?s$fWfTji +!:-[V%6mڔ>WYv5o< *T@ƍ1qD]Νף?5j+W"/m`N=ݽkoĔʣPkzo)Bi`2՞O]ekW``Zj%EJtңS'`0Hڷ~[:Vt -$E/,zvߏ˗/ù>N.],Wh=Rb=MNNN> O>X"^xL4 }nWw0|p?Dbbt$"zѣW^NonE/ׂɋb8Y#6L]T&!CTH~Uc2"=H/IaK IN6mdѝ]u|X4?ADDdTgFJ &`˖-qt|>pD||>_F|]l<D1C1͛ Lo)ȑL )H T[V"[;V6ݻ-9E/^x{yk؂>+VD֭-kk֬Ecӧ/RRRz߮89BTH{dg~ {%sׯP;3o\&ʽVS=^Ӧ7˗oNAz5j$ WLMMN`X„ zl—_~7nDorQeNOO: 99YjժA_]"-ʈW/_?ZNKt&.^)iSDTu &B+&F:Y}{S#y`hdC,zY7@z<22w~aֆs={9/^<ޥIDDT۷mڴAllt"#W_Nuf ?{H!K t$a۷99)zNA7DDҧ!j*/^ܦΙ3Ώ:''GĉQZ5 8-.\ȂgΜa;'^:͛1㏑"}6 hD:@`t "2 T[V"[;`k=Z"Ǣׯu=HLj#гgOc?3F777x{{cHLLx/^DhhMt:vk׮IG![9yg/Pt??Lt -U""MkKxXptdՓNAdR|]c^u|(SUt .|ٳgcҥr劮qCBBEDDD!22=z@*/l; ૯p$GrK3H'AKCJVKNbv3ߤ/ʖNw#)H"EԍDI'!GS+K'!+e#:uBHHZh{͛QX1'Ξ=k1 2eʠvhڴ)6m:h~ZlM...qr;V:JBI'!=QE AiF`SC.ɍySZQfTj*p!)\Y# T1H$mznݺ ֭[駟O}Mjbȑ(\p|rKDDhQvmXL 0g*,պ50~<`MDLy4 ;W:Y\0k{@JiHZFK'1zUH{@ժiRFlI^=usGI'!X1'''{F\\~={ q-eˢAhժڶmBlp¨^:<<#""rdKFӦMѢE _>իE8Ŋ'<==ѥK?}6b۶mضmmzܧ;v,ZjOO|=.ST IڌrgOu _uS+֬Nb,YL*%@3Gi'ٗD` $fӧ# lrFx祗cŋXCիWǨQgF…>… t(_?*TE;"""{Vn]+xѢE ԩS'On<ɭg}ݺuCnݰd۷/eXJJ a׮]y~,'6IWEݻpFYoJbULݹS:Yp:O Q*`7mQ<$,4X0n S7M#ҫat^vW^쎇:v숗_~ T" *;cǎᅬk";;;O?bԩSͪ}IHiſ\Io0ySG}mۦZ ?95xtc8uA知ޭ԰aIQۧoK'!=~[}ڷO: DDDD6TX1tsŹspy|W0` ^Y&[;Ϗ7mڴVNBzի!NmA}6 "5 UK:j" X2&?HۥS#ٸ;׌U%n/_?7n`׮]8q"ԩ#j^^^駟9 s+$G2o͖0ӊ1kZ(9uAHhaIQdf6FZHWzPA:YVj"Ev6ZHUNAE>6A;vĴiӤ# TR׿w^cʕٳ'JbI0Lعs'fK,ɳIHb"0{6#TE< J/ʕ,9c*嗚5e#u XNB") XNBzT.(jNEDH'!G ,YK'!=ʕSѹߟ9lɓػw?DDDD6駟b…h߾}2:_~AʕdCϓI''یs"0w*&M'1f*JCiS`Z/fIQ\ԕ+IH/:T:VdZI,j5!ٟvQShEG7oJ'!GÛYV ) Lڵk8t """"ݼ~z*dp[ldrrCBp̊ח }N8ӠA@)V1eZ_}ӥ}}H -ݓNB)5Ҥ=zH(0E/Xft""""tS=ŋq9Ku+qt kys/?#+#|l\n$fDeKZ?_/"@kn){6z4+)S#9pj5|8Щt zUZh3ٹO>6w6 dZ`~Ԫe˲ _0Q`Kf2kKZعS:U Pt ;S^>>@)モNAdf`&״i@&)uu7nܐBDDD[ѢE|}|L2wU S..HQE/ g+ El+F&qŋ eH'1PmNBz89󔫫t,`J $D: QS*I'1Vmݕ/rrj^&.@_EfͰzjddX3z 6`}IGt:0g $fW{" //uϤ%$̙tTjN}d6t횚Sァn Vb"0{:Oլ),nN{]$Z;w`5(W5j&Mm۶h߾=J(! `РA6-zegg#22^^^6 9u)#E>e V R_Aͩ+?,XAOv_K'1S%˕NCz%`(bb z$uUt70i0wt777o1k,˗G>}phDDD$5m|`[UIHAfX9pLkI:vLŭ S߾@)I'!=zNujK&→n5ILn) <̽{ehѢ~WHDDD$]v6͛6 n6`Fĉ@˖=< @V';6nкt ]ט1@۶)S^#G*B`H_`j 'G: 1dеtE/;vQncǎEzzt"""РA; `e\f2uJZرC: モNAz?/Bkf`&Hnild/``ɒ%hӦ \"Y:ul:^J5C.ݻ,_8!(QB]P.SWLrxȎ-ji+$GBjNU$,;[,UNa] :$Q`׮NAzL@Z) $gѥKԩS(^H,֭[HII۷qҥKرch׮<*Uqb"""25jt<*n{NC^0w-+yWDjUufd$fokK!KUԔ)ijߜzӐʕS+&O6ξw_}V7R,5mw=ʗ^xA: Yy7YT0p@899Iyׯصklق>7"":uBHHJ,)HM3g#C/fWiBnV+7CK]Pǫ̙tTjN}tOUԭ.(tD`w5Ӑ OW us̙\5djT+Sg̐NRlfޫR ~'[vqQ:uٳ 4k?}4Ǝ HRRl:w2Hm$d%K%ѶCW#ShEG7oJ'!=ڶƌNׯK'!=ZƏN ̞ `dIShũ}BtңiS`Ml25ڦTP!G+>yWFPPP>#""" 6D6PH;(`%%~}Q+PR-B+, XLIHwo5)B(-2={Hw$Gn)ΝS-Y&>u *(PkڵknݺIǰcǎ1{.L:FiLDDDy{pU2&$g`z CBOVaP6qQV޽t kHH֮NAz t$B!o$ǐ!@.)UԾcd wNQ `UTvm:tUV}oQQQXb@*"""Om܆Zj6X@st zT7Ϫ1+@)6nmNAzd&`״i@)nU*OS͛Kڶ  NAzMl)W#\jհi&*y!w9WF Gv,# 4T: =ŋqي=h X%"V*RT(}Վ58X: e2WLNzZCdWNv-t djՒN-2ڵS8S*[^iժFΝnZDD*Qc['S̟DEI'CΗcڵ &MBdddF ,$"ul=z d6lPnݤ83/y+V0vΞQ** e2 5NI'1۴IzNBzL.(8!lV5xm>/F IDATMѣI̶mSsj$ĉ/$D:?*ou#θqjNK'qްx] 9Ei,^lY;..ΝV,xݘ3gMYu9L`JuG)_Ϟ"+lרv颊+(K=3ڜVV+t.ZR^Eի4jnUC57֪FԪ%Bk:`w\Eg`<"ْԭ+!pqss.@\nP"@izjcGAXX5j0p@#66ϟ1zꅚ5k"11NB {lA%$$/xh߾ƣ 9YPPt~~ȶag?899*/Փ'Fiߜ8)_WO: Y5 c$fw\]xYY8㑑r\? \IO$\ @ypP㏟xƃiij8WW+F⢊'s=`r57NCOq?;/xT"/_V籜wr\|F+ uuus<:nv͛[;巢Eg1c?Xr0...=vU$h֬Y۷o?ҥKx>>(T BY̙t6s*~Uc|8d}Gam8nU+7-UiS7}$$rT/͛#FHк|Yͩx$^Xxڧ당[࿧NYUʍ;,0 jk}8 ˒\?T$`>)NNNxwj[o "ʭcԩ6?)b19~\IONp20/u1 ƊSQ1M>=~8yRc[3gTC K0ib=~Y]> lsTKVķOmJ'xXd$h;RS1j\lWZZHGs(usZRR#2.Z=F[-zߗ-[OIVN.lu'zt|"XnlӵkW6 ݻj)}]5 hΪڵ)-[J':t[]P kBh`)jNeoD!!ʕpј) r-4Z8v XBET`Ο?ݻIQ\rAAAO|Çqĉ|JUpnn)֬NA`AHسVHB ;S }azUP%* f̘1ϟpAHLb|%CFFF[]w#&_SA8xwЫW/KdIuA>ҧNH/6l@hdUc 4EjNiTٳIHeU1xq$fiijϸSAAz@S{[ u?!>(SF:YF7\D _+9LιG+??}o\YmRIl,L gN"Q+W`4pg,ǁF 0猋3!"\ 6}*>fφӧu'n,)x_;S9sUG6]fϞ}ߋ+… ̞ B!,,)SШQ#T´iӸqSԩ-) M47ԝڙ30}:\ۯ-^2[9x0r a2YI;Gݺ0lΟW/N"LĂ*uX 0y2\dx.O? GNa-*Jݧt'nfP o\S.>.e6""" a&}+F63n!441p@ZlIrxBᖢٽ{73gΤ[n+W+2j(Թիv>ުG\(V?wy{WXG}DΜ9]U!0c 9;+ƚ5khԨ(UƍS ǎN!$ vt'1t>M}د% JcfN_6)vQdΨQjA!I2|vQb gM_~tȜCϾ}t'ɰmO;pC[Q= 74nY1%K(Zj̙3ڧz'Yd I!ױcG>,^Yj^$,Xv%˘i?8^reA %J8gl{XaS6C2SX[ Vw aR;P 6X\mR%)] 7N!>p/TR^,X@w DGGLժU&~i'bp»Q 4zM~9ƢEM!ըQ;vB``8B BLΗÉ٘-Y,9l*?ZwHJR=՝D#wnuMN-X\stQd:A_$RSŨu'n"_ᙿp玺tQdM/aÆl21lre֮]K=(S ~!l`O:uPx[rԩ~PIжm9?#vJ>!OrJ~W^xqn+e3iYILb0`tnsFUgP|ԕ+0}:DFN"QZP681""t'&uh aL8}SzL`l ӝD?ɓB a, 55}pB:vH 7񤦦ùsx"'OѣX,lԨ9!9s{|ڵ^Kݙ6mCYt)3fpJF!4hah߾=ٳgGIÆ0dZ1HINeqKm_~qhJ%K2U}օR fqL&@B[ծ #Fԩdp&Ow1WX7V@ G ʗy!/~~w_w#);wHLNr|<^\vԴ4k?oLDEeܧ-ȓO ?ԝ$åK>5a{y)ZAy޽C{]o޺EJj*))s>6Wx SS5 6vbb dIل0Bˁ8pNצM g۶mwܷo_r׾Z|9~!9s4<BduJk׮ӇJf5 ̥m[ujZI2; ,9!U޾=yyx\Y >sݜs$̝ F\Ȝ-5|$`=ѝ-xyyQhQjV@ X|bV Hsp<'HsǠ 2@i5 ƌ|L8GӦ>eV;V!(U=K{JX8}"ᄆaniz_g)3g2N+XP[.!e7^{517oe}mʕ[C7ٳS !DVӰaC^z%ZlI5rVy^]-ܩ;I~@0?'7W.^mԈW5b|w/Kmcv@&P5oغUy S{NavuM;[KkJfMivX l\y=p+wߩk7\Bc#pˋQF!g}F\\?<ҥ s~DZc 'Y˗2d+WZj{?~\w4.a {)\JB#sxoSfƞVT͸qPV6שF ʛڵcϴiD]ˢ#h] 5/v8B;~8V>, .$99Yw,afeI2$%^Y!3g]]cdULEʥT^.oaN2~>>ڨ_>׬aƠA4VnP: pp,T9> tN5VBt'p玺!v! @r1cnJ*VT_fr L8Ńs3}@y81""t'(]Z-I|<̜ ONTE `L΄/_Cz1cԮ\gda3!!́0'%NUOi#81̓?НDrVv^|kMgt_?`  2`_HL .LtwF3f63gwޙ~o_ޑxB!2ҥK > *duGfTjxn&)p$Nsuޘ6q&@ax.pIG[xᰚ5ah)EEԩ;fxέZ_x`HZqx*_ީs%M>u Z|ĨSNryrd?Iڵ42 dʕGGӧCXlwgXaWNٞk)յkWl{c/ZȮlB!s^u5j$=ܑW^g?/k~g8yUOzssCc4Q͛(  Sԍ{4o}Na-, fVt#$3/;}`r6LhO?rNcшxevST(>'ii{t6䧟`HIѝD W?N~îF{p# :NaRZMmK}ǹ qi/UÛ78ƑM/˺u뤬IZZ .g}ZjU'Z`ƾ~:QQQ>}'NoqQӝ6ӰaCmF5\:0yB͈)Y,O=;ݾ eɶm=;KF^;.s*I<;_},SSU̠ ][wa^Hco€ 1-M]S B.0nO];b Bƺ{…}M$~v?ۓ&NzþN3өF!#^n(00sw^ʖ-;pSs"̤I(R(P Q^%fϞ=k !ȟ??/2ӧOٳ۷iٲ%NKڵՓfr>ٌ_[țcb}VTlbcaTsWC)o3 <鄙plzҥKf"kq+Vxoݺŵk2Ojjj^ϊ!DE={6ϟgĉ-Zԩs^t_|8в%;S`s7nth ///~ Sst'xxu)ED׮N"lTlYFth|AK5lCNaYuM9gThR. o>p”7G>wNm_مp욘X@@jf׮]3n8sJ>S_mBc̘1>}w}0ӧѣׄSm~IZ$u;5~SW7{y~IJIѝDأsghNw k q­ӣePС^GEϥp?m۪{; tn o :aNT%Y0˲ݐk׮MX"r.???C)^8^^^# cX7oײgNF(S 6ȹs8piYz*ׯ[n6#YIΜ9yݻ7}[Flٲ>f+%1B=]OdشI56g-[94F`޼LߠD⡶nUWvQdΠAҞ=dؾOI rNcC]/vTהN7yuuڹSw ?@O>37X}9~6)#W SԠA UɄIdM/ooo.\H*]vq˗gTT׿Ŝ9s ʦBdR2eعs'3gd6ͬ &ТE z)n,8Xm~jysI觓'~2y H$kjPcӍ"sƍSNԝ$CH^~YwanM5)"PсBBCf;(2gXI;I}CI=FRY}y>S~%ȾJ]S]8rۯ3f̐ /ݿ!5c C6@={6iϞ=4/Bar^^^ 6ܹs>~ZZ$==˕K-(;ᚳ۪|}_e 8KkӹIcBKMUetQd6"-MceGl>ތtA#X`rؽۈфPV'U.0~<)/qT3>3שFeM5jo!6ϟg=M .L- gȑbaņ+y|qB߉h"nLK\fAx$Vr%"##wΜ>ܘ@"`l ӝDأHl2]7n';Af1tL/?Nb$}ʉzm >_έNeOkSe>)3jYV߿?٤ϟos}dϞy֭KʕO>[n>Bx5ksNoIf~<ʨŋ0e DGN&8<·QL'Vhcǝ4iթ`!z50ۦO?ҥ/&^!z{dH<[Vuw$:Kh*$'‹>k`YcTä$G3'ה;Zb=M'dT|ɕ#齃b/cFɄ'a|yb`k"-[+t'p*=z4\.O8_@߸aX`lxmUS'M/!<ԁx <<*U,Kzxbz-J(a؉ͮP;|O:nqܩDtȜ_W;wNa.(X|{nz٣~ h"szT)3߿-S'f#Q\w q#]R^6{pO#FrfiNʊL1zbhիQ+%y&&MA\r塯/iԨ[ly))GƲ`d ݻw[fj#fذa|͛oɞ={25?~#uF||]s!jժexgϞ5t<||`8YНDxtSsj?ם"Ţʙ}$ƵNZeS6PVoѝBX:`kךT(r.Uw?fiժTX@=z4<Ξ=˷~Kxx8'Oȑ#9sŶgAÞ={ȕ+գqtޝeoq.^ݻoIOOx1114nܘuҴiS/NPP5H"VMMM믿ŋ9s/f^n۶mUV<*T"EТE Bᨲe˒={vN?ސqDRZ1S3UbEi'HLyN7+P >ec IJ 5U4H20 Uϸ@QCG\}jHHHНF}[ ;֌%ݺeX k`AnnO6'OdOq%''sn߾]^7ofvex7ocvA Bg" )o(BA?ׯ6b НZDZRQ"#atY!Oz0tΝS!6Vw,-3yz$~]1 .2vj*736 3N9s`hȝ[wa_T-ӝ$CX:ې74!n0l,"")1c ~g&(QK"CjYe5klH!7Չݻu'ɰcGѯ$SڥQtȜ}ՂdسGݧ//i11؛Q7jd @kj~&I2,] C};|fL6B!u'x);Vw k2a;w_z8 7'^B!"&&˛7ac qWЩ֎U%Ēt'aRьrvQdNp:sFw VAм$e-|߿~|B!WN7>u$BB (|=ܺݻ3 i׭SW^ѝDB!o6lz XIIѝFySm,U>%Krɒ.f> Ϗf5k3X2KdU`AQCwaYiݾʇ,<;G/4|zϸ(5U9 4Fx>>0~bcuQ`2uMtX,Lt©M<敞~)n# B!<ƍILL4l\w kÔ);3Ϩtfr"L .Nntǖ}i3y)^|}Ĩ}.NR^q_[?]S/k9IɦB! o5t{x6mkW)?J޼;GVнC=l2y6-Z@^SX S')t'q[o/ZD:eTfЯs;$<f4OEa矇tN;;3L^g>n瞃F` vHdz8IIɦB!׿Ezzc6iȔ}ޭX,{6*d>sGwaݡuk)<1Nv?-[ LԪT)s/ڶ5CU=SUlz !BÇb~~~ԕB1cT'3ٰ֭ӝBkHUKw k7nBÆ~NnN(b]GjԤ ^^^Ð!а9}\S7qc)N؜ʒoߦK6[׭K92д8d.XBw aB%B0`᧼Zlc a`(YRw k+WwN! eNaҴ…B)Y[NRrI}_gax80WYZw ajU)_N1M¯aaN /O:e>_;0B!pS&LlJD֓/ZəSw ɪ,ѣ{Mܹu'p** IDATo{ſ|t'ɐ$n0`t~8r)(W7T`앖˖{d˦…u'ɐFݫ;{orV唱T)Zթoŋ;e^X,N"LD6B!Ж-[4iK/>6+_^-(ɕ+0mj-O2滦` НDأdIl&0{6>;)Y,̚ŲoqctŊ~]]SNN"Qu'ɐɓIko'ӹ3V@uM9m~%%^ǏN"LB6B!́ԩe ^{5|}} Wԭz0y2N"Qe&/k%I=~Z"4(uMEEY}y70zB.k Wڝ;1m6mru*WI R6bb`pAwa*UwM]mz۶ԩDfa/gNdPKVhbc4qlz !´ܹC~8o/C楗^͛-[6x ![CSX{hf+NR$$N"Ѭ;p5K[nz`\Bەb'u #:*_ڴ]NKLԝDh$^B!0.\HJxwb,RSSywر#I6BY>>>{N[a󳺑#Ui:3ٸ֭ӝBk0xXv]֬ѝBk仙lW?[L *?ƌaӁX,t۷x1 㬓?;z{3{P>p 4nl옎ڱVҝBثx\:W۵ ~C.:u ݺѾaCc+_|ի4 ?#>&zDD!W^e͚5YlٲQfM{9֭K:u(i IIIرM6i&b`A *Ă \>h\Q", Q} ѝFتY3ضt$NYԆJ0iTݧ-ҝP 7ovO ŊQD *,I%XO+Fqh89oO[x8lݚ-Zvƍa@u,"#a4Ӡ ~֘Źsjk r7oÇ@%RrRwcE p4畄EFoO˩S8{ѿZթûF?Y ӧ;#ΟW&TpB!GKOO_~UX1ԩCթT+VbŊq"͛7 ѣi !Bx8___V\ɘ1ctG¹jքѣuS'ͷuL .N"QZPn!{lۣ&L 6UZP6X6 "#u'(_|ի0cNhՊ&Q8~=ʔ1穸8uMEDN"@6B!ooajR=E ѥKɦB!?Ahh(M6Eڴѝ/2*)){tNa-4T]SoN"Ѿ)kЀ gtGɼ6mK)?J޼;Gн.^ԝm<6>7%CЫTIdK!ȑ'm64tzLmիu85ҝpON!Q`AM7F5ӝ`N"ѫڨn#@ ɓ\8׽P5aRHM՝DD6B!ŋSXKHPt'(\X-4mb"̛'ON")__NB@~fnծ/gAYQS 9sN!) .Gu'PT@F ~7=5,=%Jd9g|t'ɐ$AY B}|g\|/ݻL5[.} 8ooyQzIPc&.Ґ]UjԘ:ΟםDأR%ܧr 4>^ټhRF&jf DQ36Z w L{-65 ,ΧKBLlߞ}\9rhN|*Ub֐!D]1cPH+YהxpӺȦB!B9h۶-+V 66~cO29s[nT;Ν2sf=M-ZT.?n`7lV ˔D\*lR%tv{mtoB_=NoL0>/穏>N̜cǢmӦh۴) Dzͱ쎏˗WBӢn*uUu$mjl$ӧKA3(ծ]O>$|I@||<֭[7_~ݻ;ŋGӦMѮ];t ]tA"0MQyz$V+ÂE-65X ^]h0P^V;Ú5ҦzJ; zNB&^xAV nFE, GSRp$) RRp"5Gqlz:^8 ,BBBPLX\9 CԊTAԎDuQҡmv/}Qof$˗E?rŊo-7ވ;;C'NDj*8~4RΝ/-U,W!*-+zDjV*V*Q0-g8\))~ *,zQWT)n[}ѣػw/<#G->>O?\jժjժVШQ#4n 6dHCh)FNN#9sNC&1cgΕοӐh9O=a^;ÂҦxr?O}?nviS.i'!X""""""d MS-^ ,]LhFieKv_~)* NF[k[XH;zYSح\ ,\L=4оv UxqZcыBBd!յ8XY^; jNa7ovv 25~Z;7Nax1lv rE/"""""\ҡ!=x`^$d""B:KN␑!kڥL//m\9$.fɔO|ʔp$/KڶM; (YRTJI23e͛PyZU;Cv5X""""""ZFGu IN&OMSIDzҡHRSSC:u]F~Q`PvI:sF; h>\;2i$\2^vsL t额nF` 'G; 0U;ݦMҦ~ݵS +ID>@Ϟ)~U֍|Y; [;Νf*/NSǢ;-Zh3Qp3hR;ݗ_Kh SF[k+OSgS;݊…)3i[ X@;6 Q;ݚ5)<<jNa7o)Tt4Pv  NA(v ~L4hONAbbƍS-^ ,[L͚iSϵSi,zy!%Kj'qȐvNB&åM-Egv$dlY NpLmv2QJ8df~lެLJZU;CvLǺqv2Ԩ뵓Y,zyA='IDTtT`T!$dNkSgӦh'!5k{_Z0s&LT&b-}<}ID*r!QXvtw{Itv!""""" pw .!88uJ; h =Z;ѣ ɓIcj;~4 8qB; h\:ɉr:vL; h$TR|JLNB&5 >$E^DDDDDD _;]l\ML<0pv 8୷w >;if̐Q:|2D;ݡC2$dC`pvSOk'!m#Fh;rDfeHNNREDDDDDA.]Sm̝+P0@_e휬,$d)-[ٳ+WGzNa믲nID^@)v-32=}S#^`ыĘ12X YXX;=Z; $_}|v 25rLHV.NAyhN;ݪU)԰a@ǎ)֬>X;<I;ݺuy8EDDDDDd*&SG;݂k S11@Tv oNAƏ4Na'_k S11@),"=hY3vD^xhB;gɍ^DDDDDDålY$/ʴ<۷k'!eJ NpLImv2QJ8dfԙ[h'!RN␕%m꧟iSիk'q,6zz$d*&UK;ݼyڵ) ="*J$i ;:uM9L)ISIDFצI#GMSI -A<]hۿ9S~ӥ 0hv 8L{/0dv Cdd3ID)6uv2Ѷ-0bv #GHLi.!A Ni')X""""""NaϲIVv2ѿ?Эv -[d+WGzNaY~^޽SmsIG9Wݻwߕ(t&IlLɚaыȟFW_|L yv +ئ3i[ cdj0HX@;2DFu뀹s9B4HF9s$ ^DDDDDDDEi[P:)8EG˺dB)~QpsR#|ŋKSqd=@gr4f вv /s ^DDDDD$$D;Cٲҡ\v˗em۴ҥC9"B;CfLsev2DFj'qʒXI;  TI,KF_Lji7djxn]v,S/X""""""uՁ`tA$dV- $ii#IDҦ邋e}Hy N␞.k1j'!r*]Z;CFݻⴲe8\GHyƢQ~iRQ $ǎ''Nh'!-ZtO SML=L4j$ʁ)yKHNB&6L k'!QQޗ L*QEDDDDDtI\0sҡs!)P^t !KMNB&ڵyF;]|)ئӝw#GjKHbSIDV)6nNX""""""oݺiۼYFdfj'!> 衝 ޽^Sm.=]L)@{7pv 2խ| $2%kzv2)NNX""""""*#Gmh`"djm[T . h SC@evʋAd} ٸ;,$dbkWG,zY",\XL9f:d*&F $K_|L'˘1k[XX;5J;BE/""""`tv2Qt(GDh'qf*, ?Nbliv 2"յ8}v2 Ԭ Nv T N11@Tv Ѷm)DZGu IKfNB&W$IDdP$o9|*U%\Sxe8\(n߮L-+m*<\; ^DDDDDy`D V-Z/q`OfR$IIr:zT; h8 NIJHNB&4f >^; Tj*0mpv2QN)^DDDDD*.NFRi'!:k;xP:jΜNB&:v Naw0utRixv@rv2qȑ)N&Mb VZGkKL}Rv2qn-ȺL|zNam0kpv2ѻ7Ыv e=GNa{7@Fv2ѭпv 3ӵ NaL}v2ѥ 0hv 2ĢQ`"djm[vVL t蠝٦ِ!=h[7O;4Y;݆ ܹei'!J+3@Vv2ѿ)EDDDDT,ZXLi[X\;4Nax1)Tt4мv >>T;;V $Kʹ2a +OS#eZV *,zW4[j'!aaҡ\v,:y&$dX1)V!'#ず5S8X$d*&SG;݂2:SL nBEOixA g35&;'mj~$djUiSIeݜX$dR%iS%Kj'q}W.mlY$/ʚq;vh'!eʅDI._RmNB&J6ĢQar80aKY+|%%'Gj'!^:uJTBv2Ѡ\HRRɓx$d"*J:ԩIDݺצΜONB&j S^DDDDDi5aôS:$IDv3hLb;}V;]B0iLn .1Q.$JJNB&ZƌNaw\qv2Ѣ0nv ^DDDDDѶmו+ID@^)o.]NB&zNak{2=݁~SLLt n~`Ly$dK` vqq2utZv2q!) +el NÇk[X@;:vki SO> tn`\NaYYID@n)l23GzNAnEDDDDT-Z,_L4inb/Shysv}|v 25v,p).,NAFZNaWlSlHMv|#)8m\`ы0ʒ7mNB&EUN␓|I@͚), ?X^; Na7>fv 2 ԫn`*d*&__;…)TL Шv rE/""""9Yb~$djU:4T;Cz:;@lv2Qt(,iS{h'!ҦʔNpcv2Q/e`,Y7OҦ""8dfgt|BH$t 'GƍC9$%IJLNB&6 6 Lk'!7$ʁi`Ta$dn]P$gӧj'!jy*-M.NNB&W6¢QQqtj'!O?.><HINB&xYv RJNNB&ZFNaL z >-[ch;vL.8yR; h7N;ҦNB&6 8ہ.]NB&~G;ݮ]2XFv2ѽ;Яv ~[Ѥӵ+0`v M?Lt ..1CFP^``vӦhB >;Æi?EDDDDTԬ^-Sp:vkk SO> tn`\Naq#0gLu( 衝n6Y7e$dwoW/%Ke˴Shysv~ |v 25v,pm).sC;ݲe)smh`"djm[vVL t蠝cы(>P| ԨddITL Pv 5kS^=v H2~}v +Vh S11@F)//NAbbd/RâQQ.kj'!U%Jh'qpx`$dbE`xL$/ʚq;vh'!y|y$/g4b|J6!3S9ܺU;  "#8dei6i'!!!ҦUNRdEDDDDT%%'IDÆHIx$d⦛M> L6֕bj IMONB&jՒ@r,0cpv2QF൩sM߯LT*BCI,zu@Jv2qjKHL֭QS%&&HO˖W 9vLɓIm/qiSǏk'!͚E=L4nxm`ы]22݁}S#S^Lt D;1hV;ʕE)@)V,NAG;E9X0wСԮn|IH)&WO;}LEGk[XB;6Nah|v 25~<Фv ŋ/NA͵S ,z݅ Ӟ=IDŊQSveZ]r夣\9$.fL)#mbE$WȔt[j'!%JHJ$YY6i'!ŊIVM;CNLI@͚) =z#IM7Ir II6Lԫ'$5:8tH; ];ٳIDצΝ6v2QP$rqZlv2Q%j,zsGH"9Y; hF $ @Rv2qW 9vLɓImZqiSǏk'!͚^+) 88zT; h8pTbv2ѠA]VȰEDDDDD#W^L< )~8^; ~'SH4$dS'``vӧ/ >;Æi;tHFj'!O?.>^.NKINB&xY^DDDDD~ei'!O{?nvY%$dᇁ>}SLt망P I!! [TuꤝLt$${j'qKKYrOkvҹpUwHk,_(<)z*mNr%PTO;=$-[l.ҦxByAR矵8]+mœQ8Pw#S4]DDT[nw؁͛+&""opw""""""r[;E@DDDDDDDDDDDDX""""""""""""ǢPDDD-44իWTZ'"r.$D;QPbыo'NЎADei' """""" Jސ^DDDDDDDDDDDDX""""""""""""5 7-[j "ΝSKRB$IJR67NoHDDDDDDDDDDDDAE/"""""""""""" z,zQcы^DDDDDDDDDDDDX""""""""""""Ǣ^7o)>K,K;  _nۖ-!+gΨ+RSg幱,re)ŕ+=Ϟul "BQٶͿiiunq"#|Y_v @T?k+W+u++^#) XXؽطھV2@˖@vw:xWKO5 ѯ?Qlsp"p̕+u/ɑ8Ko67J{` 23QH/"""""MHv%[nڷ:u:wJNEqHqh0kyR+? 4mz99ԩJ( ,\h(Wr aaw]}uj'*8? ,]jX"?EDDDDD ׵*TF4NC Y``tO;&M#ٷ+_8}Z:"^W g`X3?tcY"?EDDDDDG9y[!X1U+vmvp0p:x5o IDATY? ݻh`N !ǰiWΔ, 4kvLCZ<HIcceJ <42k>ٰh??]>*S8!]];[䟗_D}{X"?EDDDDDykx10c}AqOT/Zv]f,^mX:AתYS^cxbSѣc+~]P6ަ}{`{w <>/_u> GZcܱ,_›JdVW֝;v.Y-S0֮^yE.pE/"^DDDDD'O=$x㣏}?ŋ^\hb鄧).h,z…siS̏o H7z,1?&;Zx}xncl*⡡Cߖi l` `<۱ED`ы))I9}vŋKtic][oy6,Lցjx^y?]Ϣ˒TZ!!qK/I~x1瑩__:Kl,pW^8/J˛QO> ̚Udnp>n.<T vy)x@ٲ̙z̔<(x$rrd7p^*UJ}utv;AQ#GpljyXR.͏L*TDD^`ы˛Nnz xM۝<)W`STFaYRz}(!kp?ǏiIǎu]No_d "աwըcwra'11jQ^DDDDD_tJGy>)p +2m+|t钿BB=jDDժy]dת0a231]QƑE/""""" ~!!)9O/LZrrG 5k _|_ . ] 﨣75ڵM;.Qcы =od ڵe-ğJUOHԙoN-LŊs渞ȑCDx ~}sg`24G{Go" ¢y1`ϢeZ̞z 8w'e\\+Ӧ9/!`wOM bEg!" ,zQq}ޭysgѲzv Vs+Wukws:vG|".pjT"иRS?Q`ы  *v;w G?ŋ@Lm &;/pN.D$-.]NQ5hvǎo" .5jxަNx뵘ܙ:}i:@nǕΝ[o/5H… …AD$X"""""¥re$%v Fof@Xt!cؿ !*ʎZR!,̻ʔDDA">Q*mr`>Μq#Lo-k:YiӀ,Yn!s p,dfv F̘~Zf &7J:u,DEՉ]\PRRy?GA9yx,M&W]cы] ̙hHMPj D!?Qzm€{q.]> XP:*ϟin[ÆI?\$?myΝGVI+w$+%JNSAu4_+5S/Xz6+K1{G M@TTgrt8_IO8r.۾\ܖ-rۺU:rG*z9#=+T5FNuodek>7˖-As^JJ JZ*?Ęw_̝+M<)[nG9_;+Kn?&#Cs/K\?eɱ}ֻ K [F}<4\"9s:;W\tiM) bN`t/{ 4l(ˀ;//y1ŋK9/תLGӦKz^."B>O ~|oj7}j>劏f̐zGx޾V-,Яй}Je x!/""""""|5iZ:S'oӽtZHO^xG'P駁G:Wqm7gؿ_%xWm&#~ܭg<1w.0bw&LmsٹSK<)R^m2``yyZN0{LG'^xAFe)~7k4^"&HqWw|!Рdns}3s옌8{,[A[xۥ潘 yeK#}y5˒"ʸqލvL)sW(QkgJNƏs­+M ^DDDDD_ZE}dOW(%KQd;iBj'dR:p@?k^_xK:-ҀÇ_~qbŀ[Fuڑke4D^DUKggVANޭ6n4+R=[: MRjVF awm/ =k浝3gYѕ+tLH 7:A)Δ//={w7n, oք_)n~d輸8amwuL`,ٗՓ%nv:߬,ߝ[-,kڻײRS-%JL%K,kWϙg9'mzW^of<*]Fs\?vF˪]u-eYVޮWc,;ֲBB{+]ڲ~-dI?[6I%'= '"W{%>52ɓ}>㏖U/^ܲzʲv1YYzeOaY[z>f?#߹ӲV b,k,~w._ߗ28IӊLǸxѲOwzRŲ?~ͲjԸ!!5`eΖ`v3-kYk=_y?eI1hв>Pk;fY/hYJ׿ZVNY9;ҤerۻײRR8zԲ>̲zsmʲ\=ו+u]}Y֫ZVFefZ\g?WU5ye9sRS-k4\YZR2珩Yxa}з-[ukIU/qPd6~ݻ}˒k{]}V=lMk3ҥiI]u#""""BGz}ez>ܹ{diTCWs}fv} Ov\ j_{4+;[F\zZвbc-)JOxZJF+}`L=zh'7QZ}|+9YF-gYrpսee*Vt g9{VTɻ,'OZ։^եeedF42,|W-#W9ڴIzVFxx]27(Zs,kx uݏ??ƫmۑ^չe *Wߞx²yIJuA5kzR<Сd;2RF b@?K&y!#rr,ǝ}ƍېs}Ϝ9{7@?ܷL%yy__>~פIgDDDDDTT++˲|S:p=ϗBL}\9ٖլ۷Ѳ ;n׷"UFNߟ׸qdjt=xPڬvV%W_yy iۿ쏢eYV^cRc-Q|]w_uؤ,qqBɓyuӦ{sVʽU:ǹsuM?&,̲vkYRwVȽM>Ms;} y=X[r`AOvL|mbŤM9f맞p:[1nY#ITQQ_Sڦ,G^1c=Vdӕ_ΞmÇNYrն-`y&3G﫯d:WS? q6ՕOM\0v߯T 2lcIL&Mn?S4תV)BCB't/GM-ޠor(,Ybz03>ի;iiYP__=)932}'>,k&4i)#]ȜFɚt0aT̳Vd A2~}:wvMﳠuzu|MʮvZ[ 3xqۭ\iWA> \ߤW(Yz 1#UT)|;wWYl UL 5,/, h='<Ι3RU˅ bZ'(6yAX1+Ov\]弰4twYgʗ5\6ͻ  kʟGK|0j5k^ٱc=嗝׭pfyBBbW>77+HWNVΕΐ^ƍZÆղSG-H'?{YW UWۼY:]1MWm'O:=[Ώ?eey^\9onKM79&$}Kq͓P;2j܏<իck=ިRhy<47߷}^˛,K˨]w^}U.rhBFT.شIt9'-Za_~VZS&Vҙ yƁƛb Eߞ 7%seȫ]zՒ%}ɑ#z!!Wkp*bVb"z>P:_+;[7ɋs]o{HHp]ݹ}|hSi~iOvъGtFJ2{w}fΦM,zNoHDDDDD̳uaa@F2ާ IDAT2(Ym;'J{< g?|Y%11rPXT)44L]]OQp钌p_~\˛u_I_hq7/ zg)*deA/]ߗs}m?{>z)Nbaы{w]UuB0 DhQDD<%BE}P ڊRUh>dh@ & S@ 㷲2{>{o.|?kE=g}=So&""""ozHR?/~=0iRkX=5r^'6t/ |+0,z8S󟻟ͩnn ȟ?< E ffze`[#*۴q֭y-^l=VMys*37{:J8x]^,] eM3fWoT0xM`~%%˫KIo=~~TsmU+ ED{|f) """"""rRϤaazM7X)66nwo>s.eecD? n w)!rTO? wr$8tȷv68_)Ds?۷޽HII~ ̜_OwCwM/;ŨQ5./X4l(ϣJ;keב#%Á7h ? կo#ysguҸi"""""Ką _{Z`Qx<%818uU  M7l*+v}}sK7J|<^Z&N4TYlqK#o 2୷>}~gjIt.(ۓ'_m/^yyՊ =z7}€%A/""""""Skݻ?|ue믾,F"(Uun$^+&-=s)4iʕ{~%dh]N59mnE7OٳN=]i?iiP)`""""""frWׯaТ~ڽ)t=do.vr11@RRBצ~d׃M͛-$ave.F/~XV[O}ohܪUeǵʨcǀU$_O)I+)5g۶~eN~Ǡ'۷u߻v`B ,, X2GP<h@Ւ>ؿeK$j?ޔ;R#fH[zc_mlw2c}Lڴɝr;]J={ԅ- n헯;oRSݻj?,Yx׿ii/]hYlz".Ngsz_~EA/"""""ʬ.eb8[7e**>L}?bce R79*JJ,=cpz%'{o$ lR鮻̖o/h2CQ@ˬZ>UTX|y=x~ 11{RMYopu Qu;wOLtVny9' ;= ѯKLƏS܆ 2+ML3Ѩh|U =/rs<8z/%k֟GOGد~^ˁck"5U2S7Zq1oW)2X>*ll**wLxiϿrϿ7o| R_"k""""""w>_tA˭|iSzܹ2squIݒ7x1С~Sgw }AyMet}+4n Wӧݓ'ufffe[~(s)/`W1OF3 $(Q)2~u )S̶o*/ny'rUp<:{6;2fgJzݹ8rٺ_}߽;=.nn-[̚%cƸ[6A/"""""rI`;(/8wYQXhɓϽ{|{w;BgL 3|?.J|/ɜYƴ"i뫯Gå]xQW$b <^Aߪ*Х))k>)XLG:6Cgy=<ϗԹS<`um:wf㏁ɓ}o~|69{0GPʯP֭>utJIiǒ%fi`R_Z/oo۰C-W v>:TƴiiB8nz;/[w}Zcy}\Fpk^#@^[SSec}k7Xs..y X9=ON ^?l6z6o7}JtxCt|{ET9E (y(ݬ,z;=v?WU+|`ڝfd(5rdmiw^~AR͚YDRRe9u5ʺ^xAӧ}ի߉s;vҳR.jv6h_ɚ5J<}~{+ӓwߵu׉26.7R[ltR'O:K̚evlJ)j{~QmJ .ٳJmڤT||{}T?ryԼT5Y| w~*թپ&Vj,_nާ)zZ/>zYsaaJf{vmbb:|X^i|7&W+l^BNVYN+DDDDDt+?4kFV yzHiI2?f7JCmJK̔F+a?I Sj6TxCk\^uwoS)uRGJYO<9Ҧ,oQ_^zImZFDG+ηQdd(d}+u= +yR)ըcQ>?~&)r>ٸQ.zIIJkϢ:T3ZHG_;j¬Q/))JkgV3g~mJJPn uРnTZ:UWNfVo%Θ1_CO=ԁJ;'Ad lYs'pY-7WȈWdR?/:_9瘼:w~)rz@KV粊%v]u͛]iiխQ#V_~WJ=R_~=3x,d @V@orNth޼j>}:lu_Ujy̔g>}>U׏YڵfY盯x@y:atF=yR;tȑ=O_)TGQ9rD~> |_k TxfGeKIҽkնe(˵l)nV%)JL |Y#0c<1+}KWwe p-e?_Ni#scΫWۛsgסwoڵ.WR"s:%CIhOI ٱh| x%Igz5m*t5HZ¦M<8wNy9{VR</@k Tӧ (/\R(z2qK.#C-ɹn[Mw  cl_Uwꔜ{ii*jj{Y4Pʹز#T&%eDD5sgaCI{ x4ol&*}2OnY%ArjMΡv\ׯ7?6%`^rv*LJΪU6I:[O+rsÇ%\judnI4c=) *9v:t{gͫi壏$}'J5o3//<\ΙD筸XRR<#5yjX$%E1u4]V=O^ٺv~s_hB%Z3_NK1N,Zdw1C@Rܲo__:kӎG.…ԛ1c$WNI{@f̰==]RrIQܤ pr?={Acn8QӓޗWbb>MխR[,;/<]\e zik5{}lq{K~nsxIKsw;sQjR#FM7IJYVz{U/<謌6m2}{挳}6guIMkB?A +1^zM7ϓW^ 7FUƌ1HIV_,]V^w;ifYa];3iM_11CuG vR{Mlե9QQU2j>d85;&5ĩSJ wANGRp5"""""ڮx} kKH  ]vxL~D2g(r""BFpC}U);{/\6LFsmV5B;%ˁSZ[(C74i"rwF&_1!ٳx1{' ѣ@bp͚I:8`͒[} RXp! bH',L.=oAr ̽40V6zHR오$(kmyӯ0aBggqˈKlI%f\ٌF{XpKAo%ǖizN$7yr"I׶+_:ȿKG˗~jyL,X,] n]f͒FJ'Tge$%N4=& n5 /~$nHJ~|s瞓{ɹߠ/1unHNԽv]Tr;ް8sr[KZ8O6 Ν+iC:uuNk["#_wnZ<._.)jg I8{omD+_w3y ׫s;qUQR)r2~2曒NJttvy)q*3Sow3G""""""),Q[))ȟ/#;7?8JNbbeN '.\[ʨ'aQ#"!A:2DpKE|Ud\x )Kk~i^>҉íFWV=0.5.5 - numpy - pandas >=1 - biom-format >=2.1.5,<2.2.0 - ijson - h5py - hdf5 {{ hdf5 }} - qiime2 {{ qiime2_epoch }}.* test: requires: - pytest - qiime2 >={{ qiime2 }} imports: - q2_types - qiime2.plugins.types about: home: https://qiime2.org license: BSD-3-Clause license_family: BSD q2-types-2021.8.0/q2_types/000077500000000000000000000000001412142116700151755ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/__init__.py000066400000000000000000000016471412142116700173160ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._version import get_versions __version__ = get_versions()['version'] del get_versions # feature_data needs to be imported before feature_table to avoid circular # import. importlib.import_module('q2_types.feature_data') importlib.import_module('q2_types.feature_table') importlib.import_module('q2_types.distance_matrix') importlib.import_module('q2_types.tree') importlib.import_module('q2_types.ordination') importlib.import_module('q2_types.sample_data') importlib.import_module('q2_types.per_sample_sequences') importlib.import_module('q2_types.bowtie2') q2-types-2021.8.0/q2_types/_version.py000066400000000000000000000441051412142116700173770ustar00rootroot00000000000000 # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = " (tag: 2021.8.0)" git_full = "6937d19a125fba82387c3ae3265dae827120d01d" git_date = "2021-09-09 18:35:29 +0000" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "q2-types-" cfg.versionfile_source = "q2_types/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} q2-types-2021.8.0/q2_types/bowtie2/000077500000000000000000000000001412142116700165505ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/bowtie2/__init__.py000066400000000000000000000010171412142116700206600ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from ._formats import (Bowtie2IndexFileFormat, Bowtie2IndexDirFmt) from ._types import Bowtie2Index __all__ = ['Bowtie2IndexFileFormat', 'Bowtie2IndexDirFmt', 'Bowtie2Index'] q2-types-2021.8.0/q2_types/bowtie2/_formats.py000066400000000000000000000036001412142116700207330ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import itertools from qiime2.plugin import model from ..plugin_setup import plugin, citations class Bowtie2IndexFileFormat(model.BinaryFileFormat): def _validate_(self, level): # It's not clear if there is any way to tell if a Bowtie2 index is # correct or not. # bowtie2 does have an inspect method — this inspects at the dir level # not on the file level. # may also want to validate that all files have the same basename pass class Bowtie2IndexDirFmt(model.DirectoryFormat): idx1 = model.File(r'.+(? LSMatFormat: ff = LSMatFormat() with ff.open() as fh: data.write(fh, format='lsmat') return ff @plugin.register_transformer def _2(ff: LSMatFormat) -> skbio.DistanceMatrix: return skbio.DistanceMatrix.read(str(ff), format='lsmat', verify=False) q2-types-2021.8.0/q2_types/distance_matrix/_type.py000066400000000000000000000012471412142116700220510ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import DistanceMatrixDirectoryFormat DistanceMatrix = SemanticType('DistanceMatrix') plugin.register_semantic_types(DistanceMatrix) plugin.register_semantic_type_to_format( DistanceMatrix, artifact_format=DistanceMatrixDirectoryFormat ) q2-types-2021.8.0/q2_types/distance_matrix/tests/000077500000000000000000000000001412142116700215155ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/distance_matrix/tests/__init__.py000066400000000000000000000005351412142116700236310ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/distance_matrix/tests/data/000077500000000000000000000000001412142116700224265ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/distance_matrix/tests/data/distance-matrix-1x1.tsv000066400000000000000000000000131412142116700266610ustar00rootroot00000000000000 s1 s1 0.0 q2-types-2021.8.0/q2_types/distance_matrix/tests/data/distance-matrix-2x2.tsv000066400000000000000000000000371412142116700266710ustar00rootroot00000000000000 s1 s2 s1 0.0 42.0 s2 42.0 0.0 q2-types-2021.8.0/q2_types/distance_matrix/tests/data/distance-matrix-NxN.tsv000066400000000000000000000020751412142116700267650ustar00rootroot00000000000000 f2 f1 f3 f4 p2 p1 t1 t2 f2 0.0 0.2600956552695528 0.2600956552695528 0.5255545064776262 0.6381417932050844 0.6492777579705917 0.8799013499926959 0.8799013499926959 f1 0.2600956552695528 0.0 0.0 0.3587745538982909 0.7486973341403722 0.8308268239717369 0.8596322074582944 0.8596322074582944 f3 0.2600956552695528 0.0 0.0 0.3587745538982909 0.7486973341403722 0.8308268239717369 0.8596322074582944 0.8596322074582944 f4 0.5255545064776262 0.3587745538982909 0.3587745538982909 0.0 0.7847285623990449 0.7923509949143664 0.9285079089827815 0.9285079089827815 p2 0.6381417932050844 0.7486973341403722 0.7486973341403722 0.7847285623990449 0.0 0.5757110752159563 0.5383102745979352 0.5383102745979352 p1 0.6492777579705917 0.8308268239717369 0.8308268239717369 0.7923509949143664 0.5757110752159563 0.0 0.7223049343704425 0.7223049343704425 t1 0.8799013499926959 0.8596322074582944 0.8596322074582944 0.9285079089827815 0.5383102745979352 0.7223049343704425 0.0 0.0 t2 0.8799013499926959 0.8596322074582944 0.8596322074582944 0.9285079089827815 0.5383102745979352 0.7223049343704425 0.0 0.0 q2-types-2021.8.0/q2_types/distance_matrix/tests/data/not-lsmat000066400000000000000000000000501412142116700242620ustar00rootroot00000000000000# Hello, World! I am not an lsmat file. q2-types-2021.8.0/q2_types/distance_matrix/tests/test_format.py000066400000000000000000000035371412142116700244260ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os.path import shutil import unittest from q2_types.distance_matrix import LSMatFormat, DistanceMatrixDirectoryFormat from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestFormats(TestPluginBase): package = 'q2_types.distance_matrix.tests' def test_lsmat_format_validate_positive(self): filenames = ('distance-matrix-1x1.tsv', 'distance-matrix-2x2.tsv', 'distance-matrix-NxN.tsv') for filename in filenames: filepath = self.get_data_path(filename) format = LSMatFormat(filepath, mode='r') # Should not error. format.validate() def test_lsmat_format_validate_negative(self): filepath = self.get_data_path('not-lsmat') format = LSMatFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'LSMat'): format.validate() def test_distance_matrix_directory_format(self): # This test exists mainly to assert that the single-file directory # format is defined and functional. More extensive testing is performed # on its underlying format (LSMatFormat). filepath = self.get_data_path('distance-matrix-NxN.tsv') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'distance-matrix.tsv')) format = DistanceMatrixDirectoryFormat(self.temp_dir.name, mode='r') # Should not error. format.validate() if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/distance_matrix/tests/test_transformer.py000066400000000000000000000030471412142116700254740ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import skbio from q2_types.distance_matrix import LSMatFormat from qiime2.plugin.testing import TestPluginBase class TestTransformers(TestPluginBase): package = 'q2_types.distance_matrix.tests' def test_skbio_distance_matrix_to_lsmat_format(self): transformer = self.get_transformer(skbio.DistanceMatrix, LSMatFormat) filenames = ('distance-matrix-1x1.tsv', 'distance-matrix-2x2.tsv', 'distance-matrix-NxN.tsv') for filename in filenames: input = skbio.DistanceMatrix.read(self.get_data_path(filename)) obs = transformer(input) obs = skbio.DistanceMatrix.read(str(obs)) exp = input self.assertEqual(obs, exp) def test_lsmat_format_to_skbio_distance_matrix(self): filenames = ('distance-matrix-1x1.tsv', 'distance-matrix-2x2.tsv', 'distance-matrix-NxN.tsv') for filename in filenames: input, obs = self.transform_format( LSMatFormat, skbio.DistanceMatrix, filename=filename) exp = skbio.DistanceMatrix.read(str(input)) self.assertEqual(obs, exp) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/distance_matrix/tests/test_type.py000066400000000000000000000017201412142116700241070ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.distance_matrix import (DistanceMatrix, DistanceMatrixDirectoryFormat) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = 'q2_types.distance_matrix.tests' def test_distance_matrix_semantic_type_registration(self): self.assertRegisteredSemanticType(DistanceMatrix) def test_distance_matrix_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( DistanceMatrix, DistanceMatrixDirectoryFormat) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/feature_data/000077500000000000000000000000001412142116700176215ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_data/__init__.py000066400000000000000000000055011412142116700217330ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import ( TaxonomyFormat, TaxonomyDirectoryFormat, HeaderlessTSVTaxonomyFormat, HeaderlessTSVTaxonomyDirectoryFormat, TSVTaxonomyFormat, TSVTaxonomyDirectoryFormat, DNAFASTAFormat, DNASequencesDirectoryFormat, PairedDNASequencesDirectoryFormat, AlignedDNAFASTAFormat, AlignedDNASequencesDirectoryFormat, DifferentialFormat, DifferentialDirectoryFormat, FASTAFormat, AlignedFASTAFormatMixin, AlignedProteinSequencesDirectoryFormat, ProteinSequencesDirectoryFormat, ProteinFASTAFormat, AlignedProteinFASTAFormat, RNASequencesDirectoryFormat, RNAFASTAFormat, AlignedRNAFASTAFormat, AlignedRNASequencesDirectoryFormat, PairedRNASequencesDirectoryFormat, BLAST6Format, BLAST6DirectoryFormat) from ._type import ( FeatureData, Taxonomy, Sequence, PairedEndSequence, AlignedSequence, Differential, ProteinSequence, AlignedProteinSequence, RNASequence, AlignedRNASequence, PairedEndRNASequence, BLAST6) # TODO remove these imports when tests are rewritten. Remove from __all__ too from ._transformer import ( NucleicAcidIterator, DNAIterator, PairedDNAIterator, AlignedDNAIterator, ProteinIterator, AlignedProteinIterator, RNAIterator, AlignedRNAIterator, PairedRNAIterator) __all__ = [ 'TaxonomyFormat', 'TaxonomyDirectoryFormat', 'HeaderlessTSVTaxonomyFormat', 'HeaderlessTSVTaxonomyDirectoryFormat', 'TSVTaxonomyFormat', 'TSVTaxonomyDirectoryFormat', 'DNAFASTAFormat', 'DifferentialFormat', 'DNASequencesDirectoryFormat', 'PairedDNASequencesDirectoryFormat', 'AlignedDNAFASTAFormat', 'AlignedDNASequencesDirectoryFormat', 'FeatureData', 'Taxonomy', 'Sequence', 'PairedEndSequence', 'AlignedSequence', 'NucleicAcidIterator', 'DNAIterator', 'PairedDNAIterator', 'FASTAFormat', 'AlignedDNAIterator', 'Differential', 'DifferentialDirectoryFormat', 'AlignedFASTAFormatMixin', 'ProteinFASTAFormat', 'ProteinSequence', 'AlignedProteinFASTAFormat', 'ProteinSequencesDirectoryFormat', 'AlignedProteinSequence', 'AlignedProteinSequencesDirectoryFormat', 'ProteinIterator', 'AlignedProteinIterator', 'RNAIterator', 'AlignedRNAIterator', 'RNAFASTAFormat', 'AlignedRNAFASTAFormat', 'RNASequencesDirectoryFormat', 'AlignedRNASequencesDirectoryFormat', 'RNASequence', 'AlignedRNASequence', 'PairedRNAIterator', 'PairedRNASequencesDirectoryFormat', 'PairedEndRNASequence', 'BLAST6Format', 'BLAST6DirectoryFormat', 'BLAST6'] importlib.import_module('q2_types.feature_data._transformer') q2-types-2021.8.0/q2_types/feature_data/_format.py000066400000000000000000000351321412142116700216260ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import re import pandas as pd import skbio import qiime2.plugin.model as model from qiime2.plugin import ValidationError import qiime2 from ..plugin_setup import plugin class TaxonomyFormat(model.TextFileFormat): """Legacy format for any 2+ column TSV file, with or without a header. This format has been superseded by taxonomy file formats explicitly with and without headers, `TSVTaxonomyFormat` and `HeaderlessTSVTaxonomyFormat`, respectively. This format remains in place for backwards-compatibility. Transformers are intentionally not hooked up to transform this format into the canonical .qza format (`TSVTaxonomyFormat`) to prevent users from importing data in this format. Transformers will remain in place to transform this format into in-memory Python objects (e.g. `pd.Series`) so that existing .qza files can still be loaded and processed. The only header recognized by this format is: Feature IDTaxon Optionally followed by other arbitrary columns. If this header isn't present, the format is assumed to be headerless. This format supports comment lines starting with #, and blank lines. """ def sniff(self): with self.open() as fh: count = 0 while count < 10: line = fh.readline() if line == '': # EOF break elif line.lstrip(' ') == '\n': # Blank line continue else: cells = line.split('\t') if len(cells) < 2: return False count += 1 return False if count == 0 else True TaxonomyDirectoryFormat = model.SingleFileDirectoryFormat( 'TaxonomyDirectoryFormat', 'taxonomy.tsv', TaxonomyFormat) class HeaderlessTSVTaxonomyFormat(TaxonomyFormat): """Format for a 2+ column TSV file without a header. This format supports comment lines starting with #, and blank lines. """ pass HeaderlessTSVTaxonomyDirectoryFormat = model.SingleFileDirectoryFormat( 'HeaderlessTSVTaxonomyDirectoryFormat', 'taxonomy.tsv', HeaderlessTSVTaxonomyFormat) class TSVTaxonomyFormat(model.TextFileFormat): """Format for a 2+ column TSV file with an expected minimal header. The only header recognized by this format is: Feature IDTaxon Optionally followed by other arbitrary columns. This format supports blank lines. The expected header must be the first non-blank line. In addition to the header, there must be at least one line of data. """ HEADER = ['Feature ID', 'Taxon'] def _check_n_records(self, n=None): with self.open() as fh: data_line_count = 0 header = None file_ = enumerate(fh) if n is None else zip(range(n), fh) for i, line in file_: # Tracks line number for error reporting i = i + 1 if line.lstrip(' ') == '\n': # Blank line continue cells = line.strip('\n').split('\t') if header is None: if cells[:2] != self.HEADER: raise ValidationError( '%s must be the first two header values. The ' 'first two header values provided are: %s (on ' 'line %s).' % (self.HEADER, cells[:2], i)) header = cells else: if len(cells) != len(header): raise ValidationError( 'Number of values on line %s are not the same as ' 'number of header values. Found %s values ' '(%s), expected %s.' % (i, len(cells), cells, len(self.HEADER))) data_line_count += 1 if data_line_count == 0: raise ValidationError('No taxonomy records found, only blank ' 'lines and/or a header row.') def _validate_(self, level): self._check_n_records(n={'min': 10, 'max': None}[level]) TSVTaxonomyDirectoryFormat = model.SingleFileDirectoryFormat( 'TSVTaxonomyDirectoryFormat', 'taxonomy.tsv', TSVTaxonomyFormat) class FASTAFormat(model.TextFileFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.aligned = False self.alphabet = None def _validate_(self, level): FASTAValidator, ValidationSet = _construct_validator_from_alphabet( self.alphabet) self._validate_FASTA(level, FASTAValidator, ValidationSet) def _validate_FASTA(self, level, FASTAValidator=None, ValidationSet=None): last_line_was_ID = False ids = {} seq_len = 0 prev_seq_len = 0 prev_seq_start_line = 0 level_map = {'min': 100, 'max': float('inf')} max_lines = level_map[level] with self.path.open('rb') as fh: try: first = fh.read(6) if first[:3] == b'\xEF\xBB\xBF': first = first[3:] # Empty files should validate if first.strip() == b'': return if first[0] != ord(b'>'): raise ValidationError("First line of file is not a valid " "description. Descriptions must " "start with '>'") fh.seek(0) for line_number, line in enumerate(fh, 1): line = line.strip() if line_number >= max_lines: return line = line.decode('utf-8-sig') if line.startswith('>'): if FASTAValidator and ValidationSet: if seq_len == 0: seq_len = prev_seq_len if self.aligned: self._validate_line_lengths( seq_len, prev_seq_len, prev_seq_start_line) prev_seq_len = 0 prev_seq_start_line = 0 if last_line_was_ID: raise ValidationError('Multiple consecutive ' 'descriptions starting on ' f'line {line_number-1!r}') line = line.split() if line[0] == '>': if len(line) == 1: raise ValidationError( f'Description on line {line_number} is ' 'missing an ID.') else: raise ValidationError( f'ID on line {line_number} starts with a ' 'space. IDs may not start with spaces') if line[0] in ids: raise ValidationError( f'ID on line {line_number} is a duplicate of ' f'another ID on line {ids[line[0]]}.') ids[line[0]] = line_number last_line_was_ID = True elif FASTAValidator and ValidationSet: if re.fullmatch(FASTAValidator, line): if prev_seq_start_line == 0: prev_seq_start_line = line_number prev_seq_len += len(line) last_line_was_ID = False else: for position, character in enumerate(line): if character not in ValidationSet: raise ValidationError( f"Invalid character '{character}' at " f"position {position} on line " f"{line_number} (does not match IUPAC " "characters for this sequence type). " "Allowed characters are " f"{self.alphabet}.") else: last_line_was_ID = False except UnicodeDecodeError as e: raise ValidationError(f'utf-8 cannot decode byte on line ' f'{line_number}') from e if self.aligned: self._validate_line_lengths( seq_len, prev_seq_len, prev_seq_start_line) class AlignedFASTAFormatMixin: def _turn_into_alignment(self): self.aligned = True self.alphabet = self.alphabet + ".-" def _validate_line_lengths( self, seq_len, prev_seq_len, prev_seq_start_line): if prev_seq_len != seq_len: raise ValidationError('The sequence starting on line ' f'{prev_seq_start_line} was length ' f'{prev_seq_len}. All previous sequences ' f'were length {seq_len}. All sequences must ' 'be the same length for AlignedFASTAFormat.') class DNAFASTAFormat(FASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.alphabet = "ACGTRYKMSWBDHVN" DNASequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'DNASequencesDirectoryFormat', 'dna-sequences.fasta', DNAFASTAFormat) class RNAFASTAFormat(FASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.alphabet = "ACGURYKMSWBDHVN" RNASequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'RNASequencesDirectoryFormat', 'rna-sequences.fasta', RNAFASTAFormat) class PairedDNASequencesDirectoryFormat(model.DirectoryFormat): left_dna_sequences = model.File('left-dna-sequences.fasta', format=DNAFASTAFormat) right_dna_sequences = model.File('right-dna-sequences.fasta', format=DNAFASTAFormat) class PairedRNASequencesDirectoryFormat(model.DirectoryFormat): left_rna_sequences = model.File('left-rna-sequences.fasta', format=RNAFASTAFormat) right_rna_sequences = model.File('right-rna-sequences.fasta', format=RNAFASTAFormat) class AlignedDNAFASTAFormat(AlignedFASTAFormatMixin, DNAFASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) super()._turn_into_alignment() AlignedDNASequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'AlignedDNASequencesDirectoryFormat', 'aligned-dna-sequences.fasta', AlignedDNAFASTAFormat) class AlignedRNAFASTAFormat(AlignedFASTAFormatMixin, RNAFASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) super()._turn_into_alignment() AlignedRNASequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'AlignedRNASequencesDirectoryFormat', 'aligned-rna-sequences.fasta', AlignedRNAFASTAFormat) def _construct_validator_from_alphabet(alphabet_str): if alphabet_str: Validator = re.compile(fr'[{alphabet_str}]+\r?\n?') ValidationSet = frozenset(alphabet_str) else: Validator, ValidationSet = None, None return Validator, ValidationSet class DifferentialFormat(model.TextFileFormat): def validate(self, *args): try: md = qiime2.Metadata.load(str(self)) except qiime2.metadata.MetadataFileError as md_exc: raise ValidationError(md_exc) from md_exc if md.column_count == 0: raise ValidationError('Format must contain at least 1 column') filtered_md = md.filter_columns(column_type='numeric') if filtered_md.column_count != md.column_count: raise ValidationError('Must only contain numeric values.') DifferentialDirectoryFormat = model.SingleFileDirectoryFormat( 'DifferentialDirectoryFormat', 'differentials.tsv', DifferentialFormat) class ProteinFASTAFormat(FASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.alphabet = "ABCDEFGHIKLMNPQRSTVWXYZ" ProteinSequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'ProteinSequencesDirectoryFormat', 'protein-sequences.fasta', ProteinFASTAFormat) class AlignedProteinFASTAFormat(AlignedFASTAFormatMixin, ProteinFASTAFormat): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) super()._turn_into_alignment() AlignedProteinSequencesDirectoryFormat = model.SingleFileDirectoryFormat( 'AlignedProteinSequencesDirectoryFormat', 'aligned-protein-sequences.fasta', AlignedProteinFASTAFormat) class BLAST6Format(model.TextFileFormat): def validate(self, *args): try: _ = skbio.read(str(self), format='blast+6', into=pd.DataFrame, default_columns=True) except pd.errors.EmptyDataError: raise ValidationError('BLAST6 file is empty.') except ValueError: raise ValidationError('Invalid BLAST6 format.') BLAST6DirectoryFormat = model.SingleFileDirectoryFormat( 'BLAST6DirectoryFormat', 'blast6.tsv', BLAST6Format) plugin.register_formats( TSVTaxonomyFormat, TSVTaxonomyDirectoryFormat, HeaderlessTSVTaxonomyFormat, HeaderlessTSVTaxonomyDirectoryFormat, TaxonomyFormat, TaxonomyDirectoryFormat, DNAFASTAFormat, DNASequencesDirectoryFormat, PairedDNASequencesDirectoryFormat, AlignedDNAFASTAFormat, AlignedDNASequencesDirectoryFormat, DifferentialFormat, DifferentialDirectoryFormat, ProteinFASTAFormat, AlignedProteinFASTAFormat, ProteinSequencesDirectoryFormat, AlignedProteinSequencesDirectoryFormat, RNAFASTAFormat, RNASequencesDirectoryFormat, AlignedRNAFASTAFormat, AlignedRNASequencesDirectoryFormat, PairedRNASequencesDirectoryFormat, BLAST6Format, BLAST6DirectoryFormat ) q2-types-2021.8.0/q2_types/feature_data/_transformer.py000066400000000000000000000502111412142116700226730ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import collections.abc from itertools import zip_longest import pandas as pd import biom import skbio import qiime2 from ..plugin_setup import plugin from ..feature_table import BIOMV210Format from . import (TaxonomyFormat, HeaderlessTSVTaxonomyFormat, TSVTaxonomyFormat, DNAFASTAFormat, PairedDNASequencesDirectoryFormat, AlignedDNAFASTAFormat, DifferentialFormat, ProteinFASTAFormat, AlignedProteinFASTAFormat, RNAFASTAFormat, AlignedRNAFASTAFormat, PairedRNASequencesDirectoryFormat, BLAST6Format) # Taxonomy format transformers def _taxonomy_formats_to_dataframe(filepath, has_header=None): """Read any of the three taxonomy formats into a dataframe. Parameters ---------- filepath : str The taxonomy-formatted file to be read. has_header : bool, optional If `None`, autodetect the header: only `Feature IDTaxon` is recognized, optionally followed by other columns. If `True`, the file must have the expected header described above otherwise an error is raised. If `False`, the file is read without assuming a header. Returns ------- pd.DataFrame Dataframe containing parsed contents of the taxonomy file. The dataframe will have its index name set to `Feature ID` and its first column will be `Taxon`, followed by any other columns in the input file. """ # Using `dtype=object` and `set_index()` to avoid type casting/inference of # any columns or the index. df = pd.read_csv(filepath, sep='\t', skip_blank_lines=True, header=None, dtype=object) if len(df.columns) < 2: raise ValueError( "Taxonomy format requires at least two columns, found %d." % len(df.columns)) if has_header and not _has_expected_header(df): raise ValueError( "Taxonomy format requires a header with `Feature ID` and `Taxon` " "as the first two columns.") if has_header or (has_header is None and _has_expected_header(df)): # Make first row the header: # https://stackoverflow.com/a/26147330/3776794 df.columns = df.iloc[0] df.columns.name = None df = df.reindex(df.index.drop(0)) else: # No header unnamed_columns = ['Unnamed Column %d' % (i + 1) for i in range(len(df.columns[2:]))] df.columns = TSVTaxonomyFormat.HEADER + unnamed_columns df.set_index(df.columns[0], drop=True, append=False, inplace=True) if len(df.index) < 1: raise ValueError("Taxonomy format requires at least one row of data.") if df.index.has_duplicates: raise ValueError( "Taxonomy format feature IDs must be unique. The following IDs " "are duplicated: %s" % ', '.join(df.index[df.index.duplicated()].unique())) if df.columns.has_duplicates: raise ValueError( "Taxonomy format column names must be unique. The following " "column names are duplicated: %s" % ', '.join(df.columns[df.columns.duplicated()].unique())) df['Taxon'] = df['Taxon'].str.strip() return df def _has_expected_header(df): return df.iloc[0].tolist()[:2] == TSVTaxonomyFormat.HEADER def _dataframe_to_tsv_taxonomy_format(df): if len(df.index) < 1: raise ValueError("Taxonomy format requires at least one row of data.") if len(df.columns) < 1: raise ValueError( "Taxonomy format requires at least one column of data.") if df.index.name != 'Feature ID': raise ValueError( "Taxonomy format requires the dataframe index name to be " "`Feature ID`, found %r" % df.index.name) if df.columns[0] != 'Taxon': raise ValueError( "Taxonomy format requires the first column name to be `Taxon`, " "found %r" % df.columns[0]) if df.index.has_duplicates: raise ValueError( "Taxonomy format feature IDs must be unique. The following IDs " "are duplicated: %s" % ', '.join(df.index[df.index.duplicated()].unique())) if df.columns.has_duplicates: raise ValueError( "Taxonomy format column names must be unique. The following " "column names are duplicated: %s" % ', '.join(df.columns[df.columns.duplicated()].unique())) ff = TSVTaxonomyFormat() df.to_csv(str(ff), sep='\t', header=True, index=True) return ff def _biom_to_tsv_taxonomy_format(table): metadata = table.metadata(axis='observation') ids = table.ids(axis='observation') if metadata is None: raise TypeError('Table must have observation metadata.') taxonomy = [] for oid, m in zip(ids, metadata): if 'taxonomy' not in m: raise ValueError('Observation %s does not contain `taxonomy` ' 'metadata.' % oid) try: taxonomy.append('; '.join(m['taxonomy'])) except Exception as e: raise TypeError('There was a problem preparing the taxonomy ' 'data for Observation %s. Metadata should be ' 'formatted as a list of strings; received %r.' % (oid, type(m['taxonomy']))) from e series = pd.Series(taxonomy, index=ids, name='Taxon') series.index.name = 'Feature ID' return _dataframe_to_tsv_taxonomy_format(series.to_frame()) @plugin.register_transformer def _4(ff: TaxonomyFormat) -> pd.DataFrame: return _taxonomy_formats_to_dataframe(str(ff), has_header=None) @plugin.register_transformer def _6(ff: TaxonomyFormat) -> pd.Series: df = _taxonomy_formats_to_dataframe(str(ff), has_header=None) return df.iloc[:, 0] @plugin.register_transformer def _28(ff: TaxonomyFormat) -> qiime2.Metadata: df = _taxonomy_formats_to_dataframe(str(ff), has_header=None) return qiime2.Metadata(df) @plugin.register_transformer def _20(ff: HeaderlessTSVTaxonomyFormat) -> TSVTaxonomyFormat: return _dataframe_to_tsv_taxonomy_format( _taxonomy_formats_to_dataframe(str(ff), has_header=False)) @plugin.register_transformer def _22(ff: TSVTaxonomyFormat) -> pd.DataFrame: return _taxonomy_formats_to_dataframe(str(ff), has_header=True) @plugin.register_transformer def _23(ff: TSVTaxonomyFormat) -> pd.Series: df = _taxonomy_formats_to_dataframe(str(ff), has_header=True) return df.iloc[:, 0] @plugin.register_transformer def _29(ff: TSVTaxonomyFormat) -> qiime2.Metadata: df = _taxonomy_formats_to_dataframe(str(ff), has_header=True) return qiime2.Metadata(df) @plugin.register_transformer def _24(df: pd.DataFrame) -> TSVTaxonomyFormat: return _dataframe_to_tsv_taxonomy_format(df) @plugin.register_transformer def _25(series: pd.Series) -> TSVTaxonomyFormat: return _dataframe_to_tsv_taxonomy_format(series.to_frame()) @plugin.register_transformer def _26(data: biom.Table) -> TSVTaxonomyFormat: return _biom_to_tsv_taxonomy_format(data) @plugin.register_transformer def _27(ff: BIOMV210Format) -> TSVTaxonomyFormat: # not using q2_types.feature_table._transformer._parse_biom_table_v210 # because it strips out metadata with ff.open() as fh: table = biom.Table.from_hdf5(fh) return _biom_to_tsv_taxonomy_format(table) # common to all FASTA transformers def _read_from_fasta(path, constructor=skbio.DNA): return skbio.read(path, format='fasta', constructor=constructor) def _fastaformats_to_series(ff, constructor=skbio.DNA): data = {} for sequence in _read_from_fasta(str(ff), constructor): id_ = sequence.metadata['id'] if id_ in data: raise ValueError("FASTA format sequence IDs must be unique. The " "following ID was found more than once: %s." % id_) data[id_] = sequence return pd.Series(data) def _fastaformats_to_metadata(ff, constructor=skbio.DNA): df = _fastaformats_to_series(ff, constructor).to_frame() df = df.astype(str) df.index.name, df.columns = 'Feature ID', ['Sequence'] return qiime2.Metadata(df) def _series_to_fasta_format(ff, data, sequence_type="DNA"): with ff.open() as f: for id_, seq in data.iteritems(): if sequence_type == "protein": sequence = skbio.Protein(seq, metadata={'id': id_}) elif sequence_type == "DNA": sequence = skbio.DNA(seq, metadata={'id': id_}) elif sequence_type == "RNA": sequence = skbio.RNA(seq, metadata={'id': id_}) else: raise NotImplementedError( "pd.Series can only be converted to DNA or " "protein FASTA format.") skbio.io.write(sequence, format='fasta', into=f) # DNA FASTA transformers class NucleicAcidIterator(collections.abc.Iterable): def __init__(self, generator): self.generator = generator def __iter__(self): yield from self.generator class DNAIterator(NucleicAcidIterator): pass class PairedDNAIterator(NucleicAcidIterator): pass class AlignedDNAIterator(NucleicAcidIterator): pass class RNAIterator(NucleicAcidIterator): pass class PairedRNAIterator(NucleicAcidIterator): pass class AlignedRNAIterator(NucleicAcidIterator): pass # DNA Transformers @plugin.register_transformer def _9(ff: DNAFASTAFormat) -> DNAIterator: generator = _read_from_fasta(str(ff), skbio.DNA) return DNAIterator(generator) @plugin.register_transformer def _10(data: DNAIterator) -> DNAFASTAFormat: ff = DNAFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _11(df: PairedDNASequencesDirectoryFormat) -> PairedDNAIterator: left = df.left_dna_sequences.view(DNAIterator) right = df.right_dna_sequences.view(DNAIterator) def read_seqs(): for lseq, rseq in zip_longest(left, right): if rseq is None: raise ValueError('more left sequences than right sequences') if lseq is None: raise ValueError('more right sequences than left sequences') if rseq.metadata['id'] != lseq.metadata['id']: raise ValueError(lseq.metadata['id'] + ' and ' + rseq.metadata['id'] + ' differ') yield lseq, rseq return PairedDNAIterator(read_seqs()) @plugin.register_transformer def _12(data: PairedDNAIterator) -> PairedDNASequencesDirectoryFormat: df = PairedDNASequencesDirectoryFormat() ff_left = DNAFASTAFormat() ff_right = DNAFASTAFormat() with ff_left.open() as lfile, ff_right.open() as rfile: for lseq, rseq in data: if rseq.metadata['id'] != lseq.metadata['id']: raise ValueError(lseq.metadata['id'] + ' and ' + rseq.metadata['id'] + ' differ') skbio.io.write(lseq, format='fasta', into=lfile) skbio.io.write(rseq, format='fasta', into=rfile) df.left_dna_sequences.write_data(ff_left, DNAFASTAFormat) df.right_dna_sequences.write_data(ff_right, DNAFASTAFormat) return df @plugin.register_transformer def _13(ff: AlignedDNAFASTAFormat) -> skbio.TabularMSA: return skbio.TabularMSA.read(str(ff), constructor=skbio.DNA, format='fasta') @plugin.register_transformer def _14(data: skbio.TabularMSA) -> AlignedDNAFASTAFormat: ff = AlignedDNAFASTAFormat() data.write(str(ff), format='fasta') return ff @plugin.register_transformer def _15(ff: DNAFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, skbio.DNA) @plugin.register_transformer def _31(ff: DNAFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, skbio.DNA) @plugin.register_transformer def _16(data: pd.Series) -> DNAFASTAFormat: ff = DNAFASTAFormat() _series_to_fasta_format(ff, data) return ff @plugin.register_transformer def _18(ff: AlignedDNAFASTAFormat) -> AlignedDNAIterator: generator = _read_from_fasta(str(ff), skbio.DNA) return AlignedDNAIterator(generator) @plugin.register_transformer def _19(data: AlignedDNAIterator) -> AlignedDNAFASTAFormat: ff = AlignedDNAFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _33(ff: AlignedDNAFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, skbio.DNA) @plugin.register_transformer def _34(ff: AlignedDNAFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, skbio.DNA) @plugin.register_transformer def _35(data: pd.Series) -> AlignedDNAFASTAFormat: ff = AlignedDNAFASTAFormat() _series_to_fasta_format(ff, data) return ff @plugin.register_transformer def _36(fmt: AlignedDNAFASTAFormat) -> DNAIterator: generator = _read_from_fasta(str(fmt), skbio.DNA) return DNAIterator(generator) # Protein FASTA transformers class ProteinIterator(collections.abc.Iterable): def __init__(self, generator): self.generator = generator def __iter__(self): yield from self.generator class AlignedProteinIterator(ProteinIterator): pass @plugin.register_transformer def _37(ff: ProteinFASTAFormat) -> ProteinIterator: generator = _read_from_fasta(str(ff), skbio.Protein) return ProteinIterator(generator) @plugin.register_transformer def _38(data: ProteinIterator) -> ProteinFASTAFormat: ff = ProteinFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _39(ff: AlignedProteinFASTAFormat) -> skbio.TabularMSA: return skbio.TabularMSA.read(str(ff), constructor=skbio.Protein, format='fasta') @plugin.register_transformer def _40(data: skbio.TabularMSA) -> AlignedProteinFASTAFormat: ff = AlignedProteinFASTAFormat() data.write(str(ff), format='fasta') return ff @plugin.register_transformer def _41(ff: ProteinFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, skbio.Protein) @plugin.register_transformer def _42(ff: ProteinFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, skbio.Protein) @plugin.register_transformer def _43(data: pd.Series) -> ProteinFASTAFormat: ff = ProteinFASTAFormat() _series_to_fasta_format(ff, data, "protein") return ff @plugin.register_transformer def _44(ff: AlignedProteinFASTAFormat) -> AlignedProteinIterator: generator = _read_from_fasta(str(ff), skbio.Protein) return AlignedProteinIterator(generator) @plugin.register_transformer def _45(data: AlignedProteinIterator) -> AlignedProteinFASTAFormat: ff = AlignedProteinFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _46(ff: AlignedProteinFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, skbio.Protein) @plugin.register_transformer def _47(ff: AlignedProteinFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, skbio.Protein) @plugin.register_transformer def _48(data: pd.Series) -> AlignedProteinFASTAFormat: ff = AlignedProteinFASTAFormat() _series_to_fasta_format(ff, data, "protein") return ff @plugin.register_transformer def _49(fmt: AlignedProteinFASTAFormat) -> ProteinIterator: generator = _read_from_fasta(str(fmt), skbio.Protein) return ProteinIterator(generator) # RNA Transformers @plugin.register_transformer def _50(ff: RNAFASTAFormat) -> RNAIterator: generator = _read_from_fasta(str(ff), constructor=skbio.RNA) return RNAIterator(generator) @plugin.register_transformer def _51(data: RNAIterator) -> RNAFASTAFormat: ff = RNAFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _52(ff: AlignedRNAFASTAFormat) -> skbio.TabularMSA: return skbio.TabularMSA.read(str(ff), constructor=skbio.RNA, format='fasta') @plugin.register_transformer def _53(data: skbio.TabularMSA) -> AlignedRNAFASTAFormat: ff = AlignedRNAFASTAFormat() data.write(str(ff), format='fasta') return ff @plugin.register_transformer def _54(ff: RNAFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, constructor=skbio.RNA) @plugin.register_transformer def _55(ff: RNAFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, constructor=skbio.RNA) @plugin.register_transformer def _56(data: pd.Series) -> RNAFASTAFormat: ff = RNAFASTAFormat() _series_to_fasta_format(ff, data, sequence_type="RNA") return ff @plugin.register_transformer def _57(ff: AlignedRNAFASTAFormat) -> AlignedRNAIterator: generator = _read_from_fasta(str(ff), constructor=skbio.RNA) return AlignedRNAIterator(generator) @plugin.register_transformer def _58(data: AlignedRNAIterator) -> AlignedRNAFASTAFormat: ff = AlignedRNAFASTAFormat() skbio.io.write(iter(data), format='fasta', into=str(ff)) return ff @plugin.register_transformer def _59(ff: AlignedRNAFASTAFormat) -> qiime2.Metadata: return _fastaformats_to_metadata(ff, constructor=skbio.RNA) @plugin.register_transformer def _60(ff: AlignedRNAFASTAFormat) -> pd.Series: return _fastaformats_to_series(ff, constructor=skbio.RNA) @plugin.register_transformer def _61(data: pd.Series) -> AlignedRNAFASTAFormat: ff = AlignedRNAFASTAFormat() _series_to_fasta_format(ff, data, sequence_type="RNA") return ff @plugin.register_transformer def _62(fmt: AlignedRNAFASTAFormat) -> RNAIterator: generator = _read_from_fasta(str(fmt), constructor=skbio.RNA) return RNAIterator(generator) @plugin.register_transformer def _63(df: PairedRNASequencesDirectoryFormat) -> PairedRNAIterator: left = df.left_rna_sequences.view(RNAIterator) right = df.right_rna_sequences.view(RNAIterator) def read_seqs(): for lseq, rseq in zip_longest(left, right): if rseq is None: raise ValueError('more left sequences than right sequences') if lseq is None: raise ValueError('more right sequences than left sequences') if rseq.metadata['id'] != lseq.metadata['id']: raise ValueError(lseq.metadata['id'] + ' and ' + rseq.metadata['id'] + ' differ') yield lseq, rseq return PairedRNAIterator(read_seqs()) @plugin.register_transformer def _64(data: PairedRNAIterator) -> PairedRNASequencesDirectoryFormat: df = PairedRNASequencesDirectoryFormat() ff_left = RNAFASTAFormat() ff_right = RNAFASTAFormat() with ff_left.open() as lfile, ff_right.open() as rfile: for lseq, rseq in data: if rseq.metadata['id'] != lseq.metadata['id']: raise ValueError(lseq.metadata['id'] + ' and ' + rseq.metadata['id'] + ' differ') skbio.io.write(lseq, format='fasta', into=lfile) skbio.io.write(rseq, format='fasta', into=rfile) df.left_rna_sequences.write_data(ff_left, RNAFASTAFormat) df.right_rna_sequences.write_data(ff_right, RNAFASTAFormat) return df # differential types @plugin.register_transformer def _222(ff: DifferentialFormat) -> pd.DataFrame: return qiime2.Metadata.load(str(ff)).to_dataframe() @plugin.register_transformer def _223(ff: DifferentialFormat) -> qiime2.Metadata: return qiime2.Metadata.load(str(ff)) @plugin.register_transformer def _224(data: pd.DataFrame) -> DifferentialFormat: ff = DifferentialFormat() qiime2.Metadata(data).save(str(ff)) return ff # blast types @plugin.register_transformer def _225(ff: BLAST6Format) -> pd.DataFrame: return skbio.read(str(ff), format='blast+6', into=pd.DataFrame, default_columns=True) @plugin.register_transformer def _226(data: pd.DataFrame) -> BLAST6Format: ff = BLAST6Format() data.to_csv(str(ff), sep='\t', header=False, index=False) return ff q2-types-2021.8.0/q2_types/feature_data/_type.py000066400000000000000000000074371412142116700213260ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import (TSVTaxonomyDirectoryFormat, DNASequencesDirectoryFormat, PairedDNASequencesDirectoryFormat, AlignedDNASequencesDirectoryFormat, DifferentialDirectoryFormat, ProteinSequencesDirectoryFormat, AlignedProteinSequencesDirectoryFormat, RNASequencesDirectoryFormat, AlignedRNASequencesDirectoryFormat, PairedRNASequencesDirectoryFormat, BLAST6DirectoryFormat) FeatureData = SemanticType('FeatureData', field_names='type') Taxonomy = SemanticType('Taxonomy', variant_of=FeatureData.field['type']) Sequence = SemanticType('Sequence', variant_of=FeatureData.field['type']) RNASequence = SemanticType('RNASequence', variant_of=FeatureData.field['type']) PairedEndSequence = SemanticType('PairedEndSequence', variant_of=FeatureData.field['type']) PairedEndRNASequence = SemanticType('PairedEndRNASequence', variant_of=FeatureData.field['type']) AlignedSequence = SemanticType('AlignedSequence', variant_of=FeatureData.field['type']) AlignedRNASequence = SemanticType('AlignedRNASequence', variant_of=FeatureData.field['type']) Differential = SemanticType('Differential', variant_of=FeatureData.field['type']) ProteinSequence = SemanticType('ProteinSequence', variant_of=FeatureData.field['type']) AlignedProteinSequence = SemanticType('AlignedProteinSequence', variant_of=FeatureData.field['type']) BLAST6 = SemanticType('BLAST6', variant_of=FeatureData.field['type']) plugin.register_semantic_types(FeatureData, Taxonomy, Sequence, PairedEndSequence, AlignedSequence, Differential, ProteinSequence, AlignedProteinSequence, RNASequence, AlignedRNASequence, PairedEndRNASequence, BLAST6) plugin.register_semantic_type_to_format( FeatureData[Taxonomy], artifact_format=TSVTaxonomyDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[Sequence], artifact_format=DNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[RNASequence], artifact_format=RNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[PairedEndSequence], artifact_format=PairedDNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[PairedEndRNASequence], artifact_format=PairedRNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[AlignedSequence], artifact_format=AlignedDNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[AlignedRNASequence], artifact_format=AlignedRNASequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[Differential], DifferentialDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[ProteinSequence], artifact_format=ProteinSequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[AlignedProteinSequence], artifact_format=AlignedProteinSequencesDirectoryFormat) plugin.register_semantic_type_to_format( FeatureData[BLAST6], artifact_format=BLAST6DirectoryFormat) q2-types-2021.8.0/q2_types/feature_data/tests/000077500000000000000000000000001412142116700207635ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_data/tests/__init__.py000066400000000000000000000005351412142116700230770ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/feature_data/tests/data/000077500000000000000000000000001412142116700216745ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_data/tests/data/aligned-dna-sequences.fasta000066400000000000000000000003121412142116700270440ustar00rootroot00000000000000>SEQUENCE1 ------------------------ACGTACGTACGTACGTACGTACGTACGTACGTACGT ACGTACGTACGTACGTACGTACGTACGT >SEQUENCE2 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT ACGTACGTACGTACGTACGTACGTACGT q2-types-2021.8.0/q2_types/feature_data/tests/data/aligned-protein-sequences.fasta000066400000000000000000000003241412142116700277650ustar00rootroot00000000000000>sequence1 ------------------------VDFSATWCGPCKMIKPFFHSLSEKYSNVIFLEVDVDDCQD VASECEVKCMPTFQFFKKGQKVGEFSGAN >sequence2 MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLSEKYSNVIFLEVDVDDCQD VASECEVKCMPTFQ-------VGEFSGAN q2-types-2021.8.0/q2_types/feature_data/tests/data/aligned-rna-sequences.fasta000066400000000000000000000003201412142116700270610ustar00rootroot00000000000000>RNASEQUENCE1 ------------------------ACGUACGUACGUACGUACGUACGUACGUACGUACGU ACGUACGUACGUACGUACGUACGUACGU >RNASEQUENCE2 ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU ACGUACGUACGUACGUACGUACGUACGU q2-types-2021.8.0/q2_types/feature_data/tests/data/bad_differential.tsv000066400000000000000000000014141412142116700256740ustar00rootroot00000000000000featureid effect bad_effect F0 -0.910182258821735 a F1 1.01418002973925 0 F2 1.02456128258909 0 F3 -0.74363992043225 0 F4 1.29823896534823 0 F5 -1.12965055281585 0 F6 -0.522401797448688 0 F7 0.327560711072239 0 F8 -1.3738693465664802 0 F9 -0.7847891526325621 0 F10 -0.280063201878434 0 F11 -0.251269847578052 0 F12 1.24602780723028 0 F13 0.665734866338239 0 F14 -0.889042985114811 0 F15 -0.811956802515126 0 F16 0.11591764582945001 0 F17 -0.156195990858492 0 F18 -0.965770633683909 0 F19 0.8035240337800391 0 F20 0.680306950765235 0 F21 -0.688388077896823 0 F22 0.7949653982850671 0 F23 -1.11912925367142 0 F24 -1.1059356352774599 0 F25 0.678934047810573 0 F26 -0.937189288219405 0 F27 -0.9997301931164799 0 F28 -0.31799033232181 0 F29 -0.66141741897569 0 F30 0.550511528315366 0 q2-types-2021.8.0/q2_types/feature_data/tests/data/blast6.tsv000066400000000000000000000002221412142116700236210ustar00rootroot00000000000000moaC gi|15800534|ref|NP_286546.1| 100.00 161 0 0 1 161 1 161 2e-114 330 moaC gi|170768970|ref|ZP_02903423.1| 99.38 161 1 0 1 161 1 161 8e-114 329 q2-types-2021.8.0/q2_types/feature_data/tests/data/blast6_invalid.tsv000066400000000000000000000001741412142116700253350ustar00rootroot00000000000000moaC gi|15800534|ref|NP_286546.1| 100.00 161 0 0 1 161 1 161 moaC gi|170768970|ref|ZP_02903423.1| 99.38 161 1 0 1 161 1 161 q2-types-2021.8.0/q2_types/feature_data/tests/data/differentials.tsv000066400000000000000000000013031412142116700252460ustar00rootroot00000000000000featureid effect F0 -0.910182258821735 F1 1.01418002973925 F2 1.02456128258909 F3 -0.74363992043225 F4 1.29823896534823 F5 -1.12965055281585 F6 -0.522401797448688 F7 0.327560711072239 F8 -1.3738693465664802 F9 -0.7847891526325621 F10 -0.280063201878434 F11 -0.251269847578052 F12 1.24602780723028 F13 0.665734866338239 F14 -0.889042985114811 F15 -0.811956802515126 F16 0.11591764582945001 F17 -0.156195990858492 F18 -0.965770633683909 F19 0.8035240337800391 F20 0.680306950765235 F21 -0.688388077896823 F22 0.7949653982850671 F23 -1.11912925367142 F24 -1.1059356352774599 F25 0.678934047810573 F26 -0.937189288219405 F27 -0.9997301931164799 F28 -0.31799033232181 F29 -0.66141741897569 F30 0.550511528315366 q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-consecutive-ids.fasta000066400000000000000000000000431412142116700305460ustar00rootroot00000000000000>This is an ID >This is another ID q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-corrupt-characters.fasta000066400000000000000000000000401412142116700312520ustar00rootroot00000000000000>This data is corrupt q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-duplicate-id.fasta000066400000000000000000000002611412142116700300100ustar00rootroot00000000000000>SEQUENCE1 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT >SEQUENCE1 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT ACGTACGTACGTACGTACGTACGT q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-first-line-not-id.fasta000066400000000000000000000000221412142116700307030ustar00rootroot00000000000000This is not an id q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-gisaid.fasta000066400000000000000000000735251412142116700267210ustar00rootroot00000000000000>USA/AZ-TGXXXX/2020 ACGTNTGCATNACA N- TGCTA .NNNNNNNN >pangolin/Asdf/JKL/2017 ACGTGACCANNNnnn-nnnac gtcagtaca...gtaccann >NC_045512.2 Severe acute respiratory syndrome coronavirus 2 isolate Wuhan-Hu-1, complete genome ATTAAAGGTTTATACCTTCCCAGGTAACAAACCAACCAACTTTCGATCTCTTGTAGATCTGTTCTCTAAA CGAACTTTAAAATCTGTGTGGCTGTCACTCGGCTGCATGCTTAGTGCACTCACGCAGTATAATTAATAAC TAATTACTGTCGTTGACAGGACACGAGTAACTCGTCTATCTTCTGCAGGCTGCTTACGGTTTCGTCCGTG TTGCAGCCGATCATCAGCACATCTAGGTTTCGTCCGGGTGTGACCGAAAGGTAAGATGGAGAGCCTTGTC CCTGGTTTCAACGAGAAAACACACGTCCAACTCAGTTTGCCTGTTTTACAGGTTCGCGACGTGCTCGTAC GTGGCTTTGGAGACTCCGTGGAGGAGGTCTTATCAGAGGCACGTCAACATCTTAAAGATGGCACTTGTGG CTTAGTAGAAGTTGAAAAAGGCGTTTTGCCTCAACTTGAACAGCCCTATGTGTTCATCAAACGTTCGGAT GCTCGAACTGCACCTCATGGTCATGTTATGGTTGAGCTGGTAGCAGAACTCGAAGGCATTCAGTACGGTC GTAGTGGTGAGACACTTGGTGTCCTTGTCCCTCATGTGGGCGAAATACCAGTGGCTTACCGCAAGGTTCT TCTTCGTAAGAACGGTAATAAAGGAGCTGGTGGCCATAGTTACGGCGCCGATCTAAAGTCATTTGACTTA GGCGACGAGCTTGGCACTGATCCTTATGAAGATTTTCAAGAAAACTGGAACACTAAACATAGCAGTGGTG TTACCCGTGAACTCATGCGTGAGCTTAACGGAGGGGCATACACTCGCTATGTCGATAACAACTTCTGTGG CCCTGATGGCTACCCTCTTGAGTGCATTAAAGACCTTCTAGCACGTGCTGGTAAAGCTTCATGCACTTTG TCCGAACAACTGGACTTTATTGACACTAAGAGGGGTGTATACTGCTGCCGTGAACATGAGCATGAAATTG CTTGGTACACGGAACGTTCTGAAAAGAGCTATGAATTGCAGACACCTTTTGAAATTAAATTGGCAAAGAA ATTTGACACCTTCAATGGGGAATGTCCAAATTTTGTATTTCCCTTAAATTCCATAATCAAGACTATTCAA CCAAGGGTTGAAAAGAAAAAGCTTGATGGCTTTATGGGTAGAATTCGATCTGTCTATCCAGTTGCGTCAC CAAATGAATGCAACCAAATGTGCCTTTCAACTCTCATGAAGTGTGATCATTGTGGTGAAACTTCATGGCA GACGGGCGATTTTGTTAAAGCCACTTGCGAATTTTGTGGCACTGAGAATTTGACTAAAGAAGGTGCCACT ACTTGTGGTTACTTACCCCAAAATGCTGTTGTTAAAATTTATTGTCCAGCATGTCACAATTCAGAAGTAG GACCTGAGCATAGTCTTGCCGAATACCATAATGAATCTGGCTTGAAAACCATTCTTCGTAAGGGTGGTCG CACTATTGCCTTTGGAGGCTGTGTGTTCTCTTATGTTGGTTGCCATAACAAGTGTGCCTATTGGGTTCCA CGTGCTAGCGCTAACATAGGTTGTAACCATACAGGTGTTGTTGGAGAAGGTTCCGAAGGTCTTAATGACA ACCTTCTTGAAATACTCCAAAAAGAGAAAGTCAACATCAATATTGTTGGTGACTTTAAACTTAATGAAGA GATCGCCATTATTTTGGCATCTTTTTCTGCTTCCACAAGTGCTTTTGTGGAAACTGTGAAAGGTTTGGAT TATAAAGCATTCAAACAAATTGTTGAATCCTGTGGTAATTTTAAAGTTACAAAAGGAAAAGCTAAAAAAG GTGCCTGGAATATTGGTGAACAGAAATCAATACTGAGTCCTCTTTATGCATTTGCATCAGAGGCTGCTCG TGTTGTACGATCAATTTTCTCCCGCACTCTTGAAACTGCTCAAAATTCTGTGCGTGTTTTACAGAAGGCC GCTATAACAATACTAGATGGAATTTCACAGTATTCACTGAGACTCATTGATGCTATGATGTTCACATCTG ATTTGGCTACTAACAATCTAGTTGTAATGGCCTACATTACAGGTGGTGTTGTTCAGTTGACTTCGCAGTG GCTAACTAACATCTTTGGCACTGTTTATGAAAAACTCAAACCCGTCCTTGATTGGCTTGAAGAGAAGTTT AAGGAAGGTGTAGAGTTTCTTAGAGACGGTTGGGAAATTGTTAAATTTATCTCAACCTGTGCTTGTGAAA TTGTCGGTGGACAAATTGTCACCTGTGCAAAGGAAATTAAGGAGAGTGTTCAGACATTCTTTAAGCTTGT AAATAAATTTTTGGCTTTGTGTGCTGACTCTATCATTATTGGTGGAGCTAAACTTAAAGCCTTGAATTTA GGTGAAACATTTGTCACGCACTCAAAGGGATTGTACAGAAAGTGTGTTAAATCCAGAGAAGAAACTGGCC TACTCATGCCTCTAAAAGCCCCAAAAGAAATTATCTTCTTAGAGGGAGAAACACTTCCCACAGAAGTGTT AACAGAGGAAGTTGTCTTGAAAACTGGTGATTTACAACCATTAGAACAACCTACTAGTGAAGCTGTTGAA GCTCCATTGGTTGGTACACCAGTTTGTATTAACGGGCTTATGTTGCTCGAAATCAAAGACACAGAAAAGT ACTGTGCCCTTGCACCTAATATGATGGTAACAAACAATACCTTCACACTCAAAGGCGGTGCACCAACAAA GGTTACTTTTGGTGATGACACTGTGATAGAAGTGCAAGGTTACAAGAGTGTGAATATCACTTTTGAACTT GATGAAAGGATTGATAAAGTACTTAATGAGAAGTGCTCTGCCTATACAGTTGAACTCGGTACAGAAGTAA ATGAGTTCGCCTGTGTTGTGGCAGATGCTGTCATAAAAACTTTGCAACCAGTATCTGAATTACTTACACC ACTGGGCATTGATTTAGATGAGTGGAGTATGGCTACATACTACTTATTTGATGAGTCTGGTGAGTTTAAA TTGGCTTCACATATGTATTGTTCTTTCTACCCTCCAGATGAGGATGAAGAAGAAGGTGATTGTGAAGAAG AAGAGTTTGAGCCATCAACTCAATATGAGTATGGTACTGAAGATGATTACCAAGGTAAACCTTTGGAATT TGGTGCCACTTCTGCTGCTCTTCAACCTGAAGAAGAGCAAGAAGAAGATTGGTTAGATGATGATAGTCAA CAAACTGTTGGTCAACAAGACGGCAGTGAGGACAATCAGACAACTACTATTCAAACAATTGTTGAGGTTC AACCTCAATTAGAGATGGAACTTACACCAGTTGTTCAGACTATTGAAGTGAATAGTTTTAGTGGTTATTT AAAACTTACTGACAATGTATACATTAAAAATGCAGACATTGTGGAAGAAGCTAAAAAGGTAAAACCAACA GTGGTTGTTAATGCAGCCAATGTTTACCTTAAACATGGAGGAGGTGTTGCAGGAGCCTTAAATAAGGCTA CTAACAATGCCATGCAAGTTGAATCTGATGATTACATAGCTACTAATGGACCACTTAAAGTGGGTGGTAG TTGTGTTTTAAGCGGACACAATCTTGCTAAACACTGTCTTCATGTTGTCGGCCCAAATGTTAACAAAGGT GAAGACATTCAACTTCTTAAGAGTGCTTATGAAAATTTTAATCAGCACGAAGTTCTACTTGCACCATTAT TATCAGCTGGTATTTTTGGTGCTGACCCTATACATTCTTTAAGAGTTTGTGTAGATACTGTTCGCACAAA TGTCTACTTAGCTGTCTTTGATAAAAATCTCTATGACAAACTTGTTTCAAGCTTTTTGGAAATGAAGAGT GAAAAGCAAGTTGAACAAAAGATCGCTGAGATTCCTAAAGAGGAAGTTAAGCCATTTATAACTGAAAGTA AACCTTCAGTTGAACAGAGAAAACAAGATGATAAGAAAATCAAAGCTTGTGTTGAAGAAGTTACAACAAC TCTGGAAGAAACTAAGTTCCTCACAGAAAACTTGTTACTTTATATTGACATTAATGGCAATCTTCATCCA GATTCTGCCACTCTTGTTAGTGACATTGACATCACTTTCTTAAAGAAAGATGCTCCATATATAGTGGGTG ATGTTGTTCAAGAGGGTGTTTTAACTGCTGTGGTTATACCTACTAAAAAGGCTGGTGGCACTACTGAAAT GCTAGCGAAAGCTTTGAGAAAAGTGCCAACAGACAATTATATAACCACTTACCCGGGTCAGGGTTTAAAT GGTTACACTGTAGAGGAGGCAAAGACAGTGCTTAAAAAGTGTAAAAGTGCCTTTTACATTCTACCATCTA TTATCTCTAATGAGAAGCAAGAAATTCTTGGAACTGTTTCTTGGAATTTGCGAGAAATGCTTGCACATGC AGAAGAAACACGCAAATTAATGCCTGTCTGTGTGGAAACTAAAGCCATAGTTTCAACTATACAGCGTAAA TATAAGGGTATTAAAATACAAGAGGGTGTGGTTGATTATGGTGCTAGATTTTACTTTTACACCAGTAAAA CAACTGTAGCGTCACTTATCAACACACTTAACGATCTAAATGAAACTCTTGTTACAATGCCACTTGGCTA TGTAACACATGGCTTAAATTTGGAAGAAGCTGCTCGGTATATGAGATCTCTCAAAGTGCCAGCTACAGTT TCTGTTTCTTCACCTGATGCTGTTACAGCGTATAATGGTTATCTTACTTCTTCTTCTAAAACACCTGAAG AACATTTTATTGAAACCATCTCACTTGCTGGTTCCTATAAAGATTGGTCCTATTCTGGACAATCTACACA ACTAGGTATAGAATTTCTTAAGAGAGGTGATAAAAGTGTATATTACACTAGTAATCCTACCACATTCCAC CTAGATGGTGAAGTTATCACCTTTGACAATCTTAAGACACTTCTTTCTTTGAGAGAAGTGAGGACTATTA AGGTGTTTACAACAGTAGACAACATTAACCTCCACACGCAAGTTGTGGACATGTCAATGACATATGGACA ACAGTTTGGTCCAACTTATTTGGATGGAGCTGATGTTACTAAAATAAAACCTCATAATTCACATGAAGGT AAAACATTTTATGTTTTACCTAATGATGACACTCTACGTGTTGAGGCTTTTGAGTACTACCACACAACTG ATCCTAGTTTTCTGGGTAGGTACATGTCAGCATTAAATCACACTAAAAAGTGGAAATACCCACAAGTTAA TGGTTTAACTTCTATTAAATGGGCAGATAACAACTGTTATCTTGCCACTGCATTGTTAACACTCCAACAA ATAGAGTTGAAGTTTAATCCACCTGCTCTACAAGATGCTTATTACAGAGCAAGGGCTGGTGAAGCTGCTA ACTTTTGTGCACTTATCTTAGCCTACTGTAATAAGACAGTAGGTGAGTTAGGTGATGTTAGAGAAACAAT GAGTTACTTGTTTCAACATGCCAATTTAGATTCTTGCAAAAGAGTCTTGAACGTGGTGTGTAAAACTTGT GGACAACAGCAGACAACCCTTAAGGGTGTAGAAGCTGTTATGTACATGGGCACACTTTCTTATGAACAAT TTAAGAAAGGTGTTCAGATACCTTGTACGTGTGGTAAACAAGCTACAAAATATCTAGTACAACAGGAGTC ACCTTTTGTTATGATGTCAGCACCACCTGCTCAGTATGAACTTAAGCATGGTACATTTACTTGTGCTAGT GAGTACACTGGTAATTACCAGTGTGGTCACTATAAACATATAACTTCTAAAGAAACTTTGTATTGCATAG ACGGTGCTTTACTTACAAAGTCCTCAGAATACAAAGGTCCTATTACGGATGTTTTCTACAAAGAAAACAG TTACACAACAACCATAAAACCAGTTACTTATAAATTGGATGGTGTTGTTTGTACAGAAATTGACCCTAAG TTGGACAATTATTATAAGAAAGACAATTCTTATTTCACAGAGCAACCAATTGATCTTGTACCAAACCAAC CATATCCAAACGCAAGCTTCGATAATTTTAAGTTTGTATGTGATAATATCAAATTTGCTGATGATTTAAA CCAGTTAACTGGTTATAAGAAACCTGCTTCAAGAGAGCTTAAAGTTACATTTTTCCCTGACTTAAATGGT GATGTGGTGGCTATTGATTATAAACACTACACACCCTCTTTTAAGAAAGGAGCTAAATTGTTACATAAAC CTATTGTTTGGCATGTTAACAATGCAACTAATAAAGCCACGTATAAACCAAATACCTGGTGTATACGTTG TCTTTGGAGCACAAAACCAGTTGAAACATCAAATTCGTTTGATGTACTGAAGTCAGAGGACGCGCAGGGA ATGGATAATCTTGCCTGCGAAGATCTAAAACCAGTCTCTGAAGAAGTAGTGGAAAATCCTACCATACAGA AAGACGTTCTTGAGTGTAATGTGAAAACTACCGAAGTTGTAGGAGACATTATACTTAAACCAGCAAATAA TAGTTTAAAAATTACAGAAGAGGTTGGCCACACAGATCTAATGGCTGCTTATGTAGACAATTCTAGTCTT ACTATTAAGAAACCTAATGAATTATCTAGAGTATTAGGTTTGAAAACCCTTGCTACTCATGGTTTAGCTG CTGTTAATAGTGTCCCTTGGGATACTATAGCTAATTATGCTAAGCCTTTTCTTAACAAAGTTGTTAGTAC AACTACTAACATAGTTACACGGTGTTTAAACCGTGTTTGTACTAATTATATGCCTTATTTCTTTACTTTA TTGCTACAATTGTGTACTTTTACTAGAAGTACAAATTCTAGAATTAAAGCATCTATGCCGACTACTATAG CAAAGAATACTGTTAAGAGTGTCGGTAAATTTTGTCTAGAGGCTTCATTTAATTATTTGAAGTCACCTAA TTTTTCTAAACTGATAAATATTATAATTTGGTTTTTACTATTAAGTGTTTGCCTAGGTTCTTTAATCTAC TCAACCGCTGCTTTAGGTGTTTTAATGTCTAATTTAGGCATGCCTTCTTACTGTACTGGTTACAGAGAAG GCTATTTGAACTCTACTAATGTCACTATTGCAACCTACTGTACTGGTTCTATACCTTGTAGTGTTTGTCT TAGTGGTTTAGATTCTTTAGACACCTATCCTTCTTTAGAAACTATACAAATTACCATTTCATCTTTTAAA TGGGATTTAACTGCTTTTGGCTTAGTTGCAGAGTGGTTTTTGGCATATATTCTTTTCACTAGGTTTTTCT ATGTACTTGGATTGGCTGCAATCATGCAATTGTTTTTCAGCTATTTTGCAGTACATTTTATTAGTAATTC TTGGCTTATGTGGTTAATAATTAATCTTGTACAAATGGCCCCGATTTCAGCTATGGTTAGAATGTACATC TTCTTTGCATCATTTTATTATGTATGGAAAAGTTATGTGCATGTTGTAGACGGTTGTAATTCATCAACTT GTATGATGTGTTACAAACGTAATAGAGCAACAAGAGTCGAATGTACAACTATTGTTAATGGTGTTAGAAG GTCCTTTTATGTCTATGCTAATGGAGGTAAAGGCTTTTGCAAACTACACAATTGGAATTGTGTTAATTGT GATACATTCTGTGCTGGTAGTACATTTATTAGTGATGAAGTTGCGAGAGACTTGTCACTACAGTTTAAAA GACCAATAAATCCTACTGACCAGTCTTCTTACATCGTTGATAGTGTTACAGTGAAGAATGGTTCCATCCA TCTTTACTTTGATAAAGCTGGTCAAAAGACTTATGAAAGACATTCTCTCTCTCATTTTGTTAACTTAGAC AACCTGAGAGCTAATAACACTAAAGGTTCATTGCCTATTAATGTTATAGTTTTTGATGGTAAATCAAAAT GTGAAGAATCATCTGCAAAATCAGCGTCTGTTTACTACAGTCAGCTTATGTGTCAACCTATACTGTTACT AGATCAGGCATTAGTGTCTGATGTTGGTGATAGTGCGGAAGTTGCAGTTAAAATGTTTGATGCTTACGTT AATACGTTTTCATCAACTTTTAACGTACCAATGGAAAAACTCAAAACACTAGTTGCAACTGCAGAAGCTG AACTTGCAAAGAATGTGTCCTTAGACAATGTCTTATCTACTTTTATTTCAGCAGCTCGGCAAGGGTTTGT TGATTCAGATGTAGAAACTAAAGATGTTGTTGAATGTCTTAAATTGTCACATCAATCTGACATAGAAGTT ACTGGCGATAGTTGTAATAACTATATGCTCACCTATAACAAAGTTGAAAACATGACACCCCGTGACCTTG GTGCTTGTATTGACTGTAGTGCGCGTCATATTAATGCGCAGGTAGCAAAAAGTCACAACATTGCTTTGAT ATGGAACGTTAAAGATTTCATGTCATTGTCTGAACAACTACGAAAACAAATACGTAGTGCTGCTAAAAAG AATAACTTACCTTTTAAGTTGACATGTGCAACTACTAGACAAGTTGTTAATGTTGTAACAACAAAGATAG CACTTAAGGGTGGTAAAATTGTTAATAATTGGTTGAAGCAGTTAATTAAAGTTACACTTGTGTTCCTTTT TGTTGCTGCTATTTTCTATTTAATAACACCTGTTCATGTCATGTCTAAACATACTGACTTTTCAAGTGAA ATCATAGGATACAAGGCTATTGATGGTGGTGTCACTCGTGACATAGCATCTACAGATACTTGTTTTGCTA ACAAACATGCTGATTTTGACACATGGTTTAGCCAGCGTGGTGGTAGTTATACTAATGACAAAGCTTGCCC ATTGATTGCTGCAGTCATAACAAGAGAAGTGGGTTTTGTCGTGCCTGGTTTGCCTGGCACGATATTACGC ACAACTAATGGTGACTTTTTGCATTTCTTACCTAGAGTTTTTAGTGCAGTTGGTAACATCTGTTACACAC CATCAAAACTTATAGAGTACACTGACTTTGCAACATCAGCTTGTGTTTTGGCTGCTGAATGTACAATTTT TAAAGATGCTTCTGGTAAGCCAGTACCATATTGTTATGATACCAATGTACTAGAAGGTTCTGTTGCTTAT GAAAGTTTACGCCCTGACACACGTTATGTGCTCATGGATGGCTCTATTATTCAATTTCCTAACACCTACC TTGAAGGTTCTGTTAGAGTGGTAACAACTTTTGATTCTGAGTACTGTAGGCACGGCACTTGTGAAAGATC AGAAGCTGGTGTTTGTGTATCTACTAGTGGTAGATGGGTACTTAACAATGATTATTACAGATCTTTACCA GGAGTTTTCTGTGGTGTAGATGCTGTAAATTTACTTACTAATATGTTTACACCACTAATTCAACCTATTG GTGCTTTGGACATATCAGCATCTATAGTAGCTGGTGGTATTGTAGCTATCGTAGTAACATGCCTTGCCTA CTATTTTATGAGGTTTAGAAGAGCTTTTGGTGAATACAGTCATGTAGTTGCCTTTAATACTTTACTATTC CTTATGTCATTCACTGTACTCTGTTTAACACCAGTTTACTCATTCTTACCTGGTGTTTATTCTGTTATTT ACTTGTACTTGACATTTTATCTTACTAATGATGTTTCTTTTTTAGCACATATTCAGTGGATGGTTATGTT CACACCTTTAGTACCTTTCTGGATAACAATTGCTTATATCATTTGTATTTCCACAAAGCATTTCTATTGG TTCTTTAGTAATTACCTAAAGAGACGTGTAGTCTTTAATGGTGTTTCCTTTAGTACTTTTGAAGAAGCTG CGCTGTGCACCTTTTTGTTAAATAAAGAAATGTATCTAAAGTTGCGTAGTGATGTGCTATTACCTCTTAC GCAATATAATAGATACTTAGCTCTTTATAATAAGTACAAGTATTTTAGTGGAGCAATGGATACAACTAGC TACAGAGAAGCTGCTTGTTGTCATCTCGCAAAGGCTCTCAATGACTTCAGTAACTCAGGTTCTGATGTTC TTTACCAACCACCACAAACCTCTATCACCTCAGCTGTTTTGCAGAGTGGTTTTAGAAAAATGGCATTCCC ATCTGGTAAAGTTGAGGGTTGTATGGTACAAGTAACTTGTGGTACAACTACACTTAACGGTCTTTGGCTT GATGACGTAGTTTACTGTCCAAGACATGTGATCTGCACCTCTGAAGACATGCTTAACCCTAATTATGAAG ATTTACTCATTCGTAAGTCTAATCATAATTTCTTGGTACAGGCTGGTAATGTTCAACTCAGGGTTATTGG ACATTCTATGCAAAATTGTGTACTTAAGCTTAAGGTTGATACAGCCAATCCTAAGACACCTAAGTATAAG TTTGTTCGCATTCAACCAGGACAGACTTTTTCAGTGTTAGCTTGTTACAATGGTTCACCATCTGGTGTTT ACCAATGTGCTATGAGGCCCAATTTCACTATTAAGGGTTCATTCCTTAATGGTTCATGTGGTAGTGTTGG TTTTAACATAGATTATGACTGTGTCTCTTTTTGTTACATGCACCATATGGAATTACCAACTGGAGTTCAT GCTGGCACAGACTTAGAAGGTAACTTTTATGGACCTTTTGTTGACAGGCAAACAGCACAAGCAGCTGGTA CGGACACAACTATTACAGTTAATGTTTTAGCTTGGTTGTACGCTGCTGTTATAAATGGAGACAGGTGGTT TCTCAATCGATTTACCACAACTCTTAATGACTTTAACCTTGTGGCTATGAAGTACAATTATGAACCTCTA ACACAAGACCATGTTGACATACTAGGACCTCTTTCTGCTCAAACTGGAATTGCCGTTTTAGATATGTGTG CTTCATTAAAAGAATTACTGCAAAATGGTATGAATGGACGTACCATATTGGGTAGTGCTTTATTAGAAGA TGAATTTACACCTTTTGATGTTGTTAGACAATGCTCAGGTGTTACTTTCCAAAGTGCAGTGAAAAGAACA ATCAAGGGTACACACCACTGGTTGTTACTCACAATTTTGACTTCACTTTTAGTTTTAGTCCAGAGTACTC AATGGTCTTTGTTCTTTTTTTTGTATGAAAATGCCTTTTTACCTTTTGCTATGGGTATTATTGCTATGTC TGCTTTTGCAATGATGTTTGTCAAACATAAGCATGCATTTCTCTGTTTGTTTTTGTTACCTTCTCTTGCC ACTGTAGCTTATTTTAATATGGTCTATATGCCTGCTAGTTGGGTGATGCGTATTATGACATGGTTGGATA TGGTTGATACTAGTTTGTCTGGTTTTAAGCTAAAAGACTGTGTTATGTATGCATCAGCTGTAGTGTTACT AATCCTTATGACAGCAAGAACTGTGTATGATGATGGTGCTAGGAGAGTGTGGACACTTATGAATGTCTTG ACACTCGTTTATAAAGTTTATTATGGTAATGCTTTAGATCAAGCCATTTCCATGTGGGCTCTTATAATCT CTGTTACTTCTAACTACTCAGGTGTAGTTACAACTGTCATGTTTTTGGCCAGAGGTATTGTTTTTATGTG TGTTGAGTATTGCCCTATTTTCTTCATAACTGGTAATACACTTCAGTGTATAATGCTAGTTTATTGTTTC TTAGGCTATTTTTGTACTTGTTACTTTGGCCTCTTTTGTTTACTCAACCGCTACTTTAGACTGACTCTTG GTGTTTATGATTACTTAGTTTCTACACAGGAGTTTAGATATATGAATTCACAGGGACTACTCCCACCCAA GAATAGCATAGATGCCTTCAAACTCAACATTAAATTGTTGGGTGTTGGTGGCAAACCTTGTATCAAAGTA GCCACTGTACAGTCTAAAATGTCAGATGTAAAGTGCACATCAGTAGTCTTACTCTCAGTTTTGCAACAAC TCAGAGTAGAATCATCATCTAAATTGTGGGCTCAATGTGTCCAGTTACACAATGACATTCTCTTAGCTAA AGATACTACTGAAGCCTTTGAAAAAATGGTTTCACTACTTTCTGTTTTGCTTTCCATGCAGGGTGCTGTA GACATAAACAAGCTTTGTGAAGAAATGCTGGACAACAGGGCAACCTTACAAGCTATAGCCTCAGAGTTTA GTTCCCTTCCATCATATGCAGCTTTTGCTACTGCTCAAGAAGCTTATGAGCAGGCTGTTGCTAATGGTGA TTCTGAAGTTGTTCTTAAAAAGTTGAAGAAGTCTTTGAATGTGGCTAAATCTGAATTTGACCGTGATGCA GCCATGCAACGTAAGTTGGAAAAGATGGCTGATCAAGCTATGACCCAAATGTATAAACAGGCTAGATCTG AGGACAAGAGGGCAAAAGTTACTAGTGCTATGCAGACAATGCTTTTCACTATGCTTAGAAAGTTGGATAA TGATGCACTCAACAACATTATCAACAATGCAAGAGATGGTTGTGTTCCCTTGAACATAATACCTCTTACA ACAGCAGCCAAACTAATGGTTGTCATACCAGACTATAACACATATAAAAATACGTGTGATGGTACAACAT TTACTTATGCATCAGCATTGTGGGAAATCCAACAGGTTGTAGATGCAGATAGTAAAATTGTTCAACTTAG TGAAATTAGTATGGACAATTCACCTAATTTAGCATGGCCTCTTATTGTAACAGCTTTAAGGGCCAATTCT GCTGTCAAATTACAGAATAATGAGCTTAGTCCTGTTGCACTACGACAGATGTCTTGTGCTGCCGGTACTA CACAAACTGCTTGCACTGATGACAATGCGTTAGCTTACTACAACACAACAAAGGGAGGTAGGTTTGTACT TGCACTGTTATCCGATTTACAGGATTTGAAATGGGCTAGATTCCCTAAGAGTGATGGAACTGGTACTATC TATACAGAACTGGAACCACCTTGTAGGTTTGTTACAGACACACCTAAAGGTCCTAAAGTGAAGTATTTAT ACTTTATTAAAGGATTAAACAACCTAAATAGAGGTATGGTACTTGGTAGTTTAGCTGCCACAGTACGTCT ACAAGCTGGTAATGCAACAGAAGTGCCTGCCAATTCAACTGTATTATCTTTCTGTGCTTTTGCTGTAGAT GCTGCTAAAGCTTACAAAGATTATCTAGCTAGTGGGGGACAACCAATCACTAATTGTGTTAAGATGTTGT GTACACACACTGGTACTGGTCAGGCAATAACAGTTACACCGGAAGCCAATATGGATCAAGAATCCTTTGG TGGTGCATCGTGTTGTCTGTACTGCCGTTGCCACATAGATCATCCAAATCCTAAAGGATTTTGTGACTTA AAAGGTAAGTATGTACAAATACCTACAACTTGTGCTAATGACCCTGTGGGTTTTACACTTAAAAACACAG TCTGTACCGTCTGCGGTATGTGGAAAGGTTATGGCTGTAGTTGTGATCAACTCCGCGAACCCATGCTTCA GTCAGCTGATGCACAATCGTTTTTAAACGGGTTTGCGGTGTAAGTGCAGCCCGTCTTACACCGTGCGGCA CAGGCACTAGTACTGATGTCGTATACAGGGCTTTTGACATCTACAATGATAAAGTAGCTGGTTTTGCTAA ATTCCTAAAAACTAATTGTTGTCGCTTCCAAGAAAAGGACGAAGATGACAATTTAATTGATTCTTACTTT GTAGTTAAGAGACACACTTTCTCTAACTACCAACATGAAGAAACAATTTATAATTTACTTAAGGATTGTC CAGCTGTTGCTAAACATGACTTCTTTAAGTTTAGAATAGACGGTGACATGGTACCACATATATCACGTCA ACGTCTTACTAAATACACAATGGCAGACCTCGTCTATGCTTTAAGGCATTTTGATGAAGGTAATTGTGAC ACATTAAAAGAAATACTTGTCACATACAATTGTTGTGATGATGATTATTTCAATAAAAAGGACTGGTATG ATTTTGTAGAAAACCCAGATATATTACGCGTATACGCCAACTTAGGTGAACGTGTACGCCAAGCTTTGTT AAAAACAGTACAATTCTGTGATGCCATGCGAAATGCTGGTATTGTTGGTGTACTGACATTAGATAATCAA GATCTCAATGGTAACTGGTATGATTTCGGTGATTTCATACAAACCACGCCAGGTAGTGGAGTTCCTGTTG TAGATTCTTATTATTCATTGTTAATGCCTATATTAACCTTGACCAGGGCTTTAACTGCAGAGTCACATGT TGACACTGACTTAACAAAGCCTTACATTAAGTGGGATTTGTTAAAATATGACTTCACGGAAGAGAGGTTA AAACTCTTTGACCGTTATTTTAAATATTGGGATCAGACATACCACCCAAATTGTGTTAACTGTTTGGATG ACAGATGCATTCTGCATTGTGCAAACTTTAATGTTTTATTCTCTACAGTGTTCCCACCTACAAGTTTTGG ACCACTAGTGAGAAAAATATTTGTTGATGGTGTTCCATTTGTAGTTTCAACTGGATACCACTTCAGAGAG CTAGGTGTTGTACATAATCAGGATGTAAACTTACATAGCTCTAGACTTAGTTTTAAGGAATTACTTGTGT ATGCTGCTGACCCTGCTATGCACGCTGCTTCTGGTAATCTATTACTAGATAAACGCACTACGTGCTTTTC AGTAGCTGCACTTACTAACAATGTTGCTTTTCAAACTGTCAAACCCGGTAATTTTAACAAAGACTTCTAT GACTTTGCTGTGTCTAAGGGTTTCTTTAAGGAAGGAAGTTCTGTTGAATTAAAACACTTCTTCTTTGCTC AGGATGGTAATGCTGCTATCAGCGATTATGACTACTATCGTTATAATCTACCAACAATGTGTGATATCAG ACAACTACTATTTGTAGTTGAAGTTGTTGATAAGTACTTTGATTGTTACGATGGTGGCTGTATTAATGCT AACCAAGTCATCGTCAACAACCTAGACAAATCAGCTGGTTTTCCATTTAATAAATGGGGTAAGGCTAGAC TTTATTATGATTCAATGAGTTATGAGGATCAAGATGCACTTTTCGCATATACAAAACGTAATGTCATCCC TACTATAACTCAAATGAATCTTAAGTATGCCATTAGTGCAAAGAATAGAGCTCGCACCGTAGCTGGTGTC TCTATCTGTAGTACTATGACCAATAGACAGTTTCATCAAAAATTATTGAAATCAATAGCCGCCACTAGAG GAGCTACTGTAGTAATTGGAACAAGCAAATTCTATGGTGGTTGGCACAACATGTTAAAAACTGTTTATAG TGATGTAGAAAACCCTCACCTTATGGGTTGGGATTATCCTAAATGTGATAGAGCCATGCCTAACATGCTT AGAATTATGGCCTCACTTGTTCTTGCTCGCAAACATACAACGTGTTGTAGCTTGTCACACCGTTTCTATA GATTAGCTAATGAGTGTGCTCAAGTATTGAGTGAAATGGTCATGTGTGGCGGTTCACTATATGTTAAACC AGGTGGAACCTCATCAGGAGATGCCACAACTGCTTATGCTAATAGTGTTTTTAACATTTGTCAAGCTGTC ACGGCCAATGTTAATGCACTTTTATCTACTGATGGTAACAAAATTGCCGATAAGTATGTCCGCAATTTAC AACACAGACTTTATGAGTGTCTCTATAGAAATAGAGATGTTGACACAGACTTTGTGAATGAGTTTTACGC ATATTTGCGTAAACATTTCTCAATGATGATACTCTCTGACGATGCTGTTGTGTGTTTCAATAGCACTTAT GCATCTCAAGGTCTAGTGGCTAGCATAAAGAACTTTAAGTCAGTTCTTTATTATCAAAACAATGTTTTTA TGTCTGAAGCAAAATGTTGGACTGAGACTGACCTTACTAAAGGACCTCATGAATTTTGCTCTCAACATAC AATGCTAGTTAAACAGGGTGATGATTATGTGTACCTTCCTTACCCAGATCCATCAAGAATCCTAGGGGCC GGCTGTTTTGTAGATGATATCGTAAAAACAGATGGTACACTTATGATTGAACGGTTCGTGTCTTTAGCTA TAGATGCTTACCCACTTACTAAACATCCTAATCAGGAGTATGCTGATGTCTTTCATTTGTACTTACAATA CATAAGAAAGCTACATGATGAGTTAACAGGACACATGTTAGACATGTATTCTGTTATGCTTACTAATGAT AACACTTCAAGGTATTGGGAACCTGAGTTTTATGAGGCTATGTACACACCGCATACAGTCTTACAGGCTG TTGGGGCTTGTGTTCTTTGCAATTCACAGACTTCATTAAGATGTGGTGCTTGCATACGTAGACCATTCTT ATGTTGTAAATGCTGTTACGACCATGTCATATCAACATCACATAAATTAGTCTTGTCTGTTAATCCGTAT GTTTGCAATGCTCCAGGTTGTGATGTCACAGATGTGACTCAACTTTACTTAGGAGGTATGAGCTATTATT GTAAATCACATAAACCACCCATTAGTTTTCCATTGTGTGCTAATGGACAAGTTTTTGGTTTATATAAAAA TACATGTGTTGGTAGCGATAATGTTACTGACTTTAATGCAATTGCAACATGTGACTGGACAAATGCTGGT GATTACATTTTAGCTAACACCTGTACTGAAAGACTCAAGCTTTTTGCAGCAGAAACGCTCAAAGCTACTG AGGAGACATTTAAACTGTCTTATGGTATTGCTACTGTACGTGAAGTGCTGTCTGACAGAGAATTACATCT TTCATGGGAAGTTGGTAAACCTAGACCACCACTTAACCGAAATTATGTCTTTACTGGTTATCGTGTAACT AAAAACAGTAAAGTACAAATAGGAGAGTACACCTTTGAAAAAGGTGACTATGGTGATGCTGTTGTTTACC GAGGTACAACAACTTACAAATTAAATGTTGGTGATTATTTTGTGCTGACATCACATACAGTAATGCCATT AAGTGCACCTACACTAGTGCCACAAGAGCACTATGTTAGAATTACTGGCTTATACCCAACACTCAATATC TCAGATGAGTTTTCTAGCAATGTTGCAAATTATCAAAAGGTTGGTATGCAAAAGTATTCTACACTCCAGG GACCACCTGGTACTGGTAAGAGTCATTTTGCTATTGGCCTAGCTCTCTACTACCCTTCTGCTCGCATAGT GTATACAGCTTGCTCTCATGCCGCTGTTGATGCACTATGTGAGAAGGCATTAAAATATTTGCCTATAGAT AAATGTAGTAGAATTATACCTGCACGTGCTCGTGTAGAGTGTTTTGATAAATTCAAAGTGAATTCAACAT TAGAACAGTATGTCTTTTGTACTGTAAATGCATTGCCTGAGACGACAGCAGATATAGTTGTCTTTGATGA AATTTCAATGGCCACAAATTATGATTTGAGTGTTGTCAATGCCAGATTACGTGCTAAGCACTATGTGTAC ATTGGCGACCCTGCTCAATTACCTGCACCACGCACATTGCTAACTAAGGGCACACTAGAACCAGAATATT TCAATTCAGTGTGTAGACTTATGAAAACTATAGGTCCAGACATGTTCCTCGGAACTTGTCGGCGTTGTCC TGCTGAAATTGTTGACACTGTGAGTGCTTTGGTTTATGATAATAAGCTTAAAGCACATAAAGACAAATCA GCTCAATGCTTTAAAATGTTTTATAAGGGTGTTATCACGCATGATGTTTCATCTGCAATTAACAGGCCAC AAATAGGCGTGGTAAGAGAATTCCTTACACGTAACCCTGCTTGGAGAAAAGCTGTCTTTATTTCACCTTA TAATTCACAGAATGCTGTAGCCTCAAAGATTTTGGGACTACCAACTCAAACTGTTGATTCATCACAGGGC TCAGAATATGACTATGTCATATTCACTCAAACCACTGAAACAGCTCACTCTTGTAATGTAAACAGATTTA ATGTTGCTATTACCAGAGCAAAAGTAGGCATACTTTGCATAATGTCTGATAGAGACCTTTATGACAAGTT GCAATTTACAAGTCTTGAAATTCCACGTAGGAATGTGGCAACTTTACAAGCTGAAAATGTAACAGGACTC TTTAAAGATTGTAGTAAGGTAATCACTGGGTTACATCCTACACAGGCACCTACACACCTCAGTGTTGACA CTAAATTCAAAACTGAAGGTTTATGTGTTGACATACCTGGCATACCTAAGGACATGACCTATAGAAGACT CATCTCTATGATGGGTTTTAAAATGAATTATCAAGTTAATGGTTACCCTAACATGTTTATCACCCGCGAA GAAGCTATAAGACATGTACGTGCATGGATTGGCTTCGATGTCGAGGGGTGTCATGCTACTAGAGAAGCTG TTGGTACCAATTTACCTTTACAGCTAGGTTTTTCTACAGGTGTTAACCTAGTTGCTGTACCTACAGGTTA TGTTGATACACCTAATAATACAGATTTTTCCAGAGTTAGTGCTAAACCACCGCCTGGAGATCAATTTAAA CACCTCATACCACTTATGTACAAAGGACTTCCTTGGAATGTAGTGCGTATAAAGATTGTACAAATGTTAA GTGACACACTTAAAAATCTCTCTGACAGAGTCGTATTTGTCTTATGGGCACATGGCTTTGAGTTGACATC TATGAAGTATTTTGTGAAAATAGGACCTGAGCGCACCTGTTGTCTATGTGATAGACGTGCCACATGCTTT TCCACTGCTTCAGACACTTATGCCTGTTGGCATCATTCTATTGGATTTGATTACGTCTATAATCCGTTTA TGATTGATGTTCAACAATGGGGTTTTACAGGTAACCTACAAAGCAACCATGATCTGTATTGTCAAGTCCA TGGTAATGCACATGTAGCTAGTTGTGATGCAATCATGACTAGGTGTCTAGCTGTCCACGAGTGCTTTGTT AAGCGTGTTGACTGGACTATTGAATATCCTATAATTGGTGATGAACTGAAGATTAATGCGGCTTGTAGAA AGGTTCAACACATGGTTGTTAAAGCTGCATTATTAGCAGACAAATTCCCAGTTCTTCACGACATTGGTAA CCCTAAAGCTATTAAGTGTGTACCTCAAGCTGATGTAGAATGGAAGTTCTATGATGCACAGCCTTGTAGT GACAAAGCTTATAAAATAGAAGAATTATTCTATTCTTATGCCACACATTCTGACAAATTCACAGATGGTG TATGCCTATTTTGGAATTGCAATGTCGATAGATATCCTGCTAATTCCATTGTTTGTAGATTTGACACTAG AGTGCTATCTAACCTTAACTTGCCTGGTTGTGATGGTGGCAGTTTGTATGTAAATAAACATGCATTCCAC ACACCAGCTTTTGATAAAAGTGCTTTTGTTAATTTAAAACAATTACCATTTTTCTATTACTCTGACAGTC CATGTGAGTCTCATGGAAAACAAGTAGTGTCAGATATAGATTATGTACCACTAAAGTCTGCTACGTGTAT AACACGTTGCAATTTAGGTGGTGCTGTCTGTAGACATCATGCTAATGAGTACAGATTGTATCTCGATGCT TATAACATGATGATCTCAGCTGGCTTTAGCTTGTGGGTTTACAAACAATTTGATACTTATAACCTCTGGA ACACTTTTACAAGACTTCAGAGTTTAGAAAATGTGGCTTTTAATGTTGTAAATAAGGGACACTTTGATGG ACAACAGGGTGAAGTACCAGTTTCTATCATTAATAACACTGTTTACACAAAAGTTGATGGTGTTGATGTA GAATTGTTTGAAAATAAAACAACATTACCTGTTAATGTAGCATTTGAGCTTTGGGCTAAGCGCAACATTA AACCAGTACCAGAGGTGAAAATACTCAATAATTTGGGTGTGGACATTGCTGCTAATACTGTGATCTGGGA CTACAAAAGAGATGCTCCAGCACATATATCTACTATTGGTGTTTGTTCTATGACTGACATAGCCAAGAAA CCAACTGAAACGATTTGTGCACCACTCACTGTCTTTTTTGATGGTAGAGTTGATGGTCAAGTAGACTTAT TTAGAAATGCCCGTAATGGTGTTCTTATTACAGAAGGTAGTGTTAAAGGTTTACAACCATCTGTAGGTCC CAAACAAGCTAGTCTTAATGGAGTCACATTAATTGGAGAAGCCGTAAAAACACAGTTCAATTATTATAAG AAAGTTGATGGTGTTGTCCAACAATTACCTGAAACTTACTTTACTCAGAGTAGAAATTTACAAGAATTTA AACCCAGGAGTCAAATGGAAATTGATTTCTTAGAATTAGCTATGGATGAATTCATTGAACGGTATAAATT AGAAGGCTATGCCTTCGAACATATCGTTTATGGAGATTTTAGTCATAGTCAGTTAGGTGGTTTACATCTA CTGATTGGACTAGCTAAACGTTTTAAGGAATCACCTTTTGAATTAGAAGATTTTATTCCTATGGACAGTA CAGTTAAAAACTATTTCATAACAGATGCGCAAACAGGTTCATCTAAGTGTGTGTGTTCTGTTATTGATTT ATTACTTGATGATTTTGTTGAAATAATAAAATCCCAAGATTTATCTGTAGTTTCTAAGGTTGTCAAAGTG ACTATTGACTATACAGAAATTTCATTTATGCTTTGGTGTAAAGATGGCCATGTAGAAACATTTTACCCAA AATTACAATCTAGTCAAGCGTGGCAACCGGGTGTTGCTATGCCTAATCTTTACAAAATGCAAAGAATGCT ATTAGAAAAGTGTGACCTTCAAAATTATGGTGATAGTGCAACATTACCTAAAGGCATAATGATGAATGTC GCAAAATATACTCAACTGTGTCAATATTTAAACACATTAACATTAGCTGTACCCTATAATATGAGAGTTA TACATTTTGGTGCTGGTTCTGATAAAGGAGTTGCACCAGGTACAGCTGTTTTAAGACAGTGGTTGCCTAC GGGTACGCTGCTTGTCGATTCAGATCTTAATGACTTTGTCTCTGATGCAGATTCAACTTTGATTGGTGAT TGTGCAACTGTACATACAGCTAATAAATGGGATCTCATTATTAGTGATATGTACGACCCTAAGACTAAAA ATGTTACAAAAGAAAATGACTCTAAAGAGGGTTTTTTCACTTACATTTGTGGGTTTATACAACAAAAGCT AGCTCTTGGAGGTTCCGTGGCTATAAAGATAACAGAACATTCTTGGAATGCTGATCTTTATAAGCTCATG GGACACTTCGCATGGTGGACAGCCTTTGTTACTAATGTGAATGCGTCATCATCTGAAGCATTTTTAATTG GATGTAATTATCTTGGCAAACCACGCGAACAAATAGATGGTTATGTCATGCATGCAAATTACATATTTTG GAGGAATACAAATCCAATTCAGTTGTCTTCCTATTCTTTATTTGACATGAGTAAATTTCCCCTTAAATTA AGGGGTACTGCTGTTATGTCTTTAAAAGAAGGTCAAATCAATGATATGATTTTATCTCTTCTTAGTAAAG GTAGACTTATAATTAGAGAAAACAACAGAGTTGTTATTTCTAGTGATGTTCTTGTTAACAACTAAACGAA CAATGTTTGTTTTTCTTGTTTTATTGCCACTAGTCTCTAGTCAGTGTGTTAATCTTACAACCAGAACTCA ATTACCCCCTGCATACACTAATTCTTTCACACGTGGTGTTTATTACCCTGACAAAGTTTTCAGATCCTCA GTTTTACATTCAACTCAGGACTTGTTCTTACCTTTCTTTTCCAATGTTACTTGGTTCCATGCTATACATG TCTCTGGGACCAATGGTACTAAGAGGTTTGATAACCCTGTCCTACCATTTAATGATGGTGTTTATTTTGC TTCCACTGAGAAGTCTAACATAATAAGAGGCTGGATTTTTGGTACTACTTTAGATTCGAAGACCCAGTCC CTACTTATTGTTAATAACGCTACTAATGTTGTTATTAAAGTCTGTGAATTTCAATTTTGTAATGATCCAT TTTTGGGTGTTTATTACCACAAAAACAACAAAAGTTGGATGGAAAGTGAGTTCAGAGTTTATTCTAGTGC GAATAATTGCACTTTTGAATATGTCTCTCAGCCTTTTCTTATGGACCTTGAAGGAAAACAGGGTAATTTC AAAAATCTTAGGGAATTTGTGTTTAAGAATATTGATGGTTATTTTAAAATATATTCTAAGCACACGCCTA TTAATTTAGTGCGTGATCTCCCTCAGGGTTTTTCGGCTTTAGAACCATTGGTAGATTTGCCAATAGGTAT TAACATCACTAGGTTTCAAACTTTACTTGCTTTACATAGAAGTTATTTGACTCCTGGTGATTCTTCTTCA GGTTGGACAGCTGGTGCTGCAGCTTATTATGTGGGTTATCTTCAACCTAGGACTTTTCTATTAAAATATA ATGAAAATGGAACCATTACAGATGCTGTAGACTGTGCACTTGACCCTCTCTCAGAAACAAAGTGTACGTT GAAATCCTTCACTGTAGAAAAAGGAATCTATCAAACTTCTAACTTTAGAGTCCAACCAACAGAATCTATT GTTAGATTTCCTAATATTACAAACTTGTGCCCTTTTGGTGAAGTTTTTAACGCCACCAGATTTGCATCTG TTTATGCTTGGAACAGGAAGAGAATCAGCAACTGTGTTGCTGATTATTCTGTCCTATATAATTCCGCATC ATTTTCCACTTTTAAGTGTTATGGAGTGTCTCCTACTAAATTAAATGATCTCTGCTTTACTAATGTCTAT GCAGATTCATTTGTAATTAGAGGTGATGAAGTCAGACAAATCGCTCCAGGGCAAACTGGAAAGATTGCTG ATTATAATTATAAATTACCAGATGATTTTACAGGCTGCGTTATAGCTTGGAATTCTAACAATCTTGATTC TAAGGTTGGTGGTAATTATAATTACCTGTATAGATTGTTTAGGAAGTCTAATCTCAAACCTTTTGAGAGA GATATTTCAACTGAAATCTATCAGGCCGGTAGCACACCTTGTAATGGTGTTGAAGGTTTTAATTGTTACT TTCCTTTACAATCATATGGTTTCCAACCCACTAATGGTGTTGGTTACCAACCATACAGAGTAGTAGTACT TTCTTTTGAACTTCTACATGCACCAGCAACTGTTTGTGGACCTAAAAAGTCTACTAATTTGGTTAAAAAC AAATGTGTCAATTTCAACTTCAATGGTTTAACAGGCACAGGTGTTCTTACTGAGTCTAACAAAAAGTTTC TGCCTTTCCAACAATTTGGCAGAGACATTGCTGACACTACTGATGCTGTCCGTGATCCACAGACACTTGA GATTCTTGACATTACACCATGTTCTTTTGGTGGTGTCAGTGTTATAACACCAGGAACAAATACTTCTAAC CAGGTTGCTGTTCTTTATCAGGATGTTAACTGCACAGAAGTCCCTGTTGCTATTCATGCAGATCAACTTA CTCCTACTTGGCGTGTTTATTCTACAGGTTCTAATGTTTTTCAAACACGTGCAGGCTGTTTAATAGGGGC TGAACATGTCAACAACTCATATGAGTGTGACATACCCATTGGTGCAGGTATATGCGCTAGTTATCAGACT CAGACTAATTCTCCTCGGCGGGCACGTAGTGTAGCTAGTCAATCCATCATTGCCTACACTATGTCACTTG GTGCAGAAAATTCAGTTGCTTACTCTAATAACTCTATTGCCATACCCACAAATTTTACTATTAGTGTTAC CACAGAAATTCTACCAGTGTCTATGACCAAGACATCAGTAGATTGTACAATGTACATTTGTGGTGATTCA ACTGAATGCAGCAATCTTTTGTTGCAATATGGCAGTTTTTGTACACAATTAAACCGTGCTTTAACTGGAA TAGCTGTTGAACAAGACAAAAACACCCAAGAAGTTTTTGCACAAGTCAAACAAATTTACAAAACACCACC AATTAAAGATTTTGGTGGTTTTAATTTTTCACAAATATTACCAGATCCATCAAAACCAAGCAAGAGGTCA TTTATTGAAGATCTACTTTTCAACAAAGTGACACTTGCAGATGCTGGCTTCATCAAACAATATGGTGATT GCCTTGGTGATATTGCTGCTAGAGACCTCATTTGTGCACAAAAGTTTAACGGCCTTACTGTTTTGCCACC TTTGCTCACAGATGAAATGATTGCTCAATACACTTCTGCACTGTTAGCGGGTACAATCACTTCTGGTTGG ACCTTTGGTGCAGGTGCTGCATTACAAATACCATTTGCTATGCAAATGGCTTATAGGTTTAATGGTATTG GAGTTACACAGAATGTTCTCTATGAGAACCAAAAATTGATTGCCAACCAATTTAATAGTGCTATTGGCAA AATTCAAGACTCACTTTCTTCCACAGCAAGTGCACTTGGAAAACTTCAAGATGTGGTCAACCAAAATGCA CAAGCTTTAAACACGCTTGTTAAACAACTTAGCTCCAATTTTGGTGCAATTTCAAGTGTTTTAAATGATA TCCTTTCACGTCTTGACAAAGTTGAGGCTGAAGTGCAAATTGATAGGTTGATCACAGGCAGACTTCAAAG TTTGCAGACATATGTGACTCAACAATTAATTAGAGCTGCAGAAATCAGAGCTTCTGCTAATCTTGCTGCT ACTAAAATGTCAGAGTGTGTACTTGGACAATCAAAAAGAGTTGATTTTTGTGGAAAGGGCTATCATCTTA TGTCCTTCCCTCAGTCAGCACCTCATGGTGTAGTCTTCTTGCATGTGACTTATGTCCCTGCACAAGAAAA GAACTTCACAACTGCTCCTGCCATTTGTCATGATGGAAAAGCACACTTTCCTCGTGAAGGTGTCTTTGTT TCAAATGGCACACACTGGTTTGTAACACAAAGGAATTTTTATGAACCACAAATCATTACTACAGACAACA CATTTGTGTCTGGTAACTGTGATGTTGTAATAGGAATTGTCAACAACACAGTTTATGATCCTTTGCAACC TGAATTAGACTCATTCAAGGAGGAGTTAGATAAATATTTTAAGAATCATACATCACCAGATGTTGATTTA GGTGACATCTCTGGCATTAATGCTTCAGTTGTAAACATTCAAAAAGAAATTGACCGCCTCAATGAGGTTG CCAAGAATTTAAATGAATCTCTCATCGATCTCCAAGAACTTGGAAAGTATGAGCAGTATATAAAATGGCC ATGGTACATTTGGCTAGGTTTTATAGCTGGCTTGATTGCCATAGTAATGGTGACAATTATGCTTTGCTGT ATGACCAGTTGCTGTAGTTGTCTCAAGGGCTGTTGTTCTTGTGGATCCTGCTGCAAATTTGATGAAGACG ACTCTGAGCCAGTGCTCAAAGGAGTCAAATTACATTACACATAAACGAACTTATGGATTTGTTTATGAGA ATCTTCACAATTGGAACTGTAACTTTGAAGCAAGGTGAAATCAAGGATGCTACTCCTTCAGATTTTGTTC GCGCTACTGCAACGATACCGATACAAGCCTCACTCCCTTTCGGATGGCTTATTGTTGGCGTTGCACTTCT TGCTGTTTTTCAGAGCGCTTCCAAAATCATAACCCTCAAAAAGAGATGGCAACTAGCACTCTCCAAGGGT GTTCACTTTGTTTGCAACTTGCTGTTGTTGTTTGTAACAGTTTACTCACACCTTTTGCTCGTTGCTGCTG GCCTTGAAGCCCCTTTTCTCTATCTTTATGCTTTAGTCTACTTCTTGCAGAGTATAAACTTTGTAAGAAT AATAATGAGGCTTTGGCTTTGCTGGAAATGCCGTTCCAAAAACCCATTACTTTATGATGCCAACTATTTT CTTTGCTGGCATACTAATTGTTACGACTATTGTATACCTTACAATAGTGTAACTTCTTCAATTGTCATTA CTTCAGGTGATGGCACAACAAGTCCTATTTCTGAACATGACTACCAGATTGGTGGTTATACTGAAAAATG GGAATCTGGAGTAAAAGACTGTGTTGTATTACACAGTTACTTCACTTCAGACTATTACCAGCTGTACTCA ACTCAATTGAGTACAGACACTGGTGTTGAACATGTTACCTTCTTCATCTACAATAAAATTGTTGATGAGC CTGAAGAACATGTCCAAATTCACACAATCGACGGTTCATCCGGAGTTGTTAATCCAGTAATGGAACCAAT TTATGATGAACCGACGACGACTACTAGCGTGCCTTTGTAAGCACAAGCTGATGAGTACGAACTTATGTAC TCATTCGTTTCGGAAGAGACAGGTACGTTAATAGTTAATAGCGTACTTCTTTTTCTTGCTTTCGTGGTAT TCTTGCTAGTTACACTAGCCATCCTTACTGCGCTTCGATTGTGTGCGTACTGCTGCAATATTGTTAACGT GAGTCTTGTAAAACCTTCTTTTTACGTTTACTCTCGTGTTAAAAATCTGAATTCTTCTAGAGTTCCTGAT CTTCTGGTCTAAACGAACTAAATATTATATTAGTTTTTCTGTTTGGAACTTTAATTTTAGCCATGGCAGA TTCCAACGGTACTATTACCGTTGAAGAGCTTAAAAAGCTCCTTGAACAATGGAACCTAGTAATAGGTTTC CTATTCCTTACATGGATTTGTCTTCTACAATTTGCCTATGCCAACAGGAATAGGTTTTTGTATATAATTA AGTTAATTTTCCTCTGGCTGTTATGGCCAGTAACTTTAGCTTGTTTTGTGCTTGCTGCTGTTTACAGAAT AAATTGGATCACCGGTGGAATTGCTATCGCAATGGCTTGTCTTGTAGGCTTGATGTGGCTCAGCTACTTC ATTGCTTCTTTCAGACTGTTTGCGCGTACGCGTTCCATGTGGTCATTCAATCCAGAAACTAACATTCTTC TCAACGTGCCACTCCATGGCACTATTCTGACCAGACCGCTTCTAGAAAGTGAACTCGTAATCGGAGCTGT GATCCTTCGTGGACATCTTCGTATTGCTGGACACCATCTAGGACGCTGTGACATCAAGGACCTGCCTAAA GAAATCACTGTTGCTACATCACGAACGCTTTCTTATTACAAATTGGGAGCTTCGCAGCGTGTAGCAGGTG ACTCAGGTTTTGCTGCATACAGTCGCTACAGGATTGGCAACTATAAATTAAACACAGACCATTCCAGTAG CAGTGACAATATTGCTTTGCTTGTACAGTAAGTGACAACAGATGTTTCATCTCGTTGACTTTCAGGTTAC TATAGCAGAGATATTACTAATTATTATGAGGACTTTTAAAGTTTCCATTTGGAATCTTGATTACATCATA AACCTCATAATTAAAAATTTATCTAAGTCACTAACTGAGAATAAATATTCTCAATTAGATGAAGAGCAAC CAATGGAGATTGATTAAACGAACATGAAAATTATTCTTTTCTTGGCACTGATAACACTCGCTACTTGTGA GCTTTATCACTACCAAGAGTGTGTTAGAGGTACAACAGTACTTTTAAAAGAACCTTGCTCTTCTGGAACA TACGAGGGCAATTCACCATTTCATCCTCTAGCTGATAACAAATTTGCACTGACTTGCTTTAGCACTCAAT TTGCTTTTGCTTGTCCTGACGGCGTAAAACACGTCTATCAGTTACGTGCCAGATCAGTTTCACCTAAACT GTTCATCAGACAAGAGGAAGTTCAAGAACTTTACTCTCCAATTTTTCTTATTGTTGCGGCAATAGTGTTT ATAACACTTTGCTTCACACTCAAAAGAAAGACAGAATGATTGAACTTTCATTAATTGACTTCTATTTGTG CTTTTTAGCCTTTCTGCTATTCCTTGTTTTAATTATGCTTATTATCTTTTGGTTCTCACTTGAACTGCAA GATCATAATGAAACTTGTCACGCCTAAACGAACATGAAATTTCTTGTTTTCTTAGGAATCATCACAACTG TAGCTGCATTTCACCAAGAATGTAGTTTACAGTCATGTACTCAACATCAACCATATGTAGTTGATGACCC GTGTCCTATTCACTTCTATTCTAAATGGTATATTAGAGTAGGAGCTAGAAAATCAGCACCTTTAATTGAA TTGTGCGTGGATGAGGCTGGTTCTAAATCACCCATTCAGTACATCGATATCGGTAATTATACAGTTTCCT GTTTACCTTTTACAATTAATTGCCAGGAACCTAAATTGGGTAGTCTTGTAGTGCGTTGTTCGTTCTATGA AGACTTTTTAGAGTATCATGACGTTCGTGTTGTTTTAGATTTCATCTAAACGAACAAACTAAAATGTCTG ATAATGGACCCCAAAATCAGCGAAATGCACCCCGCATTACGTTTGGTGGACCCTCAGATTCAACTGGCAG TAACCAGAATGGAGAACGCAGTGGGGCGCGATCAAAACAACGTCGGCCCCAAGGTTTACCCAATAATACT GCGTCTTGGTTCACCGCTCTCACTCAACATGGCAAGGAAGACCTTAAATTCCCTCGAGGACAAGGCGTTC CAATTAACACCAATAGCAGTCCAGATGACCAAATTGGCTACTACCGAAGAGCTACCAGACGAATTCGTGG TGGTGACGGTAAAATGAAAGATCTCAGTCCAAGATGGTATTTCTACTACCTAGGAACTGGGCCAGAAGCT GGACTTCCCTATGGTGCTAACAAAGACGGCATCATATGGGTTGCAACTGAGGGAGCCTTGAATACACCAA AAGATCACATTGGCACCCGCAATCCTGCTAACAATGCTGCAATCGTGCTACAACTTCCTCAAGGAACAAC ATTGCCAAAAGGCTTCTACGCAGAAGGGAGCAGAGGCGGCAGTCAAGCCTCTTCTCGTTCCTCATCACGT AGTCGCAACAGTTCAAGAAATTCAACTCCAGGCAGCAGTAGGGGAACTTCTCCTGCTAGAATGGCTGGCA ATGGCGGTGATGCTGCTCTTGCTTTGCTGCTGCTTGACAGATTGAACCAGCTTGAGAGCAAAATGTCTGG TAAAGGCCAACAACAACAAGGCCAAACTGTCACTAAGAAATCTGCTGCTGAGGCTTCTAAGAAGCCTCGG CAAAAACGTACTGCCACTAAAGCATACAATGTAACACAAGCTTTCGGCAGACGTGGTCCAGAACAAACCC AAGGAAATTTTGGGGACCAGGAACTAATCAGACAAGGAACTGATTACAAACATTGGCCGCAAATTGCACA ATTTGCCCCCAGCGCTTCAGCGTTCTTCGGAATGTCGCGCATTGGCATGGAAGTCACACCTTCGGGAACG TGGTTGACCTACACAGGTGCCATCAAATTGGATGACAAAGATCCAAATTTCAAAGATCAAGTCATTTTGC TGAATAAGCATATTGACGCATACAAAACATTCCCACCAACAGAGCCTAAAAAGGACAAAAAGAAGAAGGC TGATGAAACTCAAGCCTTACCGCAGAGACAGAAGAAACAGCAAACTGTGACTCTTCTTCCTGCTGCAGAT TTGGATGATTTCTCCAAACAATTGCAACAATCCATGAGCAGTGCTGACTCAACTCAGGCCTAAACTCATG CAGACCACACAAGGCAGATGGGCTATATAAACGTTTTCGCTTTTCCGTTTACGATATATAGTCTACTCTT GTGCAGAATGAATTCTCGTAACTACATAGCACAAGTAGATGTAGTTAACTTTAATCTCACATAGCAATCT TTAATCAGTGTGTAACATTAGGGAGGACTTGAAAGAGCCACCACATTTTCACCGAGGCCACGCGGAGTAC GATCGAGTGTACAGTGAACAATGCTAGGGAGAGCTGCCTATATGGAAGAGCCCTAATGTGTAAAATTAAT TTTAGTAGTGCTATCCCCATGTGATTTTAATAGCTTCTTAGGAGAATGACAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAA q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-id-starts-with-space.fasta000066400000000000000000000000351412142116700314170ustar00rootroot00000000000000> this_id_starts_with_a_spaceq2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-no-id.fasta000066400000000000000000000000011412142116700264420ustar00rootroot00000000000000>q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences-with-duplicate-ids.fasta000066400000000000000000000004061412142116700311450ustar00rootroot00000000000000>SEQUENCE1 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT >SEQUENCE2 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT ACGTACGTACGTACGTACGTACGT >SEQUENCE1 TTAAGGCCCACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-sequences.fasta000066400000000000000000000002611412142116700254460ustar00rootroot00000000000000>SEQUENCE1 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT >SEQUENCE2 ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT ACGTACGTACGTACGTACGTACGT q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-with-bom-fails.fasta000066400000000000000000000000221412142116700262700ustar00rootroot00000000000000Not a valid id q2-types-2021.8.0/q2_types/feature_data/tests/data/dna-with-bom-passes.fasta000066400000000000000000000000401412142116700264700ustar00rootroot00000000000000>Some kinda DNA ACGTACGTACGT q2-types-2021.8.0/q2_types/feature_data/tests/data/empty_differential.tsv000066400000000000000000000001741412142116700263060ustar00rootroot00000000000000featureid F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 F16 F17 F18 F19 F20 F21 F22 F23 F24 F25 F26 F27 F28 F29 F30 q2-types-2021.8.0/q2_types/feature_data/tests/data/inf_differential.tsv000066400000000000000000000012551412142116700257250ustar00rootroot00000000000000featureid effect F0 inf F1 1.01418002973925 F2 1.02456128258909 F3 -0.74363992043225 F4 1.29823896534823 F5 -1.12965055281585 F6 -0.522401797448688 F7 0.327560711072239 F8 -1.3738693465664802 F9 -0.7847891526325621 F10 -0.280063201878434 F11 -0.251269847578052 F12 1.24602780723028 F13 0.665734866338239 F14 -0.889042985114811 F15 -0.811956802515126 F16 0.11591764582945 F17 -0.156195990858492 F18 -0.965770633683909 F19 0.8035240337800391 F20 0.680306950765235 F21 -0.688388077896823 F22 0.7949653982850671 F23 -1.11912925367142 F24 -1.10593563527746 F25 0.678934047810573 F26 -0.937189288219405 F27 -0.99973019311648 F28 -0.31799033232181 F29 -0.66141741897569 F30 0.550511528315366 q2-types-2021.8.0/q2_types/feature_data/tests/data/left-dna-sequences.fasta000066400000000000000000000001071412142116700263750ustar00rootroot00000000000000>SEQUENCE1 NGCTCCTAGGTCGGCATGATGGGGGAAGGAGAGCATGGGAAGAAATGAGAGAGTAGCAA q2-types-2021.8.0/q2_types/feature_data/tests/data/not-dna-sequences000066400000000000000000000001111412142116700251410ustar00rootroot00000000000000; I don't know what this is, but it isn't >DNA 123456789 >DNA abcdefghi q2-types-2021.8.0/q2_types/feature_data/tests/data/not-dna-sequences.fasta000066400000000000000000000000371412142116700262450ustar00rootroot00000000000000>DNA 123456789 >DNA abcdefghi q2-types-2021.8.0/q2_types/feature_data/tests/data/not-rna-sequences000066400000000000000000000001451412142116700251660ustar00rootroot00000000000000; I don't know what this is, but it isn't >notRNA1 123456789 >notRNA2 abcdefghi >notRNA3 ACTGAATTT q2-types-2021.8.0/q2_types/feature_data/tests/data/not-rna-sequences.fasta000066400000000000000000000000731412142116700262630ustar00rootroot00000000000000>notRNA1 123456789 >notRNA2 abcdefghi >notRNA3 ACTGAATTT q2-types-2021.8.0/q2_types/feature_data/tests/data/not_differential.tsv000066400000000000000000000000631412142116700257450ustar00rootroot00000000000000asdfasdfasdfasdf asdfasdfa asdfasdf asdf asdfas dfaq2-types-2021.8.0/q2_types/feature_data/tests/data/protein-sequences-duplicate-ids.fasta000066400000000000000000000004111412142116700311060ustar00rootroot00000000000000>sequence1 MTTRDLTAAQFNETIQSSDMVLVDYWASWCGPCRAFAPTFAESSEKHPDVVHAKVDTEAERELA AAAQIR >sequence2 MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLSEKYSNVIFLEVDVDDCQD VASECEVKCMPTFQFFKKGQKVGEFSGAN >sequence1 AFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLSEKYSNVIFLEVDVDDCQDQKVGEFSGA q2-types-2021.8.0/q2_types/feature_data/tests/data/protein-sequences.fasta000066400000000000000000000002751412142116700263710ustar00rootroot00000000000000>sequence1 MTTRDLTAAQFNETIQSSDMVLVDYWASWCGPCRAFAPTFAESSEKHPDVVHAKVDTEAERELA AAAQIR >sequence2 MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLSEKYSNVIFLEVDVDDCQD VASECEVKCMPTFQFFKKGQKVGEFSGAN q2-types-2021.8.0/q2_types/feature_data/tests/data/right-dna-sequences.fasta000066400000000000000000000001071412142116700265600ustar00rootroot00000000000000>SEQUENCE1 TTGCTACTCTCTCATTTCTTCCCATGCCTTCCTTCCCCCATCATGCCGACCTAGGAGCC q2-types-2021.8.0/q2_types/feature_data/tests/data/rna-sequences-with-duplicate-ids.fasta000066400000000000000000000004171412142116700311650ustar00rootroot00000000000000>RNASEQUENCE1 ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU >RNASEQUENCE2 ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU ACGUACGUACGUACGUACGUACGU >RNASEQUENCE1 UUAAGGCCCACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU q2-types-2021.8.0/q2_types/feature_data/tests/data/rna-sequences.fasta000066400000000000000000000002671412142116700254720ustar00rootroot00000000000000>RNASEQUENCE1 ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU >RNASEQUENCE2 ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU ACGUACGUACGUACGUACGUACGU q2-types-2021.8.0/q2_types/feature_data/tests/data/rna-with-bom-passes.fasta000066400000000000000000000000401412142116700265060ustar00rootroot00000000000000>Some kinda DNA ACGUACGUACGU q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/000077500000000000000000000000001412142116700235525ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/1-column.tsv000066400000000000000000000000251412142116700257400ustar00rootroot00000000000000Feature ID seq1 seq2 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/2-column.tsv000066400000000000000000000001061412142116700257410ustar00rootroot00000000000000Feature ID Taxon seq1 k__Bacteria; p__Proteobacteria seq2 k__Bacteria q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/3-column.tsv000066400000000000000000000001171412142116700257440ustar00rootroot00000000000000Feature ID Taxon Confidence seq1 k__Foo; p__Bar -1.0 seq2 k__Foo; p__Baz -42.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/blanks000066400000000000000000000000341412142116700247440ustar00rootroot00000000000000 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/duplicate-columns.tsv000066400000000000000000000004671412142116700277470ustar00rootroot00000000000000Feature ID Taxon Column1 Column2 Column1 SEQUENCE1 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__Sulfitobacter; s__mediterraneus a b c SEQUENCE2 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__; s__ d e f q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/duplicate-ids.tsv000066400000000000000000000004511412142116700270370ustar00rootroot00000000000000Feature ID Taxon SEQUENCE1 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__Sulfitobacter; s__mediterraneus SEQUENCE2 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__; s__ SEQUENCE1 k__Bacteria q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/empty000066400000000000000000000000001412142116700246210ustar00rootroot00000000000000feature-table-with-taxonomy-metadata_v210.biom000066400000000000000000001155101412142116700342410ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomyHDF  H`  TREEHEAPX observationsample8 @id ` @type` H format-url` P format-version@ H generated-by` Hcreation-date` H shape@ 0 nnz@`TREE @HLhNGCOL No Table IDhttp://biom-format.orgq2-types test data2016-11-22T15:02:32.495577O2O3O1 O0 a b a babbaS2S3S1S0 HEAPX8matrixidsmetadatagroup-metadata SNOD`0^X^x`TREE@'HEAPX `dataindicesindptr8SNOD(p[[]C @(HLhN ?@4 4 deflatex4XPTREE5(SNOD09x^c``Bq@i(-% VJPZJAi (uHx^cd```bf`$x^c````bv b~ ,%x^cb``H`N fB(d>?hx^cd``H`A fD h|.4>7 x^c`0Š5{-+Ai-(% ցPZJAi=~Hx^cd```bf`$x^c````bv b~ ,%x^cb``H`Q fB⋠@ Dx^c`F(4p  deflate1x4XXTREE(  deflate:x4XXTREE( deflateDx4XPTREE(TREE(ZHEAPXNtaxonomy@( deflateOx4X@TREE))SNODN[]TREE(ZHEAPX]PX^x`TREEcHEAPX8`matrixidsmetadatagroup-metadata a8cTREE8nHEAPX Xcdataindicesindptr8SNOD(К`a8c@h ?@4 4 deflatefx4XPTREE5/)SNODdox  deflatepx4XXTREEd)  deflateyx4XXTREEy) deflatex4XPTREE)hTREE@HEAPXxH@ deflatex4XXTREE)SNODКTREE@HEAPXPq2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/feature-table_v210.biom000066400000000000000000001020101412142116700277040ustar00rootroot00000000000000HDF  `  xTREE`HEAPX observationsample8 @id ` @type ` H format-url` P format-version@ H generated-by` Hcreation-date` H shape@ 0 nnz@TREEhHEAPX8matrixidsmetadatagroup-metadata GCOL No Table ID otu tablehttp://biom-format.orgexample2014-07-29T16:15:43.318377GG_OTU_4GG_OTU_5GG_OTU_3 GG_OTU_2 GG_OTU_1 Sample4 Sample5 Sample6Sample3Sample2Sample18SNODxQQSTREE&HEAPX dataindicesindptr8SNOD(NNQBK L@N ?@4 4 deflateSPTREE'8(SNOD80x9x^c``AB P>8htu4} Tx^cbF fb fb&(ĆɃ@|%x^c````b6 b^ b-x^```H`. 1a|N4>B泡فg8Yx^c`0?ؓG @i(.} Tx^cd```bF(LH|al$ x^c````bV bn b~ `7x^cg``H` fGh|4>/  deflateH1SXTREE!_(  deflate:SXTREE( deflateCSPTREE"( L@NTREE&HEAPX`NPNQTREE&HEAPX QPQSTREEVHEAPX8Smatrixidsmetadatagroup-metadata `TVTREEaHEAPX Vdataindicesindptr8SNOD(HpHu8T`TV~~Ѐ ?@4 4 deflatePYSPTREE"(SNOD@Xbl  deflatecSXTREE (  deflatemSXTREE( deflateXvSPTREE$)~ЀTREEaHEAPXPpTREEaHEAPXPq2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/greater-column-length.tsv000066400000000000000000000001171412142116700305120ustar00rootroot00000000000000Feature ID Taxon seq1 k__Bacteria; p__Proteobacteria -1.0 seq2 k__Bacteria 1.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/greater-header-length.tsv000066400000000000000000000001261412142116700304450ustar00rootroot00000000000000Feature ID Taxon Confidence Random seq1 k__Foo; p__Bar -1.0 seq2 k__Foo; p__Baz -42.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/header-only.tsv000066400000000000000000000000211412142116700265100ustar00rootroot00000000000000Feature ID Taxon q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/headerless.tsv000066400000000000000000000001041412142116700264220ustar00rootroot00000000000000seq1 k__Foo; p__Bar some another seq2 k__Foo; p__Baz column column! q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/jagged.tsv000066400000000000000000000005271412142116700255350ustar00rootroot00000000000000Feature ID Taxon Confidence SEQUENCE1 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__Sulfitobacter; s__mediterraneus -1.0 SEQUENCE2 k__Bacteria; p__Proteobacteria; c__Alphaproteobacteria; o__Rhodobacterales; f__Rhodobacteraceae; g__; s__ 42 43 SEQUENCE3 k__Bacteria; p__Proteobacteria -1.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/leading_space_taxon.tsv000066400000000000000000000000661412142116700303010ustar00rootroot00000000000000Feature ID Taxon Confidence seq1 k__Foo; p__Bar -1.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/many-rows.tsv000066400000000000000000000002001412142116700262340ustar00rootroot00000000000000Feature ID Taxon seq1 foo seq2 foo seq3 foo seq4 foo seq5 foo seq6 foo seq7 foo seq8 foo seq9 foo seq10 foo seq11 foo seq12 foo q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/start_end_space_taxon.tsv000066400000000000000000000000671412142116700306620ustar00rootroot00000000000000Feature ID Taxon Confidence seq1 k__Foo; p__Bar -1.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/trailing_space_taxon.tsv000066400000000000000000000000671412142116700305100ustar00rootroot00000000000000Feature ID Taxon Confidence seq1 k__Foo; p__Bar -1.0 q2-types-2021.8.0/q2_types/feature_data/tests/data/taxonomy/valid-but-messy.tsv000066400000000000000000000002251412142116700273340ustar00rootroot00000000000000 Feature ID Taxon Extra Column SEQUENCE1 k__Bar; p__Baz foo seq2 some; taxonomy; for; ya bar baz q2-types-2021.8.0/q2_types/feature_data/tests/test_format.py000066400000000000000000000533041412142116700236710ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os import os.path import shutil import unittest from q2_types.feature_data import ( TaxonomyFormat, TaxonomyDirectoryFormat, HeaderlessTSVTaxonomyFormat, HeaderlessTSVTaxonomyDirectoryFormat, TSVTaxonomyFormat, TSVTaxonomyDirectoryFormat, DNAFASTAFormat, DNASequencesDirectoryFormat, PairedDNASequencesDirectoryFormat, AlignedDNAFASTAFormat, AlignedDNASequencesDirectoryFormat, DifferentialDirectoryFormat, ProteinFASTAFormat, AlignedProteinFASTAFormat, FASTAFormat, AlignedProteinSequencesDirectoryFormat, ProteinSequencesDirectoryFormat, RNAFASTAFormat, RNASequencesDirectoryFormat, AlignedRNAFASTAFormat, AlignedRNASequencesDirectoryFormat, BLAST6DirectoryFormat ) from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestTaxonomyFormats(TestPluginBase): package = 'q2_types.feature_data.tests' def test_taxonomy_format_validate_positive(self): filenames = ['headerless.tsv', '2-column.tsv', '3-column.tsv', 'valid-but-messy.tsv', 'many-rows.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = TaxonomyFormat(filepath, mode='r') format.validate() def test_taxonomy_format_validate_negative(self): filenames = ['empty', 'blanks', '1-column.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = TaxonomyFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'Taxonomy'): format.validate() def test_taxonomy_directory_format(self): # Basic test to verify that single-file directory format is working. filepath = self.get_data_path(os.path.join('taxonomy', '2-column.tsv')) shutil.copy(filepath, os.path.join(self.temp_dir.name, 'taxonomy.tsv')) format = TaxonomyDirectoryFormat(self.temp_dir.name, mode='r') format.validate() # NOTE: the tests below for HeaderlessTSVTaxonomyFormat use some test files # that have headers. However, it makes no difference to this file format # since the header will be interpreted as data and exercises the correct # codepaths in the sniffer. # # These tests are nearly identical to the tests above for TaxonomyFormat -- # the sniffer operates in exactly the same way (the transformers, however, # differ in behavior). def test_headerless_tsv_taxonomy_format_validate_positive(self): filenames = ['headerless.tsv', '2-column.tsv', '3-column.tsv', 'valid-but-messy.tsv', 'many-rows.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = HeaderlessTSVTaxonomyFormat(filepath, mode='r') format.validate() def test_headerless_tsv_taxonomy_format_validate_negative(self): filenames = ['empty', 'blanks', '1-column.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = HeaderlessTSVTaxonomyFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'HeaderlessTSVTaxonomy'): format.validate() def test_headerless_tsv_taxonomy_directory_format(self): # Basic test to verify that single-file directory format is working. filepath = self.get_data_path(os.path.join('taxonomy', 'headerless.tsv')) shutil.copy(filepath, os.path.join(self.temp_dir.name, 'taxonomy.tsv')) format = HeaderlessTSVTaxonomyDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_tsv_taxonomy_format_validate_positive(self): filenames = ['2-column.tsv', '3-column.tsv', 'valid-but-messy.tsv', 'many-rows.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = TSVTaxonomyFormat(filepath, mode='r') format.validate() def test_tsv_taxonomy_format_validate_negative(self): filenames = ['empty', 'blanks', '1-column.tsv', 'headerless.tsv', 'header-only.tsv', 'jagged.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = TSVTaxonomyFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'TSVTaxonomy'): format.validate() def test_tsv_taxonomy_directory_format(self): # Basic test to verify that single-file directory format is working. filepath = self.get_data_path(os.path.join('taxonomy', '2-column.tsv')) shutil.copy(filepath, os.path.join(self.temp_dir.name, 'taxonomy.tsv')) format = TSVTaxonomyDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_tsv_taxonomy_format_column_header_lengths(self): filenames = ['greater-column-length.tsv', 'greater-header-length.tsv'] filepaths = [self.get_data_path(os.path.join('taxonomy', filename)) for filename in filenames] for filepath in filepaths: format = TSVTaxonomyFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'line 2.*3 values.*expected 2'): format.validate() class TestNucleicAcidFASTAFormats(TestPluginBase): package = 'q2_types.feature_data.tests' # DNA Format Tests def test_permissive_fasta_format(self): filepath = self.get_data_path('dna-sequences-gisaid.fasta') format = FASTAFormat(filepath, mode='r') format.validate() def test_dna_fasta_format_validate_positive(self): filepath = self.get_data_path('dna-sequences.fasta') format = DNAFASTAFormat(filepath, mode='r') format.validate() def test_dna_fasta_format_bom_passes(self): filepath = self.get_data_path('dna-with-bom-passes.fasta') format = DNAFASTAFormat(filepath, mode='r') format.validate() def test_dna_fasta_format_empty_file(self): filepath = os.path.join(self.temp_dir.name, 'empty') with open(filepath, 'w') as fh: fh.write('\n') format = DNAFASTAFormat(filepath, mode='r') format.validate() def test_dna_fasta_format_invalid_characters(self): filepath = self.get_data_path('not-dna-sequences.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, "Invalid character '1' " ".*0 on line 2"): format.validate() def test_dna_fasta_format_validate_negative(self): filepath = self.get_data_path('not-dna-sequences') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'DNAFASTA'): format.validate() def test_dna_fasta_format_consecutive_IDs(self): filepath = self.get_data_path('dna-sequences-consecutive-ids.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex( ValidationError, 'consecutive descriptions.*1'): format.validate() def test_dna_fasta_format_missing_initial_ID(self): filepath = self.get_data_path('dna-sequences-first-line-not-id.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'First line'): format.validate() def test_dna_fasta_format_corrupt_characters(self): filepath = self.get_data_path('dna-sequences-corrupt-characters.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'utf-8.*2'): format.validate() def test_dna_fasta_format_bom_fails(self): filepath = self.get_data_path('dna-with-bom-fails.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'First line'): format.validate() def test_dna_sequences_directory_format(self): filepath = self.get_data_path('dna-sequences.fasta') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'dna-sequences.fasta')) format = DNASequencesDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_dna_fasta_format_duplicate_ids(self): filepath = self.get_data_path('dna-sequences-duplicate-id.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '3.*duplicate.*1'): format.validate() def test_dna_fasta_format_no_id(self): filepath = self.get_data_path('dna-sequences-no-id.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '1.*missing an ID'): format.validate() def test_dna_fasta_format_id_starts_with_space(self): filepath = self.get_data_path( 'dna-sequences-id-starts-with-space.fasta') format = DNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '1 starts with a space'): format.validate() def test_paired_dna_sequences_directory_format(self): filepath = self.get_data_path('dna-sequences.fasta') temp_dir = self.temp_dir.name left_seq = os.path.join(temp_dir, 'left-dna-sequences.fasta') right_seq = os.path.join(temp_dir, 'right-dna-sequences.fasta') shutil.copy(filepath, left_seq) shutil.copy(filepath, right_seq) format = PairedDNASequencesDirectoryFormat(temp_dir, mode='r') format.validate() def test_aligned_dna_fasta_format_validate_positive(self): filepath = self.get_data_path('aligned-dna-sequences.fasta') format = AlignedDNAFASTAFormat(filepath, mode='r') format.validate() def test_aligned_dna_fasta_format_validate_negative(self): filepath = self.get_data_path('not-dna-sequences') format = AlignedDNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'AlignedDNAFASTA'): format.validate() def test_aligned_dna_fasta_format_unaligned(self): filepath = self.get_data_path('dna-sequences.fasta') format = AlignedDNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'line 4.*length 88.*length 64'): format.validate() def test_aligned_dna_sequences_directory_format(self): filepath = self.get_data_path('aligned-dna-sequences.fasta') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'aligned-dna-sequences.fasta')) format = AlignedDNASequencesDirectoryFormat(temp_dir, mode='r') format.validate() # RNA Format Tests def test_rna_fasta_format_validate_positive(self): filepath = self.get_data_path('rna-sequences.fasta') format = RNAFASTAFormat(filepath, mode='r') format.validate() def test_rna_fasta_format_bom_passes(self): filepath = self.get_data_path('rna-with-bom-passes.fasta') format = RNAFASTAFormat(filepath, mode='r') format.validate() def test_rna_fasta_format_empty_file(self): filepath = os.path.join(self.temp_dir.name, 'empty') with open(filepath, 'w') as fh: fh.write('\n') format = RNAFASTAFormat(filepath, mode='r') format.validate() def test_rna_fasta_format_invalid_characters(self): filepath = self.get_data_path('not-rna-sequences.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, "Invalid character '1' " ".*0 on line 2"): format.validate() def test_rna_fasta_format_validate_negative(self): filepath = self.get_data_path('not-rna-sequences') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'RNAFASTA'): format.validate() def test_rna_fasta_format_consecutive_IDs(self): filepath = self.get_data_path('dna-sequences-consecutive-ids.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex( ValidationError, 'consecutive descriptions.*1'): format.validate() def test_rna_fasta_format_missing_initial_ID(self): filepath = self.get_data_path('dna-sequences-first-line-not-id.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'First line'): format.validate() def test_rna_fasta_format_corrupt_characters(self): filepath = self.get_data_path('dna-sequences-corrupt-characters.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'utf-8.*2'): format.validate() def test_rna_fasta_format_bom_fails(self): filepath = self.get_data_path('dna-with-bom-fails.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'First line'): format.validate() def test_rna_sequences_directory_format(self): filepath = self.get_data_path('rna-sequences.fasta') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'rna-sequences.fasta')) format = RNASequencesDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_rna_fasta_format_duplicate_ids(self): filepath = self.get_data_path('rna-sequences-with-duplicate-ids.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '6.*duplicate.*1'): format.validate() def test_rna_fasta_format_no_id(self): filepath = self.get_data_path('dna-sequences-no-id.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '1.*missing an ID'): format.validate() def test_rna_fasta_format_id_starts_with_space(self): filepath = self.get_data_path( 'dna-sequences-id-starts-with-space.fasta') format = RNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, '1 starts with a space'): format.validate() def test_aligned_rna_fasta_format_validate_positive(self): filepath = self.get_data_path('aligned-rna-sequences.fasta') format = AlignedRNAFASTAFormat(filepath, mode='r') format.validate() def test_aligned_rna_fasta_format_validate_negative(self): filepath = self.get_data_path('not-rna-sequences') format = AlignedRNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'AlignedRNAFASTA'): format.validate() def test_aligned_rna_fasta_format_unaligned(self): filepath = self.get_data_path('rna-sequences.fasta') format = AlignedRNAFASTAFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'line 4.*length 88.*length 64'): format.validate() def test_aligned_rna_sequences_directory_format(self): filepath = self.get_data_path('aligned-rna-sequences.fasta') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'aligned-rna-sequences.fasta')) format = AlignedRNASequencesDirectoryFormat(temp_dir, mode='r') format.validate() class TestDifferentialFormat(TestPluginBase): package = 'q2_types.feature_data.tests' def test_differential_format(self): filepath = self.get_data_path('differentials.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'differentials.tsv')) format = DifferentialDirectoryFormat(temp_dir, mode='r') format.validate() self.assertTrue(True) def test_differential_format_empty(self): filepath = self.get_data_path('empty_differential.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'differentials.tsv')) with self.assertRaisesRegex(ValidationError, 'least 1 column'): format = DifferentialDirectoryFormat(temp_dir, mode='r') format.validate() def test_differential_format_not(self): filepath = self.get_data_path('not_differential.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'differentials.tsv')) with self.assertRaises(ValidationError): format = DifferentialDirectoryFormat(temp_dir, mode='r') format.validate() def test_differential_format_inf(self): filepath = self.get_data_path('inf_differential.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'differentials.tsv')) with self.assertRaisesRegex(ValidationError, 'numeric'): format = DifferentialDirectoryFormat(temp_dir, mode='r') format.validate() def test_differential_format_bad_type(self): filepath = self.get_data_path('bad_differential.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'differentials.tsv')) with self.assertRaisesRegex(ValidationError, 'numeric'): format = DifferentialDirectoryFormat(temp_dir, mode='r') format.validate() class TestProteinFASTAFormats(TestPluginBase): package = 'q2_types.feature_data.tests' def test_protein_fasta_format_validate_positive(self): filepath = self.get_data_path('protein-sequences.fasta') format = ProteinFASTAFormat(filepath, mode='r') format.validate() format.validate('min') def test_protein_fasta_format_invalid_characters(self): filepath = self.get_data_path('not-dna-sequences.fasta') format = ProteinFASTAFormat(filepath, mode='r') with self.assertRaisesRegex( ValidationError, "Invalid character '1' .*0 on line 2"): format.validate() def test_protein_fasta_format_empty_file(self): filepath = os.path.join(self.temp_dir.name, 'empty') with open(filepath, 'w') as fh: fh.write('\n') format = ProteinFASTAFormat(filepath, mode='r') format.validate() def test_protein_sequences_directory_format(self): filepath = self.get_data_path('protein-sequences.fasta') shutil.copy(filepath, os.path.join( self.temp_dir.name, 'protein-sequences.fasta')) format = ProteinSequencesDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_aligned_protein_fasta_format_validate_positive(self): filepath = self.get_data_path('aligned-protein-sequences.fasta') format = AlignedProteinFASTAFormat(filepath, mode='r') format.validate() format.validate('min') def test_aligned_protein_fasta_format_unaligned(self): filepath = self.get_data_path('protein-sequences.fasta') format = AlignedProteinFASTAFormat(filepath, mode='r') with self.assertRaisesRegex( ValidationError, 'line 5 was length 93.* previous .* 70'): format.validate() def test_aligned_protein_sequences_directory_format(self): filepath = self.get_data_path('aligned-protein-sequences.fasta') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'aligned-protein-sequences.fasta')) format = AlignedProteinSequencesDirectoryFormat(temp_dir, mode='r') format.validate() class TestBLAST6Format(TestPluginBase): package = 'q2_types.feature_data.tests' def test_blast6_format(self): filepath = self.get_data_path('blast6.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'blast6.tsv')) format = BLAST6DirectoryFormat(temp_dir, mode='r') format.validate() self.assertTrue(True) def test_blast6_format_empty(self): temp_dir = self.temp_dir.name open(os.path.join(temp_dir, 'blast6.tsv'), 'w').close() with self.assertRaisesRegex(ValidationError, 'BLAST6 file is empty.'): BLAST6DirectoryFormat(temp_dir, mode='r').validate() def test_blast6_format_invalid(self): filepath = self.get_data_path('blast6_invalid.tsv') temp_dir = self.temp_dir.name shutil.copy(filepath, os.path.join(temp_dir, 'blast6.tsv')) with self.assertRaisesRegex(ValidationError, 'Invalid BLAST6 format.'): BLAST6DirectoryFormat(temp_dir, mode='r').validate() if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/feature_data/tests/test_transformer.py000066400000000000000000001466231412142116700247520ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os.path import unittest import pandas as pd import pandas.errors import biom import skbio import qiime2 from pandas.testing import assert_frame_equal, assert_series_equal from q2_types.feature_table import BIOMV210Format from q2_types.feature_data import ( TaxonomyFormat, HeaderlessTSVTaxonomyFormat, TSVTaxonomyFormat, DNAFASTAFormat, DNAIterator, PairedDNAIterator, PairedDNASequencesDirectoryFormat, AlignedDNAFASTAFormat, DifferentialFormat, AlignedDNAIterator, ProteinFASTAFormat, AlignedProteinFASTAFormat, RNAFASTAFormat, AlignedRNAFASTAFormat, RNAIterator, AlignedRNAIterator, BLAST6Format ) from q2_types.feature_data._transformer import ( _taxonomy_formats_to_dataframe, _dataframe_to_tsv_taxonomy_format, ProteinIterator, AlignedProteinIterator) from qiime2.plugin.testing import TestPluginBase # NOTE: these tests are fairly high-level and mainly test the transformer # interfaces for the three taxonomy file formats. More in-depth testing for # border cases, errors, etc. are in `TestTaxonomyFormatsToDataFrame` and # `TestDataFrameToTSVTaxonomyFormat` below, which test the lower-level helper # functions utilized by the transformers. class TestTaxonomyFormatTransformers(TestPluginBase): package = 'q2_types.feature_data.tests' def test_taxonomy_format_to_dataframe_with_header(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.DataFrame([['k__Foo; p__Bar', '-1.0'], ['k__Foo; p__Baz', '-42.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) _, obs = self.transform_format( TaxonomyFormat, pd.DataFrame, filename=os.path.join('taxonomy', '3-column.tsv')) assert_frame_equal(obs, exp) def test_taxonomy_format_to_dataframe_without_header(self): # Bug identified in https://github.com/qiime2/q2-types/issues/107 index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Taxon', 'Unnamed Column 1', 'Unnamed Column 2'] exp = pd.DataFrame([['k__Foo; p__Bar', 'some', 'another'], ['k__Foo; p__Baz', 'column', 'column!']], index=index, columns=columns, dtype=object) _, obs = self.transform_format( TaxonomyFormat, pd.DataFrame, filename=os.path.join('taxonomy', 'headerless.tsv')) assert_frame_equal(obs, exp) def test_taxonomy_format_to_series_with_header(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.Series(['k__Foo; p__Bar', 'k__Foo; p__Baz'], index=index, name='Taxon', dtype=object) _, obs = self.transform_format( TaxonomyFormat, pd.Series, filename=os.path.join('taxonomy', '3-column.tsv')) assert_series_equal(obs, exp) def test_taxonomy_format_to_series_without_header(self): # Bug identified in https://github.com/qiime2/q2-types/issues/107 index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.Series(['k__Foo; p__Bar', 'k__Foo; p__Baz'], index=index, name='Taxon', dtype=object) _, obs = self.transform_format( TaxonomyFormat, pd.Series, filename=os.path.join('taxonomy', 'headerless.tsv')) assert_series_equal(obs, exp) def test_headerless_tsv_taxonomy_format_to_tsv_taxonomy_format(self): exp = ( 'Feature ID\tTaxon\tUnnamed Column 1\tUnnamed Column 2\n' 'seq1\tk__Foo; p__Bar\tsome\tanother\n' 'seq2\tk__Foo; p__Baz\tcolumn\tcolumn!\n' ) _, obs = self.transform_format( HeaderlessTSVTaxonomyFormat, TSVTaxonomyFormat, filename=os.path.join('taxonomy', 'headerless.tsv')) with obs.open() as fh: self.assertEqual(fh.read(), exp) def test_tsv_taxonomy_format_to_dataframe(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.DataFrame([['k__Foo; p__Bar', '-1.0'], ['k__Foo; p__Baz', '-42.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) _, obs = self.transform_format( TSVTaxonomyFormat, pd.DataFrame, filename=os.path.join('taxonomy', '3-column.tsv')) assert_frame_equal(obs, exp) def test_tsv_taxonomy_format_to_series(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.Series(['k__Foo; p__Bar', 'k__Foo; p__Baz'], index=index, name='Taxon', dtype=object) _, obs = self.transform_format( TSVTaxonomyFormat, pd.Series, filename=os.path.join('taxonomy', '3-column.tsv')) assert_series_equal(obs, exp) def test_dataframe_to_tsv_taxonomy_format(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Taxon', 'Foo', 'Bar'] df = pd.DataFrame([['taxon1', '42', 'foo'], ['taxon2', '43', 'bar']], index=index, columns=columns, dtype=object) exp = ( 'Feature ID\tTaxon\tFoo\tBar\n' 'seq1\ttaxon1\t42\tfoo\n' 'seq2\ttaxon2\t43\tbar\n' ) transformer = self.get_transformer(pd.DataFrame, TSVTaxonomyFormat) obs = transformer(df) with obs.open() as fh: self.assertEqual(fh.read(), exp) def test_series_to_tsv_taxonomy_format(self): index = pd.Index(['emrakul', 'peanut'], name='Feature ID', dtype=object) series = pd.Series(['taxon1', 'taxon2'], index=index, name='Taxon', dtype=object) exp = ( 'Feature ID\tTaxon\n' 'emrakul\ttaxon1\n' 'peanut\ttaxon2\n' ) transformer = self.get_transformer(pd.Series, TSVTaxonomyFormat) obs = transformer(series) with obs.open() as fh: self.assertEqual(fh.read(), exp) def test_biom_table_to_tsv_taxonomy_format(self): filepath = self.get_data_path( os.path.join('taxonomy', 'feature-table-with-taxonomy-metadata_v210.biom')) table = biom.load_table(filepath) transformer = self.get_transformer(biom.Table, TSVTaxonomyFormat) obs = transformer(table) self.assertIsInstance(obs, TSVTaxonomyFormat) self.assertEqual( obs.path.read_text(), 'Feature ID\tTaxon\nO0\ta; b\nO1\ta; b\nO2\ta; b\nO3\ta; b\n') def test_biom_table_to_tsv_taxonomy_format_no_taxonomy_md(self): filepath = self.get_data_path( os.path.join('taxonomy', 'feature-table-with-taxonomy-metadata_v210.biom')) table = biom.load_table(filepath) observation_metadata = [dict(taxon=['a', 'b']) for _ in range(4)] table = biom.Table(table.matrix_data, observation_ids=table.ids(axis='observation'), sample_ids=table.ids(axis='sample'), observation_metadata=observation_metadata) transformer = self.get_transformer(biom.Table, TSVTaxonomyFormat) with self.assertRaisesRegex(ValueError, 'O0 does not contain `taxonomy`'): transformer(table) def test_biom_table_to_tsv_taxonomy_format_missing_md(self): filepath = self.get_data_path( os.path.join('taxonomy', 'feature-table-with-taxonomy-metadata_v210.biom')) table = biom.load_table(filepath) observation_metadata = [dict(taxonomy=['a', 'b']) for _ in range(4)] observation_metadata[2]['taxonomy'] = None # Wipe out one entry table = biom.Table(table.matrix_data, observation_ids=table.ids(axis='observation'), sample_ids=table.ids(axis='sample'), observation_metadata=observation_metadata) transformer = self.get_transformer(biom.Table, TSVTaxonomyFormat) with self.assertRaisesRegex(TypeError, 'problem preparing.*O2'): transformer(table) def test_biom_v210_format_to_tsv_taxonomy_format(self): filename = os.path.join( 'taxonomy', 'feature-table-with-taxonomy-metadata_v210.biom') _, obs = self.transform_format(BIOMV210Format, TSVTaxonomyFormat, filename=filename) self.assertIsInstance(obs, TSVTaxonomyFormat) self.assertEqual( obs.path.read_text(), 'Feature ID\tTaxon\nO0\ta; b\nO1\ta; b\nO2\ta; b\nO3\ta; b\n') def test_biom_v210_format_no_md_to_tsv_taxonomy_format(self): with self.assertRaisesRegex(TypeError, 'observation metadata'): self.transform_format( BIOMV210Format, TSVTaxonomyFormat, filename=os.path.join('taxonomy', 'feature-table_v210.biom')) def test_taxonomy_format_with_header_to_metadata(self): _, obs = self.transform_format(TaxonomyFormat, qiime2.Metadata, os.path.join('taxonomy', '3-column.tsv')) index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp_df = pd.DataFrame([['k__Foo; p__Bar', '-1.0'], ['k__Foo; p__Baz', '-42.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_taxonomy_format_without_header_to_metadata(self): _, obs = self.transform_format(TaxonomyFormat, qiime2.Metadata, os.path.join('taxonomy', 'headerless.tsv')) index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Taxon', 'Unnamed Column 1', 'Unnamed Column 2'] exp_df = pd.DataFrame([['k__Foo; p__Bar', 'some', 'another'], ['k__Foo; p__Baz', 'column', 'column!']], index=index, columns=columns, dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_tsv_taxonomy_format_to_metadata(self): _, obs = self.transform_format(TSVTaxonomyFormat, qiime2.Metadata, os.path.join('taxonomy', '3-column.tsv')) index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp_df = pd.DataFrame([['k__Foo; p__Bar', '-1.0'], ['k__Foo; p__Baz', '-42.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_tsv_taxonomy_to_metadata_trailing_whitespace_taxon(self): _, obs = self.transform_format(TSVTaxonomyFormat, qiime2.Metadata, os.path.join( 'taxonomy', 'trailing_space_taxon.tsv')) index = pd.Index(['seq1'], name='Feature ID', dtype=object) exp_df = pd.DataFrame([['k__Foo; p__Bar', '-1.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_tsv_taxonomy_to_metadata_leading_whitespace_taxon(self): _, obs = self.transform_format(TSVTaxonomyFormat, qiime2.Metadata, os.path.join( 'taxonomy', 'leading_space_taxon.tsv')) index = pd.Index(['seq1'], name='Feature ID', dtype=object) exp_df = pd.DataFrame([['k__Foo; p__Bar', '-1.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_tsv_taxonomy_to_metadata_trailing_leading_whitespace_taxon(self): _, obs = self.transform_format(TSVTaxonomyFormat, qiime2.Metadata, os.path.join( 'taxonomy', 'start_end_space_taxon.tsv')) index = pd.Index(['seq1'], name='Feature ID', dtype=object) exp_df = pd.DataFrame([['k__Foo; p__Bar', '-1.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) # In-depth testing of the `_taxonomy_formats_to_dataframe` helper function, # which does the heavy lifting for the transformers. class TestTaxonomyFormatsToDataFrame(TestPluginBase): package = 'q2_types.feature_data.tests' def test_one_column(self): with self.assertRaisesRegex(ValueError, "two columns, found 1"): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', '1-column.tsv'))) def test_blanks(self): with self.assertRaises(pandas.errors.EmptyDataError): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'blanks'))) def test_empty(self): with self.assertRaises(pandas.errors.EmptyDataError): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'empty'))) def test_header_only(self): with self.assertRaisesRegex(ValueError, 'one row of data'): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'header-only.tsv'))) def test_has_header_with_headerless(self): with self.assertRaisesRegex(ValueError, 'requires a header'): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'headerless.tsv')), has_header=True) def test_jagged(self): with self.assertRaises(pandas.errors.ParserError): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'jagged.tsv'))) def test_duplicate_ids(self): with self.assertRaisesRegex(ValueError, 'duplicated: SEQUENCE1'): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join( 'taxonomy', 'duplicate-ids.tsv'))) def test_duplicate_columns(self): with self.assertRaisesRegex(ValueError, 'duplicated: Column1'): _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join( 'taxonomy', 'duplicate-columns.tsv'))) def test_2_columns(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.DataFrame([['k__Bacteria; p__Proteobacteria'], ['k__Bacteria']], index=index, columns=['Taxon'], dtype=object) # has_header=None (default) obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', '2-column.tsv'))) assert_frame_equal(obs, exp) # has_header=True obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', '2-column.tsv')), has_header=True) assert_frame_equal(obs, exp) def test_3_columns(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) exp = pd.DataFrame([['k__Foo; p__Bar', '-1.0'], ['k__Foo; p__Baz', '-42.0']], index=index, columns=['Taxon', 'Confidence'], dtype=object) # has_header=None (default) obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', '3-column.tsv'))) assert_frame_equal(obs, exp) # has_header=True obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', '3-column.tsv')), has_header=True) assert_frame_equal(obs, exp) def test_valid_but_messy_file(self): index = pd.Index( ['SEQUENCE1', 'seq2'], name='Feature ID', dtype=object) exp = pd.DataFrame([['k__Bar; p__Baz', 'foo'], ['some; taxonomy; for; ya', 'bar baz']], index=index, columns=['Taxon', 'Extra Column'], dtype=object) # has_header=None (default) obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'valid-but-messy.tsv'))) assert_frame_equal(obs, exp) # has_header=True obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'valid-but-messy.tsv')), has_header=True) assert_frame_equal(obs, exp) def test_headerless(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Taxon', 'Unnamed Column 1', 'Unnamed Column 2'] exp = pd.DataFrame([['k__Foo; p__Bar', 'some', 'another'], ['k__Foo; p__Baz', 'column', 'column!']], index=index, columns=columns, dtype=object) # has_header=None (default) obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'headerless.tsv'))) assert_frame_equal(obs, exp) # has_header=False obs = _taxonomy_formats_to_dataframe( self.get_data_path(os.path.join('taxonomy', 'headerless.tsv')), has_header=False) assert_frame_equal(obs, exp) # In-depth testing of the `_dataframe_to_tsv_taxonomy_format` helper function, # which does the heavy lifting for the transformers. class TestDataFrameToTSVTaxonomyFormat(TestPluginBase): package = 'q2_types.feature_data.tests' def test_no_rows(self): index = pd.Index([], name='Feature ID', dtype=object) columns = ['Taxon'] df = pd.DataFrame([], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, 'one row of data'): _dataframe_to_tsv_taxonomy_format(df) def test_no_columns(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = [] df = pd.DataFrame([[], []], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, 'one column of data'): _dataframe_to_tsv_taxonomy_format(df) def test_invalid_index_name(self): index = pd.Index(['seq1', 'seq2'], name='Foo', dtype=object) columns = ['Taxon'] df = pd.DataFrame([['abc'], ['def']], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, "`Feature ID`, found 'Foo'"): _dataframe_to_tsv_taxonomy_format(df) def test_invalid_taxon_column_name(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Foo'] df = pd.DataFrame([['abc'], ['def']], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, "`Taxon`, found 'Foo'"): _dataframe_to_tsv_taxonomy_format(df) def test_duplicate_ids(self): index = pd.Index(['seq1', 'seq2', 'seq1'], name='Feature ID', dtype=object) columns = ['Taxon'] df = pd.DataFrame([['abc'], ['def'], ['ghi']], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, "duplicated: seq1"): _dataframe_to_tsv_taxonomy_format(df) def test_duplicate_columns(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) columns = ['Taxon', 'Taxon'] df = pd.DataFrame([['abc', 'def'], ['ghi', 'jkl']], index=index, columns=columns, dtype=object) with self.assertRaisesRegex(ValueError, "duplicated: Taxon"): _dataframe_to_tsv_taxonomy_format(df) def test_1_column(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) df = pd.DataFrame([['k__Bacteria; p__Proteobacteria'], ['k__Bacteria']], index=index, columns=['Taxon'], dtype=object) exp = ( 'Feature ID\tTaxon\n' 'seq1\tk__Bacteria; p__Proteobacteria\n' 'seq2\tk__Bacteria\n' ) obs = _dataframe_to_tsv_taxonomy_format(df) with obs.open() as fh: self.assertEqual(fh.read(), exp) def test_2_columns(self): index = pd.Index(['seq1', 'seq2'], name='Feature ID', dtype=object) df = pd.DataFrame([['k__Bacteria; p__Proteobacteria', '42'], ['k__Bacteria', '43']], index=index, columns=['Taxon', 'Confidence'], dtype=object) exp = ( 'Feature ID\tTaxon\tConfidence\n' 'seq1\tk__Bacteria; p__Proteobacteria\t42\n' 'seq2\tk__Bacteria\t43\n' ) obs = _dataframe_to_tsv_taxonomy_format(df) with obs.open() as fh: self.assertEqual(fh.read(), exp) class TestDNAFASTAFormatTransformers(TestPluginBase): package = 'q2_types.feature_data.tests' def test_dna_fasta_format_to_dna_iterator(self): input, obs = self.transform_format(DNAFASTAFormat, DNAIterator, filename='dna-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.DNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_dna_iterator_to_dna_fasta_format(self): transformer = self.get_transformer(DNAIterator, DNAFASTAFormat) filepath = self.get_data_path('dna-sequences.fasta') generator = skbio.read(filepath, format='fasta', constructor=skbio.DNA) input = DNAIterator(generator) obs = transformer(input) self.assertIsInstance(obs, DNAFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.DNA) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_aln_dna_fasta_format_to_aln_dna_iterator(self): filename = 'aligned-dna-sequences.fasta' input, obs = self.transform_format(AlignedDNAFASTAFormat, AlignedDNAIterator, filename=filename) exp = skbio.read(str(input), format='fasta', constructor=skbio.DNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_aln_dna_iterator_to_aln_dna_fasta_format(self): transformer = self.get_transformer(AlignedDNAIterator, AlignedDNAFASTAFormat) filepath = self.get_data_path('aligned-dna-sequences.fasta') generator = skbio.read(filepath, format='fasta', constructor=skbio.DNA) input = AlignedDNAIterator(generator) obs = transformer(input) self.assertIsInstance(obs, AlignedDNAFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.DNA) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_pair_dna_sequences_directory_format_to_pair_dna_iterator(self): filenames = ('left-dna-sequences.fasta', 'right-dna-sequences.fasta') input, obs = self.transform_format(PairedDNASequencesDirectoryFormat, PairedDNAIterator, filenames=filenames) exp_left = skbio.read(self.get_data_path(filenames[0]), format='fasta', constructor=skbio.DNA) exp_right = skbio.read(self.get_data_path(filenames[1]), format='fasta', constructor=skbio.DNA) for act, exp in zip(obs, zip(exp_left, exp_right)): self.assertEqual(act, exp) self.assertIsInstance(obs, PairedDNAIterator) def test_pair_dna_iterator_to_pair_dna_sequences_directory_format(self): transformer = self.get_transformer(PairedDNAIterator, PairedDNASequencesDirectoryFormat) l_seqs = skbio.read(self.get_data_path('left-dna-sequences.fasta'), format='fasta', constructor=skbio.DNA) r_seqs = skbio.read(self.get_data_path('right-dna-sequences.fasta'), format='fasta', constructor=skbio.DNA) input = PairedDNAIterator(zip(l_seqs, r_seqs)) obs = transformer(input) obs_l = skbio.read('%s/left-dna-sequences.fasta' % str(obs), format='fasta', constructor=skbio.DNA) obs_r = skbio.read('%s/right-dna-sequences.fasta' % str(obs), format='fasta', constructor=skbio.DNA) for act, exp in zip(zip(obs_l, obs_r), zip(l_seqs, r_seqs)): self.assertEqual(act, exp) self.assertIsInstance(obs, PairedDNASequencesDirectoryFormat) def test_aligned_dna_fasta_format_to_skbio_tabular_msa(self): filename = 'aligned-dna-sequences.fasta' input, obs = self.transform_format(AlignedDNAFASTAFormat, skbio.TabularMSA, filename=filename) exp = skbio.TabularMSA.read(str(input), constructor=skbio.DNA, format='fasta') for act, exp in zip(obs, exp): self.assertEqual(act, exp) def test_skbio_tabular_msa_to_aligned_dna_fasta_format(self): filepath = self.get_data_path('aligned-dna-sequences.fasta') transformer = self.get_transformer(skbio.TabularMSA, AlignedDNAFASTAFormat) input = skbio.TabularMSA.read(filepath, constructor=skbio.DNA, format='fasta') obs = transformer(input) obs = skbio.TabularMSA.read(str(obs), constructor=skbio.DNA, format='fasta') for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_dnafasta_format_to_series(self): _, obs = self.transform_format(DNAFASTAFormat, pd.Series, 'dna-sequences.fasta') obs = obs.astype(str) index = pd.Index(['SEQUENCE1', 'SEQUENCE2']) exp = pd.Series(['ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTA' 'CGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACG' 'TACGTACGTACGTACGTACGT'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_dnafasta_format(self): transformer = self.get_transformer(pd.Series, DNAFASTAFormat) index = pd.Index(['SEQUENCE1', 'SEQUENCE2']) input = pd.Series(['ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTA' 'CGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACG' 'TACGTACGTACGTACGTACGT'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, DNAFASTAFormat) def test_dnafasta_format_with_duplicate_ids_to_series(self): with self.assertRaisesRegex(ValueError, 'unique.*SEQUENCE1'): self.transform_format(DNAFASTAFormat, pd.Series, 'dna-sequences-with-duplicate-ids.fasta') def test_dnafasta_format_to_metadata(self): _, obs = self.transform_format(DNAFASTAFormat, qiime2.Metadata, 'dna-sequences.fasta') index = pd.Index(['SEQUENCE1', 'SEQUENCE2'], name='Feature ID') exp_df = pd.DataFrame(['ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTA' 'CGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACG' 'TACGTACGTACGTACGTACGT'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_dnafasta_format_to_metadata(self): _, obs = self.transform_format(AlignedDNAFASTAFormat, qiime2.Metadata, 'aligned-dna-sequences.fasta') index = pd.Index(['SEQUENCE1', 'SEQUENCE2'], name='Feature ID') exp_df = pd.DataFrame(['------------------------ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_dnafasta_format_to_series(self): _, obs = self.transform_format(AlignedDNAFASTAFormat, pd.Series, 'aligned-dna-sequences.fasta') obs = obs.astype(str) index = pd.Index(['SEQUENCE1', 'SEQUENCE2']) exp = pd.Series(['------------------------ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_aligned_dnafasta_format(self): transformer = self.get_transformer(pd.Series, AlignedDNAFASTAFormat) index = pd.Index(['SEQUENCE1', 'SEQUENCE2']) input = pd.Series(['------------------------ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT', 'ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, AlignedDNAFASTAFormat) obs_lines = list(open(str(obs))) self.assertEqual(obs_lines[0], '>SEQUENCE1\n') self.assertEqual(obs_lines[1], '------------------------ACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT\n') self.assertEqual(obs_lines[2], '>SEQUENCE2\n') self.assertEqual(obs_lines[3], 'ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTAC' 'GTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT\n') def test_aligned_dna_fasta_format_to_dna_iterator(self): input, obs = self.transform_format( AlignedDNAFASTAFormat, DNAIterator, filename='aligned-dna-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.DNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) class TestRNAFASTAFormatTransformers(TestPluginBase): package = 'q2_types.feature_data.tests' def test_rna_fasta_format_to_rna_iterator(self): input, obs = self.transform_format(RNAFASTAFormat, RNAIterator, filename='rna-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.RNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_rna_iterator_to_rna_fasta_format(self): transformer = self.get_transformer(RNAIterator, RNAFASTAFormat) filepath = self.get_data_path('rna-sequences.fasta') generator = skbio.read(filepath, format='fasta', constructor=skbio.RNA) input = RNAIterator(generator) obs = transformer(input) self.assertIsInstance(obs, RNAFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.RNA) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_aln_rna_fasta_format_to_aln_rna_iterator(self): filename = 'aligned-rna-sequences.fasta' input, obs = self.transform_format(AlignedRNAFASTAFormat, AlignedRNAIterator, filename=filename) exp = skbio.read(str(input), format='fasta', constructor=skbio.RNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_aln_rna_iterator_to_aln_rna_fasta_format(self): transformer = self.get_transformer(AlignedRNAIterator, AlignedRNAFASTAFormat) filepath = self.get_data_path('aligned-rna-sequences.fasta') generator = skbio.read(filepath, format='fasta', constructor=skbio.RNA) input = AlignedRNAIterator(generator) obs = transformer(input) self.assertIsInstance(obs, AlignedRNAFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.RNA) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_aligned_rna_fasta_format_to_skbio_tabular_msa(self): filename = 'aligned-rna-sequences.fasta' input, obs = self.transform_format(AlignedRNAFASTAFormat, skbio.TabularMSA, filename=filename) exp = skbio.TabularMSA.read(str(input), constructor=skbio.RNA, format='fasta') for act, exp in zip(obs, exp): self.assertEqual(act, exp) def test_skbio_tabular_msa_to_aligned_rna_fasta_format(self): filepath = self.get_data_path('aligned-rna-sequences.fasta') transformer = self.get_transformer(skbio.TabularMSA, AlignedRNAFASTAFormat) input = skbio.TabularMSA.read(filepath, constructor=skbio.RNA, format='fasta') obs = transformer(input) obs = skbio.TabularMSA.read(str(obs), constructor=skbio.RNA, format='fasta') for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_rnafasta_format_to_series(self): _, obs = self.transform_format(RNAFASTAFormat, pd.Series, 'rna-sequences.fasta') obs = obs.astype(str) index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2']) exp = pd.Series(['ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUA' 'CGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACG' 'UACGUACGUACGUACGUACGU'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_rnafasta_format(self): transformer = self.get_transformer(pd.Series, RNAFASTAFormat) index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2']) input = pd.Series(['ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUA' 'CGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACG' 'UACGUACGUACGUACGUACGU'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, RNAFASTAFormat) def test_rnafasta_format_with_duplicate_ids_to_series(self): with self.assertRaisesRegex(ValueError, 'unique.*RNASEQUENCE1'): self.transform_format(RNAFASTAFormat, pd.Series, 'rna-sequences-with-duplicate-ids.fasta') def test_rnafasta_format_to_metadata(self): _, obs = self.transform_format(RNAFASTAFormat, qiime2.Metadata, 'rna-sequences.fasta') index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2'], name='Feature ID') exp_df = pd.DataFrame(['ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUA' 'CGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACG' 'UACGUACGUACGUACGUACGU'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_rnafasta_format_to_metadata(self): _, obs = self.transform_format(AlignedRNAFASTAFormat, qiime2.Metadata, 'aligned-rna-sequences.fasta') index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2'], name='Feature ID') exp_df = pd.DataFrame(['------------------------ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_rnafasta_format_to_series(self): _, obs = self.transform_format(AlignedRNAFASTAFormat, pd.Series, 'aligned-rna-sequences.fasta') obs = obs.astype(str) index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2']) exp = pd.Series(['------------------------ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_aligned_rnafasta_format(self): transformer = self.get_transformer(pd.Series, AlignedRNAFASTAFormat) index = pd.Index(['RNASEQUENCE1', 'RNASEQUENCE2']) input = pd.Series(['------------------------ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU', 'ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, AlignedRNAFASTAFormat) obs_lines = list(open(str(obs))) self.assertEqual(obs_lines[0], '>RNASEQUENCE1\n') self.assertEqual(obs_lines[1], '------------------------ACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU\n') self.assertEqual(obs_lines[2], '>RNASEQUENCE2\n') self.assertEqual(obs_lines[3], 'ACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGUAC' 'GUACGUACGUACGUACGUACGUACGUACGUACGUACGUACGU\n') def test_aligned_rna_fasta_format_to_rna_iterator(self): input, obs = self.transform_format( AlignedRNAFASTAFormat, RNAIterator, filename='aligned-rna-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.RNA) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) class TestDifferentialTransformer(TestPluginBase): package = 'q2_types.feature_data.tests' def test_differential_to_df(self): _, obs = self.transform_format(DifferentialFormat, pd.DataFrame, filename='differentials.tsv') # sniff to see if the first 4 feature ids are the same exp = ['F0', 'F1', 'F2', 'F3'] obs = list(obs.index[:4]) self.assertListEqual(exp, obs) def test_differential_to_md(self): _, obs = self.transform_format(DifferentialFormat, qiime2.Metadata, filename='differentials.tsv') obs = obs.to_dataframe() # sniff to see if the first 4 feature ids are the same exp = ['F0', 'F1', 'F2', 'F3'] obs = list(obs.index[:4]) self.assertListEqual(exp, obs) def test_df_to_differential(self): transformer = self.get_transformer(pd.DataFrame, DifferentialFormat) index = pd.Index(['SEQUENCE1', 'SEQUENCE2', 'SEQUENCE3']) index.name = 'featureid' input = pd.DataFrame( [-1.3, 0.1, 1.2], index=index, columns=['differential'], dtype=float) obs = transformer(input) self.assertIsInstance(obs, DifferentialFormat) class TestProteinFASTAFormatTransformers(TestPluginBase): package = 'q2_types.feature_data.tests' def test_protein_fasta_format_to_protein_iterator(self): input, obs = self.transform_format(ProteinFASTAFormat, ProteinIterator, filename='protein-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.Protein) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_protein_iterator_to_protein_fasta_format(self): transformer = self.get_transformer( ProteinIterator, ProteinFASTAFormat) filepath = self.get_data_path('protein-sequences.fasta') generator = skbio.read( filepath, format='fasta', constructor=skbio.Protein) input = ProteinIterator(generator) obs = transformer(input) self.assertIsInstance(obs, ProteinFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.Protein) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_aln_protein_fasta_format_to_aln_protein_iterator(self): filename = 'aligned-protein-sequences.fasta' input, obs = self.transform_format(AlignedProteinFASTAFormat, AlignedProteinIterator, filename=filename) exp = skbio.read(str(input), format='fasta', constructor=skbio.Protein) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) def test_aln_protein_iterator_to_aln_protein_fasta_format(self): transformer = self.get_transformer(AlignedProteinIterator, AlignedProteinFASTAFormat) filepath = self.get_data_path('aligned-protein-sequences.fasta') generator = skbio.read( filepath, format='fasta', constructor=skbio.Protein) input = AlignedProteinIterator(generator) obs = transformer(input) self.assertIsInstance(obs, AlignedProteinFASTAFormat) obs = skbio.read(str(obs), format='fasta', constructor=skbio.Protein) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_aligned_protein_fasta_format_to_skbio_tabular_msa(self): filename = 'aligned-protein-sequences.fasta' input, obs = self.transform_format(AlignedProteinFASTAFormat, skbio.TabularMSA, filename=filename) exp = skbio.TabularMSA.read(str(input), constructor=skbio.Protein, format='fasta') for act, exp in zip(obs, exp): self.assertEqual(act, exp) def test_skbio_tabular_msa_to_aligned_protein_fasta_format(self): filepath = self.get_data_path('aligned-protein-sequences.fasta') transformer = self.get_transformer(skbio.TabularMSA, AlignedProteinFASTAFormat) input = skbio.TabularMSA.read(filepath, constructor=skbio.Protein, format='fasta') obs = transformer(input) obs = skbio.TabularMSA.read(str(obs), constructor=skbio.Protein, format='fasta') for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_proteinfasta_format_to_series(self): _, obs = self.transform_format(ProteinFASTAFormat, pd.Series, 'protein-sequences.fasta') obs = obs.astype(str) index = pd.Index(['sequence1', 'sequence2']) exp = pd.Series(['MTTRDLTAAQFNETIQSSDMVLVDYWASWCGPCRAFAPTFAESSEK' 'HPDVVHAKVDTEAERELAAAAQIR', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLS' 'EKYSNVIFLEVDVDDCQDVASECEVKCMPTFQFFKKGQKVGEFSGAN'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_proteinfasta_format(self): transformer = self.get_transformer(pd.Series, ProteinFASTAFormat) index = pd.Index(['sequence1', 'sequence2']) input = pd.Series(['MTTRDLTAAQFNETIQSSDMVLVDYWASWCGPCRAFAPTFAESSEK' 'HPDVVHAKVDTEAERELAAAAQIR', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPCKMIKPFFHSLS' 'EKYSNVIFLEVDVDDCQDVASECEVKCMPTFQFFKKGQKVGEFSGAN'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, ProteinFASTAFormat) def test_proteinfasta_format_with_duplicate_ids_to_series(self): with self.assertRaisesRegex(ValueError, 'unique.*sequence1'): self.transform_format( ProteinFASTAFormat, pd.Series, 'protein-sequences-duplicate-ids.fasta') def test_proteinfasta_format_to_metadata(self): _, obs = self.transform_format(ProteinFASTAFormat, qiime2.Metadata, 'protein-sequences.fasta') index = pd.Index(['sequence1', 'sequence2'], name='Feature ID') exp_df = pd.DataFrame(['MTTRDLTAAQFNETIQSSDMVLVDYWASWCGPCRA' 'FAPTFAESSEKHPDVVHAKVDTEAERELAAAAQIR', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQFFKKGQKVGEFSGAN'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_proteinfasta_format_to_metadata(self): _, obs = self.transform_format(AlignedProteinFASTAFormat, qiime2.Metadata, 'aligned-protein-sequences.fasta') index = pd.Index(['sequence1', 'sequence2'], name='Feature ID') exp_df = pd.DataFrame(['------------------------VDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQFFKKGQKVGEFSGAN', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQ-------VGEFSGAN'], index=index, columns=['Sequence'], dtype=object) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_aligned_proteinfasta_format_to_series(self): _, obs = self.transform_format(AlignedProteinFASTAFormat, pd.Series, 'aligned-protein-sequences.fasta') obs = obs.astype(str) index = pd.Index(['sequence1', 'sequence2']) exp = pd.Series(['------------------------VDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQFFKKGQKVGEFSGAN', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQ-------VGEFSGAN'], index=index, dtype=object) assert_series_equal(exp, obs) def test_series_to_aligned_proteinfasta_format(self): transformer = self.get_transformer( pd.Series, AlignedProteinFASTAFormat) index = pd.Index(['sequence1', 'sequence2']) input = pd.Series(['------------------------VDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQFFKKGQKVGEFSGAN', 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQ-------VGEFSGAN'], index=index, dtype=object) obs = transformer(input) self.assertIsInstance(obs, AlignedProteinFASTAFormat) obs_lines = list(open(str(obs))) self.assertEqual(obs_lines[0], '>sequence1\n') self.assertEqual(obs_lines[1], '------------------------VDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQFFKKGQKVGEFSGAN\n') self.assertEqual(obs_lines[2], '>sequence2\n') self.assertEqual(obs_lines[3], 'MVKQIESKTAFQEALDAAGDKLVVVDFSATWCGPC' 'KMIKPFFHSLSEKYSNVIFLEVDVDDCQDVASECE' 'VKCMPTFQ-------VGEFSGAN\n') def test_aligned_protein_fasta_format_to_protein_iterator(self): input, obs = self.transform_format( AlignedProteinFASTAFormat, ProteinIterator, filename='aligned-protein-sequences.fasta') exp = skbio.read(str(input), format='fasta', constructor=skbio.Protein) for observed, expected in zip(obs, exp): self.assertEqual(observed, expected) class TestBLAST6Transformer(TestPluginBase): package = 'q2_types.feature_data.tests' def test_blast6_to_df(self): _, obs = self.transform_format(BLAST6Format, pd.DataFrame, filename='blast6.tsv') self.assertEqual(obs.shape[0], 2) self.assertListEqual(obs.columns.tolist(), [ 'qseqid', 'sseqid', 'pident', 'length', 'mismatch', 'gapopen', 'qstart', 'qend', 'sstart', 'send', 'evalue', 'bitscore']) self.assertListEqual(obs['pident'].tolist(), [100.0, 99.38]) self.assertListEqual(obs['mismatch'].tolist(), [0.0, 1.0]) self.assertListEqual(obs['bitscore'].tolist(), [330.0, 329.0]) def test_df_to_blast6(self): transformer = self.get_transformer(pd.DataFrame, BLAST6Format) columns = [ 'qseqid', 'sseqid', 'pident', 'length', 'mismatch', 'gapopen', 'qstart', 'qend', 'sstart', 'send', 'evalue', 'bitscore'] data = [ ['moaC', 'gi|15800534|ref|NP_286546.1|', 100.0, 161.0, 0.0, 0.0, 1.0, 161.0, 1.0, 161.0, 2e-114, 330.0], ['moaC', 'gi|170768970|ref|ZP_02903423.1|', 99.38, 161.0, 1.0, 0.0, 1.0, 161.0, 1.0, 161.0, 8e-114, 329.0]] input = pd.DataFrame(data=data, columns=columns) obs = transformer(input) self.assertIsInstance(obs, BLAST6Format) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/feature_data/tests/test_type.py000066400000000000000000000116021412142116700233550ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.feature_data import ( FeatureData, Taxonomy, Sequence, PairedEndSequence, AlignedSequence, Differential, TSVTaxonomyDirectoryFormat, DNASequencesDirectoryFormat, DifferentialDirectoryFormat, PairedDNASequencesDirectoryFormat, AlignedDNASequencesDirectoryFormat, ProteinSequencesDirectoryFormat, AlignedProteinSequencesDirectoryFormat, ProteinSequence, AlignedProteinSequence, RNASequence, RNASequencesDirectoryFormat, AlignedRNASequencesDirectoryFormat, AlignedRNASequence, PairedRNASequencesDirectoryFormat, PairedEndRNASequence, BLAST6, BLAST6DirectoryFormat ) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = 'q2_types.feature_data.tests' def test_feature_data_semantic_type_registration(self): self.assertRegisteredSemanticType(FeatureData) def test_taxonomy_semantic_type_registration(self): self.assertRegisteredSemanticType(Taxonomy) def test_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(Sequence) def test_paired_end_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(PairedEndSequence) def test_aligned_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(AlignedSequence) def test_differential_semantic_type_registration(self): self.assertRegisteredSemanticType(AlignedSequence) def test_protein_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(ProteinSequence) def test_aligned_protein_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(AlignedProteinSequence) def test_differential_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[Differential], DifferentialDirectoryFormat) def test_taxonomy_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[Taxonomy], TSVTaxonomyDirectoryFormat) def test_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[Sequence], DNASequencesDirectoryFormat) def test_paired_end_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[PairedEndSequence], PairedDNASequencesDirectoryFormat ) def test_aligned_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[AlignedSequence], AlignedDNASequencesDirectoryFormat ) def test_protein_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[ProteinSequence], ProteinSequencesDirectoryFormat ) def test_aln_protein_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[AlignedProteinSequence], AlignedProteinSequencesDirectoryFormat ) def test_rna_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(RNASequence) def test_aligned_rna_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(AlignedRNASequence) def test_paired_end_rna_sequence_semantic_type_registration(self): self.assertRegisteredSemanticType(PairedEndRNASequence) def test_rna_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[RNASequence], RNASequencesDirectoryFormat) def test_paired_end_rna_sequence_semantic_type_to_format_registration( self): self.assertSemanticTypeRegisteredToFormat( FeatureData[PairedEndRNASequence], PairedRNASequencesDirectoryFormat ) def test_aligned_rna_sequence_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[AlignedRNASequence], AlignedRNASequencesDirectoryFormat ) def test_blast6_semantic_type_registration(self): self.assertRegisteredSemanticType(BLAST6) def test_blast6_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureData[BLAST6], BLAST6DirectoryFormat) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/feature_table/000077500000000000000000000000001412142116700177775ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_table/__init__.py000066400000000000000000000016771412142116700221230ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import (BIOMV100Format, BIOMV210Format, BIOMV100DirFmt, BIOMV210DirFmt) from ._type import (FeatureTable, Frequency, RelativeFrequency, PresenceAbsence, Composition, Balance, PercentileNormalized, Design) __all__ = ['BIOMV100Format', 'BIOMV100DirFmt', 'FeatureTable', 'Frequency', 'RelativeFrequency', 'PresenceAbsence', 'BIOMV210Format', 'BIOMV210DirFmt', 'Composition', 'Balance', 'PercentileNormalized', 'Design'] importlib.import_module('q2_types.feature_table._transformer') q2-types-2021.8.0/q2_types/feature_table/_format.py000066400000000000000000000067141412142116700220100ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import ijson import h5py import biom import qiime2.plugin.model as model from ..plugin_setup import plugin, citations class BIOMV100Format(model.TextFileFormat): top_level_keys = { 'id', 'format', 'format_url', 'type', 'generated_by', 'date', 'rows', 'columns', 'matrix_type', 'matrix_element_type', 'shape', 'data', 'comment' } def sniff(self): # Can't self.open(mode='rb'), so we defer to the backing pathlib object with self.path.open(mode='rb') as fh: try: parser = ijson.parse(fh) for prefix, event, value in parser: if (prefix, event) == ('', 'map_key'): # `format_url` seems pretty unique to BIOM 1.0. if value == 'format_url': return True elif value not in self.top_level_keys: return False except (ijson.JSONError, UnicodeDecodeError): pass return False class BIOMV210Format(model.BinaryFileFormat): # minimum requirements as described by # http://biom-format.org/documentation/format_versions/biom-2.1.html groups = {'sample', 'sample/matrix', 'sample/metadata', 'sample/group-metadata', 'observation', 'observation/matrix', 'observation/metadata', 'observation/group-metadata'} datasets = {'sample/ids', 'sample/matrix/data', 'sample/matrix/indptr', 'sample/matrix/indices', 'observation/ids', 'observation/matrix/data', 'observation/matrix/indptr', 'observation/matrix/indices'} attrs = {'id', 'type', 'format-url', 'format-version', 'generated-by', 'creation-date', 'shape', 'nnz'} def open(self): return h5py.File(str(self), mode=self._mode) def sniff(self): try: with self.open() as fh: for grp in self.groups: if grp not in fh: return False for ds in self.datasets: if ds not in fh: return False for attr in self.attrs: if attr not in fh.attrs: return False return True except Exception: return False BIOMV100DirFmt = model.SingleFileDirectoryFormat('BIOMV100DirFmt', 'feature-table.biom', BIOMV100Format) BIOMV210DirFmt = model.SingleFileDirectoryFormat('BIOMV210DirFmt', 'feature-table.biom', BIOMV210Format) plugin.register_views(BIOMV100Format, BIOMV210Format, BIOMV100DirFmt, BIOMV210DirFmt, biom.Table, citations=[citations['mcdonald2012biological']]) q2-types-2021.8.0/q2_types/feature_table/_transformer.py000066400000000000000000000113371412142116700230570ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import json import biom import pandas as pd import qiime2 from . import BIOMV100Format, BIOMV210Format from ..plugin_setup import plugin # NOTE: In the readers and writers for BIOM v1 and v2 below, metadata must be # ignored on both axes because BIOM v1 and v2 are incompatible with certain # types of metadata. We need to support both versions of the format and # converting between them (e.g. to support existing QIIME 1 data). We can # ignore metadata because it is represented as different types in QIIME 2, and # thus is stored in separate artifacts. `biom.Table` does not have an API to # delete/unset metadata on its axes, so we construct a new `biom.Table` object # from the existing table's matrix data and axis IDs (see `_drop_axis_metadata` # below). This workaround should be fairly efficient because the matrix data # and axis IDs aren't copied; only a new `biom.Table` reference is created and # some ID indexing operations are performed. # # TODO: Revisit this workaround when `biom.Table` supports deletion of # metadata: https://github.com/biocore/biom-format/issues/708 def _drop_axis_metadata(table): return biom.Table(table.matrix_data, observation_ids=table.ids(axis='observation'), sample_ids=table.ids(axis='sample')) def _get_generated_by(): return 'qiime2 %s' % qiime2.__version__ def _parse_biom_table_v100(ff): with ff.open() as fh: table = biom.Table.from_json(json.load(fh)) return _drop_axis_metadata(table) def _parse_biom_table_v210(ff): with ff.open() as fh: table = biom.Table.from_hdf5(fh) return _drop_axis_metadata(table) def _table_to_dataframe(table: biom.Table) -> pd.DataFrame: array = table.matrix_data.toarray().T sample_ids = table.ids(axis='sample') feature_ids = table.ids(axis='observation') return pd.DataFrame(array, index=sample_ids, columns=feature_ids) def _table_to_metadata(table: biom.Table) -> qiime2.Metadata: table = _table_to_dataframe(table) table.index.name = 'id' return qiime2.Metadata(table) def _table_to_v210(data): ff = BIOMV210Format() with ff.open() as fh: data.to_hdf5(fh, generated_by=_get_generated_by()) return ff def _dataframe_to_table(df): if df.index.inferred_type != 'string': raise TypeError("Please provide a DataFrame with a string-based Index") return biom.Table(df.T.values, observation_ids=df.columns, sample_ids=df.index) @plugin.register_transformer def _2(ff: BIOMV100Format) -> biom.Table: return _parse_biom_table_v100(ff) # Note: this is an old TODO and should be revisited with the new view system. # TODO: this always returns a pd.DataFrame of floats due to how biom loads # tables, and we don't know what the dtype of the DataFrame should be. It would # be nice to have support for a semantic-type override that specifies further # transformations (e.g. converting from floats to ints or bools as # appropriate). @plugin.register_transformer def _3(ff: BIOMV100Format) -> pd.DataFrame: table = _parse_biom_table_v100(ff) return _table_to_dataframe(table) @plugin.register_transformer def _4(ff: BIOMV210Format) -> pd.DataFrame: table = _parse_biom_table_v210(ff) return _table_to_dataframe(table) @plugin.register_transformer def _5(ff: BIOMV210Format) -> biom.Table: return _parse_biom_table_v210(ff) @plugin.register_transformer def _6(data: biom.Table) -> BIOMV210Format: data = _drop_axis_metadata(data) return _table_to_v210(data) @plugin.register_transformer def _7(data: biom.Table) -> pd.DataFrame: return _table_to_dataframe(data) @plugin.register_transformer def _8(ff: BIOMV100Format) -> BIOMV210Format: data = _parse_biom_table_v100(ff) return _table_to_v210(data) @plugin.register_transformer def _9(df: pd.DataFrame) -> biom.Table: return _dataframe_to_table(df) @plugin.register_transformer def _10(df: pd.DataFrame) -> BIOMV210Format: return _table_to_v210(_dataframe_to_table(df)) @plugin.register_transformer def _12(data: biom.Table) -> qiime2.Metadata: return _table_to_metadata(data) @plugin.register_transformer def _13(ff: BIOMV100Format) -> qiime2.Metadata: table = _parse_biom_table_v100(ff) return _table_to_metadata(table) @plugin.register_transformer def _14(ff: BIOMV210Format) -> qiime2.Metadata: table = _parse_biom_table_v210(ff) return _table_to_metadata(table) q2-types-2021.8.0/q2_types/feature_table/_type.py000066400000000000000000000034411412142116700214730ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import BIOMV210DirFmt FeatureTable = SemanticType('FeatureTable', field_names='content') Frequency = SemanticType('Frequency', variant_of=FeatureTable.field['content']) RelativeFrequency = SemanticType('RelativeFrequency', variant_of=FeatureTable.field['content']) PresenceAbsence = SemanticType('PresenceAbsence', variant_of=FeatureTable.field['content']) Composition = SemanticType('Composition', variant_of=FeatureTable.field['content']) Balance = SemanticType('Balance', variant_of=FeatureTable.field['content']) PercentileNormalized = SemanticType('PercentileNormalized', variant_of=FeatureTable.field['content']) # Design is the type of design matrices for linear regressions that have # been transformed/coded. Design = SemanticType('Design', variant_of=FeatureTable.field['content']) plugin.register_semantic_types(FeatureTable, Frequency, RelativeFrequency, PresenceAbsence, Balance, Composition, PercentileNormalized, Design) plugin.register_semantic_type_to_format( FeatureTable[Frequency | RelativeFrequency | PresenceAbsence | Balance | Composition | PercentileNormalized | Design], artifact_format=BIOMV210DirFmt ) q2-types-2021.8.0/q2_types/feature_table/tests/000077500000000000000000000000001412142116700211415ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_table/tests/__init__.py000066400000000000000000000005351412142116700232550ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/feature_table/tests/data/000077500000000000000000000000001412142116700220525ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/feature_table/tests/data/feature-table_v100.biom000066400000000000000000000032421412142116700262110ustar00rootroot00000000000000{"id": "None","format": "Biological Observation Matrix 1.0.0","format_url": "http://biom-format.org","type": "OTU table","generated_by": "QIIME 1.6.0-dev","date": "2013-02-09T09:30:11.550590","matrix_type": "sparse","matrix_element_type": "int","shape": [14, 9],"data": [[0,0,20],[0,1,18],[0,2,18],[0,3,22],[0,4,4],[1,4,1],[2,0,1],[2,4,1],[2,5,1],[3,6,1],[4,4,1],[5,7,20],[6,4,1],[7,4,1],[7,5,1],[8,4,1],[8,6,2],[8,8,3],[9,7,2],[10,5,1],[11,4,9],[11,5,20],[11,6,1],[11,8,4],[12,4,3],[12,6,19],[12,8,15],[13,0,1],[13,1,4],[13,2,4]],"rows": [{"id": "295053", "metadata": {"taxonomy": ["k__Bacteria"]}},{"id": "42684", "metadata": {"taxonomy": ["k__Bacteria", "p__Proteobacteria"]}},{"id": "None11", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None10", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None7", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None6", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None5", "metadata": {"taxonomy": ["k__Bacteria"]}},{"id": "None4", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None3", "metadata": {"taxonomy": ["k__Bacteria"]}},{"id": "None2", "metadata": {"taxonomy": ["k__Bacteria"]}},{"id": "None1", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "879972", "metadata": {"taxonomy": ["k__Bacteria"]}},{"id": "None9", "metadata": {"taxonomy": ["Unclassified"]}},{"id": "None8", "metadata": {"taxonomy": ["k__Bacteria"]}}],"columns": [{"id": "f2", "metadata": null},{"id": "f1", "metadata": null},{"id": "f3", "metadata": null},{"id": "f4", "metadata": null},{"id": "p2", "metadata": null},{"id": "p1", "metadata": null},{"id": "t1", "metadata": null},{"id": "not16S.1", "metadata": null},{"id": "t2", "metadata": null}]} q2-types-2021.8.0/q2_types/feature_table/tests/data/feature-table_v210.biom000066400000000000000000001020101412142116700262040ustar00rootroot00000000000000HDF  `  xTREE`HEAPX observationsample8 @id ` @type ` H format-url` P format-version@ H generated-by` Hcreation-date` H shape@ 0 nnz@TREEhHEAPX8matrixidsmetadatagroup-metadata GCOL No Table ID otu tablehttp://biom-format.orgexample2014-07-29T16:15:43.318377GG_OTU_4GG_OTU_5GG_OTU_3 GG_OTU_2 GG_OTU_1 Sample4 Sample5 Sample6Sample3Sample2Sample18SNODxQQSTREE&HEAPX dataindicesindptr8SNOD(NNQBK L@N ?@4 4 deflateSPTREE'8(SNOD80x9x^c``AB P>8htu4} Tx^cbF fb fb&(ĆɃ@|%x^c````b6 b^ b-x^```H`. 1a|N4>B泡فg8Yx^c`0?ؓG @i(.} Tx^cd```bF(LH|al$ x^c````bV bn b~ `7x^cg``H` fGh|4>/  deflateH1SXTREE!_(  deflate:SXTREE( deflateCSPTREE"( L@NTREE&HEAPX`NPNQTREE&HEAPX QPQSTREEVHEAPX8Smatrixidsmetadatagroup-metadata `TVTREEaHEAPX Vdataindicesindptr8SNOD(HpHu8T`TV~~Ѐ ?@4 4 deflatePYSPTREE"(SNOD@Xbl  deflatecSXTREE (  deflatemSXTREE( deflateXvSPTREE$)~ЀTREEaHEAPXPpTREEaHEAPXPq2-types-2021.8.0/q2_types/feature_table/tests/test_format.py000066400000000000000000000044371412142116700240520ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import shutil import os import unittest from q2_types.feature_table import (BIOMV100Format, BIOMV210Format, BIOMV100DirFmt, BIOMV210DirFmt) from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestFormats(TestPluginBase): package = 'q2_types.feature_table.tests' def test_biomv100_format_validate_positive(self): filepath = self.get_data_path('feature-table_v100.biom') format = BIOMV100Format(filepath, mode='r') format.validate() def test_biomv100_format_validate_negative(self): filepath = self.get_data_path('feature-table_v210.biom') format = BIOMV100Format(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'BIOMV100Format'): format.validate() def test_biomv210_format_validate_positive(self): filepath = self.get_data_path('feature-table_v210.biom') format = BIOMV210Format(filepath, mode='r') format.validate() def test_biomv210_format_validate_negative(self): filepath = self.get_data_path('feature-table_v100.biom') format = BIOMV210Format(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'BIOMV210Format'): format.validate() def test_biomv100_dir_format_validate_positive(self): filepath = self.get_data_path('feature-table_v100.biom') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'feature-table.biom')) format = BIOMV100DirFmt(self.temp_dir.name, mode='r') format.validate() def test_biomv210_dir_format_validate_positive(self): filepath = self.get_data_path('feature-table_v210.biom') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'feature-table.biom')) format = BIOMV210DirFmt(self.temp_dir.name, mode='r') format.validate() if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/feature_table/tests/test_transformer.py000066400000000000000000000142711412142116700251210ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import biom import pandas as pd import qiime2 from pandas.testing import assert_frame_equal from q2_types.feature_table import BIOMV100Format, BIOMV210Format from qiime2.plugin.testing import TestPluginBase from q2_types.feature_table._transformer import (_parse_biom_table_v100, _parse_biom_table_v210, _table_to_dataframe, _table_to_metadata) class TestTransformers(TestPluginBase): package = 'q2_types.feature_table.tests' def test_biom_v100_format_to_biom_table(self): input, obs = self.transform_format(BIOMV100Format, biom.Table, filename='feature-table_v100.biom') exp = biom.load_table(str(input)) self.assertEqual(obs.ids(axis='observation').all(), exp.ids(axis='observation').all()) self.assertEqual(obs.ids(axis='sample').all(), exp.ids(axis='sample').all()) def test_biom_v100_format_to_pandas_data_frame(self): input, obs = self.transform_format(BIOMV100Format, pd.DataFrame, filename='feature-table_v100.biom') table = _parse_biom_table_v100(input) df = _table_to_dataframe(table) assert_frame_equal(df, obs) def test_biom_v210_format_to_pandas_data_frame(self): input, obs = self.transform_format(BIOMV210Format, pd.DataFrame, filename='feature-table_v210.biom') table = _parse_biom_table_v210(input) df = _table_to_dataframe(table) assert_frame_equal(df, obs) def test_biom_v210_format_to_biom_table(self): input, obs = self.transform_format(BIOMV210Format, biom.Table, filename='feature-table_v210.biom') exp = biom.load_table(str(input)) self.assertEqual(obs.ids(axis='observation').all(), exp.ids(axis='observation').all()) self.assertEqual(obs.ids(axis='sample').all(), exp.ids(axis='sample').all()) def test_biom_table_to_biom_v210_format(self): filepath = self.get_data_path('feature-table_v210.biom') transformer = self.get_transformer(biom.Table, BIOMV210Format) input = biom.load_table(filepath) obs = transformer(input) obs = biom.load_table(str(obs)) exp = input self.assertEqual(obs.ids(axis='observation').all(), exp.ids(axis='observation').all()) self.assertEqual(obs.ids(axis='sample').all(), exp.ids(axis='sample').all()) def test_biom_table_to_pandas_data_frame(self): filepath = self.get_data_path('feature-table_v100.biom') transformer = self.get_transformer(biom.Table, pd.DataFrame) input = biom.load_table(filepath) obs = transformer(input) self.assertIsInstance(obs, pd.DataFrame) def test_biom_v100_format_to_biom_v210_format(self): input, obs = self.transform_format(BIOMV100Format, BIOMV210Format, filename='feature-table_v100.biom') exp = biom.load_table(str(input)) obs = biom.load_table(str(obs)) self.assertEqual(obs.ids(axis='observation').all(), exp.ids(axis='observation').all()) self.assertEqual(obs.ids(axis='sample').all(), exp.ids(axis='sample').all()) def test_to_pandas_data_frame_to_biom_v210_format(self): filepath = self.get_data_path('feature-table_v100.biom') transformer1 = self.get_transformer(BIOMV100Format, pd.DataFrame) input = BIOMV100Format(filepath, mode='r') df = transformer1(input) transformer2 = self.get_transformer(pd.DataFrame, BIOMV210Format) obs = transformer2(df) self.assertIsInstance(obs, BIOMV210Format) def test_to_pandas_dataframe_bad_index(self): transformer = self.get_transformer(pd.DataFrame, BIOMV210Format) df = pd.DataFrame([[1, 2], [2, 3]], columns=['ATG', 'ACG']) with self.assertRaisesRegex(TypeError, 'string-based'): transformer(df) df = pd.DataFrame([[1, 2], [2, 3]], columns=['ATG', 'ACG'], index=[98, 99]) with self.assertRaisesRegex(TypeError, 'string-based'): transformer(df) def test_to_pandas_data_frame_to_biom_table(self): filepath = self.get_data_path('feature-table_v100.biom') transformer1 = self.get_transformer(BIOMV100Format, pd.DataFrame) input = BIOMV100Format(filepath, mode='r') df = transformer1(input) transformer2 = self.get_transformer(pd.DataFrame, biom.Table) obs = transformer2(df) self.assertIsInstance(obs, biom.Table) def test_biom_table_to_metadata(self): filepath = self.get_data_path('feature-table_v100.biom') transformer = self.get_transformer(biom.Table, qiime2.Metadata) input = biom.load_table(filepath) obs = transformer(input) self.assertIsInstance(obs, qiime2.Metadata) def test_biom_v100_format_to_metadata(self): input, obs = self.transform_format(BIOMV100Format, qiime2.Metadata, filename='feature-table_v100.biom') table = _parse_biom_table_v100(input) df = _table_to_metadata(table) self.assertEqual(df, obs) def test_biom_v210_format_to_metadata(self): input, obs = self.transform_format(BIOMV210Format, qiime2.Metadata, filename='feature-table_v210.biom') table = _parse_biom_table_v210(input) df = _table_to_metadata(table) self.assertEqual(df, obs) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/feature_table/tests/test_type.py000066400000000000000000000035021412142116700235330ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.feature_table import (FeatureTable, Frequency, RelativeFrequency, PercentileNormalized, Composition, Balance, PresenceAbsence, BIOMV210DirFmt, Design) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = 'q2_types.feature_table.tests' def test_feature_table_semantic_type_registration(self): self.assertRegisteredSemanticType(FeatureTable) def test_frequency_semantic_type_registration(self): self.assertRegisteredSemanticType(Frequency) def test_relative_frequency_semantic_type_registration(self): self.assertRegisteredSemanticType(RelativeFrequency) def test_presence_absence_semantic_type_registration(self): self.assertRegisteredSemanticType(PresenceAbsence) def test_composition_semantic_type_registration(self): self.assertRegisteredSemanticType(Composition) def test_balance_semantic_type_registration(self): self.assertRegisteredSemanticType(Balance) def test_feature_table_semantic_type_to_v210_format_registration(self): self.assertSemanticTypeRegisteredToFormat( FeatureTable[Frequency | RelativeFrequency | PresenceAbsence | Composition | Balance | PercentileNormalized | Design], BIOMV210DirFmt) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/multiplexed_sequences/000077500000000000000000000000001412142116700216045ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/multiplexed_sequences/__init__.py000066400000000000000000000014511412142116700237160ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from ._format import (MultiplexedSingleEndBarcodeInSequenceDirFmt, MultiplexedPairedEndBarcodeInSequenceDirFmt) from ._type import (MultiplexedSingleEndBarcodeInSequence, MultiplexedPairedEndBarcodeInSequence) __all__ = [ 'MultiplexedSingleEndBarcodeInSequence', 'MultiplexedPairedEndBarcodeInSequence', 'MultiplexedSingleEndBarcodeInSequenceDirFmt', 'MultiplexedPairedEndBarcodeInSequenceDirFmt', ] q2-types-2021.8.0/q2_types/multiplexed_sequences/_format.py000066400000000000000000000023351412142116700236100ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import qiime2.plugin.model as model from ..plugin_setup import plugin from q2_types.per_sample_sequences import FastqGzFormat # MultiplexedSingleEndBarcodeInSequenceDirFmt & # MultiplexedPairedEndBarcodeInSequenceDirFmt represent multiplexed sequences # that contain inline barcode information: # AGGACTAGGTAGATC => barcode: AGGA ; biological sequence: CTAGGTAGATC MultiplexedSingleEndBarcodeInSequenceDirFmt = model.SingleFileDirectoryFormat( 'MultiplexedSingleEndBarcodeInSequenceDirFmt', 'forward.fastq.gz', FastqGzFormat) class MultiplexedPairedEndBarcodeInSequenceDirFmt(model.DirectoryFormat): forward_sequences = model.File('forward.fastq.gz', format=FastqGzFormat) reverse_sequences = model.File('reverse.fastq.gz', format=FastqGzFormat) plugin.register_formats( MultiplexedSingleEndBarcodeInSequenceDirFmt, MultiplexedPairedEndBarcodeInSequenceDirFmt, ) q2-types-2021.8.0/q2_types/multiplexed_sequences/_type.py000066400000000000000000000022451412142116700233010ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import (MultiplexedSingleEndBarcodeInSequenceDirFmt, MultiplexedPairedEndBarcodeInSequenceDirFmt) MultiplexedSingleEndBarcodeInSequence = \ SemanticType('MultiplexedSingleEndBarcodeInSequence') MultiplexedPairedEndBarcodeInSequence = \ SemanticType('MultiplexedPairedEndBarcodeInSequence') plugin.register_semantic_types(MultiplexedSingleEndBarcodeInSequence, MultiplexedPairedEndBarcodeInSequence) plugin.register_semantic_type_to_format( MultiplexedSingleEndBarcodeInSequence, artifact_format=MultiplexedSingleEndBarcodeInSequenceDirFmt ) plugin.register_semantic_type_to_format( MultiplexedPairedEndBarcodeInSequence, artifact_format=MultiplexedPairedEndBarcodeInSequenceDirFmt, ) q2-types-2021.8.0/q2_types/multiplexed_sequences/tests/000077500000000000000000000000001412142116700227465ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/multiplexed_sequences/tests/__init__.py000066400000000000000000000005351412142116700250620ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/multiplexed_sequences/tests/data/000077500000000000000000000000001412142116700236575ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/multiplexed_sequences/tests/data/forward.fastq.gz000066400000000000000000000013731412142116700270060ustar00rootroot00000000000000FEXHuman-Kneecap_S1_L001_R1_001.fastqr0 @'i"aj@N{KR^2IݙcK\5uڶ2^hEZ@gz C _)bƜE$I4b0W? "NQ_)eY<SSEiAD(D)`GBj7&+Z}[J1QmM]|v-i+Tꢅ(kPͦmIhT9<^-nD4sӲH)0p!D!"0F<˳+lh‹]f]Wf1i&2HL794jMH V?TtW)-q@g활_X@@:vϝd\fqOʓ|tl:߽t7?⦅ٴO?N.,7~[{:g372I;xcL|c2LblsAuse_PQRk2,iO(vN1Y=6s;7TuKJA+LjTʅIg)qjTY63=0rV#oTBԩj7Jp'‘]s%zySYiN btEZi&hv/ZϽ|O%8 9#h|G&@4  x/W9.b` ҡp+NP0̥p֦)"@wpFtzy+ j.T4@-zD 9IiGI##~8VW;>[ ;–*VB4S N8!2TH RzQBE5&\؂ 8gR*ga">a:^:[Ka5bZLZ()Ԥza_O89w82&5e9MuUq&Z6V kWMyrVVc:ֽEp |8g&YN󢟘$$7ILқ,b_ 2 te $P9L$$V>A-PCReI2sH(g(fioMO7@9D?vvl]Gd=Ma\3,)@pSPf5y_П Ctv‛Cnjp`vj"j:keG,vR|JRϧgDLD H:.FIGz)tŐc+)Boy{I!8\$22q5VV+W-&kRڌI֥ 9 i/F&j4nPcq2-types-2021.8.0/q2_types/multiplexed_sequences/tests/test_format.py000066400000000000000000000037771412142116700256650ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os import shutil import unittest from q2_types.multiplexed_sequences import ( MultiplexedSingleEndBarcodeInSequenceDirFmt, MultiplexedPairedEndBarcodeInSequenceDirFmt, ) from qiime2.plugin.testing import TestPluginBase class TestMultiplexedSingleEndBarcodeInSequenceDirFmt(TestPluginBase): package = 'q2_types.multiplexed_sequences.tests' def test_format(self): # This test exists mainly to assert that the single-file directory # format is defined and functional. More extensive testing is performed # on its underlying format (FastqGzFormat). filepath = self.get_data_path('forward.fastq.gz') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'forward.fastq.gz')) format = MultiplexedSingleEndBarcodeInSequenceDirFmt( self.temp_dir.name, mode='r') # Should not error. format.validate() class TestMultiplexedPairedEndBarcodeInSequenceDirFmt(TestPluginBase): package = 'q2_types.multiplexed_sequences.tests' def test_format(self): # This test exists mainly to assert that the directory format is # defined and functional. More extensive testing is performed # on its underlying formats (FastqGzFormat). for read in ['forward', 'reverse']: filepath = self.get_data_path('%s.fastq.gz' % read) shutil.copy(filepath, os.path.join(self.temp_dir.name, '%s.fastq.gz' % read)) format = MultiplexedPairedEndBarcodeInSequenceDirFmt( self.temp_dir.name, mode='r') # Should not error. format.validate() if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/multiplexed_sequences/tests/test_type.py000066400000000000000000000030711412142116700253410ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.multiplexed_sequences import ( MultiplexedSingleEndBarcodeInSequence, MultiplexedPairedEndBarcodeInSequence, MultiplexedSingleEndBarcodeInSequenceDirFmt, MultiplexedPairedEndBarcodeInSequenceDirFmt, ) from qiime2.plugin.testing import TestPluginBase class TestMultiplexedBarcodeInSequenceTypes(TestPluginBase): package = "q2_types.per_sample_sequences.tests" def test_single_end_semantic_type_registration(self): self.assertRegisteredSemanticType( MultiplexedSingleEndBarcodeInSequence) def test_paired_end_semantic_type_registration(self): self.assertRegisteredSemanticType( MultiplexedPairedEndBarcodeInSequence) def test_single_end_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( MultiplexedSingleEndBarcodeInSequence, MultiplexedSingleEndBarcodeInSequenceDirFmt, ) def test_paired_end_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( MultiplexedPairedEndBarcodeInSequence, MultiplexedPairedEndBarcodeInSequenceDirFmt, ) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/ordination/000077500000000000000000000000001412142116700173435ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/ordination/__init__.py000066400000000000000000000014321412142116700214540ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import ( OrdinationFormat, OrdinationDirectoryFormat, ProcrustesStatisticsFmt, ProcrustesStatisticsDirFmt, ) from ._type import PCoAResults, ProcrustesStatistics __all__ = ['OrdinationFormat', 'OrdinationDirectoryFormat', 'ProcrustesStatisticsFmt', 'ProcrustesStatisticsDirFmt', 'PCoAResults', 'ProcrustesStatistics'] importlib.import_module('q2_types.ordination._transformer') q2-types-2021.8.0/q2_types/ordination/_format.py000066400000000000000000000032571412142116700213530ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import qiime2 import skbio.io import qiime2.plugin.model as model from qiime2.plugin import ValidationError from ..plugin_setup import plugin class OrdinationFormat(model.TextFileFormat): def sniff(self): sniffer = skbio.io.io_registry.get_sniffer('ordination') return sniffer(str(self))[0] OrdinationDirectoryFormat = model.SingleFileDirectoryFormat( 'OrdinationDirectoryFormat', 'ordination.txt', OrdinationFormat) class ProcrustesStatisticsFmt(model.TextFileFormat): METADATA_COLUMNS = { 'true M^2 value', 'p-value for true M^2 value', 'number of Monte Carlo permutations', } def validate(self, level): try: md = qiime2.Metadata.load(str(self)) except qiime2.metadata.MetadataFileError as md_exc: raise ValidationError(md_exc) from md_exc for column in sorted(self.METADATA_COLUMNS): try: md.get_column(column) except ValueError as md_exc: raise ValidationError(md_exc) from md_exc ProcrustesStatisticsDirFmt = model.SingleFileDirectoryFormat( 'ProcrustesStatisticsDirFmt', 'ProcrustesStatistics.tsv', ProcrustesStatisticsFmt) plugin.register_formats(OrdinationFormat, OrdinationDirectoryFormat, ProcrustesStatisticsFmt, ProcrustesStatisticsDirFmt) q2-types-2021.8.0/q2_types/ordination/_transformer.py000066400000000000000000000037631412142116700224270ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import pandas as pd import skbio import qiime2 from ..plugin_setup import plugin from . import OrdinationFormat, ProcrustesStatisticsFmt def _ordination_format_to_ordination_results(ff): return skbio.OrdinationResults.read(str(ff), format='ordination', verify=False) def _ordination_format_to_dataframe(ff): ordination_results = _ordination_format_to_ordination_results(ff) df = ordination_results.samples df.index.name = 'Sample ID' df.columns = ['Axis %d' % i for i in range(1, len(df.columns) + 1)] return df @plugin.register_transformer def _1(data: skbio.OrdinationResults) -> OrdinationFormat: ff = OrdinationFormat() data.write(str(ff), format='ordination') return ff @plugin.register_transformer def _2(ff: OrdinationFormat) -> skbio.OrdinationResults: return _ordination_format_to_ordination_results(ff) @plugin.register_transformer def _3(ff: OrdinationFormat) -> qiime2.Metadata: df = _ordination_format_to_dataframe(ff) return qiime2.Metadata(df) @plugin.register_transformer def _4(data: pd.DataFrame) -> ProcrustesStatisticsFmt: ff = ProcrustesStatisticsFmt() qiime2.Metadata(data).save(str(ff)) return ff @plugin.register_transformer def _5(ff: ProcrustesStatisticsFmt) -> pd.DataFrame: df = qiime2.Metadata.load(str(ff)).to_dataframe() return df.astype({ 'true M^2 value': float, 'p-value for true M^2 value': float, 'number of Monte Carlo permutations': int, }, copy=True, errors='raise') @plugin.register_transformer def _6(ff: ProcrustesStatisticsFmt) -> qiime2.Metadata: return qiime2.Metadata.load(str(ff)) q2-types-2021.8.0/q2_types/ordination/_type.py000066400000000000000000000015701412142116700210400ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import OrdinationDirectoryFormat, ProcrustesStatisticsDirFmt PCoAResults = SemanticType('PCoAResults') ProcrustesStatistics = SemanticType('ProcrustesStatistics') plugin.register_semantic_types(PCoAResults, ProcrustesStatistics) plugin.register_semantic_type_to_format( PCoAResults, artifact_format=OrdinationDirectoryFormat ) plugin.register_semantic_type_to_format( ProcrustesStatistics, artifact_format=ProcrustesStatisticsDirFmt ) q2-types-2021.8.0/q2_types/ordination/tests/000077500000000000000000000000001412142116700205055ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/ordination/tests/__init__.py000066400000000000000000000005351412142116700226210ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/ordination/tests/data/000077500000000000000000000000001412142116700214165ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/ordination/tests/data/m2stats-999-permus.tsv000066400000000000000000000002321412142116700254070ustar00rootroot00000000000000id true M^2 value p-value for true M^2 value number of Monte Carlo permutations #q2:types numeric numeric numeric results 0.0789623748362618 0.001 999 q2-types-2021.8.0/q2_types/ordination/tests/data/not-pcoa-results.txt000066400000000000000000000000371412142116700253760ustar00rootroot00000000000000This is not an ordination file q2-types-2021.8.0/q2_types/ordination/tests/data/pcoa-results-1x1.txt000066400000000000000000000001521412142116700252050ustar00rootroot00000000000000Eigvals 1 0.0 Proportion explained 1 nan Species 0 0 Site 1 1 s1 0.0 Biplot 0 0 Site constraints 0 0 q2-types-2021.8.0/q2_types/ordination/tests/data/pcoa-results-2x2.txt000066400000000000000000000002441412142116700252110ustar00rootroot00000000000000Eigvals 2 882.0 0.0 Proportion explained 2 1.0 0.0 Species 0 0 Site 2 2 s1 -20.999999999999996 -0.0 s2 20.999999999999996 -0.0 Biplot 0 0 Site constraints 0 0 q2-types-2021.8.0/q2_types/ordination/tests/data/pcoa-results-NxN.txt000066400000000000000000000021021412142116700252740ustar00rootroot00000000000000Eigvals 8 1.0829945301842285 0.3533606153760929 0.15350101859831256 0.11185635481220381 0.0 0.0 0.0 0.0 Proportion explained 8 0.6364145048654882 0.2076500063534812 0.09020384870362648 0.0657316400774041 0.0 0.0 0.0 0.0 Species 0 0 Site 8 8 f2 -0.30485714903989747 -0.12525562879260999 -0.17581264555363468 -0.09969461658220807 0.0 0.0 0.0 0.0 f1 -0.3690894133377798 0.13859886917433423 -0.03889271532103086 -0.05183070746079718 0.0 -0.0 0.0 0.0 f3 -0.36908941333777984 0.13859886917433414 -0.03889271532103104 -0.05183070746079679 0.0 0.0 -0.0 0.0 f4 -0.37482068632168797 0.031830859315108845 0.24822915350171945 0.1646278415237003 0.0 -0.0 0.0 0.0 p2 0.2533037597244472 -0.12067851655968227 -0.19734955265501047 0.23472931532809366 0.0 -0.0 0.0 0.0 p1 0.18670946676383143 -0.45488927326554546 0.1270305242967276 -0.09736323343886037 0.0 -0.0 0.0 0.0 t1 0.4889217177744332 0.19589741047703058 0.037843975526129635 -0.04931894595456521 0.0 -0.0 -0.0 0.0 t2 0.4889217177744332 0.19589741047703058 0.03784397552612958 -0.04931894595456517 0.0 0.0 0.0 0.0 Biplot 0 0 Site constraints 0 0 q2-types-2021.8.0/q2_types/ordination/tests/test_format.py000066400000000000000000000043451412142116700234140ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os import shutil import unittest from q2_types.ordination import ( OrdinationFormat, OrdinationDirectoryFormat, ProcrustesStatisticsFmt, ProcrustesStatisticsDirFmt) from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestFormats(TestPluginBase): package = 'q2_types.ordination.tests' def test_ordination_format_validate_positive(self): filepath = self.get_data_path('pcoa-results-NxN.txt') format = OrdinationFormat(filepath, mode='r') format.validate() def test_ordination_format_validate_negative(self): filepath = self.get_data_path('not-pcoa-results.txt') format = OrdinationFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'OrdinationFormat'): format.validate() def test_ordination_dir_format_validate_positive(self): filepath = self.get_data_path('pcoa-results-NxN.txt') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'ordination.txt')) format = OrdinationDirectoryFormat(self.temp_dir.name, mode='r') format.validate() def test_m2_stats_fmt_positive(self): filepath = self.get_data_path('m2stats-999-permus.tsv') format = ProcrustesStatisticsFmt(filepath, mode='r') format.validate(level='max') # If we made it this far, congrats! self.assertTrue(True) def test_m2_stats_dir_fmt_positive(self): filepath = self.get_data_path('m2stats-999-permus.tsv') shutil.copy(filepath, os.path.join(self.temp_dir.name, 'ProcrustesStatistics.tsv')) format = ProcrustesStatisticsDirFmt(self.temp_dir.name, mode='r') format.validate(level='max') # If we made it this far, congrats! self.assertTrue(True) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/ordination/tests/test_transformer.py000066400000000000000000000127051412142116700244650ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import skbio import pandas as pd import pandas.testing as pdt import qiime2 from q2_types.ordination import OrdinationFormat, ProcrustesStatisticsFmt from qiime2.plugin.testing import TestPluginBase class TestTransformers(TestPluginBase): package = 'q2_types.ordination.tests' def test_skbio_ordination_results_to_ordination_format(self): filenames = ('pcoa-results-1x1.txt', 'pcoa-results-2x2.txt', 'pcoa-results-NxN.txt') for filename in filenames: filepath = self.get_data_path(filename) transformer = self.get_transformer(skbio.OrdinationResults, OrdinationFormat) input = skbio.OrdinationResults.read(filepath) obs = transformer(input) self.assertIsInstance(obs, OrdinationFormat) obs = skbio.OrdinationResults.read(str(obs)) self.assertEqual(str(obs), str(input)) def test_ordination_format_to_skbio_ordination_results(self): filenames = ('pcoa-results-1x1.txt', 'pcoa-results-2x2.txt', 'pcoa-results-NxN.txt') for filename in filenames: input, obs = self.transform_format(OrdinationFormat, skbio.OrdinationResults, filename=filename) exp = skbio.OrdinationResults.read(str(input)) self.assertEqual(str(exp), str(obs)) def test_1x1_ordination_format_to_metadata(self): _, obs = self.transform_format(OrdinationFormat, qiime2.Metadata, 'pcoa-results-1x1.txt') index = pd.Index(['s1'], name='Sample ID', dtype=object) exp_df = pd.DataFrame([0.0], index=index, columns=['Axis 1'], dtype=float) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_2x2_ordination_format_to_metadata(self): _, obs = self.transform_format(OrdinationFormat, qiime2.Metadata, 'pcoa-results-2x2.txt') index = pd.Index(['s1', 's2'], name='Sample ID', dtype=object) exp_df = pd.DataFrame([[-20.999999999999996, -0.0], [20.999999999999996, -0.0]], index=index, columns=['Axis 1', 'Axis 2'], dtype=float) exp = qiime2.Metadata(exp_df) self.assertEqual(exp, obs) def test_NxN_ordination_format_to_metadata(self): # Not creating a reference dataframe here because manually populating # that DataFrame is a pain. Specifically we just want to check the # functionality of the dynamic column naming (e.g. Axis N). _, obs = self.transform_format(OrdinationFormat, qiime2.Metadata, 'pcoa-results-NxN.txt') columns = ['Axis %d' % i for i in range(1, 9)] self.assertEqual(columns, list(obs.columns)) def test_df_to_procrustes_m2_stats_fmt(self): input_df = pd.DataFrame({'true M^2 value': [1], 'p-value for true M^2 value': [0.2], 'number of Monte Carlo permutations': [300]}, index=pd.Index(['results'], name='id')) exp = ['id\ttrue M^2 value\tp-value for true M^2 value\t' 'number of Monte Carlo permutations\n', '#q2:types\tnumeric\tnumeric\tnumeric\n', 'results\t1\t0.2\t300\n'] transformer = self.get_transformer(pd.DataFrame, ProcrustesStatisticsFmt) fmt = transformer(input_df) with open(str(fmt), 'r') as fh: obs = fh.readlines() self.assertEqual(exp, obs) def test_procrustes_m2_stats_fmt_to_df(self): filepath = self.get_data_path('m2stats-999-permus.tsv') input_fmt = ProcrustesStatisticsFmt(filepath, mode='r') exp = pd.DataFrame({'true M^2 value': [0.0789623748362618], 'p-value for true M^2 value': [0.001], 'number of Monte Carlo permutations': [999]}, index=pd.Index(['results'], name='id')) transformer = self.get_transformer(ProcrustesStatisticsFmt, pd.DataFrame) obs = transformer(input_fmt) pdt.assert_frame_equal(exp, obs) def test_procrustes_m2_stats_fmt_to_md(self): filepath = self.get_data_path('m2stats-999-permus.tsv') input_fmt = ProcrustesStatisticsFmt(filepath, mode='r') df = pd.DataFrame({'true M^2 value': [0.0789623748362618], 'p-value for true M^2 value': [0.001], 'number of Monte Carlo permutations': [999]}, index=pd.Index(['results'], name='id')) exp = qiime2.Metadata(df) transformer = self.get_transformer(ProcrustesStatisticsFmt, qiime2.Metadata) obs = transformer(input_fmt) self.assertEqual(exp, obs) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/ordination/tests/test_type.py000066400000000000000000000025211412142116700230770ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.ordination import ( PCoAResults, OrdinationDirectoryFormat, ProcrustesStatistics, ProcrustesStatisticsDirFmt, ) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = 'q2_types.ordination.tests' def test_pcoa_results_semantic_type_registration(self): self.assertRegisteredSemanticType(PCoAResults) def test_pcoa_results_semantic_type_to_ordination_fmt_registration(self): self.assertSemanticTypeRegisteredToFormat(PCoAResults, OrdinationDirectoryFormat) def test_procrustes_m2_semantic_type_registration(self): self.assertRegisteredSemanticType(ProcrustesStatistics) def test_procrustes_m2_semantic_type_to_m2_fmt_registration(self): self.assertSemanticTypeRegisteredToFormat(ProcrustesStatistics, ProcrustesStatisticsDirFmt) if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/per_sample_sequences/000077500000000000000000000000001412142116700213775ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/__init__.py000066400000000000000000000043561412142116700235200ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import (CasavaOneEightSingleLanePerSampleDirFmt, CasavaOneEightLanelessPerSampleDirFmt, FastqGzFormat, YamlFormat, FastqManifestFormat, FastqAbsolutePathManifestFormat, SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64, SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2, PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2, QIIME1DemuxFormat, QIIME1DemuxDirFmt) from ._type import (Sequences, SequencesWithQuality, PairedEndSequencesWithQuality, JoinedSequencesWithQuality) __all__ = ['CasavaOneEightSingleLanePerSampleDirFmt', 'CasavaOneEightLanelessPerSampleDirFmt', 'FastqGzFormat', 'YamlFormat', 'FastqManifestFormat', 'FastqAbsolutePathManifestFormat', 'SingleLanePerSampleSingleEndFastqDirFmt', 'SingleLanePerSamplePairedEndFastqDirFmt', 'Sequences', 'SequencesWithQuality', 'PairedEndSequencesWithQuality', 'JoinedSequencesWithQuality', 'SingleEndFastqManifestPhred33', 'SingleEndFastqManifestPhred64', 'PairedEndFastqManifestPhred33', 'PairedEndFastqManifestPhred64', 'SingleEndFastqManifestPhred33V2', 'SingleEndFastqManifestPhred64V2', 'PairedEndFastqManifestPhred33V2', 'PairedEndFastqManifestPhred64V2', 'QIIME1DemuxFormat', 'QIIME1DemuxDirFmt'] importlib.import_module('q2_types.per_sample_sequences._transformer') q2-types-2021.8.0/q2_types/per_sample_sequences/_format.py000066400000000000000000000445061412142116700234110ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os import gzip import itertools import collections import pathlib import pandas as pd import skbio import skbio.io import yaml import qiime2 import qiime2.plugin.model as model from qiime2.plugin import ValidationError from ..plugin_setup import plugin from ._util import ( _parse_casava_filename, _manifest_to_df, ) class FastqAbsolutePathManifestFormatV2(model.TextFileFormat): """ Base class for mapping of sample identifies to filepaths. This format relies heavily on the qiime2.Metadata on-disk format, as well as the validation rules and behavior. """ METADATA_COLUMNS = None def _validate_(self, level): try: md = qiime2.Metadata.load(str(self)) except qiime2.metadata.MetadataFileError as md_exc: raise ValidationError(md_exc) from md_exc md = md.filter_columns(column_type='categorical') md_cols = dict() for column in self.METADATA_COLUMNS.keys(): try: md_cols[column] = md.get_column(column) except ValueError as md_exc: raise ValidationError(md_exc) from md_exc filepaths = dict() for column_name, column in md_cols.items(): column = column.to_series() for i, (id_, fp) in enumerate(column.iteritems(), start=1): # QIIME 2 represents empty cells as np.nan once normalized if pd.isna(fp): raise ValidationError( 'Missing filepath on line %d and column "%s".' % (i, column_name)) if not os.path.exists(os.path.expandvars(fp)): raise ValidationError( 'Filepath on line %d and column "%s" could not ' 'be found (%s) for sample "%s".' % (i, column_name, fp, id_)) if fp in filepaths: old_id, old_col_name, old_row = filepaths[fp] raise ValidationError( 'Filepath on line %d and column "%s" (sample "%s") ' 'has already been registered on line %d and column ' '"%s" (sample "%s").' % (i, column_name, id_, old_row, old_col_name, old_id)) else: filepaths[fp] = (id_, column_name, i) class _SingleEndFastqManifestV2(FastqAbsolutePathManifestFormatV2): METADATA_COLUMNS = {'absolute-filepath': 'forward'} class SingleEndFastqManifestPhred33V2(_SingleEndFastqManifestV2): pass class SingleEndFastqManifestPhred64V2(_SingleEndFastqManifestV2): pass class _PairedEndFastqManifestV2(FastqAbsolutePathManifestFormatV2): METADATA_COLUMNS = {'forward-absolute-filepath': 'forward', 'reverse-absolute-filepath': 'reverse'} class PairedEndFastqManifestPhred33V2(_PairedEndFastqManifestV2): pass class PairedEndFastqManifestPhred64V2(_PairedEndFastqManifestV2): pass class _FastqManifestBase(model.TextFileFormat): """ Base class for mapping of sample identifiers to filepaths and read direction. """ EXPECTED_HEADER = None PATH_HEADER_LABEL = None def _check_n_records(self, root, n=None): with self.open() as fh: header = None records_seen = 0 file_ = enumerate(fh) if n is None else zip(range(n), fh) for i, line in file_: i = i + 1 # For easier reporting if line.lstrip(' ') == '\n': continue # Blank line elif line.startswith('#'): continue # Comment line cells = [c.strip() for c in line.rstrip('\n').split(',')] if header is None: if cells != self.EXPECTED_HEADER: raise ValidationError( 'Found header on line %d with the following ' 'labels: %s, expected: %s' % (i, cells, self.EXPECTED_HEADER)) else: header = cells else: if len(cells) != len(header): raise ValidationError( 'Line %d has %s cells (%s), expected %s.' % (i, len(cells), cells, len(header))) # Structure checks out, so let's make lookup easy cells = dict(zip(header, cells)) # TODO: a bunch of tests in this subpackage aren't well # behaved --- many tests fail on this check because the # test data isn't constructed correctly. As well, there # appear to be framework-related issues preventing us from # making this kind of validation work for the relative # manifest formats at this time. if root == '': fp = os.path.join(root, cells[self.PATH_HEADER_LABEL]) if not os.path.exists(os.path.expandvars(fp)): raise ValidationError( 'File referenced on line %d could not be ' 'found (%s).' % (i, fp)) if cells['direction'] not in ('forward', 'reverse'): raise ValidationError( 'Read direction declared on line %d was %s, ' 'expected `forward` or `reverse`.' % (i, cells['direction'])) records_seen += 1 if header is None: raise ValidationError('No header found, expected: %s.' % self.EXPECTED_HEADER) if records_seen == 0: raise ValidationError('No sample records found in manifest, ' 'only observed comments, blank lines, ' 'and/or a header row.') class FastqManifestFormat(_FastqManifestBase): """ Mapping of sample identifiers to relative filepaths and read direction. """ EXPECTED_HEADER = ['sample-id', 'filename', 'direction'] PATH_HEADER_LABEL = 'filename' def _validate_(self, level): self._check_n_records(root=str(self.path.parent), n={'min': 10, 'max': None}[level]) class FastqAbsolutePathManifestFormat(_FastqManifestBase): """ Mapping of sample identifiers to absolute filepaths and read direction. """ EXPECTED_HEADER = ['sample-id', 'absolute-filepath', 'direction'] PATH_HEADER_LABEL = 'absolute-filepath' def _validate_(self, level): # This is effectively only invoked on import, so let's just # validate the whole file! self._check_n_records(root='', n=None) class SingleEndFastqManifestPhred33(FastqAbsolutePathManifestFormat): pass class SingleEndFastqManifestPhred64(FastqAbsolutePathManifestFormat): pass class PairedEndFastqManifestPhred33(FastqAbsolutePathManifestFormat): pass class PairedEndFastqManifestPhred64(FastqAbsolutePathManifestFormat): pass class YamlFormat(model.TextFileFormat): """ Arbitrary yaml-formatted file. """ def sniff(self): with self.open() as fh: try: yaml.safe_load(fh) except yaml.YAMLError: return False return True class FastqGzFormat(model.BinaryFileFormat): """ A gzipped fastq file. """ def _check_n_records(self, n=None): with gzip.open(str(self), mode='rt', encoding='ascii') as fh: zipper = itertools.zip_longest(*[fh] * 4) if n is None: file_ = enumerate(zipper) else: file_ = zip(range(1, n), zipper) for i, record in file_: header, seq, sep, qual = record if not header.startswith('@'): raise ValidationError('Header on line %d is not FASTQ, ' 'records may be misaligned' % (i * 4 + 1)) if seq is None or seq == '\n': raise ValidationError('Missing sequence for record ' 'beginning on line %d' % (i * 4 + 1)) elif not seq.isupper(): raise ValidationError('Lowercase case sequence on line %d' % (i * 4 + 2)) if sep is None: raise ValidationError('Missing separator for record ' 'beginning on line %d' % (i * 4 + 1)) elif not sep.startswith('+'): raise ValidationError('Invalid separator on line %d' % (i * 4 + 3)) if qual is None: raise ValidationError('Missing quality for record ' 'beginning on line %d' % (i * 4 + 1)) elif len(qual) != len(seq): raise ValidationError('Quality score length doesn\'t ' 'match sequence length for record ' 'beginning on line %d' % (i * 4 + 1)) def _validate_(self, level): with self.open() as fh: if fh.peek(2)[:2] != b'\x1f\x8b': raise ValidationError('File is uncompressed') record_count_map = {'min': 5, 'max': None} self._check_n_records(record_count_map[level]) class CasavaOneEightSingleLanePerSampleDirFmt(model.DirectoryFormat): _CHECK_PAIRED = True _REQUIRE_PAIRED = False sequences = model.FileCollection( r'.+_.+_L[0-9][0-9][0-9]_R[12]_001\.fastq\.gz', format=FastqGzFormat) @sequences.set_path_maker def sequences_path_maker(self, sample_id, barcode_id, lane_number, read_number): return '%s_%s_L%03d_R%d_001.fastq.gz' % (sample_id, barcode_id, lane_number, read_number) def _find_duplicates(self, ids): return {x for x, c in collections.Counter(ids).items() if c > 1} @property def manifest(self): tmp_manifest = FastqManifestFormat() with tmp_manifest.open() as fh: fh.write('sample-id,filename,direction\n') for fp, _ in self.sequences.iter_views(FastqGzFormat): sample_id, _, _, _, direction = _parse_casava_filename(fp) fh.write('%s,%s,%s\n' % (sample_id, fp.name, direction)) df = _manifest_to_df(tmp_manifest, self.path.parent) if 'reverse' not in df: df['reverse'] = None if 'forward' not in df: df['forward'] = None def munge_fn_closure(val): if val is not None: return str(self.path / pathlib.Path(val).name) return val for column in {'forward', 'reverse'}: df[column] = df[column].apply(munge_fn_closure) return df def _validate_(self, level): forwards = [] reverse = [] for p in self.path.iterdir(): if p.is_dir(): # This branch happens if you have a filepath that looks roughly # like: Human_Kneecap/S1_L001_R1_001.fastq.gz # This technically matches the regex. It's easier to just # check that there aren't any directories, than making a very # complicated regex. This also produces a nicer error anyways. d = p.relative_to(self.path) raise ValidationError("Contains a subdirectory: %s" % d) else: if p.name.endswith('_001.fastq.gz'): sample_id = p.name.rsplit('_', maxsplit=4)[0] if p.name.endswith('R1_001.fastq.gz'): forwards.append(sample_id) else: reverse.append(sample_id) set_forwards = set(forwards) set_reverse = set(reverse) if len(set_forwards) != len(forwards): raise ValidationError('Duplicate samples in forward reads: %r' % self._find_duplicates(forwards)) if len(set_reverse) != len(reverse): raise ValidationError('Duplicate samples in reverse reads: %r' % self._find_duplicates(reverse)) if forwards and reverse: if not self._CHECK_PAIRED: raise ValidationError("Forward and reverse reads found.") elif set_forwards ^ set_reverse: raise ValidationError( "These samples do not have matching pairs of forward and " "reverse reads: %r" % (set_forwards ^ set_reverse)) elif self._REQUIRE_PAIRED: raise ValidationError("Reads are not paired end.") class _SingleLanePerSampleFastqDirFmt(CasavaOneEightSingleLanePerSampleDirFmt): manifest = model.File('MANIFEST', format=FastqManifestFormat) metadata = model.File('metadata.yml', format=YamlFormat) class SingleLanePerSampleSingleEndFastqDirFmt(_SingleLanePerSampleFastqDirFmt): _CHECK_PAIRED = False class SingleLanePerSamplePairedEndFastqDirFmt(_SingleLanePerSampleFastqDirFmt): _REQUIRE_PAIRED = True class CasavaOneEightLanelessPerSampleDirFmt(model.DirectoryFormat): sequences = model.FileCollection(r'.+_.+_R[12]_001\.fastq\.gz', format=FastqGzFormat) @sequences.set_path_maker def sequences_path_maker(self, sample_id, barcode_id, read_number): return '%s_%s_R%d_001.fastq.gz' % (sample_id, barcode_id, read_number) class QIIME1DemuxFormat(model.TextFileFormat): """QIIME 1 demultiplexed FASTA format. The QIIME 1 demultiplexed FASTA format is the default output format of ``split_libraries.py`` and ``split_libraries_fastq.py``. The file output by QIIME 1 is named ``seqs.fna``; this filename is sometimes associated with the file format itself due to its widespread usage in QIIME 1. The format is documented here: http://qiime.org/documentation/file_formats.html#demultiplexed-sequences Format details: - FASTA file with exactly two lines per record: header and sequence. Each sequence must span exactly one line and cannot be split across multiple lines. - The ID in each header must follow the format ``_``. ```` is the identifier of the sample the sequence belongs to, and ```` is an identifier for the sequence *within* its sample. In QIIME 1, ```` is typically an incrementing integer starting from zero, but any non-empty value can be used here, as long as the header IDs remain unique throughout the file. Note: ```` may contain sample IDs that contain underscores; the rightmost underscore will used to delimit sample and sequence IDs. - Descriptions in headers are permitted and ignored. - Header IDs must be unique within the file. - Each sequence must be DNA and cannot be empty. """ def sniff(self): with self.open() as filehandle: try: self._validate(filehandle, num_records=30) except Exception: return False else: return True # The code is structured such that `_validate` can be used to validate as # much of the file as desired. Users may be able to control levels of # validation in the future, and we'll also have the ability to describe # *why* a file is invalid. Sniffers can only offer a boolean response # currently, but the below `Exceptions` could include real error messages # in the future. For now, the `Exceptions` are only used to give a boolean # response to the sniffer. def _validate(self, filehandle, *, num_records): ids = set() for (header, seq), _ in zip(itertools.zip_longest(*[filehandle] * 2), range(num_records)): if header is None or seq is None: # Not exactly two lines per record. raise Exception() header = header.rstrip('\n') seq = seq.rstrip('\n') id = self._parse_id(header) if id in ids: # Duplicate header ID. raise Exception() self._validate_id(id) self._validate_seq(seq) ids.add(id) def _parse_id(self, header): if not header.startswith('>'): raise Exception() header = header[1:] id = '' if header and not header[0].isspace(): id = header.split(maxsplit=1)[0] return id def _validate_id(self, id): pieces = id.rsplit('_', maxsplit=1) if len(pieces) != 2 or not all(pieces): raise Exception() def _validate_seq(self, seq): if seq: # Will raise a `ValueError` on invalid DNA characters. skbio.DNA(seq, validate=True) else: # Empty sequence. raise Exception() QIIME1DemuxDirFmt = model.SingleFileDirectoryFormat( 'QIIME1DemuxDirFmt', 'seqs.fna', QIIME1DemuxFormat) plugin.register_formats( FastqManifestFormat, YamlFormat, FastqGzFormat, CasavaOneEightSingleLanePerSampleDirFmt, CasavaOneEightLanelessPerSampleDirFmt, _SingleLanePerSampleFastqDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64, SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2, PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2, QIIME1DemuxFormat, QIIME1DemuxDirFmt ) q2-types-2021.8.0/q2_types/per_sample_sequences/_transformer.py000066400000000000000000000206671412142116700244650ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import functools import re import warnings import skbio import yaml import pandas as pd import qiime2.util from ..plugin_setup import plugin from . import ( SingleLanePerSampleSingleEndFastqDirFmt, FastqAbsolutePathManifestFormat, FastqManifestFormat, FastqGzFormat, SingleLanePerSamplePairedEndFastqDirFmt, YamlFormat, CasavaOneEightSingleLanePerSampleDirFmt, CasavaOneEightLanelessPerSampleDirFmt, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64, SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2, PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2, QIIME1DemuxDirFmt, ) from ._util import ( _single_lane_per_sample_fastq_helper, _dirfmt_to_casava, _parse_and_validate_manifest, _copy_with_compression, _fastq_manifest_helper, _phred64_warning, _write_phred64_to_phred33, _manifest_v2_to_v1, _manifest_to_df, ) # Partially apply the helpers here, to cut down on boilerplate. _fastq_manifest_helper_partial = functools.partial( _fastq_manifest_helper, se_fmt=SingleLanePerSampleSingleEndFastqDirFmt, pe_fmt=SingleLanePerSamplePairedEndFastqDirFmt, abs_manifest_fmt=FastqAbsolutePathManifestFormat, manifest_fmt=FastqManifestFormat, yaml_fmt=YamlFormat, ) _parse_and_validate_manifest_partial = functools.partial( _parse_and_validate_manifest, abs_manifest_fmt=FastqAbsolutePathManifestFormat, manifest_fmt=FastqManifestFormat, ) _single_lane_per_sample_fastq_helper_partial = functools.partial( _single_lane_per_sample_fastq_helper, manifest_fmt=FastqManifestFormat, fastq_fmt=FastqGzFormat, yaml_fmt=YamlFormat, ) _dirfmt_to_casava_partial = functools.partial( _dirfmt_to_casava, manifest_fmt=FastqManifestFormat, abs_manifest_fmt=FastqAbsolutePathManifestFormat, fastq_fmt=FastqGzFormat, casava_fmt=CasavaOneEightSingleLanePerSampleDirFmt, ) @plugin.register_transformer def _3(dirfmt: CasavaOneEightSingleLanePerSampleDirFmt) \ -> SingleLanePerSampleSingleEndFastqDirFmt: return _single_lane_per_sample_fastq_helper_partial( dirfmt, SingleLanePerSampleSingleEndFastqDirFmt) @plugin.register_transformer def _3_and_a_half(dirfmt_in: SingleLanePerSampleSingleEndFastqDirFmt) \ -> CasavaOneEightSingleLanePerSampleDirFmt: return _dirfmt_to_casava_partial(dirfmt_in) @plugin.register_transformer def _4(dirfmt: CasavaOneEightSingleLanePerSampleDirFmt) \ -> SingleLanePerSamplePairedEndFastqDirFmt: return _single_lane_per_sample_fastq_helper_partial( dirfmt, SingleLanePerSamplePairedEndFastqDirFmt) @plugin.register_transformer def _4_and_a_half(dirfmt_in: SingleLanePerSamplePairedEndFastqDirFmt) \ -> CasavaOneEightSingleLanePerSampleDirFmt: return _dirfmt_to_casava_partial(dirfmt_in) @plugin.register_transformer def _10(dirfmt: CasavaOneEightLanelessPerSampleDirFmt) \ -> SingleLanePerSampleSingleEndFastqDirFmt: return _single_lane_per_sample_fastq_helper_partial( dirfmt, SingleLanePerSampleSingleEndFastqDirFmt, parse_lane=False) @plugin.register_transformer def _11(dirfmt: CasavaOneEightLanelessPerSampleDirFmt) \ -> SingleLanePerSamplePairedEndFastqDirFmt: return _single_lane_per_sample_fastq_helper_partial( dirfmt, SingleLanePerSamplePairedEndFastqDirFmt, parse_lane=False) @plugin.register_transformer def _5(dirfmt: SingleLanePerSamplePairedEndFastqDirFmt) \ -> SingleLanePerSampleSingleEndFastqDirFmt: with dirfmt.manifest.view(FastqManifestFormat).open() as fh: input_manifest = _parse_and_validate_manifest_partial( fh, single_end=False, absolute=False) output_manifest = FastqManifestFormat() output_df = input_manifest[input_manifest.direction == 'forward'] with output_manifest.open() as fh: output_df.to_csv(fh, index=False) result = SingleLanePerSampleSingleEndFastqDirFmt() result.manifest.write_data(output_manifest, FastqManifestFormat) for _, _, filename, _ in output_df.itertuples(): qiime2.util.duplicate(str(dirfmt.path / filename), str(result.path / filename)) metadata = YamlFormat() metadata.path.write_text(yaml.dump({'phred-offset': 33})) result.metadata.write_data(metadata, YamlFormat) return result @plugin.register_transformer def _6(fmt: SingleEndFastqManifestPhred33) \ -> SingleLanePerSampleSingleEndFastqDirFmt: return _fastq_manifest_helper_partial(fmt, _copy_with_compression, single_end=True) @plugin.register_transformer def _7(fmt: SingleEndFastqManifestPhred64) \ -> SingleLanePerSampleSingleEndFastqDirFmt: warnings.warn(_phred64_warning) return _fastq_manifest_helper_partial(fmt, _write_phred64_to_phred33, single_end=True) @plugin.register_transformer def _8(fmt: PairedEndFastqManifestPhred33) \ -> SingleLanePerSamplePairedEndFastqDirFmt: return _fastq_manifest_helper_partial(fmt, _copy_with_compression, single_end=False) @plugin.register_transformer def _9(fmt: PairedEndFastqManifestPhred64) \ -> SingleLanePerSamplePairedEndFastqDirFmt: warnings.warn(_phred64_warning) return _fastq_manifest_helper_partial(fmt, _write_phred64_to_phred33, single_end=False) @plugin.register_transformer def _12(dirfmt: SingleLanePerSampleSingleEndFastqDirFmt) \ -> QIIME1DemuxDirFmt: with dirfmt.manifest.view(FastqManifestFormat).open() as fh: input_manifest = _parse_and_validate_manifest_partial( fh, single_end=True, absolute=False) result = QIIME1DemuxDirFmt() fp = str(result.path / 'seqs.fna') with open(fp, 'w') as fh: i = 0 for r in input_manifest.iterrows(): sample_id = r[1]['sample-id'] filename = r[1]['filename'] if re.search(r"\s", sample_id) is not None: raise ValueError( "Whitespace was found in the ID for sample %s. Sample " "IDs with whitespace are incompatible with FASTA." % sample_id) fq_reader = skbio.io.read('%s/%s' % (str(dirfmt), filename), format='fastq', constructor=skbio.DNA, phred_offset=33, verify=False) for seq in fq_reader: seq.metadata['id'] = '%s_%d' % (sample_id, i) seq.write(fh) i += 1 return result @plugin.register_transformer def _21(ff: FastqManifestFormat) -> pd.DataFrame: return _manifest_to_df(ff, ff.path.parent) @plugin.register_transformer def _23(fmt: SingleEndFastqManifestPhred33V2) \ -> SingleLanePerSampleSingleEndFastqDirFmt: old_fmt = _manifest_v2_to_v1(fmt, FastqManifestFormat) return _fastq_manifest_helper_partial(old_fmt, _copy_with_compression, single_end=True) @plugin.register_transformer def _24(fmt: SingleEndFastqManifestPhred64V2) \ -> SingleLanePerSampleSingleEndFastqDirFmt: warnings.warn(_phred64_warning) old_fmt = _manifest_v2_to_v1(fmt, FastqManifestFormat) return _fastq_manifest_helper_partial(old_fmt, _write_phred64_to_phred33, single_end=True) @plugin.register_transformer def _25(fmt: PairedEndFastqManifestPhred33V2) \ -> SingleLanePerSamplePairedEndFastqDirFmt: old_fmt = _manifest_v2_to_v1(fmt, FastqManifestFormat) return _fastq_manifest_helper_partial(old_fmt, _copy_with_compression, single_end=False) @plugin.register_transformer def _26(fmt: PairedEndFastqManifestPhred64V2) \ -> SingleLanePerSamplePairedEndFastqDirFmt: warnings.warn(_phred64_warning) old_fmt = _manifest_v2_to_v1(fmt, FastqManifestFormat) return _fastq_manifest_helper_partial(old_fmt, _write_phred64_to_phred33, single_end=False) q2-types-2021.8.0/q2_types/per_sample_sequences/_type.py000066400000000000000000000033121412142116700230700ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from ..sample_data import SampleData from . import (QIIME1DemuxDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt) Sequences = SemanticType('Sequences', variant_of=SampleData.field['type']) SequencesWithQuality = SemanticType( 'SequencesWithQuality', variant_of=SampleData.field['type']) PairedEndSequencesWithQuality = SemanticType( 'PairedEndSequencesWithQuality', variant_of=SampleData.field['type']) JoinedSequencesWithQuality = SemanticType( 'JoinedSequencesWithQuality', variant_of=SampleData.field['type']) plugin.register_semantic_types(Sequences, SequencesWithQuality, PairedEndSequencesWithQuality, JoinedSequencesWithQuality) plugin.register_semantic_type_to_format( SampleData[Sequences], artifact_format=QIIME1DemuxDirFmt ) plugin.register_semantic_type_to_format( SampleData[SequencesWithQuality], artifact_format=SingleLanePerSampleSingleEndFastqDirFmt ) plugin.register_semantic_type_to_format( SampleData[JoinedSequencesWithQuality], artifact_format=SingleLanePerSampleSingleEndFastqDirFmt ) plugin.register_semantic_type_to_format( SampleData[PairedEndSequencesWithQuality], artifact_format=SingleLanePerSamplePairedEndFastqDirFmt ) q2-types-2021.8.0/q2_types/per_sample_sequences/_util.py000066400000000000000000000315741412142116700230770ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import collections import gzip import os import shutil import pandas as pd import qiime2.util import skbio import yaml # Note: we DI all of the formats into these utils so that we don't wind # up in circular import mayhem. That is all. def _parse_casava_filename(path, parse_lane=True): directions = ['forward', 'reverse'] filename = str(path).replace('.fastq.gz', '') if parse_lane: sample_id, barcode_id, lane_number, read_number, _ = \ filename.rsplit('_', maxsplit=4) else: sample_id, barcode_id, read_number, _ = \ filename.rsplit('_', maxsplit=3) read_number = int(read_number[1:]) lane_number = int(lane_number[1:]) if parse_lane else 1 direction = directions[read_number - 1] return sample_id, barcode_id, lane_number, read_number, direction def _single_lane_per_sample_fastq_helper(dirfmt, output_cls, manifest_fmt, fastq_fmt, yaml_fmt, parse_lane=True): result = output_cls() manifest = manifest_fmt() manifest_fh = manifest.open() manifest_fh.write('sample-id,filename,direction\n') for path, view in dirfmt.sequences.iter_views(fastq_fmt): parsed = _parse_casava_filename(path, parse_lane) sample_id, barcode_id, lane_number, read_number, direction = parsed result.sequences.write_data(view, fastq_fmt, sample_id=sample_id, barcode_id=barcode_id, lane_number=lane_number, read_number=read_number) filepath = result.sequences.path_maker(sample_id=sample_id, barcode_id=barcode_id, lane_number=lane_number, read_number=read_number) name = filepath.name manifest_fh.write('%s,%s,%s\n' % (sample_id, name, direction)) manifest_fh.close() result.manifest.write_data(manifest, manifest_fmt) metadata = yaml_fmt() metadata.path.write_text(yaml.dump({'phred-offset': 33})) result.metadata.write_data(metadata, yaml_fmt) return result def _dirfmt_to_casava(dirfmt_in, manifest_fmt, abs_manifest_fmt, fastq_fmt, casava_fmt): dirfmt_out = casava_fmt() for fastq, _ in dirfmt_in.sequences.iter_views(fastq_fmt): from_fp = str(dirfmt_in.path / fastq.name) to_fp = str(dirfmt_out.path / fastq.name) qiime2.util.duplicate(from_fp, to_fp) return dirfmt_out def _parse_and_validate_manifest(manifest_fh, single_end, absolute, abs_manifest_fmt, manifest_fmt): try: manifest = pd.read_csv(manifest_fh, comment='#', header=0, skip_blank_lines=True, dtype=object) except Exception as e: raise ValueError('There was an issue parsing the manifest ' 'file as CSV:\n %s' % e) expected_header = (abs_manifest_fmt.EXPECTED_HEADER if absolute else manifest_fmt.EXPECTED_HEADER) _validate_header(manifest, expected_header) for idx in manifest.index: record = manifest.loc[idx] if record.isnull().any(): raise ValueError('Empty cells are not supported in ' 'manifest files. Found one or more ' 'empty cells in this record: %s' % ','.join(map(str, record))) record[expected_header[1]] = \ os.path.expandvars(record[expected_header[1]]) path = record[expected_header[1]] if absolute: if not os.path.isabs(path): raise ValueError('All paths provided in manifest must be ' 'absolute but found relative path: %s' % path) else: if os.path.isabs(path): raise ValueError('All paths provided in manifest must be ' 'relative but found absolute path: %s' % path) path = os.path.join(os.path.dirname(manifest_fh.name), path) if not os.path.exists(path): raise FileNotFoundError( 'A path specified in the manifest does not exist ' 'or is not accessible: ' '%s' % path) if single_end: _validate_single_end_fastq_manifest_directions(manifest) else: _validate_paired_end_fastq_manifest_directions(manifest) return manifest def _validate_header(manifest, expected_header): header = manifest.columns.tolist() if header != expected_header: raise ValueError('Expected manifest header %r but ' 'found %r.' % (','.join(expected_header), ','.join(header))) def _duplicated_ids(sample_ids): counts = collections.Counter(sample_ids).most_common() if len(counts) == 0 or counts[0][1] == 1: # if there were no sample ids provided, or the most frequent sample id # was only observed once, there are no duplicates return [] else: return [e[0] for e in counts if e[1] > 1] def _validate_single_end_fastq_manifest_directions(manifest): directions = set(manifest['direction']) if not directions.issubset({'forward', 'reverse'}): raise ValueError('Directions can only be "forward" or ' '"reverse", but observed: %s' % ', '.join(directions)) if len(directions) > 1: raise ValueError('Manifest for single-end reads can ' 'contain only forward or reverse reads, ' 'but not both. The following directions were ' 'observed: %s' % ', '.join(directions)) duplicated_ids = _duplicated_ids(manifest['sample-id']) if len(duplicated_ids) > 0: raise ValueError('Each sample id can only appear one time in a ' 'manifest for single-end reads, but the following ' 'sample ids were observed more than once: ' '%s' % ', '.join(duplicated_ids)) def _validate_paired_end_fastq_manifest_directions(manifest): forward_direction_sample_ids = [] reverse_direction_sample_ids = [] for _, sample_id, _, direction in manifest.itertuples(): if direction == 'forward': forward_direction_sample_ids.append(sample_id) elif direction == 'reverse': reverse_direction_sample_ids.append(sample_id) else: raise ValueError('Directions can only be "forward" or ' '"reverse", but observed: %s' % direction) duplicated_ids_forward = _duplicated_ids(forward_direction_sample_ids) if len(duplicated_ids_forward) > 0: raise ValueError('Each sample id can have only one forward read ' 'record in a paired-end read manifest, but the ' 'following sample ids were associated with more ' 'than one forward read record: ' '%s' % ', '.join(duplicated_ids_forward)) duplicated_ids_reverse = _duplicated_ids(reverse_direction_sample_ids) if len(duplicated_ids_reverse) > 0: raise ValueError('Each sample id can have only one reverse read ' 'record in a paired-end read manifest, but the ' 'following sample ids were associated with more ' 'than one reverse read record: ' '%s' % ', '.join(duplicated_ids_reverse)) if sorted(forward_direction_sample_ids) != \ sorted(reverse_direction_sample_ids): forward_but_no_reverse = set(forward_direction_sample_ids) - \ set(reverse_direction_sample_ids) if len(forward_but_no_reverse) > 0: raise ValueError('Forward and reverse reads must be provided ' 'exactly one time each for each sample. The ' 'following samples had forward but not ' 'reverse read fastq files: %s' % ', '.join(forward_but_no_reverse)) else: reverse_but_no_forward = set(reverse_direction_sample_ids) - \ set(forward_direction_sample_ids) raise ValueError('Forward and reverse reads must be provided ' 'exactly one time each for each sample. The ' 'following samples had reverse but not ' 'forward read fastq files: %s' % ', '.join(reverse_but_no_forward)) def _copy_with_compression(src, dst): with open(src, 'rb') as src_fh: if src_fh.read(2)[:2] != b'\x1f\x8b': src_fh.seek(0) # SO: http://stackoverflow.com/a/27069578/579416 # shutil.copyfileobj will pick a pretty good chunksize for us with gzip.open(dst, 'wb') as dst_fh: shutil.copyfileobj(src_fh, dst_fh) return qiime2.util.duplicate(src, dst) def _fastq_manifest_helper(fmt, fastq_copy_fn, single_end, se_fmt, pe_fmt, abs_manifest_fmt, manifest_fmt, yaml_fmt): direction_to_read_number = {'forward': 1, 'reverse': 2} input_manifest = _parse_and_validate_manifest( fmt.open(), single_end=single_end, absolute=True, abs_manifest_fmt=abs_manifest_fmt, manifest_fmt=manifest_fmt, ) if single_end: result = se_fmt() else: result = pe_fmt() output_manifest_data = [] for idx, sample_id, input_fastq_fp, direction in \ input_manifest.itertuples(): read_number = direction_to_read_number[direction] output_fastq_fp = \ result.sequences.path_maker(sample_id=sample_id, # the remaining values aren't used # internally by QIIME, so their values # aren't very important barcode_id=idx, lane_number=1, read_number=read_number) output_manifest_data.append( [sample_id, output_fastq_fp.name, direction]) fastq_copy_fn(input_fastq_fp, str(output_fastq_fp)) output_manifest = manifest_fmt() output_manifest_df = \ pd.DataFrame(output_manifest_data, columns=output_manifest.EXPECTED_HEADER) output_manifest_df.to_csv(str(output_manifest), index=False) result.manifest.write_data(output_manifest, manifest_fmt) metadata = yaml_fmt() metadata.path.write_text(yaml.dump({'phred-offset': 33})) result.metadata.write_data(metadata, yaml_fmt) return result _phred64_warning = ('Importing of PHRED 64 data is slow as it is converted ' 'internally to PHRED 33. Working with the imported data ' 'will not be slower than working with PHRED 33 data.') def _write_phred64_to_phred33(phred64_path, phred33_path): with open(phred64_path, 'rb') as phred64_fh, \ open(phred33_path, 'wb') as phred33_fh: for seq in skbio.io.read(phred64_fh, format='fastq', variant='illumina1.3'): skbio.io.write(seq, into=phred33_fh, format='fastq', variant='illumina1.8', compression='gzip') def _manifest_v2_to_v1(fmt, manifest_fmt): df = qiime2.Metadata.load(str(fmt)).to_dataframe() # Drop unneccessary metadata columns df = df[list(fmt.METADATA_COLUMNS.keys())] denormalized_dfs = [] for column, direction in fmt.METADATA_COLUMNS.items(): denormalized_df = df[[column]] original_index_name = denormalized_df.index.name denormalized_df.reset_index(drop=False, inplace=True) denormalized_df.rename(columns={ original_index_name: 'sample-id', column: 'absolute-filepath' }, inplace=True) denormalized_df['direction'] = direction denormalized_dfs.append(denormalized_df) old_fmt = manifest_fmt() pd.concat(denormalized_dfs, axis=0).to_csv(str(old_fmt), index=False) return old_fmt def _manifest_to_df(ff, base_dir): manifest = pd.read_csv(str(ff), header=0, comment='#') manifest.filename = manifest.filename.apply( lambda f: os.path.join(base_dir, f)) df = manifest.pivot(index='sample-id', columns='direction', values='filename') df.columns.name = None return df q2-types-2021.8.0/q2_types/per_sample_sequences/tests/000077500000000000000000000000001412142116700225415ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/__init__.py000066400000000000000000000005351412142116700246550ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/000077500000000000000000000000001412142116700234525ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Armpit.fastq000066400000000000000000000036741412142116700270260ustar00rootroot00000000000000@M00899:113:000000000-A5K20:1:1101:18850:2539 1:N:0:2 CGGGGGGCAGCAGTGGGGAATATTGCACAATGGGCGAAAGCCTGATGCAGCAACGCCGCGTGAACGATGAAGGTCTTCGGATCGTAAAGTTCTGTTGCAGGGGAAGATAATGACGGTACCCTGTGAGGAAGCCCCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGGGCTAGCGTTATCCGGATTTACTGGGCGTAAAGGGTGCGTAGGTGGTCCTTCAAGTCGGTGGTTAAAGGCTAAGGCTCAACCGTAGTAAGCCGCCGAAACTGGAGGACTTGAGTGAAGGAGAGGAAAA + -8ABCC>=>5811884:<:99=?@EECFFGDFADECFFFEEDDEFEDEDFFFEEFCCBCF>CCB3CFF:BBFFFCCD,8@9C@C:+5@@:A@CFFFDGCCEC?FGGGGGGGGGCFGGFCFGGGGGGGGGGEG7CFFGFFFGGGFG?FACE;:8CCCCEEF9FF8C758CGGG2:7DC>EECEFE9+27CF492/8B7>D)7@F=FFCFF*9F52<2,289<0:44AB<49(3<>F51).69D?D34*44:4<5C>BFFEGFFFFF:FGGF=6=6:AFBGFFFFFFA9AC:0 @M00899:113:000000000-A5K20:1:1101:25177:3605 1:N:0:2 CCTACGGGAGGCAGCAGTGAGGAATATTGGTCAATGGACGGAAGTCTGAACCAGCCAAGTAGCGTGCAGGATGACGGCCCTATGGGTTGTAAACTGCTTTTGTATGGGGATAAAGTTAGGGACGTGTCCCTATTTGCAGGTACCATACGAATAAGGACCGGCTAATTCCGTGCCAGCAGCCGCGGTAATACGGAAGGTCCAGGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGGCTGGAGATTAAGTGTGTTGTGAAATGTAGACGCTCAACGTCTGAATTGCAGCGCATAGGGG + 88BCCEDAD9018======;;CCFGGGGFGGGFGGGGGGGGGGGGGGGGGGGGGGGFGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGFGGGGGGGGGGGGGFGGGGGGGGGGGGGGGGEGDGGGGGGGGGFFGGGGGGGGGFGGGFGGGFGGGFFGCGGGGGGFGGFDGGGGGGGGGGGGG5CBFGCGGGGC?FGGGGGGGGGGGDEGDDDGFGGGGGEGGGGGGA39>BFFDDEF4:D5@CE?CFFF>>ABGFFF9F?DF?02211:DAF7 q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Armpit.fastq.gz000066400000000000000000000013771412142116700274430ustar00rootroot00000000000000XHuman-Armpit_S1_L001_R1_001.fastqr! @}l @~8 }hxdg3@o&{cpP%qtprVo,g[{ʧʹ[(@M_8XJY=U.Y͵U!έkaCU1W*y*%Yw8R+x/J4k 8WC/Һ5O AWF~]]sX.1Ncc'1Y޼d&?0Ljʤg2 2yP}?;{8ћ;wCa}P‘{L[?@0U_e¤ 8{ iXx 6]׶o3q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Armpit_S2_L001_R1_001.fastq.gz000066400000000000000000000013771412142116700314250ustar00rootroot00000000000000XHuman-Armpit_S1_L001_R1_001.fastqr! @}l @~8 }hxdg3@o&{cpP%qtprVo,g[{ʧʹ[(@M_8XJY=U.Y͵U!έkaCU1W*y*%Yw8R+x/J4k 8WC/Һ5O AWF~]]sX.1Ncc'1Y޼d&?0Ljʤg2 2yP}?;{8ћ;wCa}P‘{L[?@0U_e¤ 8{ iXx 6]׶o3q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Armpit_S2_R1_001.fastq.gz000066400000000000000000000013771412142116700307310ustar00rootroot00000000000000XHuman-Armpit_S1_L001_R1_001.fastqr! @}l @~8 }hxdg3@o&{cpP%qtprVo,g[{ʧʹ[(@M_8XJY=U.Y͵U!έkaCU1W*y*%Yw8R+x/J4k 8WC/Һ5O AWF~]]sX.1Ncc'1Y޼d&?0Ljʤg2 2yP}?;{8ћ;wCa}P‘{L[?@0U_e¤ 8{ iXx 6]׶o3q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Kneecap_S1_L001_R1_001.fastq000066400000000000000000000036741412142116700311210ustar00rootroot00000000000000@M00899:113:000000000-A5K20:1:1101:18850:2539 1:N:0:2 GCTACGGGGGGCAGCAGTGGGGAATATTGCACAATGGGCGAAAGCCTGATGCAGCAACGCCGCGTGAACGATGAAGGTCTTCGGATCGTAAAGTTCTGTTGCAGGGGAAGATAATGACGGTACCCTGTGAGGAAGCCCCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGGGCTAGCGTTATCCGGATTTACTGGGCGTAAAGGGTGCGTAGGTGGTCCTTCAAGTCGGTGGTTAAAGGCTAAGGCTCAACCGTAGTAAGCCGCCGAAACTGGAGGACTTGAGTGAAGGAGAGG + -8ABCC>=>5811884:<:99=?@EECFFGDFADECFFFEEDDEFEDEDFFFEEFCCBCF>CCB3CFF:BBFFFCCD,8@9C@C:+5@@:A@CFFFDGCCEC?FGGGGGGGGGCFGGFCFGGGGGGGGGGEG7CFFGFFFGGGFG?FACE;:8CCCCEEF9FF8C758CGGG2:7DC>EECEFE9+27CF492/8B7>D)7@F=FFCFF*9F52<2,289<0:44AB<49(3<>F51).69D?D34*44:4<5C>BFFEGFFFFF:FGGF=6=6:AFBGFFFFFFA9AC:0 @M00899:113:000000000-A5K20:1:1101:25177:3605 1:N:0:2 CCTACGGGAGGCAGCAGTGAGGAATATTGGTCAATGGACGGAAGTCTGAACCAGCCAAGTAGCGTGCAGGATGACGGCCCTATGGGTTGTAAACTGCTTTTGTATGGGGATAAAGTTAGGGACGTGTCCCTATTTGCAGGTACCATACGAATAAGGACCGGCTAATTCCGTGCCAGCAGCCGCGGTAATACGGAAGGTCCAGGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGGCTGGAGATTAAGTGTGTTGTGAAATGTAGACGCTCAACGTCTGAATTGCAGCGCATACTGG + 88BCCEDAD9018======;;CCFGGGGFGGGFGGGGGGGGGGGGGGGGGGGGGGGFGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGFGGGGGGGGGGGGGFGGGGGGGGGGGGGGGGEGDGGGGGGGGGFFGGGGGGGGGFGGGFGGGFGGGFFGCGGGGGGFGGFDGGGGGGGGGGGGG5CBFGCGGGGC?FGGGGGGGGGGGDEGDDDGFGGGGGEGGGGGGA39>BFFDDEF4:D5@CE?CFFF>>ABGFFF9F?DF?02211:DAF7 q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/Human-Kneecap_S1_L001_R1_001.fastq.gz000066400000000000000000000013731412142116700315320ustar00rootroot00000000000000FEXHuman-Kneecap_S1_L001_R1_001.fastqr0 @'i"aj@N{KR^2IݙcK\5uڶ2^hEZ@gz C _)bƜE$I4b0W? "NQ_)eY<SSEiAD(D)`GBj7&+Z}[J1QmM]|v-i+Tꢅ(kPͦmIhT9<^-nD4sӲH)0p!D!"0F<˳+lh‹]f]Wf1i&2HL794jMH V?TtW)-q@g활_X@@:vϝd\fqOʓ|tl:߽t7?⦅ٴO?N.,7~[{:g372I;xcL|c2LblsAuse_PQRk2,iO(vN1Y=6s;7TuKJA+LjTʅIg)qjTY63=0rV#oTBԩj7Jp'‘]s%zySYiN btEZi&h%:[{F:tгZ`F_STSeXV6G2ZuDrJT+'CUiIyFIA* 6ZPpHS5$z7ԢEumE"IP)$CVltMdqhҶ9=-DsRKeS1ST#WG}g/Q|Q x4!9RSYy:D[.oxfTfkeH V>%Wt>.q 0+Jo\lswY'7}^Zs>l?<  w?_~\\8 &otr gr0`4}g&$3Yr0J fck;\>W5Tҟ Bm"*|R^88Bw9M)6PZ%ESNv5&^QUUrJ Ko֮|vע7ZD7*}u,f/4zu4۰\ `ޛ+C7.|% E\cg/7&_Ĥ5&[0I q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/incomplete-sep.fastq.gz000066400000000000000000000012171412142116700300560ustar00rootroot00000000000000Yincomplete-sep.fastqr )zl'֠68\d+Y* 'N;Lx1H1wu~=y:qg`0PE*2DX+#bR-X:"~x Q^T!wUتE<# Wtld-z8Lt VHZGDj"Ѻ"$Rm^k+ZQ&8`i[COwuYsNe )Q%f2)Řr)v+#¾(>(<)𤋮<YL-Faya9]6|ӄѯ..nq7~:{939L0پ3Ixcߙ,d9DEYP3]ϱ. F*ObqeN>)PVϜw9M)6PZ%ESNv5&^QUUrJ Ko֮|vע7ZD7*}u,f/4zu4۰\ `ޛ+C7.|% E\cg/7&Ĥ5&['0Iɟ{ Qq2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/incomplete-sequence.fastq.gz000066400000000000000000000011731412142116700311000ustar00rootroot00000000000000Yincomplete-sequence.fastqے l]A68\t+Y* 'ta ~Gu9@sgokMݷ~OO*RA(*SĊI<%*Ղqe*R\ Q%ZN++ROrW)dž.NւS!ԋM'`xFM6-U$Bn!HzFԂm sz;}X܉e59-D91$c)F&)yDG̈́HyOebm4.ғceHV>%wt>ny'0FolswU'7|}^yya9_\6|ӄѯW8Niod&lL$>>,1+ `[Po%Wj[T iq1ڄi ŭ]\4TnWcRUUI+wĀviƇY^/yk9?LiJV{ħN]2@nr]yo=[[+A)o4]Jms `d?0^;` ^q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/invalid-direction-MANIFEST000066400000000000000000000001471412142116700302070ustar00rootroot00000000000000sample-id,absolute-filepath,direction Human-Kneecap,$path/Human-Kneecap_S1_L001_R1_001.fastq.gz,peanut q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/invalid-quality.fastq.gz000066400000000000000000000013311412142116700302430ustar00rootroot00000000000000bYinvalid-quality.fastqn0 @=6"%ДCўRRr2S @@*xIqg9G Ch;y8g; dzTrz",EDD#V*K['N$,)+BH0GQJqZPhg<\%V,pp'AÙԭR{%dTWSװVx C_oC<4R($I B x5$8$).Sn)-rRcCte9(F`)Mb*'PPs3HZ@7fDDD(ZF|r96I7jԛք P4[rJ(GYh [F(lDpr*2T_X/=V2̾XOa*U—> Y獌6.Ͽa:Og/ڲVojT[;UW k/ߞ;'S$d$)c=s2Rdzp21>"71kBkD^$an*T@?9!8vzdɳ}RSp}5eN)9Iyv޷ZpŅ$U>o4Lf>o㧹[-|Ku{/!y/#'9;U%sҍtk>Ȃٖ`K4uvq2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/no-data-MANIFEST000066400000000000000000000001541412142116700261240ustar00rootroot00000000000000# line number one comment # line number 3 comment # line number 8 comment # line 9 comment # EOF below q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/not-MANIFEST000066400000000000000000000000351412142116700253770ustar00rootroot00000000000000direction,filename,sample-id q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/not-fastq.fastq.gz000066400000000000000000000000741412142116700270460ustar00rootroot00000000000000f?Xnot-fastq.fastq ,V,S/QHTHK,.)THIRH6q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/not-metadata.yml000066400000000000000000000000731412142116700265530ustar00rootroot00000000000000,This isn't metadata YAML: { Or any sort of: metadata, } q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/paired_end_data/000077500000000000000000000000001412142116700265355ustar00rootroot00000000000000Human-Kneecap_S1_L001_R2_001.fastq.gz000066400000000000000000000014731412142116700345400ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/paired_end_dataEXHuman-Kneecap_S1_L001_R2_001.fastqŔ˖ y.1w"vJ8s9H%}0e4a>v/ZϽ|O%8 9#h|G&@4  x/W9.b` ҡp+NP0̥p֦)"@wpFtzy+ j.T4@-zD 9IiGI##~8VW;>[ ;–*VB4S N8!2TH RzQBE5&\؂ 8gR*ga">a:^:[Ka5bZLZ()Ԥza_O89w82&5e9MuUq&Z6V kWMyrVVc:ֽEp |8g&YN󢟘$$7ILқ,b_ 2 te $P9L$$V>A-PCReI2sH(g(fioMO7@9D?vvl]Gd=Ma\3,)@pSPf5y_П Ctv‛Cnjp`vj"j:keG,vR|JRϧgDLD H:.FIGz)tŐc+)Boy{I!8\$22q5VV+W-&kRڌI֥ 9 i/F&j4nPcq2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/paired_end_data/MANIFEST000066400000000000000000000003271412142116700276700ustar00rootroot00000000000000# Produced by Super Duper Sequencing Machine sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward # paired end data. Human-Kneecap,Human-Kneecap_S1_L001_R2_001.fastq.gz,reverse q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/partial-record.fastq.gz000066400000000000000000000001301412142116700300330ustar00rootroot00000000000000̍:Zpartial_record.fastqs502442]GSo#+C05225T02):q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/000077500000000000000000000000001412142116700272455ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/blank-line.fna000066400000000000000000000000411412142116700317420ustar00rootroot00000000000000>my_id ACGT >something_else CAT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/description-only.fna000066400000000000000000000000421412142116700332310ustar00rootroot00000000000000> only a description :( CATCATCAT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/duplicate-ids.fna000066400000000000000000000000411412142116700324550ustar00rootroot00000000000000>foo_0 ACGT >bar_0 ACG >foo_0 AC q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/empty000066400000000000000000000000001412142116700303140ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/empty-header.fna000066400000000000000000000000071412142116700323140ustar00rootroot00000000000000> ACGT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/empty-seq.fna000066400000000000000000000000331412142116700316530ustar00rootroot00000000000000>id_0 AAA >id_1 >id_2 GGG q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/incomplete.fna000066400000000000000000000000061412142116700320660ustar00rootroot00000000000000>id_1 q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/invalid-header.fna000066400000000000000000000000131412142116700326010ustar00rootroot00000000000000my_id TGCA q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/long.fna000066400000000000000000000006111412142116700306700ustar00rootroot00000000000000>foo_0 ACGT >foo_1 ACGT >foo_2 ACGT >foo_3 ACGT >foo_4 ACGT >foo_5 ACGT >foo_6 ACGT >foo_7 ACGT >foo_8 ACGT >foo_9 ACGT >foo_10 ACGT >foo_11 ACGT >foo_12 ACGT >foo_13 ACGT >foo_14 ACGT >foo_15 ACGT >foo_16 ACGT >foo_17 ACGT >foo_18 ACGT >foo_19 ACGT >foo_20 ACGT >foo_21 ACGT >foo_22 ACGT >foo_23 ACGT >foo_24 ACGT >foo_25 ACGT >foo_26 ACGT >foo_27 ACGT >foo_28 ACGT >foo_29 ACGT >foo_30 ACGT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/no-sample-id.fna000066400000000000000000000000111412142116700322100ustar00rootroot00000000000000>_0 ACGT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/no-secondary-id.fna000066400000000000000000000000121412142116700327170ustar00rootroot00000000000000>id_ ACGT no-underscore-in-id.fna000066400000000000000000000000111412142116700334250ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format>id ACGT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/not-dna.fna000066400000000000000000000000141412142116700312660ustar00rootroot00000000000000>my_id ACGU q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/short.fna000066400000000000000000000001561412142116700310740ustar00rootroot00000000000000>foo_0 ACGT >bar_0 A >bar_42 GGGGG >foo_1 A >id_with_underscores_yay CCCCCCCCC >__- TTTT >___- TTTTT >1_2 AAA q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/single-record.fna000066400000000000000000000000251412142116700324650ustar00rootroot00000000000000>id_abc AAAACGGTAGTA split-libraries-output.fna000066400000000000000000000022621412142116700343210ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format>PC.634_1 FLP3FBN01ELBSX orig_bc=ACAGAGTCGGCT new_bc=ACAGAGTCGGCT bc_diffs=0 CTGGGCCGTGTCTCAGTCCCAATGTGGCCGTTTACCCTCTCAGGCCGGCTACGCATCATCGCCTTGGTGGGCCGTTACCTCACCAACTAGCTAATGCGCCGCAGGTCCATCCATGTTCACGCCTTGATGGGCGCTTTAATATACTGAGCATGCGCTCTGTATACCTATCCGGTTTTAGCTACCGTTTCCAGCAGTTATCCCGGACACATGGGCTAGG >PC.634_2 FLP3FBN01EG8AX orig_bc=ACAGAGTCGGCT new_bc=ACAGAGTCGGCT bc_diffs=0 TTGGACCGTGTCTCAGTTCCAATGTGGGGGCCTTCCTCTCAGAACCCCTATCCATCGAAGGCTTGGTGGGCCGTTACCCCGCCAACAACCTAATGGAACGCATCCCCATCGATGACCGAAGTTCTTTAATAGTTCTACCATGCGGAAGAACTATGCCATCGGGTATTAATCTTTCTTTCGAAAGGCTATCCCCGAGTCATCGGCAGGTTGGATACGTGTTACTCACCCGTGCGCCGGTCGCCA >PC.354_3 FLP3FBN01EEWKD orig_bc=AGCACGAGCCTA new_bc=AGCACGAGCCTA bc_diffs=0 TTGGGCCGTGTCTCAGTCCCAATGTGGCCGATCAGTCTCTTAACTCGGCTATGCATCATTGCCTTGGTAAGCCGTTACCTTACCAACTAGCTAATGCACCGCAGGTCCATCCAAGAGTGATAGCAGAACCATCTTTCAAACTCTAGACATGCGTCTAGTGTTGTTATCCGGTATTAGCATCTGTTTCCAGGTGTTATCCCAGTCTCTTGGG >PC.481_4 FLP3FBN01DEHK3 orig_bc=ACCAGCGACTAG new_bc=ACCAGCGACTAG bc_diffs=0 CTGGGCCGTGTCTCAGTCCCAATGTGGCCGTTCAACCTCTCAGTCCGGCTACTGATCGTCGACTTGGTGAGCCGTTACCTCACCAACTATCTAATCAGACGCGAGCCCATCTTTCAGCGGATTGCTCCTTTGGTATTCCGGCGATGCCGCCAAAATCATTATGCGGTATTAGCAGTCGTTTCCAACTGTTGTCCCCCTCTGAAAGGCAGGTTGCTCACG q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/qiime1-demux-format/with-descriptions.fna000066400000000000000000000000751412142116700334140ustar00rootroot00000000000000>my_id my-description ACGT >your_id your description ! ACGT q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/relative_manifests/000077500000000000000000000000001412142116700273365ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/relative_manifests/jagged-MANIFEST000066400000000000000000000006231412142116700317070ustar00rootroot00000000000000sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,reverse # TODO: Find human gut. Human-Gut,Human-Gut_S1_L001_R1_001.fastq.gz,forward Human-Gut,Human-Gut_S1_L001_R1_001.fastq.gz,reverse Human-Elbow,Human-Elbow_S1_L001_R1_001.fastq.gz,forward,hotdog,banana Human-Elbow,Human-Elbow_S1_L001_R1_001.fastq.gz,reverse q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/relative_manifests/long-MANIFEST000066400000000000000000000013271412142116700314270ustar00rootroot00000000000000 sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,reverse Human-Gut,Human-Gut_S1_L001_R1_001.fastq.gz,forward Human-Gut,Human-Gut_S1_L001_R1_001.fastq.gz,reverse Human-Elbow,Human-Elbow_S1_L001_R1_001.fastq.gz,forward Human-Elbow,Human-Elbow_S1_L001_R1_001.fastq.gz,reverse # Comment regarding the relevance of cat guts Human-Hand,Human-Hand_S1_L001_R1_001.fastq.gz,forward Human-Hand,Human-Hand_S1_L001_R1_001.fastq.gz,reverse Human-Nose,Human-Nose_S1_L001_R1_001.fastq.gz,forward Human-Nose,Human-Nose_S1_L001_R1_001.fastq.gz,reverse Cat-Gut,Cat-Gut_S1_L001_R1_001.fastq.gz,forward Cat-Gut,Cat-Gut_S1_L001_R1_001.fastq.gz,reverse q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/relative_manifests/paired-MANIFEST000066400000000000000000000002371412142116700317330ustar00rootroot00000000000000sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward Human-Kneecap,Human-Kneecap_S1_L001_R2_001.fastq.gz,reverse # bananaq2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/relative_manifests/single-MANIFEST000066400000000000000000000001731412142116700317470ustar00rootroot00000000000000# knee cap sample-id,filename,direction # echo charley golf Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/s1-phred64.fastq000066400000000000000000000020131412142116700263030ustar00rootroot00000000000000@HWI-EAS440_0386:1:23:17547:1423#0/1 TACGNAGGATCCGAGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGATGGATGTTTAAGTCAGTTGTGAAAGTTTGCGGCTCAACCGTAAAATTGCAGTTGATACTGGATATCTTGAGTGCAGTTGAGGCAGGGGGGGATTGGTGTG + hhhdHddddddddfehhfhhhghggfhhhfhhgggfhhgfgdfcfhehfdgfhggfggfggffgddfgdffdgdaagaaddcbdccc]a^ad__a]_____ba_`a`__^__\]^OWZR\Z\\WYTZ_U^BBBBBBBBBBBBBBBBBBBBBB @HWI-EAS440_0386:1:23:14818:1533#0/1 CCCCNCAGCGGCAAAAATTAAAATTTTTACCGCTTCGGCGTTATAGCCTCACACTCAATCTTTTATCACGAAGTCATGATTGAATCGCGAGTGGTCGGCAGATTGCGATAAACGGGCACATTAAATTTAAACTGATGATTCCACTGCAACAA + US[QCQSZPHNYIa[^aaccaac[]accBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB @HWI-EAS440_0386:1:23:14401:1629#0/1 TACGNAGGATCCGAGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGGCGGACGCTTAAGTCAGTTGTGAAAGTTTGCGGCTCAACCGTAAAATTGCAGTTGATACTGGGTGTCTTGAGTACAGTAGAGGCAGGGGGGGGGTTGGGGG + fffbF`bbWZZ]Zggggfgcgggggddggdgggggdbgdddgbgeggg`ffdggeabba`aaad]]]d\]`]`[]]aWaYa\aa`a`_````_^]^^^]]]X]_``____``BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/s1-phred64.fastq.gz000066400000000000000000000007101412142116700267240ustar00rootroot00000000000000Xs1-phred64.fastqn0 yq⶙OՄ%]^eE?A?Jŀ00ڊȏ:ڭ6ۮykloַͪWHZ"M}_\D$BIM+d$[e5H(! QiTJ*+;AT+'vOeY>[,Ke)4岤Z}!GIA̙99y [αqbgun2yN4 ~tU[=^]m+X ;hUx 7Iިfx)E2 8*kU oEciY'5Zv ~9?-ڱr HJ?wc2lJ)|! h,gЊ bKw ѡƘ8ot9keKz`b^ q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/s2-phred64.fastq000066400000000000000000000020121412142116700263030ustar00rootroot00000000000000@HWI-EAS440_0386:1:23:15259:1649#0/1 TACGNAGGATCCGAGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGGCGGACGCTTAAGTCAGTTGTGAAAGTTTGCGGCTCAACCGTAAAATTGCAGTTGATACTGGGTGTCTTGAGTACAGTAGAGGCAGGGGGGAGTTTGGGGG + hhhdHcdd^bbbahhhhhhfhhhhhghhhhhhhhhghhhgehchfhhhgghghfhhhgehgaffaecfdggdh\bafadddddgddfdbc^]a_\^_a``\X`^_`]`aaabcaYbY_^^X]^ZY^Z^a`_^aBBBBBBBBBBBBBBBBBBB @HWI-EAS440_0386:1:23:13748:2482#0/1 TACGNAGGATCCGAGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGATGGATGTTTAAGTCAGTTGTGAAAGTTTGCGGCTCAACCGTAAAATTGCAGTTGATACTGGATATCTTGAGTGCAGTTGAGGCAGGCGGGATTCGTGGTG + fffbF^bbS[T[Uggggggg__gffddfggeggggaagfeefbfaaff_cfacfeceddgggegdgdgaggddcddd`a_aad`ddadd`daaWY]]Yddda_]_]]a_Y___X\_]YaY]\]WYNV]\]_BBBBBBBBBBBBBBBBBBBBB @HWI-EAS440_0386:1:23:6532:3028#0/1 TACGNAGGATCCGAGCGTTATCCGGATTTATTGGGTTTAAAGGGAGCGTAGGCGGACGCTTAAGTCAGTTGTGAAAGTTTGCGGCTCAACCGTAAAATTGCAGTTGATACTGGGTGTCTTGAGTACAGTAGAGGCAGGGGGGAGTCTTGGGG + hhhfHdffacdcchhhhghfhhhhfehhhchhhehghhgghhgehghhffghhhgghghddgggfhfgdhhdea`Wf^dddddbgagcdedgcfddbcddaddcefb_addddd_daaaaaaa`ca`aX^Z\X_^^^^BBBBBBBBBBBBBB q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/s2-phred64.fastq.gz000066400000000000000000000007171412142116700267340ustar00rootroot00000000000000Xs2-phred64.fastq͒Mr0 FNmIvrl֒#@AM3ӓz}x~<x8 ;yQ(QFkk˂t#fs#6Zdc5?X##<)fQi({dɶȉr|+}mۆuw%][F) KmY "ᚼ1.S!ky<{|le8p6*9 6R&,L;}/B(DL,KB9J C"HSf3 ` GͬCp190-0] `#ӱ~ߝX /aJ-5%B:{l+YڈӺax%_2 G[)'/90Z1Aߖ/\ E q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data1/000077500000000000000000000000001412142116700305555ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data1/MANIFEST000066400000000000000000000003061412142116700317050ustar00rootroot00000000000000# Quantitative Insights Into Kneecaps and Armpits sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward Human-Armpit,Human-Armpit_S2_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data2/000077500000000000000000000000001412142116700305565ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data2/MANIFEST000066400000000000000000000003341412142116700317070ustar00rootroot00000000000000# Quantitative Insights Into Kneecaps and Armpits sample-id,filename,direction # space in sample-id Human-Kne ecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward Human-Armpit,Human-Armpit_S2_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data3/000077500000000000000000000000001412142116700305575ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single-end-two-sample-data3/MANIFEST000066400000000000000000000003321412142116700317060ustar00rootroot00000000000000# Quantitative Insights Into Kneecaps and Armpits sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward # tab in sample-id Human-Armp it,Human-Armpit_S2_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single_end_data/000077500000000000000000000000001412142116700265525ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single_end_data/MANIFEST000066400000000000000000000002101412142116700276740ustar00rootroot00000000000000# Compelling observation regarding knee-caps sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/data/single_end_data/MANIFEST.txt000066400000000000000000000002071412142116700305200ustar00rootroot00000000000000# Compelling observation regarding knee-caps sample-id,filename,direction Human-Kneecap,Human-Kneecap_S1_L001_R1_001.fastq.gz,forward q2-types-2021.8.0/q2_types/per_sample_sequences/tests/test_format.py000066400000000000000000000567631412142116700254630ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import os.path import shutil import unittest import string import pandas as pd from q2_types.per_sample_sequences import ( CasavaOneEightSingleLanePerSampleDirFmt, CasavaOneEightLanelessPerSampleDirFmt, FastqGzFormat, YamlFormat, FastqManifestFormat, FastqAbsolutePathManifestFormat, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64, SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2, PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2, SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, QIIME1DemuxFormat, QIIME1DemuxDirFmt ) from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestAbsoluteFastqManifestV2Formats(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def setUp(self): super().setUp() self.se_formats = [SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2] self.pe_formats = [PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2] def template_manifest(self, filepath, ctx): with open(filepath) as fh: tmpl = string.Template(fh.read()) basename = os.path.basename(filepath) file_ = os.path.join(self.temp_dir.name, basename) with open(file_, 'w') as fh: fh.write(tmpl.substitute(**ctx)) return file_ def test_validate_se_positive(self): s1 = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') s2 = self.get_data_path('Human-Armpit.fastq.gz') fp = self.get_data_path('absolute_manifests_v2/single-MANIFEST') manifest = self.template_manifest(fp, {'s1': s1, 's2': s2}) for fmt in self.se_formats: fmt(manifest, mode='r').validate() def test_validate_pe_positive(self): s1f = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') s1r = self.get_data_path('Human-Armpit.fastq.gz') s2f = self.get_data_path('Human-Armpit_S2_L001_R1_001.fastq.gz') s2r = self.get_data_path('Human-Kneecap_S1_R1_001.fastq.gz') fp = self.get_data_path('absolute_manifests_v2/paired-MANIFEST') manifest = self.template_manifest(fp, {'s1f': s1f, 's1r': s1r, 's2f': s2f, 's2r': s2r}) for fmt in self.pe_formats: fmt(manifest, mode='r').validate() def test_extra_columns(self): s1f = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') s1r = self.get_data_path('Human-Armpit.fastq.gz') s2f = self.get_data_path('Human-Armpit_S2_L001_R1_001.fastq.gz') s2r = self.get_data_path('Human-Kneecap_S1_R1_001.fastq.gz') fp = self.get_data_path('absolute_manifests_v2/multicol-MANIFEST') manifest = self.template_manifest(fp, {'s1f': s1f, 's1r': s1r, 's2f': s2f, 's2r': s2r}) for fmt in self.se_formats: fmt(manifest, mode='r').validate() def test_invalid_metadata(self): manifest = self.get_data_path('absolute_manifests/single-MANIFEST') for fmt in self.se_formats: with self.assertRaisesRegex(ValidationError, 'unrecognized ID'): fmt(manifest, mode='r').validate() def test_missing_column_se(self): manifest = self.get_data_path('absolute_manifests_v2/paired-MANIFEST') for fmt in self.se_formats: with self.assertRaisesRegex(ValidationError, 'is not a column'): fmt(manifest, mode='r').validate() def test_missing_columns_pe(self): manifest = self.get_data_path('absolute_manifests_v2/single-MANIFEST') for fmt in self.pe_formats: with self.assertRaisesRegex(ValidationError, 'is not a column'): fmt(manifest, mode='r').validate() def test_invalid_column_type(self): manifest = self.get_data_path('absolute_manifests_v2/numeric-MANIFEST') for fmt in self.se_formats: with self.assertRaisesRegex(ValidationError, 'is not a column'): fmt(manifest, mode='r').validate() def test_missing_files(self): manifest = self.get_data_path('absolute_manifests_v2/missing-MANIFEST') for fmt in self.pe_formats: with self.assertRaisesRegex( ValidationError, 'Missing.*line 1.*absolute-filepath'): fmt(manifest, mode='r').validate() def test_path_not_found(self): # we make sure the file is missing by skipping the templating step manifest = self.get_data_path('absolute_manifests_v2/single-MANIFEST') for fmt in self.se_formats: with self.assertRaisesRegex( ValidationError, 'line 1.*absolute-filepath.*Human-Kneecap'): fmt(manifest, mode='r').validate() def test_duplicate_filepaths(self): s1 = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') fp = self.get_data_path('absolute_manifests_v2/single-MANIFEST') manifest = self.template_manifest(fp, {'s1': s1, 's2': s1}) for fmt in self.se_formats: with self.assertRaisesRegex( ValidationError, 'line 2.*absolute-filepath.*Peanut-Eyeball.*' 'line 1.*absolute-filepath.*Human-Kneecap'): fmt(manifest, mode='r').validate() class TestAbsoluteFastqManifestFormats(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def setUp(self): super().setUp() self.formats = [FastqAbsolutePathManifestFormat, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64] def test_validate_positive(self): s1 = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') for file in ['single-MANIFEST', 'paired-MANIFEST', 'long-MANIFEST']: filepath = self.get_data_path('absolute_manifests/%s' % file) with open(filepath) as fh: tmpl = string.Template(fh.read()) file_ = os.path.join(self.temp_dir.name, file) with open(file_, 'w') as fh: fh.write(tmpl.substitute(path=os.path.dirname(s1))) for format in self.formats: format(file_, mode='r').validate() def test_validate_negative_no_data(self): filepath = self.get_data_path('no-data-MANIFEST') for format in self.formats: with self.assertRaisesRegex(ValidationError, 'No header found'): format(filepath, mode='r').validate() def test_validate_negative_empty(self): filepath = self.get_data_path('empty-MANIFEST') for format in self.formats: with self.assertRaisesRegex(ValidationError, 'No header found'): format(filepath, mode='r').validate() def test_validate_negative_header_no_records(self): filepath = self.get_data_path('empty-records-MANIFEST') for format in self.formats: with self.assertRaisesRegex(ValidationError, 'No sample records'): format(filepath, mode='r').validate() def test_validate_negative_not_manifest(self): filepath = self.get_data_path('not-MANIFEST') for format in self.formats: with self.assertRaisesRegex(ValidationError, 'line 1.*filename'): format(filepath, mode='r').validate() def test_validate_negative_jagged_manifest(self): filepath = self.get_data_path('absolute_manifests/jagged-MANIFEST') for format in self.formats: with self.assertRaisesRegex(ValidationError, 'line 3.*could not be found'): format(filepath, mode='r').validate() def test_validate_negative_invalid_direction(self): s1 = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') with open(self.get_data_path('invalid-direction-MANIFEST')) as fh: tmpl = string.Template(fh.read()) file_ = os.path.join(self.temp_dir.name, 'invalid-direction-MANIFEST') with open(file_, 'w') as fh: fh.write(tmpl.substitute(path=os.path.dirname(s1))) for format in self.formats: with self.assertRaisesRegex(ValidationError, 'direction.*peanut'): format(file_, mode='r').validate() class TestRelativeFastqManifestFormats(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def test_validate_positive(self): for file in ['single-MANIFEST', 'paired-MANIFEST', 'long-MANIFEST']: filepath = self.get_data_path('relative_manifests/%s' % file) FastqManifestFormat(filepath, mode='r').validate() def test_validate_negative(self): files = ['no-data-MANIFEST', 'not-MANIFEST', 'relative_manifests/jagged-MANIFEST'] for file in files: filepath = self.get_data_path(file) with self.assertRaisesRegex(ValidationError, 'FastqManifestFormat'): FastqManifestFormat(filepath, mode='r').validate() class TestFastqGzFormat(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def test_validate_positive(self): filepath = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') format = FastqGzFormat(filepath, mode='r') format.validate() def test_validate_negative(self): filepath = self.get_data_path('not-fastq.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'Header.*1'): format.validate() def test_validate_mixed_case(self): filepath = self.get_data_path('mixed-case.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'Lowercase.*2'): format.validate() def test_validate_uncompressed(self): filepath = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'uncompressed'): format.validate() def test_incomplete_record_qual(self): filepath = self.get_data_path('incomplete-quality.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'quality.*9'): format.validate() def test_incomplete_record_sep(self): filepath = self.get_data_path('incomplete-sep.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'separator.*9'): format.validate() def test_incomplete_record_sequence(self): filepath = self.get_data_path('incomplete-sequence.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'sequence.*9'): format.validate() def test_invalid_record_sep(self): filepath = self.get_data_path('invalid-sep.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'separator.*11'): format.validate() def test_invalid_quality_score_length(self): filepath = self.get_data_path('invalid-quality.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'length.*9'): format.validate() def test_partial_record(self): filepath = self.get_data_path('partial-record.fastq.gz') format = FastqGzFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'sequence.*1'): format.validate() class TestFormats(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def test_yaml_format_validate_positive(self): filepath = self.get_data_path('metadata.yml') format = YamlFormat(filepath, mode='r') format.validate() def test_yaml_format_validate_negative(self): filepath = self.get_data_path('not-metadata.yml') format = YamlFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'YamlFormat'): format.validate() def test_casava_one_eight_slanepsample_dir_fmt_validate_positive(self): filepath = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') shutil.copy(filepath, self.temp_dir.name) format = CasavaOneEightSingleLanePerSampleDirFmt( self.temp_dir.name, mode='r') format.validate() def test_casava_one_eight_slanepsample_dir_fmt_manifest_property(self): filepath = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') shutil.copy(filepath, self.temp_dir.name) format = CasavaOneEightSingleLanePerSampleDirFmt( self.temp_dir.name, mode='r') format.validate() self.assertTrue(True) self.assertIsInstance(format.manifest, pd.DataFrame) def test_casava_one_eight_slanepsample_dir_fmt_validate_negative(self): filepath = self.get_data_path('not-fastq.fastq.gz') shutil.copy(filepath, self.temp_dir.name) format = CasavaOneEightSingleLanePerSampleDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'CasavaOneEightSingleLanePer'): format.validate() def test_casava_one_eight_slanepsample_dir_fmt_subdirectories(self): bad_dir = os.path.join(self.temp_dir.name, 'Human_Kneecap') os.mkdir(bad_dir) bad_name = os.path.join(bad_dir, 'S1_L001_R1_001.fastq.gz') fastq = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') shutil.copy(fastq, bad_name) format = CasavaOneEightSingleLanePerSampleDirFmt(self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'subdirectory.*Human_Kneecap'): format.validate() def test_casava_one_eight_slanepsample_dir_fmt_missing_directions(self): f = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') r = self.get_data_path( 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') shutil.copy(f, self.temp_dir.name) shutil.copy(r, self.temp_dir.name) shutil.copy( f, os.path.join(self.temp_dir.name, 'Human-Other_S1_L001_R1_001.fastq.gz')) format = CasavaOneEightSingleLanePerSampleDirFmt(self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'matching.*Human-Other'): format.validate() def test_casava_one_eight_slanepsample_dir_fmt_duplicate_forwards(self): f = self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz') shutil.copy(f, self.temp_dir.name) shutil.copy( f, os.path.join(self.temp_dir.name, 'Human-Kneecap_S2_L001_R1_001.fastq.gz')) format = CasavaOneEightSingleLanePerSampleDirFmt(self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'Duplicate.*Human-Kneecap'): format.validate() def test_casava_one_eight_slanepsample_dir_fmt_duplicate_reverse(self): r = self.get_data_path( 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') shutil.copy(r, self.temp_dir.name) shutil.copy( r, os.path.join(self.temp_dir.name, 'Human-Kneecap_S2_L001_R2_001.fastq.gz')) format = CasavaOneEightSingleLanePerSampleDirFmt(self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'Duplicate.*Human-Kneecap'): format.validate() def test_miseq_demux_dir_fmt_validate_positive(self): filepath = self.get_data_path('Human-Kneecap_S1_R1_001.fastq.gz') shutil.copy(filepath, self.temp_dir.name) format = CasavaOneEightLanelessPerSampleDirFmt(self.temp_dir.name, mode='r') format.validate() def test_miseq_demux_dir_fmt_validate_negative(self): filepath = self.get_data_path('not-fastq.fastq.gz') shutil.copy(filepath, self.temp_dir.name) format = CasavaOneEightLanelessPerSampleDirFmt(self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'CasavaOneEightLanelessPerSampleDirFmt'): format.validate() def test_slanepsample_single_end_fastq_dir_fmt_validate_positive(self): filenames = ('single_end_data/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSampleSingleEndFastqDirFmt( self.temp_dir.name, mode='r') format.validate() def test_slanepsample_single_end_fastq_dir_fmt_validate_negative(self): filenames = ('single_end_data/MANIFEST', 'metadata.yml', 'not-fastq.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSampleSingleEndFastqDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'SingleLanePerSampleSingle'): format.validate() def test_slanepsample_single_end_fastq_dir_fmt_validate_bad_paired(self): filenames = ('paired_end_data/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSampleSingleEndFastqDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'Forward and reverse'): format.validate() def test_slanepsample_paired_end_fastq_dir_fmt_validate_positive(self): filenames = ('paired_end_data/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSamplePairedEndFastqDirFmt( self.temp_dir.name, mode='r') format.validate() def test_slanepsample_paired_end_fastq_dir_fmt_validate_negative(self): filenames = ('paired_end_data/MANIFEST', 'metadata.yml', 'not-fastq.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSamplePairedEndFastqDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'SingleLanePerSamplePaired'): format.validate() def test_slanepsample_paired_end_fastq_dir_fmt_validate_missing_pair(self): filenames = ('single_end_data/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSamplePairedEndFastqDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'paired'): format.validate() def test_slanepsample_paired_end_fastq_dir_fmt_incorrect_filenames(self): filenames = ('single_end_data/MANIFEST.txt', 'metadata.yml.txt', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') for filename in filenames: filepath = self.get_data_path(filename) shutil.copy(filepath, self.temp_dir.name) format = SingleLanePerSamplePairedEndFastqDirFmt( self.temp_dir.name, mode='r') with self.assertRaisesRegex(ValidationError, 'Missing one or more files.*MANIFEST'): format.validate() class TestQIIME1DemuxFormat(TestPluginBase): package = 'q2_types.per_sample_sequences.tests' def setUp(self): super().setUp() self.positives = [ 'empty', 'short.fna', 'long.fna', 'single-record.fna', 'with-descriptions.fna', 'split-libraries-output.fna' ] self.negatives = [ 'incomplete.fna', 'empty-header.fna', 'invalid-header.fna', 'description-only.fna', 'blank-line.fna', 'no-underscore-in-id.fna', 'no-sample-id.fna', 'no-secondary-id.fna', 'duplicate-ids.fna', 'empty-seq.fna', 'not-dna.fna' ] def test_file_format_validate_positive(self): for file in self.positives: filepath = self.get_data_path('qiime1-demux-format/%s' % file) QIIME1DemuxFormat(filepath, mode='r').validate() def test_file_format_validate_negative(self): for file in self.negatives: filepath = self.get_data_path('qiime1-demux-format/%s' % file) with self.assertRaisesRegex(ValidationError, 'QIIME1DemuxFormat'): QIIME1DemuxFormat(filepath, mode='r').validate() def test_directory_format_validate_positive(self): for file in self.positives: filepath = self.get_data_path('qiime1-demux-format/%s' % file) shutil.copy(filepath, os.path.join(self.temp_dir.name, 'seqs.fna')) QIIME1DemuxDirFmt(self.temp_dir.name, mode='r').validate() def test_directory_format_validate_negative(self): for file in self.negatives: filepath = self.get_data_path('qiime1-demux-format/%s' % file) shutil.copy(filepath, os.path.join(self.temp_dir.name, 'seqs.fna')) with self.assertRaisesRegex(ValidationError, 'QIIME1DemuxFormat'): QIIME1DemuxDirFmt(self.temp_dir.name, mode='r').validate() def test_directory_format_wrong_filename(self): filepath = self.get_data_path('qiime1-demux-format/short.fna') shutil.copy(filepath, self.temp_dir.name) with self.assertRaisesRegex(ValidationError, r'QIIME1DemuxDirFmt.*seqs\.fna'): QIIME1DemuxDirFmt(self.temp_dir.name, mode='r').validate() if __name__ == "__main__": unittest.main() q2-types-2021.8.0/q2_types/per_sample_sequences/tests/test_transformer.py000066400000000000000000001373701412142116700265270ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import functools import unittest import os import shutil import io import string import skbio import yaml import pandas as pd from q2_types.per_sample_sequences import ( SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, CasavaOneEightSingleLanePerSampleDirFmt, CasavaOneEightLanelessPerSampleDirFmt, SingleEndFastqManifestPhred33, SingleEndFastqManifestPhred64, PairedEndFastqManifestPhred33, PairedEndFastqManifestPhred64, FastqAbsolutePathManifestFormat, FastqManifestFormat, SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2, PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2, QIIME1DemuxDirFmt, FastqGzFormat) from q2_types.per_sample_sequences._util import ( _validate_header, _validate_single_end_fastq_manifest_directions, _validate_paired_end_fastq_manifest_directions, _parse_and_validate_manifest ) from qiime2.plugin.testing import TestPluginBase _parse_and_validate_manifest_partial = functools.partial( _parse_and_validate_manifest, abs_manifest_fmt=FastqAbsolutePathManifestFormat, manifest_fmt=FastqManifestFormat, ) class TestTransformers(TestPluginBase): package = "q2_types.per_sample_sequences.tests" def test_slpspefdf_to_slpssefdf(self): filenames = ('paired_end_data/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz') input, obs = self.transform_format( SingleLanePerSamplePairedEndFastqDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, filenames=filenames ) expected = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA ) obs = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs), format='fastq', constructor=skbio.DNA ) for act, exp in zip(obs, expected): self.assertEqual(act, exp) def test_slpssefdf_to_qiime1demuxdf(self): filenames = ('single-end-two-sample-data1/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz') input, observed = self.transform_format( SingleLanePerSampleSingleEndFastqDirFmt, QIIME1DemuxDirFmt, filenames=filenames ) expected1 = list(skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA )) expected2 = list(skbio.io.read( '%s/Human-Armpit_S2_L001_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA )) expected = \ list(zip(expected1, ['Human-Kneecap'] * len(expected1))) + \ list(zip(expected2, ['Human-Armpit'] * len(expected2))) observed = skbio.io.read( '%s/seqs.fna' % str(observed), format='fasta', constructor=skbio.DNA ) observed = list(observed) self.assertEqual(len(observed), len(expected)) for i, obs in enumerate(observed): # identifiers are as expected self.assertEqual(obs.metadata['id'], '%s_%d' % (expected[i][1], i)) # sequences are as expected self.assertEqual(str(obs), str(expected[i][0])) def test_slpssefdf_to_qiime1demuxdf_bad_sample_ids(self): filenames = ('single-end-two-sample-data2/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz') with self.assertRaisesRegex(ValueError, expected_regex='space.*Human-K'): self.transform_format( SingleLanePerSampleSingleEndFastqDirFmt, QIIME1DemuxDirFmt, filenames=filenames) filenames = ('single-end-two-sample-data3/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz') with self.assertRaisesRegex(ValueError, expected_regex='space.*Human-A'): self.transform_format( SingleLanePerSampleSingleEndFastqDirFmt, QIIME1DemuxDirFmt, filenames=filenames) def test_casava_one_eight_laneless_per_sample_dirfmt_to_slpspefd(self): filenames = ('Human-Kneecap_S1_R1_001.fastq.gz', 'Human-Armpit_S2_R1_001.fastq.gz') input, dirfmt = self.transform_format( CasavaOneEightLanelessPerSampleDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, filenames=filenames ) expected_filepaths = ['Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz'] for path, view in dirfmt.sequences.iter_views(FastqGzFormat): self.assertIn(path.name, expected_filepaths) df = dirfmt.manifest.view(pd.DataFrame) for name in df['forward']: self.assertTrue((dirfmt.path / name).exists()) def test_casava_one_eight_single_lane_per_sample_dirfmt_to_slpssefdf(self): filenames = ('Human-Kneecap_S1_L001_R1_001.fastq.gz',) input, obs = self.transform_format( CasavaOneEightSingleLanePerSampleDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, filenames=filenames ) input = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA ) obs = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs), format='fastq', constructor=skbio.DNA ) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_casava_one_eight_single_lane_per_sample_dirfmt_to_slpspefdf(self): filenames = ('Human-Kneecap_S1_L001_R1_001.fastq.gz',) input, obs = self.transform_format( CasavaOneEightSingleLanePerSampleDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, filenames=filenames ) input = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA ) obs = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs), format='fastq', constructor=skbio.DNA ) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_miseq_demux_dirfmt_to_slpssefdf(self): input, obs = self.transform_format( CasavaOneEightLanelessPerSampleDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, filenames=('Human-Kneecap_S1_R1_001.fastq.gz',), ) input = skbio.io.read( '%s/Human-Kneecap_S1_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA ) obs = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs), format='fastq', constructor=skbio.DNA ) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_miseq_demux_dirfmt_to_slpspefdf(self): input, obs = self.transform_format( CasavaOneEightLanelessPerSampleDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, filenames=('Human-Kneecap_S1_R1_001.fastq.gz',), ) input = skbio.io.read( '%s/Human-Kneecap_S1_R1_001.fastq.gz' % str(input), format='fastq', constructor=skbio.DNA ) obs = skbio.io.read( '%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs), format='fastq', constructor=skbio.DNA ) for act, exp in zip(obs, input): self.assertEqual(act, exp) def test_fastqmanifest_single(self): _, dirfmt = self.transform_format( CasavaOneEightSingleLanePerSampleDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, filenames=('Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz'), ) df = dirfmt.manifest.view(pd.DataFrame) self.assertEqual(set(df.index), {'Human-Kneecap', 'Human-Armpit'}) self.assertEqual(set(df.columns), {'forward'}) self.assertTrue(os.path.exists(df['forward'].loc['Human-Kneecap'])) self.assertTrue(os.path.exists(df['forward'].loc['Human-Armpit'])) def test_fastqmanifest_paired(self): _, dirfmt = self.transform_format( CasavaOneEightSingleLanePerSampleDirFmt, SingleLanePerSamplePairedEndFastqDirFmt, filenames=( 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz'), ) df = dirfmt.manifest.view(pd.DataFrame) self.assertEqual(set(df.index), {'Human-Kneecap'}) self.assertEqual(set(df.columns), {'forward', 'reverse'}) self.assertTrue(os.path.exists(df['forward'].loc['Human-Kneecap'])) self.assertTrue(os.path.exists(df['reverse'].loc['Human-Kneecap'])) def test_slpssefdf_to_casava_one_eight_single_lane_per_sample_dirfmt(self): filenames = ('single-end-two-sample-data1/MANIFEST', 'metadata.yml', 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Armpit_S2_L001_R1_001.fastq.gz') input, obs = self.transform_format( SingleLanePerSampleSingleEndFastqDirFmt, CasavaOneEightSingleLanePerSampleDirFmt, filenames=filenames ) self.assertEqual(input.validate(), None) exp_fp = ['Human-Armpit_S2_L001_R1_001.fastq.gz', 'Human-Kneecap_S1_L001_R1_001.fastq.gz'] obs_fp = [str(fp) for fp, _ in obs.sequences.iter_views(FastqGzFormat)] self.assertEqual(obs_fp, exp_fp) def test_slpspefdf_to_casava_one_eight_single_lane_per_sample_dirfmt(self): filenames = ('Human-Kneecap_S1_L001_R1_001.fastq.gz', 'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz', 'paired_end_data/MANIFEST', 'metadata.yml') input, obs = self.transform_format( SingleLanePerSamplePairedEndFastqDirFmt, CasavaOneEightSingleLanePerSampleDirFmt, filenames=filenames ) self.assertEqual(input.validate(), None) exp_fp = ['Human-Kneecap_S1_L001_R1_001.fastq.gz', 'Human-Kneecap_S1_L001_R2_001.fastq.gz'] obs_fp = [str(fp) for fp, _ in obs.sequences.iter_views(FastqGzFormat)] self.assertEqual(obs_fp, exp_fp) class TestFastqManifestTransformers(TestPluginBase): package = "q2_types.per_sample_sequences.tests" def test_single_end_fastq_manifest_phred33_to_slpssefdf(self): format_ = SingleEndFastqManifestPhred33 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz'), os.path.join(self.temp_dir.name, 'Human-Kneecap_S1_L001_R1_001.fastq.gz')) shutil.copy( self.get_data_path('Human-Armpit.fastq.gz'), os.path.join(self.temp_dir.name, 'Human-Armpit.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/Human-Armpit.fastq.gz,forward\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq.gz', 'sampleABC_0_L001_R1_001.fastq.gz'), ('Human-Armpit.fastq.gz', 'sampleXYZ_1_L001_R1_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n") self.assertEqual(obs_manifest, exp_manifest) def test_single_end_fastq_manifest_phred33_to_slpssefdf_uncompressed(self): format_ = SingleEndFastqManifestPhred33 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq'), os.path.join(self.temp_dir.name, 'Human-Kneecap_S1_L001_R1_001.fastq')) shutil.copy( self.get_data_path('Human-Armpit.fastq'), os.path.join(self.temp_dir.name, 'Human-Armpit.fastq')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/Human-Armpit.fastq,forward\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq', 'sampleABC_0_L001_R1_001.fastq.gz'), ('Human-Armpit.fastq', 'sampleXYZ_1_L001_R1_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n") self.assertEqual(obs_manifest, exp_manifest) def test_single_end_fastq_manifest_phred64_to_slpssefdf(self): format_ = SingleEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('s1-phred64.fastq.gz', 'sampleABC_0_L001_R1_001.fastq.gz'), ('s2-phred64.fastq.gz', 'sampleXYZ_1_L001_R1_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.3' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n") self.assertEqual(obs_manifest, exp_manifest) def test_single_end_fastq_manifest_phred64_to_slpssefdf_uncompressed(self): format_ = SingleEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq'), os.path.join(self.temp_dir.name, 's1-phred64.fastq')) shutil.copy( self.get_data_path('s2-phred64.fastq'), os.path.join(self.temp_dir.name, 's2-phred64.fastq')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/s2-phred64.fastq,forward\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('s1-phred64.fastq', 'sampleABC_0_L001_R1_001.fastq.gz'), ('s2-phred64.fastq', 'sampleXYZ_1_L001_R1_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.3' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n") self.assertEqual(obs_manifest, exp_manifest) def test_paired_end_fastq_manifest_phred33_to_slpspefdf(self): format_ = PairedEndFastqManifestPhred33 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz'), os.path.join(self.temp_dir.name, 'Human-Kneecap_S1_L001_R1_001.fastq.gz')) shutil.copy( self.get_data_path('Human-Armpit.fastq.gz'), os.path.join(self.temp_dir.name, 'Human-Armpit.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleABC,%s/Human-Armpit.fastq.gz,reverse\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq.gz', 'sampleABC_0_L001_R1_001.fastq.gz'), ('Human-Armpit.fastq.gz', 'sampleABC_1_L001_R2_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n") self.assertEqual(obs_manifest, exp_manifest) def test_paired_end_fastq_manifest_phred33_to_slpspefdf_uncompressed(self): format_ = PairedEndFastqManifestPhred33 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq'), os.path.join(self.temp_dir.name, 'Human-Kneecap_S1_L001_R1_001.fastq')) shutil.copy( self.get_data_path('Human-Armpit.fastq'), os.path.join(self.temp_dir.name, 'Human-Armpit.fastq')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq," "forward\n" % self.temp_dir.name) fh.write("sampleABC,%s/Human-Armpit.fastq,reverse\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq', 'sampleABC_0_L001_R1_001.fastq.gz'), ('Human-Armpit.fastq', 'sampleABC_1_L001_R2_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n") self.assertEqual(obs_manifest, exp_manifest) def test_paired_end_fastq_manifest_phred64_to_slpspefdf(self): format_ = PairedEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('s1-phred64.fastq.gz', 'sampleABC_0_L001_R1_001.fastq.gz'), ('s2-phred64.fastq.gz', 'sampleABC_1_L001_R2_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.3' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n") self.assertEqual(obs_manifest, exp_manifest) def test_paired_end_fastq_manifest_phred64_to_slpspefdf_uncompressed(self): format_ = PairedEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq'), os.path.join(self.temp_dir.name, 's1-phred64.fastq')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq," "forward\n" % self.temp_dir.name) fh.write("sampleABC,%s/s2-phred64.fastq,reverse\n" % self.temp_dir.name) obs = transformer(format_(manifest_fp, 'r')) fastq_pairs = [('s1-phred64.fastq', 'sampleABC_0_L001_R1_001.fastq.gz'), ('s2-phred64.fastq', 'sampleABC_1_L001_R2_001.fastq.gz')] for input_fastq, obs_fastq in fastq_pairs: obs_fh = skbio.io.read( os.path.join(str(obs), obs_fastq), compression='gzip', format='fastq', constructor=skbio.DNA, variant='illumina1.8' ) exp_fh = skbio.io.read( self.get_data_path(input_fastq), format='fastq', constructor=skbio.DNA, variant='illumina1.3' ) for o, e in zip(obs_fh, exp_fh): self.assertEqual(o, e) obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)), Loader=yaml.SafeLoader) exp_metadata = yaml.load("{'phred-offset': 33}", Loader=yaml.SafeLoader) self.assertEqual(obs_metadata, exp_metadata) obs_manifest = open('%s/MANIFEST' % (str(obs))).read() exp_manifest = ("sample-id,filename,direction\n" "sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n" "sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n") self.assertEqual(obs_manifest, exp_manifest) def test_single_end_fastq_manifest_missing_fastq(self): format_ = SingleEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" % self.temp_dir.name) with self.assertRaisesRegex(FileNotFoundError, "s2-phred64.fastq.gz"): transformer(format_(manifest_fp, 'r')) def test_single_end_fastq_manifest_invalid_direction(self): format_ = SingleEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "middle-out\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" % self.temp_dir.name) with self.assertRaisesRegex(ValueError, 'middle-out'): transformer(format_(manifest_fp, 'r')) def test_single_end_fastq_manifest_too_many_directions(self): format_ = SingleEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSampleSingleEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,reverse\n" % self.temp_dir.name) with self.assertRaisesRegex(ValueError, "only forward or reverse"): transformer(format_(manifest_fp, 'r')) def test_paired_end_fastq_manifest_missing_fastq(self): format_ = PairedEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" % self.temp_dir.name) with self.assertRaisesRegex(FileNotFoundError, "s2-phred64.fastq.gz"): transformer(format_(manifest_fp, 'r')) def test_paired_end_fastq_manifest_invalid_direction(self): format_ = PairedEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) shutil.copy( self.get_data_path('s2-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "middle-out\n" % self.temp_dir.name) fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" % self.temp_dir.name) with self.assertRaisesRegex(ValueError, 'middle-out'): transformer(format_(manifest_fp, 'r')) def test_paired_end_fastq_manifest_missing_directions(self): format_ = PairedEndFastqManifestPhred64 transformer = self.get_transformer( format_, SingleLanePerSamplePairedEndFastqDirFmt) shutil.copy( self.get_data_path('s1-phred64.fastq.gz'), os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz')) manifest_fp = os.path.join(self.temp_dir.name, 'manifest') with open(manifest_fp, 'w') as fh: fh.write("sample-id,absolute-filepath,direction\n") fh.write("sampleABC,%s/s1-phred64.fastq.gz," "forward\n" % self.temp_dir.name) with self.assertRaisesRegex(ValueError, "one time each for each sample"): transformer(format_(manifest_fp, 'r')) def test_parse_and_validate_manifest_invalid(self): manifest = io.StringIO( 'sample-id,absolute-filepath\n' 'abc,/hello/world,forward\n') with self.assertRaisesRegex( ValueError, "Expected.*absolute-filepath.*found " "'sample-id,absolute-filepath'.$"): _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=True) manifest = io.StringIO( 'sample-id,absolute-filepath,direction\n' 'abc,/hello/world\n' 'abc,/hello/world,forward\n') with self.assertRaisesRegex(ValueError, 'Empty cells'): _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=True) manifest = io.StringIO( 'sample-id,absolute-filepath,direction\n' 'abc,/hello/world,forward\n' 'xyz,/hello/world,forward,extra-field') with self.assertRaisesRegex(ValueError, 'issue parsing the manifest'): _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=True) manifest = io.StringIO( 'sample-id,absolute-filepath,direction\n' 'abc,world,forward\n' 'xyz,world,forward') with self.assertRaisesRegex(ValueError, 'absolute but found relative path'): _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=True) manifest = io.StringIO( 'sample-id,absolute-filepath,direction\n' 'abc,world,forward\n' 'abc,world,reverse') with self.assertRaisesRegex(ValueError, 'absolute but found relative path'): _parse_and_validate_manifest_partial( manifest, single_end=False, absolute=True) manifest = io.StringIO( 'sample-id,filename,direction\n' 'abc,/snap/crackle/pop/world,forward\n' 'xyz,/snap/crackle/pop/world,forward') with self.assertRaisesRegex(ValueError, 'relative but found absolute path'): _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=False) manifest = io.StringIO( 'sample-id,filename,direction\n' 'abc,/snap/crackle/pop/world,forward\n' 'abc,/snap/crackle/pop/world,reverse') with self.assertRaisesRegex(ValueError, 'relative but found absolute path'): _parse_and_validate_manifest_partial( manifest, single_end=False, absolute=False) def test_parse_and_validate_manifest_expand_vars(self): expected_fp = os.path.join(self.temp_dir.name, 'manifest.txt') # touch the file - the valdiator will fail if it doesn't exist open(expected_fp, 'w') os.environ['TESTENVGWAR'] = self.temp_dir.name manifest = io.StringIO( 'sample-id,absolute-filepath,direction\n' 'abc,$TESTENVGWAR/manifest.txt,forward') manifest = _parse_and_validate_manifest_partial( manifest, single_end=True, absolute=True) del os.environ['TESTENVGWAR'] self.assertEqual(manifest.iloc[0]['absolute-filepath'], expected_fp) def test_validate_header_valid(self): columns = ['sample-id', 'absolute-filepath', 'direction'] manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'forward']], columns=columns) # should not raise an error _validate_header(manifest, expected_header=columns) def test_validate_header_invalid(self): columns = ['sample-id', 'absolute-filepath', 'direction'] manifest = pd.DataFrame( [['abc', '/hello/world'], ['xyz', '/hello/world']], columns=['xyz', 'absolute-filepath']) with self.assertRaisesRegex(ValueError, 'Expected manifest.*absolute' '-filepath.*but'): _validate_header(manifest, expected_header=columns) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'forward']], columns=['xyz', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'sample-id.*xyz'): _validate_header(manifest, expected_header=columns) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'forward']], columns=['sample-id', 'xyz', 'direction']) with self.assertRaisesRegex(ValueError, 'absolute-filepath.*xyz'): _validate_header(manifest, expected_header=columns) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'forward']], columns=['sample-id', 'absolute-filepath', 'xyz']) with self.assertRaisesRegex(ValueError, 'direction.*xyz'): _validate_header(manifest, expected_header=columns) def test_validate_single_end_fastq_manifest_directions(self): manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'forward']], columns=['sample-id', 'absolute-filepath', 'direction']) _validate_single_end_fastq_manifest_directions(manifest) manifest = pd.DataFrame( [['abc', '/hello/world', 'reverse'], ['xyz', '/hello/world', 'reverse']], columns=['sample-id', 'absolute-filepath', 'direction']) _validate_single_end_fastq_manifest_directions(manifest) def test_validate_single_end_fastq_manifest_directions_invalid(self): manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['xyz', '/hello/world', 'reverse']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'can contain only'): _validate_single_end_fastq_manifest_directions(manifest) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['abc', '/hello/world2', 'forward']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'more than once'): _validate_single_end_fastq_manifest_directions(manifest) def test_validate_paired_end_fastq_manifest_directions(self): manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['abc', '/hello/world', 'reverse'], ['xyz', '/hello/world2', 'forward'], ['xyz', '/hello/world2', 'reverse']], columns=['sample-id', 'absolute-filepath', 'direction']) _validate_paired_end_fastq_manifest_directions(manifest) def test_validate_paired_end_fastq_manifest_directions_invalid(self): manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['abc', '/hello/world', 'reverse'], ['xyz', '/hello/world2', 'reverse']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'reverse but not.*xyz'): _validate_paired_end_fastq_manifest_directions(manifest) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['abc', '/hello/world', 'reverse'], ['xyz', '/hello/world2', 'forward']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'forward but not.*xyz'): _validate_paired_end_fastq_manifest_directions(manifest) manifest = pd.DataFrame( [['abc', '/hello/world', 'forward'], ['abc', '/hello/world', 'reverse'], ['abc', '/hello/world2', 'forward']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'forward read record: abc'): _validate_paired_end_fastq_manifest_directions(manifest) manifest = pd.DataFrame( [['xyz', '/hello/world', 'forward'], ['xyz', '/hello/world', 'reverse'], ['xyz', '/hello/world2', 'reverse']], columns=['sample-id', 'absolute-filepath', 'direction']) with self.assertRaisesRegex(ValueError, 'reverse read record: xyz'): _validate_paired_end_fastq_manifest_directions(manifest) # NOTE: we are really only interested in the manifest, since these transformers # primarily transform the V2 TSV manifests to the (older) CSV manifests. The # only things asserted here are facts about the manifest and not the actual # data assets, themselves. class TestFastqManifestV2Transformers(TestPluginBase): package = "q2_types.per_sample_sequences.tests" def setUp(self): super().setUp() self.se_formats = [SingleEndFastqManifestPhred33V2, SingleEndFastqManifestPhred64V2] self.pe_formats = [PairedEndFastqManifestPhred33V2, PairedEndFastqManifestPhred64V2] self.exp_se_manifest = ( "sample-id,filename,direction\n" "Human-Kneecap,Human-Kneecap_0_L001_R1_001.fastq.gz,forward\n" "Peanut-Eyeball,Peanut-Eyeball_1_L001_R1_001.fastq.gz,forward\n") self.exp_pe_manifest = ( "sample-id,filename,direction\n" "Human-Kneecap,Human-Kneecap_0_L001_R1_001.fastq.gz,forward\n" "Peanut-Eyeball,Peanut-Eyeball_1_L001_R1_001.fastq.gz,forward\n" "Human-Kneecap,Human-Kneecap_2_L001_R2_001.fastq.gz,reverse\n" "Peanut-Eyeball,Peanut-Eyeball_3_L001_R2_001.fastq.gz,reverse\n") def template_manifest(self, filepath, ctx): with open(filepath) as fh: tmpl = string.Template(fh.read()) basename = os.path.basename(filepath) file_ = os.path.join(self.temp_dir.name, basename) with open(file_, 'w') as fh: fh.write(tmpl.substitute(**ctx)) return file_ def apply_transformation(self, from_fmt, to_fmt, datafile_fp, manifest_fp): transformer = self.get_transformer(from_fmt, to_fmt) fp = self.get_data_path(datafile_fp) manifest = self.template_manifest( self.get_data_path(manifest_fp), {k: fp for k in ['s1', 's2', 's1f', 's1r', 's2f', 's2r']}) return transformer(from_fmt(manifest, 'r')) def test_single_end_fastq_manifest_phred33_to_slpssefdf(self): obs = self.apply_transformation( SingleEndFastqManifestPhred33V2, SingleLanePerSampleSingleEndFastqDirFmt, 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'absolute_manifests_v2/single-MANIFEST') with obs.manifest.view(FastqManifestFormat).open() as obs_manifest: self.assertEqual(obs_manifest.read(), self.exp_se_manifest) def test_single_end_fastq_manifest_phred64_to_slpssefdf(self): obs = self.apply_transformation( SingleEndFastqManifestPhred64V2, SingleLanePerSampleSingleEndFastqDirFmt, 's1-phred64.fastq.gz', 'absolute_manifests_v2/single-MANIFEST') with obs.manifest.view(FastqManifestFormat).open() as obs_manifest: self.assertEqual(obs_manifest.read(), self.exp_se_manifest) def test_paired_end_fastq_manifest_phred33_to_slpspefdf(self): obs = self.apply_transformation( PairedEndFastqManifestPhred33V2, SingleLanePerSamplePairedEndFastqDirFmt, 'Human-Kneecap_S1_L001_R1_001.fastq.gz', 'absolute_manifests_v2/paired-MANIFEST') with obs.manifest.view(FastqManifestFormat).open() as obs_manifest: self.assertEqual(obs_manifest.read(), self.exp_pe_manifest) def test_paired_end_fastq_manifest_phred64_to_slpspefdf(self): obs = self.apply_transformation( PairedEndFastqManifestPhred64V2, SingleLanePerSamplePairedEndFastqDirFmt, 's1-phred64.fastq.gz', 'absolute_manifests_v2/paired-MANIFEST') with obs.manifest.view(FastqManifestFormat).open() as obs_manifest: self.assertEqual(obs_manifest.read(), self.exp_pe_manifest) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/per_sample_sequences/tests/test_type.py000066400000000000000000000044161412142116700251400ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.sample_data import SampleData from q2_types.per_sample_sequences import ( Sequences, SequencesWithQuality, PairedEndSequencesWithQuality, JoinedSequencesWithQuality, QIIME1DemuxDirFmt, SingleLanePerSampleSingleEndFastqDirFmt, SingleLanePerSamplePairedEndFastqDirFmt ) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = "q2_types.per_sample_sequences.tests" def test_sequences_semantic_type_registration(self): self.assertRegisteredSemanticType(Sequences) def test_sequences_with_quality_semantic_type_registration(self): self.assertRegisteredSemanticType(SequencesWithQuality) def test_paired_end_sequences_with_qual_semantic_type_registration(self): self.assertRegisteredSemanticType(PairedEndSequencesWithQuality) def test_joined_sequences_with_qual_semantic_type_registration(self): self.assertRegisteredSemanticType(JoinedSequencesWithQuality) def test_sequences_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( SampleData[Sequences], QIIME1DemuxDirFmt ) def test_sequences_with_quality_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( SampleData[SequencesWithQuality], SingleLanePerSampleSingleEndFastqDirFmt ) def test_paired_end_sequences_semantic_type_to_format_registration(self): self.assertSemanticTypeRegisteredToFormat( SampleData[PairedEndSequencesWithQuality], SingleLanePerSamplePairedEndFastqDirFmt ) def test_joined_sequences_with_quality_semantic_type_to_format_reg(self): self.assertSemanticTypeRegisteredToFormat( SampleData[JoinedSequencesWithQuality], SingleLanePerSampleSingleEndFastqDirFmt ) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/plugin_setup.py000066400000000000000000000026661412142116700202770ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib import pandas as pd import qiime2.plugin import qiime2.sdk from q2_types import __version__ citations = qiime2.plugin.Citations.load('citations.bib', package='q2_types') plugin = qiime2.plugin.Plugin( name='types', version=__version__, website='https://github.com/qiime2/q2-types', package='q2_types', description=('This QIIME 2 plugin defines semantic types and ' 'transformers supporting microbiome analysis.'), short_description='Plugin defining types for microbiome analysis.' ) plugin.register_views(pd.Series, pd.DataFrame, citations=[citations['mckinney-proc-scipy-2010']]) importlib.import_module('q2_types.feature_table') importlib.import_module('q2_types.distance_matrix') importlib.import_module('q2_types.tree') importlib.import_module('q2_types.ordination') importlib.import_module('q2_types.sample_data') importlib.import_module('q2_types.feature_data') importlib.import_module('q2_types.per_sample_sequences') importlib.import_module('q2_types.multiplexed_sequences') importlib.import_module('q2_types.bowtie2') q2-types-2021.8.0/q2_types/sample_data/000077500000000000000000000000001412142116700174475ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/sample_data/__init__.py000066400000000000000000000012251412142116700215600ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import AlphaDiversityFormat, AlphaDiversityDirectoryFormat from ._type import SampleData, AlphaDiversity __all__ = ['AlphaDiversityFormat', 'AlphaDiversityDirectoryFormat', 'SampleData', 'AlphaDiversity'] importlib.import_module('q2_types.sample_data._transformer') q2-types-2021.8.0/q2_types/sample_data/_format.py000066400000000000000000000043751412142116700214610ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import csv import qiime2.plugin.model as model from qiime2.plugin import ValidationError from ..plugin_setup import plugin class AlphaDiversityFormat(model.TextFileFormat): def _validate_(self, level): with self.open() as fh: header, records_seen, is_min = None, 0, level == 'min' fh_ = csv.reader(fh, delimiter='\t') file_ = enumerate(fh_, 1) if is_min else zip(range(1, 11), fh_) for i, cells in file_: if header is None: if len(cells) < 2: raise ValidationError( 'Found header on line %d with the following ' 'columns: %s (length: %d), expected at least 2 ' 'columns.' % (i, cells, len(cells))) else: header = cells else: if len(cells) != len(header): raise ValidationError( 'Line %d has %s cells (%s), expected %s.' % (i, len(cells), cells, len(header))) records_seen += 1 # The first non-comment and non-blank row observed will always be # the header row, and since we have no requirement on the field # names (because they are dynamically defined), so no need to check # for the presence (or validity) of a header row at this point. if records_seen == 0: raise ValidationError('No records found in file, only ' 'observed comments, blank lines, and/or ' 'a header row.') AlphaDiversityDirectoryFormat = model.SingleFileDirectoryFormat( 'AlphaDiversityDirectoryFormat', 'alpha-diversity.tsv', AlphaDiversityFormat) plugin.register_formats(AlphaDiversityFormat, AlphaDiversityDirectoryFormat) q2-types-2021.8.0/q2_types/sample_data/_transformer.py000066400000000000000000000033321412142116700225230ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import pandas as pd import numpy as np import qiime2 from ..plugin_setup import plugin from . import AlphaDiversityFormat def _read_alpha_diversity(fh): # Using `dtype=object` and `set_index` to avoid type casting/inference # of any columns or the index. df = pd.read_csv(fh, sep='\t', header=0, dtype=object) df.set_index(df.columns[0], drop=True, append=False, inplace=True) df.index.name = None # casting of columns adapted from SO post: # https://stackoverflow.com/a/36814203/3424666 cols = df.columns df[cols] = df[cols].apply(pd.to_numeric, errors='ignore') return df @plugin.register_transformer def _1(data: pd.Series) -> AlphaDiversityFormat: ff = AlphaDiversityFormat() with ff.open() as fh: data.to_csv(fh, sep='\t', header=True) return ff @plugin.register_transformer def _2(ff: AlphaDiversityFormat) -> pd.Series: with ff.open() as fh: df = _read_alpha_diversity(fh) series = df.iloc[:, 0] if not np.issubdtype(series, np.number): raise ValueError('Non-numeric values detected in alpha diversity ' 'estimates.') return series @plugin.register_transformer def _3(ff: AlphaDiversityFormat) -> qiime2.Metadata: with ff.open() as fh: df = _read_alpha_diversity(fh) df.index.name = 'Sample ID' return qiime2.Metadata(df) q2-types-2021.8.0/q2_types/sample_data/_type.py000066400000000000000000000015001412142116700211350ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import AlphaDiversityDirectoryFormat SampleData = SemanticType('SampleData', field_names='type') AlphaDiversity = SemanticType('AlphaDiversity', variant_of=SampleData.field['type']) plugin.register_semantic_types(SampleData, AlphaDiversity) plugin.register_semantic_type_to_format( SampleData[AlphaDiversity], artifact_format=AlphaDiversityDirectoryFormat ) q2-types-2021.8.0/q2_types/sample_data/tests/000077500000000000000000000000001412142116700206115ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/sample_data/tests/__init__.py000066400000000000000000000005351412142116700227250ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/sample_data/tests/data/000077500000000000000000000000001412142116700215225ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-int-indices.tsv000066400000000000000000000000231412142116700275640ustar00rootroot00000000000000 foo 1 0.97 4 0.72 q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-jagged-rows.tsv000066400000000000000000000001111412142116700275650ustar00rootroot00000000000000 shannon extra Sample1 0.9709505944546688 foo Sample4 0.7219280948873623 q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-missing-records.tsv000066400000000000000000000000341412142116700304700ustar00rootroot00000000000000this is not alpha diversity q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-one-column.tsv000066400000000000000000000000211412142116700274300ustar00rootroot00000000000000 Sample1 Sample4 q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-one-sample.tsv000066400000000000000000000000441412142116700274210ustar00rootroot00000000000000 shannon Sample1 0.9709505944546688 q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity-with-metadata.tsv000066400000000000000000000001151412142116700301110ustar00rootroot00000000000000 shannon extra Sample1 0.9709505944546688 foo Sample4 0.7219280948873623 bar q2-types-2021.8.0/q2_types/sample_data/tests/data/alpha-diversity.tsv000066400000000000000000000000771412142116700253710ustar00rootroot00000000000000 shannon Sample1 0.9709505944546688 Sample4 0.7219280948873623 q2-types-2021.8.0/q2_types/sample_data/tests/data/also-not-alpha-diversity.tsv000066400000000000000000000000461412142116700271170ustar00rootroot00000000000000 shannon Sample1 just Sample4 kidding q2-types-2021.8.0/q2_types/sample_data/tests/test_format.py000066400000000000000000000052231412142116700235140ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import shutil import unittest from q2_types.sample_data import (AlphaDiversityDirectoryFormat, AlphaDiversityFormat) from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestFormats(TestPluginBase): package = "q2_types.sample_data.tests" def test_alpha_diversity_format_validate_positive(self): filepath = self.get_data_path('alpha-diversity.tsv') format = AlphaDiversityFormat(filepath, mode='r') # Should succeed format.validate() def test_alpha_diversity_dir_fmt_validate_positive(self): filepath = self.get_data_path('alpha-diversity.tsv') shutil.copy(filepath, self.temp_dir.name) format = AlphaDiversityDirectoryFormat(self.temp_dir.name, mode='r') # Should succeed format.validate() def test_alpha_diversity_format_validate_positive_one_sample(self): filepath = self.get_data_path('alpha-diversity-one-sample.tsv') format = AlphaDiversityFormat(filepath, mode='r') # Should succeed format.validate() def test_alpha_diversity_format_validate_positive_md_columns(self): filepath = self.get_data_path('alpha-diversity-with-metadata.tsv') format = AlphaDiversityFormat(filepath, mode='r') # Should succeed format.validate() def test_alpha_diversity_format_validate_negative_no_records(self): filepath = self.get_data_path('alpha-diversity-missing-records.tsv') format = AlphaDiversityFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'No records found'): format.validate() def test_alpha_diversity_format_validate_negative_too_few_cols(self): filepath = self.get_data_path('alpha-diversity-one-column.tsv') format = AlphaDiversityFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'line 1.*2 columns'): format.validate() def test_alpha_diversity_format_validate_negative_jagged_rows(self): filepath = self.get_data_path('alpha-diversity-jagged-rows.tsv') format = AlphaDiversityFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'Line 3.*expected 3'): format.validate() if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/sample_data/tests/test_transformer.py000066400000000000000000000064431412142116700245730ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import pandas as pd import qiime2 from pandas.testing import assert_series_equal from q2_types.sample_data import AlphaDiversityFormat from qiime2.plugin.testing import TestPluginBase class TestTransformers(TestPluginBase): package = "q2_types.sample_data.tests" def test_pd_series_to_alpha_diversity_format(self): transformer = self.get_transformer(pd.Series, AlphaDiversityFormat) exp_index = pd.Index(['Sample1', 'Sample4'], dtype=object) exp = pd.Series([0.970950594455, 0.721928094887], name='shannon', index=exp_index) obs = transformer(exp) # Squeeze equals true to return series instead of dataframe obs = pd.read_csv(str(obs), sep='\t', header=0, index_col=0, squeeze=True) assert_series_equal(exp, obs) def test_alpha_diversity_format_to_pd_series(self): filename = 'alpha-diversity.tsv' _, obs = self.transform_format(AlphaDiversityFormat, pd.Series, filename) exp_index = pd.Index(['Sample1', 'Sample4'], dtype=object) exp = pd.Series([0.970950594455, 0.721928094887], name='shannon', index=exp_index) assert_series_equal(exp, obs) def test_alpha_diversity_format_with_metadata_to_pd_series(self): filename = 'alpha-diversity-with-metadata.tsv' _, obs = self.transform_format(AlphaDiversityFormat, pd.Series, filename) exp_index = pd.Index(['Sample1', 'Sample4'], dtype=object) exp = pd.Series([0.970950594455, 0.721928094887], name='shannon', index=exp_index) assert_series_equal(exp, obs) def test_alpha_diversity_format_to_pd_series_int_indices(self): filename = 'alpha-diversity-int-indices.tsv' _, obs = self.transform_format(AlphaDiversityFormat, pd.Series, filename) exp_index = pd.Index(['1', '4'], dtype=object) exp = pd.Series([0.97, 0.72], name='foo', index=exp_index) assert_series_equal(exp, obs) def test_alpha_diversity_format_to_metadata(self): filename = 'alpha-diversity.tsv' _, obs = self.transform_format(AlphaDiversityFormat, qiime2.Metadata, filename) exp_index = pd.Index(['Sample1', 'Sample4'], name='Sample ID', dtype=object) exp_df = pd.DataFrame([[0.9709505944546688], [0.7219280948873623]], columns=['shannon'], index=exp_index) exp_md = qiime2.Metadata(exp_df) self.assertEqual(obs, exp_md) def test_non_alpha_diversity(self): filename = 'also-not-alpha-diversity.tsv' with self.assertRaisesRegex(ValueError, 'Non-numeric values '): self.transform_format(AlphaDiversityFormat, pd.Series, filename) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/sample_data/tests/test_type.py000066400000000000000000000021211412142116700231770ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.sample_data import (AlphaDiversityDirectoryFormat, SampleData, AlphaDiversity) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = "q2_types.sample_data.tests" def test_sample_data_semantic_type_registration(self): self.assertRegisteredSemanticType(SampleData) def test_alpha_diversity_semantic_type_registration(self): self.assertRegisteredSemanticType(AlphaDiversity) def test_sample_data_alpha_div_to_alpha_div_dir_fmt_registration(self): self.assertSemanticTypeRegisteredToFormat( SampleData[AlphaDiversity], AlphaDiversityDirectoryFormat) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/tree/000077500000000000000000000000001412142116700161345ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/tree/__init__.py000066400000000000000000000012101412142116700202370ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import importlib from ._format import NewickFormat, NewickDirectoryFormat from ._type import Phylogeny, Rooted, Unrooted, Hierarchy __all__ = [ 'NewickFormat', 'NewickDirectoryFormat', 'Phylogeny', 'Rooted', 'Unrooted', 'Hierarchy'] importlib.import_module('q2_types.tree._transformer') q2-types-2021.8.0/q2_types/tree/_format.py000066400000000000000000000014111412142116700201320ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import skbio.io import qiime2.plugin.model as model from ..plugin_setup import plugin class NewickFormat(model.TextFileFormat): def sniff(self): sniffer = skbio.io.io_registry.get_sniffer('newick') return sniffer(str(self))[0] NewickDirectoryFormat = model.SingleFileDirectoryFormat( 'NewickDirectoryFormat', 'tree.nwk', NewickFormat) plugin.register_formats(NewickFormat, NewickDirectoryFormat) q2-types-2021.8.0/q2_types/tree/_transformer.py000066400000000000000000000014111412142116700212040ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import skbio from ..plugin_setup import plugin from . import NewickFormat @plugin.register_transformer def _1(data: skbio.TreeNode) -> NewickFormat: ff = NewickFormat() with ff.open() as fh: data.write(fh, format='newick') return ff @plugin.register_transformer def _2(ff: NewickFormat) -> skbio.TreeNode: with ff.open() as fh: return skbio.TreeNode.read(fh, format='newick', verify=False) q2-types-2021.8.0/q2_types/tree/_type.py000066400000000000000000000020401412142116700176220ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import NewickDirectoryFormat Phylogeny = SemanticType('Phylogeny', field_names=['type']) Rooted = SemanticType('Rooted', variant_of=Phylogeny.field['type']) Unrooted = SemanticType('Unrooted', variant_of=Phylogeny.field['type']) Hierarchy = SemanticType('Hierarchy') plugin.register_semantic_types(Phylogeny, Rooted, Unrooted, Hierarchy) plugin.register_semantic_type_to_format(Phylogeny[Rooted | Unrooted], artifact_format=NewickDirectoryFormat) plugin.register_semantic_type_to_format(Hierarchy, artifact_format=NewickDirectoryFormat) q2-types-2021.8.0/q2_types/tree/tests/000077500000000000000000000000001412142116700172765ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/tree/tests/__init__.py000066400000000000000000000005351412142116700214120ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- q2-types-2021.8.0/q2_types/tree/tests/data/000077500000000000000000000000001412142116700202075ustar00rootroot00000000000000q2-types-2021.8.0/q2_types/tree/tests/data/not-tree.nwk000066400000000000000000000000311412142116700224570ustar00rootroot00000000000000This isn't a Newick Tree q2-types-2021.8.0/q2_types/tree/tests/data/tree.nwk000066400000000000000000000000571412142116700216710ustar00rootroot00000000000000(SEQUENCE1:0.000000003,SEQUENCE2:0.000000003); q2-types-2021.8.0/q2_types/tree/tests/test_format.py000066400000000000000000000025111412142116700221760ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import shutil import unittest from q2_types.tree import NewickFormat, NewickDirectoryFormat from qiime2.plugin.testing import TestPluginBase from qiime2.plugin import ValidationError class TestFormats(TestPluginBase): package = "q2_types.tree.tests" def test_newick_format_validate_positive(self): filepath = self.get_data_path('tree.nwk') format = NewickFormat(filepath, mode='r') format.validate() def test_newick_format_validate_negative(self): filepath = self.get_data_path('not-tree.nwk') format = NewickFormat(filepath, mode='r') with self.assertRaisesRegex(ValidationError, 'NewickFormat'): format.validate() def test_newick_directory_format_validate_postivie(self): filepath = self.get_data_path('tree.nwk') shutil.copy(filepath, self.temp_dir.name) format = NewickDirectoryFormat(self.temp_dir.name, mode='r') format.validate() if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/tree/tests/test_transformer.py000066400000000000000000000023401412142116700232500ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import skbio from q2_types.tree import NewickFormat from qiime2.plugin.testing import TestPluginBase class TestTransformers(TestPluginBase): package = "q2_types.tree.tests" def test_skbio_tree_node_to_newick_format(self): filepath = self.get_data_path('tree.nwk') transformer = self.get_transformer(skbio.TreeNode, NewickFormat) input = skbio.TreeNode.read(filepath) obs = transformer(input) obs = skbio.TreeNode.read(str(obs)) self.assertEqual(str(input), str(obs)) def test_newick_format_to_skbio_tree_node(self): filename = 'tree.nwk' input, obs = self.transform_format(NewickFormat, skbio.TreeNode, filename) exp = skbio.TreeNode.read(str(input)) self.assertEqual(str(exp), str(obs)) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/q2_types/tree/tests/test_type.py000066400000000000000000000026421412142116700216740ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from q2_types.tree import (Phylogeny, Rooted, Unrooted, Hierarchy, NewickDirectoryFormat) from qiime2.plugin.testing import TestPluginBase class TestTypes(TestPluginBase): package = "q2_types.tree.tests" def test_phylogeny_semantic_type_registration(self): self.assertRegisteredSemanticType(Phylogeny) def test_rooted_semantic_type_registration(self): self.assertRegisteredSemanticType(Rooted) def test_unrooted_semantic_type_registration(self): self.assertRegisteredSemanticType(Unrooted) def test_hierarchy_semantic_type_registration(self): self.assertRegisteredSemanticType(Hierarchy) def test_phylogeny_rooted_unrooted_to_newick_dir_fmt_registration(self): self.assertSemanticTypeRegisteredToFormat( Phylogeny[Rooted | Unrooted], NewickDirectoryFormat) def test_hierarchy_to_newick_dir_fmt_registration(self): self.assertSemanticTypeRegisteredToFormat( Hierarchy, NewickDirectoryFormat) if __name__ == '__main__': unittest.main() q2-types-2021.8.0/setup.cfg000066400000000000000000000002371412142116700152520ustar00rootroot00000000000000[versioneer] VCS=git style=pep440 versionfile_source = q2_types/_version.py versionfile_build = q2_types/_version.py tag_prefix = parentdir_prefix = q2-types- q2-types-2021.8.0/setup.py000066400000000000000000000034171412142116700151460ustar00rootroot00000000000000# ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- from setuptools import setup, find_packages import versioneer setup( name="q2-types", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), packages=find_packages(), author="Greg Caporaso", author_email="gregcaporaso@gmail.com", description="Common QIIME 2 semantic types.", license='BSD-3-Clause', url="https://qiime2.org", entry_points={ 'qiime2.plugins': ['q2-types=q2_types.plugin_setup:plugin'] }, package_data={ 'q2_types': ['citations.bib'], 'q2_types.tests': ['data/*'], 'q2_types.distance_matrix.tests': ['data/*'], 'q2_types.feature_data.tests': ['data/*', 'data/taxonomy/*'], 'q2_types.feature_table.tests': ['data/*'], 'q2_types.multiplexed_sequences.tests': ['data/*'], 'q2_types.ordination.tests': ['data/*'], 'q2_types.per_sample_sequences.tests': ['data/*', 'data/paired_end_data/*', 'data/single_end_data/*', 'data/absolute_manifests/*', 'data/absolute_manifests_v2/*', 'data/relative_manifests/*', 'data/qiime1-demux-format/*', 'data/single-end-two-sample-data1/*', 'data/single-end-two-sample-data2/*', 'data/single-end-two-sample-data3/*'], 'q2_types.sample_data.tests': ['data/*'], 'q2_types.tree.tests': ['data/*'] }, zip_safe=False, ) q2-types-2021.8.0/versioneer.py000066400000000000000000002060221412142116700161640ustar00rootroot00000000000000 # Version: 0.18 # flake8: noqa """The Versioneer - like a rocketeer, but for versions. The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/warner/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other langauges) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ### Unicode version strings While Versioneer works (and is continually tested) with both Python 2 and Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. Newer releases probably generate unicode version strings on py2. It's not clear that this is wrong, but it may be surprising for applications when then write these strings to a network connection or include them in bytes-oriented APIs like cryptographic checksums. [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates this question. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the Creative Commons "Public Domain Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_root(): """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() with open(setup_cfg, "r") as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode LONG_VERSION_PY['git'] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.18) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} def get_version(): """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(): """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: from py2exe.build_exe import py2exe as _py2exe # py2 class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): """Main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)