pax_global_header00006660000000000000000000000064131040031670014505gustar00rootroot0000000000000052 comment=3fef851081c204e85af2261c8d508b7b3b8368c1 rows-0.3.1/000077500000000000000000000000001310400316700125005ustar00rootroot00000000000000rows-0.3.1/.gitignore000066400000000000000000000002101310400316700144610ustar00rootroot00000000000000*.egg-info/ *.pyc *~ .*.sw? .coverage .directory .env .idea/* .tox MANIFEST build/* dist/* reg_settings.py rows.1 .DS_Store docs/_build rows-0.3.1/AUTHORS.md000066400000000000000000000016651310400316700141570ustar00rootroot00000000000000# Authors and Contributors of `rows` Library Project page: . ## Author/Maintainer Created and maintained by Álvaro Justen aka turicas: - - - ## Contributors - Érico Andrei - Bernardo Fontes - Alexandre Brandão (slex) - Paulo Roberto Alves de Oliveira (aka kretcheu) - Jean Ferri - Evaldo Junior - Rhenan Bartels - Mauro Baraldi - Henrique Bastos - Rômulo Collopy - Davi Oliveira - Ellison Leão - Ramiro Luz rows-0.3.1/Dockerfile000066400000000000000000000021671310400316700145000ustar00rootroot00000000000000FROM debian MAINTAINER Álvaro Justen # install system dependencies RUN apt-get update RUN apt-get install --no-install-recommends -y build-essential git locales \ python-dev python-lxml \ python-pip python-snappy \ python-thrift && \ apt-get clean #thrift (used by parquet plugin) is the only which needs build-essential and #python-dev to be installed (installing python-thrift doesn't do the job). #You can build other Python libraries from source by installing: # libsnappy-dev libxml2-dev libxslt-dev libz-dev #and not installing: # python-lxml python-snappy # configure locale (needed to run tests) RUN echo 'en_US.UTF-8 UTF-8' > /etc/locale.gen RUN echo 'pt_BR.UTF-8 UTF-8' >> /etc/locale.gen RUN /usr/sbin/locale-gen # clone the repository and install Python dependencies RUN git clone https://github.com/turicas/rows.git ~/rows RUN cd ~/rows && pip install -r requirements-development.txt && \ rm -rf ~/.cache/pip/ RUN cd ~/rows && pip install -e . rows-0.3.1/LICENSE000066400000000000000000001045131310400316700135110ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . rows-0.3.1/Makefile000066400000000000000000000007371310400316700141470ustar00rootroot00000000000000test: tox clean: find -regex '.*\.pyc' -exec rm {} \; find -regex '.*~' -exec rm {} \; rm -rf reg-settings.py rm -rf MANIFEST dist build *.egg-info rm -rf rows.1 rm -rf .tox install: make clean make uninstall python setup.py install uninstall: pip uninstall -y rows lint: pylint rows/*.py lint-tests: pylint tests/*.py man: head -1 rows.1.txt > rows.1 txt2man rows.1.txt | egrep -v '^\.TH' >> rows.1 .PHONY: test clean lint lint-tests install uninstall man rows-0.3.1/README.md000066400000000000000000000021441310400316700137600ustar00rootroot00000000000000# rows [![Join the chat at https://gitter.im/turicas/rows](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/turicas/rows?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Current version at PyPI](https://img.shields.io/pypi/v/rows.svg)](https://pypi.python.org/pypi/rows) [![Downloads per month on PyPI](https://img.shields.io/pypi/dm/rows.svg)](https://pypi.python.org/pypi/rows) ![Supported Python Versions](https://img.shields.io/pypi/pyversions/rows.svg) ![Software status](https://img.shields.io/pypi/status/rows.svg) [![License: GPLv3](https://img.shields.io/pypi/l/rows.svg)](https://github.com/turicas/rows/blob/develop/LICENSE) [![Donate](https://img.shields.io/gratipay/turicas.svg?style=social&label=Donate)](https://www.gratipay.com/turicas) No matter in which format your tabular data is: `rows` will import it, automatically detect types and give you high-level Python objects so you can start **working with the data** instead of **trying to parse it**. It is also locale-and-unicode aware. :) Want to learn more? [Read the documentation](http://turicas.info/rows). rows-0.3.1/docs/000077500000000000000000000000001310400316700134305ustar00rootroot00000000000000rows-0.3.1/docs/Makefile000066400000000000000000000013301310400316700150650ustar00rootroot00000000000000# Makefile for Sphinx documentation # You can set these variables from the command line. SPHINXBUILD = sphinx-build BUILDDIR = _build # Internal variables. ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees . help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " clean remove current BUILDDIR" @echo " publish publish to the gh-pages branch" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." publish: cd $(BUILDDIR)/html; git add . ; git commit -m "rebuilt docs"; git push origin gh-pages .PHONY: help clean html publish rows-0.3.1/docs/changelog.md000066400000000000000000000216571310400316700157140ustar00rootroot00000000000000# rows' Log of Changes ## Version `0.3.2` **Released on: (under development)** ## Version `0.3.1` **Released on: 2017-05-08** ### Enhancements - Move information on README to a site, organize and add more examples. Documentation is available at [turicas.info/rows](http://turicas.info/rows). Thanks to [@ellisonleao](https://github.com/ellisonleao) for Sphinx implementation and [@ramiroluz](https://github.com/ramiroluz) for new examples. - Little code refactorings. ### Bug Fixes - [#200](https://github.com/turicas/rows/pull/200) Escape output when exporting to HTML (thanks to [@arloc](https://github.com/arloc)) - Fix some tests - [#215](https://github.com/turicas/rows/issues/215) DecimalField does not handle negative values correctly if using locale (thanks to [@draug3n](https://github.com/draug3n) for reporting) ## Version `0.3.0` **Released on: 2016-09-02** ### Backwards Incompatible Changes ### Bug Fixes - Return `None` on XLS blank cells; - [#188](https://github.com/turicas/rows/issues/188) Change `sample_size` on encoding detection. ### Enhancements and Refactorings - `rows.fields.detect_fields` will consider `BinaryField` if all the values are `str` (Python 2)/`bytes` (Python 3) and all other fields will work only with `unicode` (Python 2)/`str` (Python 3); - Plugins HTML and XPath now uses a better way to return inner HTML (when `preserve_html=True`); - [#189](https://github.com/turicas/rows/issues/189) Optimize `Table.__add__`. ### New Features - Support for Python 3 (finally!); - `rows.fields.BinaryField` now automatically uses base64 to encode/decode; - Added `encoding` information to `rows.Table` metadata in text plugins; - Added `sheet_name` information to `rows.Table` metadata in XLS and XLSX plugins; - [#190](https://github.com/turicas/rows/issues/190) Add `query_args` to `import_from_sqlite`; - [#177](https://github.com/turicas/rows/issues/177) Add `dialect` to `export_to_csv`. ## Version `0.2.1` **Released on: 2016-08-10** ### Backwards Incompatible Changes - `rows.utils.export_to_uri` signature is now like `rows.export_to_*` (first the `rows.Table` object, then the URI) - Changed default table name in `import_from_sqlite` and `export_to_sqlite` (from `rows` and `rows_{number}` to `table{number}`) ### Bug Fixes - [#170](https://github.com/turicas/rows/issues/170) (SQLite plugin) Error converting `int` and `float` when value is `None`. - [#168](https://github.com/turicas/rows/issues/168) Use `Field.serialize` if does not know the field type (affecting: XLS, XLSX and SQLite plugins). - [#167](https://github.com/turicas/rows/issues/167) Use more data to detect dialect, delimit the possible delimiters and fallback to excel if can't detect. - [#176](https://github.com/turicas/rows/issues/176) Problem using quotes on CSV plugin. - [#179](https://github.com/turicas/rows/issues/179) Fix double underscore problem on `rows.utils.slug` - [#175](https://github.com/turicas/rows/issues/175) Fix `None` serialization/deserialization in all plugins (and also field types) - [#172](https://github.com/turicas/rows/issues/172) Expose all tables in `rows query` for SQLite databases - Fix `examples/cli/convert.sh` (missing `-`) - Avoids SQL injection in table name ### Enhancements and Refactorings - Refactor `rows.utils.import_from_uri` - Encoding and file type are better detected on `rows.utils.import_from_uri` - Added helper functions to `rows.utils` regarding encoding and file type/plugin detection - There's a better description of plugin metadata (MIME types accepted) on `rows.utils` (should be refactored to be inside each plugin) - Moved `slug` and `ipartition` functions to `rows.plugins.utils` - Optimize `rows query` when using only one SQLite source ## Version `0.2.0` **Released on: 2016-07-15** ### Backwards Incompatible Changes - `rows.fields.UnicodeField` was renamed to `rows.fields.TextField` - `rows.fields.BytesField` was renamed to `rows.fields.BinaryField` ### Bug Fixes - Fix import errors on older versions of urllib3 and Python (thanks to [@jeanferri](https://github.com/jeanferri)) - [#156](https://github.com/turicas/rows/issues/156) `BoolField` should not accept "0" and "1" as possible values - [#86](https://github.com/turicas/rows/issues/86) Fix `Content-Type` parsing - Fix locale-related tests - [#85](https://github.com/turicas/rows/issues/85) Fix `preserve_html` if `fields` is not provided - Fix problem with big integers - [#131](https://github.com/turicas/rows/issues/131) Fix problem when empty sample data - Fix problem with `unicode` and `DateField` - Fix `PercentField.serialize(None)` - Fix bug with `Decimal` receiving `''` - Fix bug in `PercentField.serialize(Decimal('0'))` - Fix nested table behaviour on HTML plugin ### General Changes - (EXPERIMENTAL) Add `rows.FlexibleTable` class (with help on tests from [@maurobaraildi](https://github.com/maurobaraldi)) - Lots of refactorings - Add `rows.operations.transpose` - Add `Table.__repr__` - Renamte `rows.fields.UnicodeField` to `rows.fields.TextField` and `rows.fields.ByteField` to `rows.fields.BinaryField` - Add a man page (thanks to [@kretcheu](https://github.com/kretcheu)) - [#40](https://github.com/turicas/rows/issues/40) The package is available on Debian! - [#120](https://github.com/turicas/rows/issues/120) The package is available on Fedora! - Add some examples - [#138](https://github.com/turicas/rows/issues/138) Add `rows.fields.JSONField` - [#146](https://github.com/turicas/rows/issues/146) Add `rows.fields.EmailField` - Enhance encoding detection using [file-magic](https://pypi.python.org/pypi/file-magic) library - [#160](https://github.com/turicas/rows/issues/160) Add support for column get/set/del in `rows.Table` ### Tests - Fix "\r\n" on tests to work on Windows - Enhance tests with `mock` to assure some functions are being called - Improve some tests ### Plugins - Add plugin JSON (thanks [@sxslex](https://github.com/sxslex)) - [#107](https://github.com/turicas/rows/issues/107) Add `import_from_txt` - [#149](https://github.com/turicas/rows/issues/149) Add `import_from_xpath` - (EXPERIMENTAL) Add `import_from_ods` - (EXPERIMENTAL) Add `import_from_parquet` - Add `import_from_sqlite` and `export_to_sqlite` (implemented by [@turicas](https://github.com/turicas) with help from [@infog](https://github.com/infog)) - Add `import_from_xlsx` and `export_to_xlsx` (thanks to [@RhenanBartels](https://github.com/turicas/RhenanBartels)) - Autodetect delimiter in CSV files - Export to TXT, JSON and XLS also support an already opened file and CSV can export to memory (thanks to [@jeanferri](https://github.com/jeanferri)) - [#93](https://github.com/turicas/rows/issues/93) Add HTML helpers inside `rows.plugins.html`: `count_tables`, `extract_text`, `extract_links` and `tag_to_dict` - [#162](https://github.com/turicas/rows/issues/162) Add `import_from_dicts` and `export_to_dicts` - Refactor `export_to_txt` ### Utils - Create `rows.plugins.utils` - [#119](https://github.com/turicas/rows/issues/119) Rename field name if name is duplicated (to "field_2", "field_3", ..., "field_N") or if starts with a number. - Add option to import only some fields (`import_fields` parameter inside `create_table`) - Add option to export only some fields (`export_fields` parameter inside `prepare_to_export`) - Add option `force_types` to force field types in some columns (instead of detecting) on `create_table`. - Support lazy objects on `create_table` - Add `samples` parameter to `create_table` ### CLI - Add option to disable SSL verification (`--verify-ssl=no`) - Add `print` subcommand - Add `--version` - CLI is not installed by default (should be installed as `pip install rows[cli]`) - Automatically detect default encoding (if not specified) - Add `--order-by` to some subcommands and remove `sort` subcommand. #111 - Do not use locale by default - Add `query` subcommand: converts (from many sources) internally to SQLite, execute the query and then export ## Version `0.1.1` **Released on: 2015-09-03** - Fix code to run on Windows (thanks [@sxslex](https://github.com/sxslex)) - Fix locale (name, default name etc.) - Remove `filemagic` dependency (waiting for `python-magic` to be available on PyPI) - Write log of changes for `0.1.0` and `0.1.1` ## Version `0.1.0` **Released on: 2015-08-29** - Implement `Table` and its basic methods - Implement basic plugin support with many utilities and the following formats: - `csv` (input/output) - `html` (input/output) - `txt` (output) - `xls` (input/output) - Implement the following field types - many of them with locale support: - `ByteField` - `BoolField` - `IntegerField` - `FloatField` - `DecimalField` - `PercentField` - `DateField` - `DatetimeField` - `UnicodeField` - Implement basic `Table` operations: - `sum` - `join` - `transform` - `serialize` - Implement a command-line interface with the following subcommands: - `convert` - `join` - `sort` - `sum` - Add examples to the repository rows-0.3.1/docs/command-line-interface.md000066400000000000000000000017711310400316700202610ustar00rootroot00000000000000# Command-Line Interface `rows` exposes a command-line interface with common operations such as converting and querying data. > Note: we still need to improve this documentation. Please run `rows --help` > to see all the available commands and take a look at [rows/cli.py][rows-cli]. ## `rows convert` You can convert from/to any of the supported formats -- just pass the correct filenames and the CLI will automatically identify file type and encoding: ```bash rows convert data/brazilian-cities.csv data/brazilian-cities.xlsx ``` ## `rows query` Yep, you can SQL-query any supported file format! Each of the source files will be a table inside an in-memory SQLite database, called `table1`, ..., `tableN`. If the `--output` is not specified, `rows` will print a table on the standard output. ```bash rows query 'SELECT * FROM table1 WHERE inhabitants > 1000000' \ data/brazilian-cities.csv \ --output=data/result.html ``` [rows-cli]: https://github.com/turicas/rows/blob/develop/rows/cli.py rows-0.3.1/docs/conf.py000066400000000000000000000240431310400316700147320ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # rows documentation build configuration file, created by # sphinx-quickstart on Tue Oct 18 12:11:09 2016. # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) import recommonmark from recommonmark.parser import CommonMarkParser from recommonmark.transform import AutoStructify # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_parsers = { '.md': CommonMarkParser, } source_suffix = ['.rst', '.md'] # The encoding of source files. source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'rows' copyright = '2014-2017, Álvaro Justen' author = 'Álvaro Justen' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.3' # The full version, including alpha/beta/rc tags. release = '0.3.1' github_doc_root = 'https://github.com/turicas/rows/tree/master/docs/' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # # today = '' # # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. # # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. # " v documentation" by default. # # html_title = 'rows v0.3.1' # A shorter title for the navigation bar. Default is the same as html_title. # # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. # # html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # # html_additional_pages = {} # If false, no module index is generated. # # html_domain_indices = True # If false, no index is generated. # # html_use_index = True # If true, the index is split into individual pages for each letter. # # html_split_index = False # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'rowsdoc' # The name of an image file (relative to this directory) to place at the top of # the title page. # # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # # latex_use_parts = False # If true, show page references after internal links. # # latex_show_pagerefs = False # If true, show URL addresses after external links. # # latex_show_urls = False # Documents to append as an appendix to all manuals. # # latex_appendices = [] # It false, will not define \strong, \code, itleref, \crossref ... but only # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added # packages. # # latex_keep_old_macro_names = True # If false, no module index is generated. # # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'rows', 'rows Documentation', [author], 1) ] # If true, show URL addresses after external links. # # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'rows', 'rows Documentation', author, 'rows', 'A common, beautiful interface to tabular data, no matter the format', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # # texinfo_appendices = [] # If false, no module index is generated. # # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # # texinfo_no_detailmenu = False def setup(app): app.add_config_value('recommonmark_config', { # when True, Auto Toc Tree will only be enabled on section that matches the title. 'auto_toc_tree_section': 'Contents', # a function that maps a existing relative position in the document to a http link 'url_resolver': lambda url: github_doc_root + url, # enable the Auto Toc Tree feature 'enable_auto_toc_tree': True, # enable the Auto Doc Ref feature 'enable_auto_doc_ref': True, # enable the Math Formula feature 'enable_math': True, # enable the Inline Math feature 'enable_inline_math': True, # enable the evaluate embedded reStructuredText feature 'enable_eval_rst': True, }, True) app.add_transform(AutoStructify) rows-0.3.1/docs/contributing.md000066400000000000000000000017761310400316700164740ustar00rootroot00000000000000## Developing Create the virtualenv: ```bash mkvirtualenv rows ``` Install all plugins' dependencies: ```bash pip install --editable .[all] ``` Install development dependencies: ```bash pip install -r requirements-development.txt ``` Run tests: ```bash make test ``` or (if you don't have `make`): ```bash tox ``` you can also run tox against an specific python version: ```bash tox -e py27 tox -e py35 ``` *tox known issues* : running tox with py27 environ may raise InvocationError in non Linux environments. To avoid it you may rebuild tox environment in every run with `tox -e py27 -r` or if you want to run nosetests directly: ```bash nosetests -dsv --with-yanc --with-coverage --cover-package rows tests/*.py ``` To create the man page you'll need to install [txt2man][txt2man]. In Debian (and Debian-based distributions) you can install by running: ```bash aptitude install txt2man ``` Then, you can generate the `rows.1` file by running: ```bash make man ``` [txt2man]: http://mvertes.free.fr/ rows-0.3.1/docs/index.md000066400000000000000000000074401310400316700150660ustar00rootroot00000000000000# Welcome to rows documentation! No matter in which format your tabular data is: `rows` will import it, automatically detect types and give you high-level Python objects so you can start **working with the data** instead of **trying to parse it**. It is also locale-and-unicode aware. :) Have you ever lost your precious time reading a CSV that had a different dialect? Or trying to learn a whole new library API to read a new tabular data format your customer just sent? You've got gray hair when trying to access some data and the only answer was `UnicodeDecodeError`? So, [rows][rows] was custom made for you - run `pip install rows` and be happy! :-) > Note: if you're using [rows][rows] in some project please [tell > us][rows-issue-103]! :-) ## Core Values - Simple, easy and flexible API - Code quality - Don't Repeat Yourself ## Contents - [Installation][doc-installing] - [Quick-start guide][doc-quick-start] - [Command-line interface][doc-cli] - [Supported plugins][doc-plugins] - [Using locale when importing data][doc-locale] - [Table operations][doc-operations] - [Contributing][doc-contributing] - [Useful links][doc-links] - [Log of changes][doc-changelog] ## Basic Usage `rows` will import tabular data in any of the supported formats, automatically detect/convert encoding and column types for you, so you can focus on work on the data. Given a CSV file like this: ``` state,city,inhabitants,area AC,Acrelândia,12538,1807.92 AC,Assis Brasil,6072,4974.18 AC,Brasiléia,21398,3916.5 AC,Bujari,8471,3034.87 AC,Capixaba,8798,1702.58 [...] RJ,Angra dos Reis,169511,825.09 RJ,Aperibé,10213,94.64 RJ,Araruama,112008,638.02 RJ,Areal,11423,110.92 RJ,Armação dos Búzios,27560,70.28 [...] ``` You can use `rows` to do some math with it without the need to convert anything: ```python import rows cities = rows.import_from_csv('data/brazilian-cities.csv') rio_biggest_cities = [city for city in cities if city.state == 'RJ' and city.inhabitants > 500000] for city in rio_biggest_cities: print('{} ({:5.2f} ppl/km²)'.format(city.city, city.inhabitants / city.area)) ``` The result: ```text Duque de Caxias (1828.51 ppl/km²) Nova Iguaçu (1527.59 ppl/km²) Rio de Janeiro (5265.81 ppl/km²) São Gonçalo (4035.88 ppl/km²) ``` The library can also export data in any of the available plugins and have a command-line interface for more common tasks. For more examples, please refer to our [quick-start guide][doc-quick-start]. ## Architecture The library is composed by: - A common interface to tabular data (the `Table` class) - A set of plugins to populate `Table` objects (CSV, XLS, XLSX, HTML and XPath, Parquet, TXT, JSON, SQLite -- more coming soon!) - A set of common fields (such as `BoolField`, `IntegerField`) which know exactly how to serialize and deserialize data for each object type you'll get - A set of utilities (such as field type recognition) to help working with tabular data - A command-line interface so you can have easy access to the most used features: convert between formats, sum, join and sort tables. ## Semantic Versioning `rows` uses [semantic versioning][semver]. Note that it means we do not guarantee API backwards compatibility on `0.x.y` versions. ## License This library is released under the [GNU General Public License version 3][gpl3]. [doc-cli]: command-line-interface.md [doc-contributing]: contributing.md [doc-installing]: installing.md [doc-links]: links.md [doc-locale]: locale.md [doc-operations]: operations.md [doc-plugins]: plugins.md [doc-changelog]: changelog.md [doc-quick-start]: quick-start.md [gpl3]: http://www.gnu.org/licenses/gpl-3.0.html [rows-issue-103]: https://github.com/turicas/rows/issues/103 [rows]: https://github.com/turicas/rows/ [semver]: http://semver.org/ rows-0.3.1/docs/installing.md000066400000000000000000000025471310400316700161260ustar00rootroot00000000000000# Installing rows Directly from [PyPI][pypi-rows]: ```bash pip install rows ``` You can also install directly from the GitHub repository to have the newest features (not pretty stable) by running: ```bash pip install git+https://github.com/turicas/rows.git@develop ``` or: ```bash git clone https://github.com/turicas/rows.git cd rows python setup.py install ``` The use of virtualenv is recommended. You can create a development image using Docker: ```bash cat Dockerfile | docker build -t turicas/rows:latest - ``` The plugins `csv`, `dicts`, `json`, `sqlite` and `txt` are built-in by default but if you want to use another one you need to explicitly install its dependencies, for example: ```bash pip install rows[html] pip install rows[xls] ``` You also need to install some dependencies to use the [command-line interface][rows-cli]. You can do it installing the `cli` extra requirement: ```bash pip install rows[cli] ``` And - easily - you can install all the dependencies by using the `all` extra requirement: ```bash pip install rows[all] ``` If you use Debian [sid][debian-sid] or [testing][debian-testing] you can install it directly from the main repository by running: ```bash apt install python-rows # Python library only apt install rows # Python library + CLI ``` And in Fedora: ```bash dnf install python-row # Python library + CLI ``` rows-0.3.1/docs/links.md000066400000000000000000000062151310400316700150760ustar00rootroot00000000000000# Links ## Showcase - (Portuguese) [Talk (videos + slides) on rows by Álvaro Justen][rows-talk-pt] ## Related and Similar Projects - (Article) [Data science at the command-line](https://github.com/jeroenjanssens/data-science-at-the-command-line) - [Ghost.py](https://github.com/jeanphix/Ghost.py) - [OKFN's goodtables](https://github.com/okfn/goodtables) - [OKFN's messytables](https://github.com/okfn/messytables) - [Pipe](https://github.com/JulienPalard/Pipe) - [Recorde](https://github.com/pinard/Recode) - [TableFactory](https://pypi.python.org/pypi/TableFactory) - [Tabula](http://tabula.technology/) - [continuous-docs](https://github.com/icgood/continuous-docs) - [csvcat](https://pypi.python.org/pypi/csvcat) - [csvstudio](https://github.com/mdipierro/csvstudio) - [dataconverters](https://github.com/okfn/dataconverters) - [dateparser](https://github.com/scrapinghub/dateparser) - [django-import-export](https://github.com/django-import-export/django-import-export) - [extruct](https://github.com/scrapinghub/extruct) - [grablib](https://github.com/lorien/grab) - [import.io](http://import.io/) - [libextract](https://github.com/datalib/libextract) - [libextract](https://github.com/datalib/libextract) - [multicorn](https://github.com/Kozea/Multicorn) - [odo](https://github.com/blaze/odo) - [pandashells](https://github.com/robdmc/pandashells) (and pandas DataFrame) - [parse](https://github.com/r1chardj0n3s/parse) - [proof](https://github.com/wireservice/proof) - [records](https://github.com/kennethreitz/records) - [schema](https://pypi.python.org/pypi/schema) - [scrapelib](https://github.com/jamesturk/scrapelib) - [scrapy](http://scrapy.org/) - [screed](https://github.com/ctb/screed) - [selection](https://github.com/lorien/selection) - [streamtools](http://blog.nytlabs.com/streamtools/) - [table-extractor](https://pypi.python.org/pypi/table-extractor) - [tablib](https://tablib.readthedocs.org/en/latest/) - [telega-mega-import](https://github.com/django-stars/telega-mega-import) - [textql](https://github.com/dinedal/textql) - [texttables](https://github.com/Taywee/texttables) - [validictory](https://github.com/jamesturk/validictory) - [validr](https://pypi.python.org/pypi/validr) - [visidata](https://github.com/saulpw/visidata/) - [webscraper.io](http://webscraper.io/) ## Known Issues - [Create a better plugin interface so anyone can benefit of it][rows-issue-27] - [Create an object to represent a set of `rows.Table`s, like `TableSet`][rows-issue-47] - Performance: the automatic type detection algorithm can cost time: it iterates over all rows to determine the type of each column. You can disable it by passing `samples=0` to any `import_from_*` function or either changing the number of sample rows (any positive number is accepted). - [Code design issues][rows-issue-31] [rows-issue-27]: https://github.com/turicas/rows/issues/27 [rows-issue-31]: https://github.com/turicas/rows/issues/31 [rows-issue-47]: https://github.com/turicas/rows/issues/47 [rows-showcase-source]: https://github.com/leonardocsantoss/django-rows [rows-showcase]: http://rows.irdx.com.br/ [rows-talk-pt]: http://blog.justen.eng.br/2016/05/dados-tabulares-a-maneira-pythonica.html rows-0.3.1/docs/locale.md000066400000000000000000000020061310400316700152070ustar00rootroot00000000000000# Locale Many fields inside `rows.fields` are locale-aware. If you have some data using Brazilian Portuguese number formatting, for example (`,` as decimal separators and `.` as thousands separator) you can configure this into the library and `rows` will automatically understand these numbers! Let's see it working by extracting the population of cities in Rio de Janeiro state: ```python import locale import requests import rows from io import BytesIO url = 'http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=33' html = requests.get(url).content with rows.locale_context(name='pt_BR.UTF-8', category=locale.LC_NUMERIC): rio = rows.import_from_html(BytesIO(html)) total_population = sum(city.pessoas for city in rio) # 'pessoas' is the fieldname related to the number of people in each city print('Rio de Janeiro has {} inhabitants'.format(total_population)) ``` The column `pessoas` will be imported as an `IntegerField` and the result is: ```text Rio de Janeiro has 15989929 inhabitants ``` rows-0.3.1/docs/make.bat000066400000000000000000000170601310400316700150410ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. epub3 to make an epub3 echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled echo. dummy to check syntax errors of document sources goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 1>NUL 2>NUL if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\rows.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\rows.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "epub3" ( %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) if "%1" == "dummy" ( %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy if errorlevel 1 exit /b 1 echo. echo.Build finished. Dummy builder generates no files. goto end ) :end rows-0.3.1/docs/operations.md000066400000000000000000000014141310400316700161350ustar00rootroot00000000000000# `Table` operations The module `rows.operations` contains some operations you can do on your `Table` objects: - `rows.operations.join`: return a new `Table` based on the joining of a list of `Table`s and a field to act as `key` between them. Note: for performance reasons you may not use this function, since the join operation is done in Python - you can also convert everything to SQLite, query data there and then have your results in a `Table`, like the [`rows query`][rows-cli-query] command. - `rows.operations.transform`: return a new `Table` based on other tables and a transformation function. - `rows.operations.transpose`: transpose the `Table` based on a specific field. [rows-cli-query]: https://github.com/turicas/rows/blob/develop/rows/cli.py#L291 rows-0.3.1/docs/plugins.md000066400000000000000000000036641310400316700154440ustar00rootroot00000000000000# Plugins The idea behing plugins is very simple: you write a little piece of code which extracts data from/to some specific format and the library will do the other tasks for you, such as detecting and converting data types. So writing a plugin is as easy as reading from/writing to the file format you want. Currently we have the following plugins: - CSV: use `rows.import_from_csv` and `rows.export_to_csv` (dependencies are installed by default) - TXT: use `rows.export_to_txt` (no dependencies) - JSON: use `rows.import_from_json` and `rows.export_to_json` (no dependencies) - HTML: use `rows.import_from_html` and `rows.export_to_html` (dependencies must be installed with `pip install rows[html]`) - XPath: use `rows.import_from_xpath` passing the following arguments: `filename_or_fobj`, `rows_xpath` and `fields_xpath` (dependencies must be installed with `pip install rows[xpath]`) -- see an example in `examples/library/ecuador_radiodifusoras.py`. - Parquet: use `rows.import_from_parquet` passing the filename (dependencies must be installed with `pip install rows[parquet]` and if the data is compressed using snappy you also need to install `rows[parquet-snappy]` and the `libsnappy-dev` system library) -- read [this blog post][blog-rows-parquet] for more details and one example - XLS: use `rows.import_from_xls` and `rows.export_to_xls` (dependencies must be installed with `pip install rows[xls]`) - XLSX: use `rows.import_from_xlsx` and `rows.export_to_xlsx` (dependencies must be installed with `pip install rows[xlsx]`) - SQLite: use `rows.import_from_sqlite` and `rows.export_to_sqlite` (no dependencies) - ODS: use `rows.import_from_ods` (dependencies must be installed with `pip install rows[ods]`) More plugins are coming and we're going to re-design the plugin interface so you can create and distribute your own in a better way. Feel free [to contribute][doc-contributing]. :-) [doc-contributing]: contributing.md rows-0.3.1/docs/quick-start.md000066400000000000000000000220331310400316700162210ustar00rootroot00000000000000# Quick Start Guide ## Programatically creating a `Table` object `rows` can import data from any of the supported formats and will return a `Table` object for you, but you can also create a `Table` object by hand. ### Using `Table.append` ```python from collections import OrderedDict from rows import fields, Table my_fields = OrderedDict([('name', fields.TextField), ('age', fields.IntegerField),]) table = Table(fields=my_fields) table.append({'name': 'Álvaro Justen', 'age': 30}) table.append({'name': 'Another Guy', 'age': 42}) ``` Check also all the available field types inside `rows.fields`. ### From a `list` of `dict`s A common use case is to have a `list` of `dict`s -- you can also import it, and `rows` will automatically fill in the blanks (your `dict`s don't need to have the same keys) and convert data: ```python import rows data = [{'name': 'Álvaro Justen', 'age': 30}, {'name': 'Another Guy', 'age': 42},] table = rows.import_from_dicts(data) ``` In this case, `table.fields` will be created automatically (`rows` will identify the field type for each `dict` key). ## Iterating over a `Table` You can iterate over a `Table` object and each returned object will be a `namedtuple` where you can access row's data, like this: ```python def print_person(person): print('{} is {} years old.'.format(person.name, person.age)) for person in table: # namedtuples are returned for each row print_person(person) ``` The result: ```text Álvaro Justen is 30 years old. Another Guy is 42 years old. ``` ## Automatic type detection/convertion `rows` will automatically identify data type for each column and converts it for you. For example: ```python table.append({'name': '...', 'age': ''}) print_person(table[-1]) # yes, you can index it! ``` And the output: ```text ... is None years old. ``` ## Importing Data `rows` will help you importing data: its plugins will do the hard job of parsing each supported file format so you don't need to. They can help you exporting data also. For example, let's download a CSV from the Web and import it: ```python import requests import rows from io import BytesIO url = 'http://unitedstates.sunlightfoundation.com/legislators/legislators.csv' csv = requests.get(url).content # Download CSV data legislators = rows.import_from_csv(BytesIO(csv)) # already imported! print('Hey, rows automatically identified the types:') for field_name, field_type in legislators.fields.items(): print('{} is {}'.format(field_name, field_type)) ``` And you'll see something like this: ```text [...] in_office is gender is [...] birthdate is ``` We can then work on this data: ```python women = sum(1 for row in legislators if row.in_office and row.gender == 'F') men = sum(1 for row in legislators if row.in_office and row.gender == 'M') print('Women vs Men (in office): {} vs {}.'.format(women, men)) ``` Then you'll see effects of our sexist society: ```text Women vs Men: 108 vs 432. ``` Now, let's compare ages: ```python legislators.order_by('birthdate') older, younger = legislators[-1], legislators[0] print('{}, {} is older than {}, {}.'.format( older.lastname, older.firstname, younger.lastname, younger.firstname)) ``` The output: ```text Stefanik, Elise is older than Byrd, Robert. ``` You can also get a whole column, like this: ```python >>> legislators['gender'] ['M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'M', 'F', 'M', ...] ``` And change the whole column (or add a new one): ```python >>> legislators['gender'] = ['male' if gender == 'M' else 'female' for gender in legislators['gender']] >>> legislators['gender'] ['male', 'male', 'male', 'male', 'male', 'male', 'male', 'male', 'male', 'male', 'male', 'male', 'female', 'male', ...] ``` Or delete it: ```python >>> 'gender' in legislators.field_names True >>> del legislators['gender'] >>> 'gender' in legislators.field_names False >>> legislators[0].gender [...] AttributeError: 'Row' object has no attribute 'gender' ``` > Note that **native Python objects** are returned for each row inside a > `namedtuple`! The library recognizes each field type and converts it > *automagically* no matter which plugin you're using to import the data. ### Common Parameters Each plugin has its own parameters (like `index` in `import_from_html` and `sheet_name` in `import_from_xls`) but all plugins create a `rows.Table` object so they also have some common parameters you can pass to `import_from_X`. They are: - `fields`: an `OrderedDict` with field names and types (disable automatic detection of types). - `force_types`: a `dict` mapping field names to field types you'd like to force, so `rows` won't try to detect it. Example: `{'name': rows.fields.TextField, 'age': rows.fields.IntegerField}`. - `skip_header`: Ignore header row. Only used if `fields` is not `None`. Default: `True`. - `import_fields`: a `list` with field names to import (other fields will be ignored) -- fields will be imported in this order. - `export_fields`: a `list` with field names to export (other fields will be ignored) -- fields will be exported in this order. - `samples`: number of sample rows to use on field type autodetect algorithm. Default: `None` (use all rows). ## Exporting Data If you have a `Table` object you can export it to all available plugins which have the "export" feature. Let's use the HTML plugin: ```python rows.export_to_html(legislators, 'legislators.html') ``` And you'll get: ```bash $ head legislators.html ``` ### Exporting to memory For some plugins you don't need to specify a filename, so the result will be returned for you as a `str`. Example: ```python fields_to_export = ('title', 'firstname', 'lastname', 'party') content = rows.export_to_txt(legislators, export_fields=fields_to_export) print(content) ``` The result will be: ```text +-------+-------------+--------------------+-------+ | title | firstname | lastname | party | +-------+-------------+--------------------+-------+ | Sen | Robert | Byrd | D | | Rep | Ralph | Hall | R | | Sen | Ted | Stevens | R | | Sen | Frank | Lautenberg | D | [...] | Rep | Aaron | Schock | R | | Rep | Matt | Gaetz | R | | Rep | Trey | Hollingsworth | R | | Rep | Mike | Gallagher | R | | Rep | Elise | Stefanik | R | +-------+-------------+--------------------+-------+ ``` The plugins `csv`, `json` and `html` will have the same behaviour. #### Using file-objects The majority of plugins also accept file-objects instead of filenames (for importing and also for exporting), for example: ```python from io import BytesIO fobj = BytesIO() rows.export_to_csv(legislators, fobj) fobj.seek(0) # You need to point the file cursor to the first position. print(fobj.read()) ``` The following text will be printed: ```text b"title,firstname,lastname,party\r\nSen,Robert,Byrd,D\r\nRep,Ralph,Hall,R[...]" ``` On `sqlite` plugin the returned object is a `sqlite3.Connection`: ```python connection = rows.export_to_sqlite(legislators, ':memory:') query = 'SELECT firstname, lastname FROM table1 WHERE birthdate > 1980-01-01' connection = rows.export_to_sqlite(legislators, ':memory:') print(list(connection.execute(query).fetchall())) ``` You'll get the following output: ```text [('Darren', 'Soto'), ('Adam', 'Kinzinger'), ('Ron', 'DeSantis'), (...)] ``` And you can use `sqlite3.Connection` when importing, too: ```python table = rows.import_from_sqlite(connection, query=query) print(rows.export_to_txt(table)) ``` The following output will be printed: ```text +-----------+-----------------+ | firstname | lastname | +-----------+-----------------+ | Darren | Soto | | Adam | Kinzinger | | Ron | DeSantis | | Stephanie | Murphy | | Seth | Moulton | | Jaime | Herrera Beutler | | Pete | Aguilar | | Scott | Taylor | | Jim | Banks | | Ruben | Gallego | | Lee | Zeldin | | Carlos | Curbelo | | Justin | Amash | | Ruben | Kihuen | | Jason | Smith | | Brian | Mast | | Joseph | Kennedy | | Eric | Swalwell | | Tulsi | Gabbard | | Aaron | Schock | | Matt | Gaetz | | Trey | Hollingsworth | | Mike | Gallagher | | Elise | Stefanik | +-----------+-----------------+ ``` ## Learn more Now you have finished the quickstart guide. See the [examples][rows-examples] folder for more examples. [rows-examples]: https://github.com/turicas/rows/tree/develop/examples rows-0.3.1/examples/000077500000000000000000000000001310400316700143165ustar00rootroot00000000000000rows-0.3.1/examples/cli/000077500000000000000000000000001310400316700150655ustar00rootroot00000000000000rows-0.3.1/examples/cli/convert.sh000077500000000000000000000013271310400316700171070ustar00rootroot00000000000000#!/bin/bash URL="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=43" LOCALE="pt_BR.UTF-8" FILENAME="populacao-rs" rows convert --input-locale=$LOCALE --input-encoding=utf-8 $URL $FILENAME.csv rows convert $FILENAME.csv $FILENAME.html rows convert $FILENAME.html $FILENAME.xls rows convert $FILENAME.xls $FILENAME.txt rows convert $FILENAME.txt $FILENAME.xlsx rows convert $FILENAME.xlsx $FILENAME.sqlite rows convert $FILENAME.sqlite $FILENAME.json # When converting to JSON we cannot guarantee field order! # `convert` can also sort the data before saving it into the CSV file rows convert --input-locale=$LOCALE --input-encoding=utf-8 \ --order-by=^pessoas $URL $FILENAME-sorted.csv rows-0.3.1/examples/cli/join.sh000077500000000000000000000002461310400316700163650ustar00rootroot00000000000000#!/bin/bash KEYS="uf,municipio" SOURCE1="populacao-sudeste.csv" SOURCE2="area-sudeste.csv" DESTINATION="sudeste.csv" rows join $KEYS $SOURCE1 $SOURCE2 $DESTINATION rows-0.3.1/examples/cli/query.sh000077500000000000000000000023021310400316700165660ustar00rootroot00000000000000#!/bin/bash # This script will run rows' "query" subcommand passing the URL of two HTML # sources, a SQL query and a output CSV filename. rows CLI will: # - Download each file, identify its format (HTML) and import as a table # - Convert the two tables into one SQLite in-memory database # - Run the query into the database # - Export the results to a CSV file # Rio de Janeiro: inhabitants (per city) SOURCE1="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=33" # Rio de Janeiro: area in km² (per city) SOURCE2="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=16&codv=v01&coduf=33" LOCALE="pt_BR.UTF-8" SOURCES="$SOURCE1 $SOURCE2" # $SOURCE1 (inhabitants) will be "table1" # $SOURCE2 (area) will be "table2" QUERY="SELECT table1.uf AS state, table1.municipio AS city, table1.pessoas AS inhabitants, table2.km2 as area, (table1.pessoas / table2.km2) AS demographic_density FROM table1, table2 WHERE table1.uf = table2.uf AND table1.municipio = table2.municipio" OUTPUT="rj-density.csv" rows query --input-locale=$LOCALE --input-encoding=utf-8 "$QUERY" $SOURCES \ --output=$OUTPUT rows-0.3.1/examples/cli/sum.sh000077500000000000000000000020661310400316700162340ustar00rootroot00000000000000#!/bin/bash # population SOURCE1="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=33" SOURCE2="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=35" SOURCE3="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=31" SOURCE4="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=1&codv=v01&coduf=32" DESTINATION="populacao-sudeste.csv" LOCALE="pt_BR.UTF-8" rows sum --input-locale=$LOCALE --input-encoding=utf-8 \ $SOURCE1 $SOURCE2 $SOURCE3 $SOURCE4 \ $DESTINATION # area SOURCE5="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=16&codv=v01&coduf=33" SOURCE6="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=16&codv=v01&coduf=35" SOURCE7="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=16&codv=v01&coduf=31" SOURCE8="http://cidades.ibge.gov.br/comparamun/compara.php?idtema=16&codv=v01&coduf=32" DESTINATION="area-sudeste.csv" rows sum --input-locale=$LOCALE --input-encoding=utf-8 \ $SOURCE5 $SOURCE6 $SOURCE7 $SOURCE8 \ $DESTINATION rows-0.3.1/examples/data/000077500000000000000000000000001310400316700152275ustar00rootroot00000000000000rows-0.3.1/examples/data/brazilian-cities.csv000066400000000000000000004712611310400316700212100ustar00rootroot00000000000000state,city,inhabitants,area AC,Acrelândia,12538,1807.92 AC,Assis Brasil,6072,4974.18 AC,Brasiléia,21398,3916.5 AC,Bujari,8471,3034.87 AC,Capixaba,8798,1702.58 AC,Cruzeiro do Sul,78507,8779.39 AC,Epitaciolândia,15100,1654.77 AC,Feijó,32412,27974.89 AC,Jordão,6577,5357.28 AC,Mâncio Lima,15206,5453.07 AC,Manoel Urbano,7981,10634.46 AC,Marechal Thaumaturgo,14227,8191.69 AC,Plácido de Castro,17209,1943.25 AC,Porto Acre,14880,2604.86 AC,Porto Walter,9176,6443.83 AC,Rio Branco,336038,8835.54 AC,Rodrigues Alves,14389,3076.95 AC,Santa Rosa do Purus,4691,6145.61 AC,Sena Madureira,38029,23751.47 AC,Senador Guiomard,20179,2321.45 AC,Tarauacá,35590,20171.05 AC,Xapuri,16091,5347.45 AL,Água Branca,19377,454.63 AL,Anadia,17424,189.47 AL,Arapiraca,214006,356.18 AL,Atalaia,44322,528.77 AL,Barra de Santo Antônio,14230,138.43 AL,Barra de São Miguel,7574,76.62 AL,Batalha,17076,320.92 AL,Belém,4551,48.63 AL,Belo Monte,7030,334.15 AL,Boca da Mata,25776,186.53 AL,Branquinha,10583,166.32 AL,Cacimbinhas,10195,272.98 AL,Cajueiro,20409,124.26 AL,Campestre,6598,66.39 AL,Campo Alegre,50816,295.1 AL,Campo Grande,9032,167.32 AL,Canapi,17250,574.57 AL,Capela,17077,242.62 AL,Carneiros,8290,113.06 AL,Chã Preta,7146,172.85 AL,Coité do Nóia,10926,88.51 AL,Colônia Leopoldina,20019,207.89 AL,Coqueiro Seco,5526,39.73 AL,Coruripe,52130,918.21 AL,Craíbas,22641,271.33 AL,Delmiro Gouveia,48096,607.81 AL,Dois Riachos,10880,140.47 AL,Estrela de Alagoas,17251,259.77 AL,Feira Grande,21321,172.75 AL,Feliz Deserto,4345,91.84 AL,Flexeiras,12325,333.22 AL,Girau do Ponciano,36600,500.62 AL,Ibateguara,15149,265.31 AL,Igaci,25188,334.45 AL,Igreja Nova,23292,427.42 AL,Inhapi,17898,376.86 AL,Jacaré dos Homens,5413,142.34 AL,Jacuípe,6997,210.38 AL,Japaratinga,7754,85.95 AL,Jaramataia,5558,103.71 AL,Jequiá da Praia,12029,351.61 AL,Joaquim Gomes,22575,298.29 AL,Jundiá,4202,92.22 AL,Junqueiro,23836,241.59 AL,Lagoa da Canoa,18250,88.45 AL,Limoeiro de Anadia,26992,315.78 AL,Maceió,932748,503.07 AL,Major Isidoro,18897,453.9 AL,Mar Vermelho,3652,93.1 AL,Maragogi,28749,334.04 AL,Maravilha,10284,302.06 AL,Marechal Deodoro,45977,331.68 AL,Maribondo,13619,174.28 AL,Mata Grande,24698,907.98 AL,Matriz de Camaragibe,23785,219.99 AL,Messias,15682,113.83 AL,Minador do Negrão,5275,167.61 AL,Monteirópolis,6935,86.1 AL,Murici,26710,426.82 AL,Novo Lino,12060,233.41 AL,Olho d`Água das Flores,20364,183.44 AL,Olho d`Água do Casado,8491,322.95 AL,Olho d`Água Grande,4957,118.51 AL,Olivença,11047,172.96 AL,Ouro Branco,10912,204.77 AL,Palestina,5112,48.9 AL,Palmeira dos Índios,70368,452.71 AL,Pão de Açúcar,23811,682.99 AL,Pariconha,10264,258.53 AL,Paripueira,11347,92.97 AL,Passo de Camaragibe,14763,244.47 AL,Paulo Jacinto,7426,118.46 AL,Penedo,60378,689.16 AL,Piaçabuçu,17203,240.01 AL,Pilar,33305,249.71 AL,Pindoba,2866,117.6 AL,Piranhas,23045,408.11 AL,Poço das Trincheiras,13872,291.94 AL,Porto Calvo,25708,307.92 AL,Porto de Pedras,8429,257.66 AL,Porto Real do Colégio,19334,240.52 AL,Quebrangulo,11480,319.83 AL,Rio Largo,68481,306.33 AL,Roteiro,6656,129.29 AL,Santa Luzia do Norte,6891,29.6 AL,Santana do Ipanema,44932,437.88 AL,Santana do Mundaú,10961,224.81 AL,São Brás,6718,139.95 AL,São José da Laje,22686,256.64 AL,São José da Tapera,30088,495.11 AL,São Luís do Quitunde,32412,397.18 AL,São Miguel dos Campos,54577,360.79 AL,São Miguel dos Milagres,7163,76.74 AL,São Sebastião,32010,315.11 AL,Satuba,14603,42.63 AL,Senador Rui Palmeira,13047,342.72 AL,Tanque d`Arca,6122,129.51 AL,Taquarana,19020,166.05 AL,Teotônio Vilela,41152,297.88 AL,Traipu,25702,697.97 AL,União dos Palmares,62358,420.66 AL,Viçosa,25407,343.36 AP,Amapá,8069,9175.99 AP,Calçoene,9000,14269.37 AP,Cutias,4696,2114.25 AP,Ferreira Gomes,5802,5046.26 AP,Itaubal,4265,1703.97 AP,Laranjal do Jari,39942,30971.9 AP,Macapá,398204,6408.55 AP,Mazagão,17032,13130.98 AP,Oiapoque,20509,22625.18 AP,Pedra Branca do Amaparí,10772,9495.52 AP,Porto Grande,16809,4401.79 AP,Pracuúba,3793,4956.48 AP,Santana,101262,1579.61 AP,Serra do Navio,4380,7756.14 AP,Tartarugalzinho,12563,6709.66 AP,Vitória do Jari,12428,2482.89 AM,Alvarães,14088,5911.77 AM,Amaturá,9467,4758.74 AM,Anamã,10214,2453.94 AM,Anori,16317,5795.31 AM,Apuí,18007,54240.02 AM,Atalaia do Norte,15153,76351.67 AM,Autazes,32135,7599.36 AM,Barcelos,25718,122476.12 AM,Barreirinha,27355,5750.57 AM,Benjamin Constant,33411,8793.42 AM,Beruri,15486,17250.72 AM,Boa Vista do Ramos,14979,2586.85 AM,Boca do Acre,30632,21951.26 AM,Borba,34961,44251.75 AM,Caapiranga,10975,9456.62 AM,Canutama,12738,29819.71 AM,Carauari,25774,25767.68 AM,Careiro,32734,6091.55 AM,Careiro da Várzea,23930,2631.14 AM,Coari,75965,57921.91 AM,Codajás,23206,18711.55 AM,Eirunepé,30665,15011.77 AM,Envira,16338,7499.33 AM,Fonte Boa,22817,12110.93 AM,Guajará,13974,7578.88 AM,Humaitá,44227,33071.79 AM,Ipixuna,22254,12044.7 AM,Iranduba,40781,2214.25 AM,Itacoatiara,86839,8892.04 AM,Itamarati,8038,25275.93 AM,Itapiranga,8211,4231.15 AM,Japurá,7326,55791.84 AM,Juruá,10802,19400.7 AM,Jutaí,17992,69551.83 AM,Lábrea,37701,68233.82 AM,Manacapuru,85141,7330.08 AM,Manaquiri,22801,3975.77 AM,Manaus,1802014,11401.09 AM,Manicoré,47017,48282.66 AM,Maraã,17528,16910.37 AM,Maués,52236,39989.89 AM,Nhamundá,18278,14105.59 AM,Nova Olinda do Norte,30696,5608.57 AM,Novo Airão,14723,37771.38 AM,Novo Aripuanã,21451,41187.89 AM,Parintins,102033,5952.39 AM,Pauini,18166,41610.06 AM,Presidente Figueiredo,27175,25422.33 AM,Rio Preto da Eva,25719,5813.23 AM,Santa Isabel do Rio Negro,18146,62846.41 AM,Santo Antônio do Içá,24481,12307.19 AM,São Gabriel da Cachoeira,37896,109183.43 AM,São Paulo de Olivença,31422,19745.9 AM,São Sebastião do Uatumã,10705,10741.08 AM,Silves,8444,3748.83 AM,Tabatinga,52272,3224.88 AM,Tapauá,19077,89325.19 AM,Tefé,61453,23704.48 AM,Tonantins,17079,6432.68 AM,Uarini,11891,10246.24 AM,Urucará,17094,27904.26 AM,Urucurituba,17837,2906.7 BA,Abaíra,8316,530.26 BA,Abaré,17064,1484.87 BA,Acajutiba,14653,180.15 BA,Adustina,15702,632.14 BA,Água Fria,15731,661.86 BA,Aiquara,4602,159.69 BA,Alagoinhas,141949,752.38 BA,Alcobaça,21271,1481.25 BA,Almadina,6357,251.11 BA,Amargosa,34351,463.19 BA,Amélia Rodrigues,25190,173.48 BA,América Dourada,15961,837.72 BA,Anagé,25516,1947.54 BA,Andaraí,13960,1861.72 BA,Andorinha,14414,1247.61 BA,Angical,14073,1528.28 BA,Anguera,10242,177.04 BA,Antas,17072,321.61 BA,Antônio Cardoso,11554,294.45 BA,Antônio Gonçalves,11015,313.95 BA,Aporá,17731,561.83 BA,Apuarema,7459,154.86 BA,Araças,11561,487.12 BA,Aracatu,13743,1489.8 BA,Araci,51651,1556.14 BA,Aramari,10036,329.65 BA,Arataca,10392,375.21 BA,Aratuípe,8599,181.14 BA,Aurelino Leal,13595,457.74 BA,Baianópolis,13850,3342.56 BA,Baixa Grande,20060,946.65 BA,Banzaê,11814,227.54 BA,Barra,49325,11414.41 BA,Barra da Estiva,21187,1346.79 BA,Barra do Choça,34788,783.14 BA,Barra do Mendes,13987,1540.8 BA,Barra do Rocha,6313,208.35 BA,Barreiras,137427,7859.23 BA,Barro Alto,13612,416.5 BA,Barro Preto,6453,128.38 BA,Barrocas,14191,200.97 BA,Belmonte,21798,1970.14 BA,Belo Campo,16021,629.07 BA,Biritinga,14836,550.08 BA,Boa Nova,15411,868.79 BA,Boa Vista do Tupim,17991,2811.23 BA,Bom Jesus da Lapa,63480,4200.13 BA,Bom Jesus da Serra,10113,421.54 BA,Boninal,13695,934.01 BA,Bonito,14834,726.62 BA,Boquira,22037,1482.65 BA,Botuporã,11154,645.53 BA,Brejões,14282,480.83 BA,Brejolândia,11077,2744.72 BA,Brotas de Macaúbas,10717,2240.11 BA,Brumado,64602,2226.8 BA,Buerarema,18605,230.46 BA,Buritirama,19600,3942.08 BA,Caatiba,11420,515.86 BA,Cabaceiras do Paraguaçu,17327,226.02 BA,Cachoeira,32026,395.22 BA,Caculé,22236,668.36 BA,Caém,10368,548.38 BA,Caetanos,13639,774.59 BA,Caetité,47515,2442.9 BA,Cafarnaum,17209,675.25 BA,Cairu,15374,460.98 BA,Caldeirão Grande,12491,454.94 BA,Camacan,31472,626.65 BA,Camaçari,242970,784.66 BA,Camamu,35180,920.37 BA,Campo Alegre de Lourdes,28090,2781.17 BA,Campo Formoso,66616,7258.68 BA,Canápolis,9410,437.22 BA,Canarana,24067,576.37 BA,Canavieiras,32336,1326.93 BA,Candeal,8895,445.1 BA,Candeias,83158,258.36 BA,Candiba,13210,417.98 BA,Cândido Sales,27918,1617.67 BA,Cansanção,32908,1336.75 BA,Canudos,15732,3214.22 BA,Capela do Alto Alegre,11527,649.43 BA,Capim Grosso,26577,334.42 BA,Caraíbas,10222,805.63 BA,Caravelas,21414,2393.5 BA,Cardeal da Silva,8899,256.91 BA,Carinhanha,28380,2737.18 BA,Casa Nova,64940,9647.07 BA,Castro Alves,25408,711.74 BA,Catolândia,2612,642.57 BA,Catu,51077,416.22 BA,Caturama,8843,664.55 BA,Central,17013,602.41 BA,Chorrochó,10734,3005.32 BA,Cícero Dantas,32300,884.97 BA,Cipó,15755,128.31 BA,Coaraci,20964,282.66 BA,Cocos,18153,10227.37 BA,Conceição da Feira,20391,162.88 BA,Conceição do Almeida,17889,289.94 BA,Conceição do Coité,62040,1016.01 BA,Conceição do Jacuípe,30123,117.53 BA,Conde,23620,964.64 BA,Condeúba,16898,1285.93 BA,Contendas do Sincorá,4663,1044.69 BA,Coração de Maria,22401,348.16 BA,Cordeiros,8168,535.49 BA,Coribe,14307,2478.51 BA,Coronel João Sá,17066,883.52 BA,Correntina,31249,11921.68 BA,Cotegipe,13636,4195.83 BA,Cravolândia,5041,162.17 BA,Crisópolis,20046,607.66 BA,Cristópolis,13280,1043.11 BA,Cruz das Almas,58606,145.74 BA,Curaçá,32168,6079.02 BA,Dário Meira,12836,445.42 BA,Dias d`Ávila,66440,184.23 BA,Dom Basílio,11355,676.9 BA,Dom Macedo Costa,3874,84.76 BA,Elísio Medrado,7947,193.53 BA,Encruzilhada,23766,1982.47 BA,Entre Rios,39872,1215.3 BA,Érico Cardoso,10859,701.42 BA,Esplanada,32802,1297.98 BA,Euclides da Cunha,56289,2028.42 BA,Eunápolis,100196,1179.13 BA,Fátima,17652,359.39 BA,Feira da Mata,6184,1633.88 BA,Feira de Santana,556642,1337.99 BA,Filadélfia,16740,570.07 BA,Firmino Alves,5384,162.42 BA,Floresta Azul,10660,293.46 BA,Formosa do Rio Preto,22528,16303.86 BA,Gandu,30336,243.15 BA,Gavião,4561,369.88 BA,Gentio do Ouro,10622,3699.87 BA,Glória,15076,1255.56 BA,Gongogi,8357,197.67 BA,Governador Mangabeira,19818,106.32 BA,Guajeru,10412,936.09 BA,Guanambi,78833,1296.65 BA,Guaratinga,22165,2325.39 BA,Heliópolis,13192,338.8 BA,Iaçu,25736,2451.42 BA,Ibiassucê,10062,426.67 BA,Ibicaraí,24272,231.94 BA,Ibicoara,17282,849.84 BA,Ibicuí,15785,1176.84 BA,Ibipeba,17008,1383.53 BA,Ibipitanga,14171,954.37 BA,Ibiquera,4866,945.3 BA,Ibirapitanga,22598,447.26 BA,Ibirapuã,7956,787.74 BA,Ibirataia,18943,294.87 BA,Ibitiara,15508,1847.57 BA,Ibititá,17840,623.08 BA,Ibotirama,25424,1722.47 BA,Ichu,5255,127.67 BA,Igaporã,15205,832.52 BA,Igrapiúna,13343,527.21 BA,Iguaí,25705,827.84 BA,Ilhéus,184236,1760.11 BA,Inhambupe,36306,1222.58 BA,Ipecaetá,15331,369.89 BA,Ipiaú,44390,267.33 BA,Ipirá,59343,3060.26 BA,Ipupiara,9285,1061.16 BA,Irajuba,7002,413.52 BA,Iramaia,11990,1947.24 BA,Iraquara,22601,1029.41 BA,Irará,27466,277.79 BA,Irecê,66181,319.03 BA,Itabela,28390,850.84 BA,Itaberaba,61631,2343.51 BA,Itabuna,204667,432.24 BA,Itacaré,24318,737.87 BA,Itaeté,14924,1208.93 BA,Itagi,13051,259.19 BA,Itagibá,15193,788.83 BA,Itagimirim,7110,839.02 BA,Itaguaçu da Bahia,13209,4451.27 BA,Itaju do Colônia,7309,1222.71 BA,Itajuípe,21081,284.5 BA,Itamaraju,63069,2215.14 BA,Itamari,7903,111.09 BA,Itambé,23089,1407.31 BA,Itanagra,7598,490.53 BA,Itanhém,20216,1463.82 BA,Itaparica,20725,118.04 BA,Itapé,10995,459.36 BA,Itapebi,10495,1005.37 BA,Itapetinga,68273,1627.52 BA,Itapicuru,32261,1585.59 BA,Itapitanga,10207,408.38 BA,Itaquara,7678,322.98 BA,Itarantim,18539,1805.13 BA,Itatim,14522,583.45 BA,Itiruçu,12693,313.71 BA,Itiúba,36113,1722.75 BA,Itororó,19914,313.59 BA,Ituaçu,18127,1216.28 BA,Ituberá,26591,417.27 BA,Iuiú,10900,1485.73 BA,Jaborandi,8973,9545.13 BA,Jacaraci,13651,1235.6 BA,Jacobina,79247,2358.69 BA,Jaguaquara,51011,928.24 BA,Jaguarari,30343,2456.61 BA,Jaguaripe,16467,898.67 BA,Jandaíra,10331,641.21 BA,Jequié,151895,3227.34 BA,Jeremoabo,37680,4656.27 BA,Jiquiriçá,14118,239.4 BA,Jitaúna,14115,218.92 BA,João Dourado,22549,914.86 BA,Juazeiro,197965,6500.52 BA,Jucuruçu,10290,1457.86 BA,Jussara,15052,948.58 BA,Jussari,6474,356.85 BA,Jussiape,8031,585.19 BA,Lafaiete Coutinho,3901,405.39 BA,Lagoa Real,13934,877.43 BA,Laje,22201,457.74 BA,Lajedão,3733,615.47 BA,Lajedinho,3936,776.06 BA,Lajedo do Tabocal,8305,431.9 BA,Lamarão,9560,209.01 BA,Lapão,25646,605.08 BA,Lauro de Freitas,163449,57.69 BA,Lençóis,10368,1277.08 BA,Licínio de Almeida,12311,843.39 BA,Livramento de Nossa Senhora,42693,2135.59 BA,Luís Eduardo Magalhães,60105,3941.07 BA,Macajuba,11229,650.3 BA,Macarani,17093,1287.52 BA,Macaúbas,47051,2994.15 BA,Macururé,8073,2294.25 BA,Madre de Deus,17376,32.2 BA,Maetinga,7038,681.66 BA,Maiquinique,8782,491.98 BA,Mairi,19326,952.6 BA,Malhada,16014,2008.35 BA,Malhada de Pedras,8468,529.06 BA,Manoel Vitorino,14387,2231.63 BA,Mansidão,12592,3177.43 BA,Maracás,24613,2253.09 BA,Maragogipe,42815,440.16 BA,Maraú,19101,823.36 BA,Marcionílio Souza,10500,1277.2 BA,Mascote,14640,772.46 BA,Mata de São João,40183,633.2 BA,Matina,11145,775.74 BA,Medeiros Neto,21560,1238.75 BA,Miguel Calmon,26475,1568.22 BA,Milagres,10306,284.38 BA,Mirangaba,16279,1697.95 BA,Mirante,10507,1083.67 BA,Monte Santo,52338,3186.38 BA,Morpará,8280,1697.01 BA,Morro do Chapéu,35164,5741.65 BA,Mortugaba,12477,612.22 BA,Mucugê,10545,2455.04 BA,Mucuri,36026,1781.14 BA,Mulungu do Morro,12249,565.98 BA,Mundo Novo,24395,1493.34 BA,Muniz Ferreira,7317,110.12 BA,Muquém de São Francisco,10272,3637.58 BA,Muritiba,28899,89.31 BA,Mutuípe,21449,283.21 BA,Nazaré,27274,253.78 BA,Nilo Peçanha,12530,399.33 BA,Nordestina,12371,468.89 BA,Nova Canaã,16713,853.7 BA,Nova Fátima,7602,349.9 BA,Nova Ibiá,6648,178.75 BA,Nova Itarana,7435,470.44 BA,Nova Redenção,8034,430.96 BA,Nova Soure,24136,950.4 BA,Nova Viçosa,38556,1322.85 BA,Novo Horizonte,10673,609.18 BA,Novo Triunfo,15051,251.32 BA,Olindina,24943,542.18 BA,Oliveira dos Brejinhos,21831,3512.69 BA,Ouriçangas,8298,155.09 BA,Ourolândia,16425,1489.24 BA,Palmas de Monte Alto,20775,2524.85 BA,Palmeiras,8410,657.68 BA,Paramirim,21001,1170.13 BA,Paratinga,29504,2614.78 BA,Paripiranga,27778,435.7 BA,Pau Brasil,10852,606.52 BA,Paulo Afonso,108396,1579.72 BA,Pé de Serra,13752,616.21 BA,Pedrão,6876,159.8 BA,Pedro Alexandre,16995,896.07 BA,Piatã,17982,1713.76 BA,Pilão Arcado,32860,11731.5 BA,Pindaí,15628,614.09 BA,Pindobaçu,20121,496.28 BA,Pintadas,10342,545.59 BA,Piraí do Norte,9799,187.28 BA,Piripá,12783,439.63 BA,Piritiba,22399,975.57 BA,Planaltino,8822,927.02 BA,Planalto,24481,883.77 BA,Poções,44701,826.5 BA,Pojuca,33066,290.12 BA,Ponto Novo,15742,497.4 BA,Porto Seguro,126929,2408.33 BA,Potiraguá,9810,985.49 BA,Prado,27627,1740.3 BA,Presidente Dutra,13750,163.55 BA,Presidente Jânio Quadros,13652,1185.15 BA,Presidente Tancredo Neves,23846,417.2 BA,Queimadas,24602,2027.88 BA,Quijingue,27228,1342.67 BA,Quixabeira,9554,387.68 BA,Rafael Jambeiro,22874,1207.22 BA,Remanso,38957,4683.41 BA,Retirolândia,12055,181.46 BA,Riachão das Neves,21937,5673.02 BA,Riachão do Jacuípe,33172,1190.2 BA,Riacho de Santana,30646,2582.4 BA,Ribeira do Amparo,14276,642.59 BA,Ribeira do Pombal,47518,762.21 BA,Ribeirão do Largo,8602,1271.35 BA,Rio de Contas,13007,1063.77 BA,Rio do Antônio,14815,814.37 BA,Rio do Pires,11918,819.79 BA,Rio Real,37164,716.89 BA,Rodelas,7775,2723.53 BA,Ruy Barbosa,29887,2171.51 BA,Salinas da Margarida,13456,149.82 BA,Salvador,2675656,693.28 BA,Santa Bárbara,19064,345.67 BA,Santa Brígida,15060,882.81 BA,Santa Cruz Cabrália,26264,1551.98 BA,Santa Cruz da Vitória,6673,298.21 BA,Santa Inês,10363,315.66 BA,Santa Luzia,13344,774.92 BA,Santa Maria da Vitória,40309,1966.84 BA,Santa Rita de Cássia,26250,5977.77 BA,Santa Teresinha,9648,707.24 BA,Santaluz,33838,1563.29 BA,Santana,24750,1820.17 BA,Santanópolis,8776,230.83 BA,Santo Amaro,57800,492.92 BA,Santo Antônio de Jesus,90985,261.35 BA,Santo Estêvão,47880,362.96 BA,São Desidério,27659,15157.01 BA,São Domingos,9226,326.95 BA,São Felipe,20305,205.99 BA,São Félix,14098,99.2 BA,São Félix do Coribe,13048,949.34 BA,São Francisco do Conde,33183,262.86 BA,São Gabriel,18427,1199.52 BA,São Gonçalo dos Campos,33283,300.73 BA,São José da Vitória,5715,72.49 BA,São José do Jacuípe,10180,402.43 BA,São Miguel das Matas,10414,214.41 BA,São Sebastião do Passé,42153,538.32 BA,Sapeaçu,16585,117.21 BA,Sátiro Dias,18964,1010.05 BA,Saubara,11201,163.5 BA,Saúde,11845,504.31 BA,Seabra,41798,2517.29 BA,Sebastião Laranjeiras,10371,1948.61 BA,Senhor do Bonfim,74419,827.49 BA,Sento Sé,37425,12698.71 BA,Serra do Ramalho,31638,2593.23 BA,Serra Dourada,18112,1346.63 BA,Serra Preta,15401,536.49 BA,Serrinha,76762,624.23 BA,Serrolândia,12344,295.85 BA,Simões Filho,118047,201.22 BA,Sítio do Mato,12050,1751.22 BA,Sítio do Quinto,12592,700.17 BA,Sobradinho,22000,1238.92 BA,Souto Soares,15899,993.51 BA,Tabocas do Brejo Velho,11431,1375.74 BA,Tanhaçu,20013,1234.44 BA,Tanque Novo,16128,722.9 BA,Tanquinho,8008,219.85 BA,Taperoá,18748,410.79 BA,Tapiramutá,16516,663.88 BA,Teixeira de Freitas,138341,1163.83 BA,Teodoro Sampaio,7895,231.54 BA,Teofilândia,21482,335.54 BA,Teolândia,14836,317.83 BA,Terra Nova,12803,198.93 BA,Tremedal,17029,1679.46 BA,Tucano,52418,2799.15 BA,Uauá,24294,3035.24 BA,Ubaíra,19750,726.26 BA,Ubaitaba,20691,178.81 BA,Ubatã,25004,268.24 BA,Uibaí,13625,550.99 BA,Umburanas,17000,1670.42 BA,Una,24110,1177.44 BA,Urandi,16466,969.45 BA,Uruçuca,19837,391.98 BA,Utinga,18173,638.23 BA,Valença,88673,1192.61 BA,Valente,24560,384.34 BA,Várzea da Roça,13786,513.92 BA,Várzea do Poço,8661,204.91 BA,Várzea Nova,13073,1192.93 BA,Varzedo,9109,226.8 BA,Vera Cruz,37567,299.73 BA,Vereda,6800,874.33 BA,Vitória da Conquista,306866,3356.89 BA,Wagner,8983,421.0 BA,Wanderley,12485,2959.51 BA,Wenceslau Guimarães,22189,674.03 BA,Xique-Xique,45536,5502.33 CE,Abaiara,10496,178.83 CE,Acarape,15338,155.68 CE,Acaraú,57551,842.56 CE,Acopiara,51160,2265.35 CE,Aiuaba,16203,2434.42 CE,Alcântaras,10771,138.61 CE,Altaneira,6856,73.3 CE,Alto Santo,16359,1338.21 CE,Amontada,39232,1179.04 CE,Antonina do Norte,6984,260.1 CE,Apuiarés,13925,545.16 CE,Aquiraz,72628,482.57 CE,Aracati,69159,1228.06 CE,Aracoiaba,25391,656.6 CE,Ararendá,10491,344.13 CE,Araripe,20685,1099.93 CE,Aratuba,11529,114.79 CE,Arneiroz,7650,1066.36 CE,Assaré,22445,1116.33 CE,Aurora,24566,885.84 CE,Baixio,6026,146.43 CE,Banabuiú,17315,1080.33 CE,Barbalha,55323,569.51 CE,Barreira,19573,245.81 CE,Barro,21514,711.89 CE,Barroquinha,14476,383.41 CE,Baturité,33321,308.58 CE,Beberibe,49311,1623.89 CE,Bela Cruz,30878,843.02 CE,Boa Viagem,52498,2836.78 CE,Brejo Santo,45193,663.43 CE,Camocim,60158,1124.78 CE,Campos Sales,26506,1082.77 CE,Canindé,74473,3218.48 CE,Capistrano,17062,222.55 CE,Caridade,20020,846.51 CE,Cariré,18347,756.88 CE,Caririaçu,26393,623.56 CE,Cariús,18567,1061.8 CE,Carnaubal,16746,364.81 CE,Cascavel,66142,837.33 CE,Catarina,18745,486.86 CE,Catunda,9952,790.71 CE,Caucaia,325441,1228.51 CE,Cedro,24527,725.8 CE,Chaval,12615,238.23 CE,Choró,12853,815.77 CE,Chorozinho,18915,278.41 CE,Coreaú,21954,775.8 CE,Crateús,72812,2985.14 CE,Crato,121428,1176.47 CE,Croatá,17069,696.98 CE,Cruz,22479,329.95 CE,Deputado Irapuan Pinheiro,9095,470.43 CE,Ererê,6840,382.71 CE,Eusébio,46033,79.01 CE,Farias Brito,19007,503.62 CE,Forquilha,21786,516.99 CE,Fortaleza,2452185,314.93 CE,Fortim,14817,278.77 CE,Frecheirinha,12991,181.24 CE,General Sampaio,6218,205.81 CE,Graça,15049,281.87 CE,Granja,52645,2697.22 CE,Granjeiro,4629,100.13 CE,Groaíras,10228,155.95 CE,Guaiúba,24091,267.13 CE,Guaraciaba do Norte,37775,611.46 CE,Guaramiranga,4164,59.44 CE,Hidrolândia,19325,966.85 CE,Horizonte,55187,159.98 CE,Ibaretama,12922,877.26 CE,Ibiapina,23808,414.94 CE,Ibicuitinga,11335,424.25 CE,Icapuí,18392,423.45 CE,Icó,65456,1872.0 CE,Iguatu,96495,1029.21 CE,Independência,25573,3218.68 CE,Ipaporanga,11343,702.14 CE,Ipaumirim,12009,273.83 CE,Ipu,40296,629.32 CE,Ipueiras,37862,1477.41 CE,Iracema,13722,821.25 CE,Irauçuba,22324,1461.25 CE,Itaiçaba,7316,212.11 CE,Itaitinga,35817,151.44 CE,Itapagé,48350,439.51 CE,Itapipoca,116065,1614.16 CE,Itapiúna,18626,588.7 CE,Itarema,37471,720.66 CE,Itatira,18894,783.44 CE,Jaguaretama,17863,1759.4 CE,Jaguaribara,10399,668.74 CE,Jaguaribe,34409,1876.81 CE,Jaguaruana,32236,867.56 CE,Jardim,26688,552.42 CE,Jati,7660,361.07 CE,Jijoca de Jericoacoara,17002,204.79 CE,Juazeiro do Norte,249939,248.83 CE,Jucás,23807,937.19 CE,Lavras da Mangabeira,31090,947.97 CE,Limoeiro do Norte,56264,751.07 CE,Madalena,18088,1034.72 CE,Maracanaú,209057,106.65 CE,Maranguape,113561,590.87 CE,Marco,24703,574.14 CE,Martinópole,10214,298.96 CE,Massapê,35191,566.58 CE,Mauriti,44240,1049.49 CE,Meruoca,13693,149.85 CE,Milagres,28316,606.44 CE,Milhã,13086,502.34 CE,Miraíma,12800,699.96 CE,Missão Velha,34274,645.7 CE,Mombaça,42690,2119.48 CE,Monsenhor Tabosa,16705,886.14 CE,Morada Nova,62065,2779.25 CE,Moraújo,8070,415.63 CE,Morrinhos,20700,415.56 CE,Mucambo,14102,190.6 CE,Mulungu,11485,134.57 CE,Nova Olinda,14256,284.4 CE,Nova Russas,30965,742.77 CE,Novo Oriente,27453,949.39 CE,Ocara,24007,765.41 CE,Orós,21389,576.27 CE,Pacajus,61838,254.48 CE,Pacatuba,72299,131.99 CE,Pacoti,11607,112.02 CE,Pacujá,5986,76.13 CE,Palhano,8866,440.38 CE,Palmácia,12005,117.81 CE,Paracuru,31636,300.29 CE,Paraipaba,30041,300.92 CE,Parambu,31309,2303.54 CE,Paramoti,11308,482.59 CE,Pedra Branca,41890,1303.29 CE,Penaforte,8226,141.93 CE,Pentecoste,35400,1378.31 CE,Pereiro,15757,433.51 CE,Pindoretama,18683,72.96 CE,Piquet Carneiro,15467,587.88 CE,Pires Ferreira,10216,243.1 CE,Poranga,12001,1309.26 CE,Porteiras,15061,217.58 CE,Potengi,10276,338.73 CE,Potiretama,6126,410.34 CE,Quiterianópolis,19921,1040.99 CE,Quixadá,80604,2019.83 CE,Quixelô,15000,559.56 CE,Quixeramobim,71887,3275.63 CE,Quixeré,19412,612.62 CE,Redenção,26415,225.31 CE,Reriutaba,19455,383.32 CE,Russas,69833,1590.21 CE,Saboeiro,15752,1383.48 CE,Salitre,15453,804.36 CE,Santa Quitéria,42763,4260.48 CE,Santana do Acaraú,29946,969.33 CE,Santana do Cariri,17170,855.56 CE,São Benedito,44178,338.25 CE,São Gonçalo do Amarante,43890,834.45 CE,São João do Jaguaribe,7900,280.46 CE,São Luís do Curu,12332,122.42 CE,Senador Pompeu,26469,1002.13 CE,Senador Sá,6852,423.92 CE,Sobral,188233,2122.9 CE,Solonópole,17665,1536.17 CE,Tabuleiro do Norte,29204,861.83 CE,Tamboril,25451,1961.31 CE,Tarrafas,8910,454.39 CE,Tauá,55716,4018.16 CE,Tejuçuoca,16827,750.63 CE,Tianguá,68892,908.89 CE,Trairi,51422,925.72 CE,Tururu,14408,202.28 CE,Ubajara,31787,421.03 CE,Umari,7545,263.93 CE,Umirim,18802,316.82 CE,Uruburetama,19765,97.07 CE,Uruoca,12883,696.75 CE,Varjota,17593,179.4 CE,Várzea Alegre,38434,835.71 CE,Viçosa do Ceará,54955,1311.63 DF,Brasília,2570160,5780.0 ES,Afonso Cláudio,31091,951.42 ES,Água Doce do Norte,11771,473.73 ES,Águia Branca,9519,454.45 ES,Alegre,30768,772.0 ES,Alfredo Chaves,13955,615.79 ES,Alto Rio Novo,7317,227.63 ES,Anchieta,23902,409.23 ES,Apiacá,7512,193.99 ES,Aracruz,81832,1423.87 ES,Atilio Vivacqua,9850,223.45 ES,Baixo Guandu,29081,917.07 ES,Barra de São Francisco,40649,941.8 ES,Boa Esperança,14199,428.5 ES,Bom Jesus do Norte,9476,89.08 ES,Brejetuba,11915,344.17 ES,Cachoeiro de Itapemirim,189889,878.18 ES,Cariacica,348738,279.86 ES,Castelo,34747,664.06 ES,Colatina,111788,1416.8 ES,Conceição da Barra,28449,1184.91 ES,Conceição do Castelo,11681,369.23 ES,Divino de São Lourenço,4516,173.88 ES,Domingos Martins,31847,1228.35 ES,Dores do Rio Preto,6397,159.3 ES,Ecoporanga,23212,2285.37 ES,Fundão,17025,288.72 ES,Governador Lindenberg,10869,359.98 ES,Guaçuí,27851,468.34 ES,Guarapari,105286,594.49 ES,Ibatiba,22366,240.54 ES,Ibiraçu,11178,201.25 ES,Ibitirama,8957,329.87 ES,Iconha,12523,203.53 ES,Irupi,11723,184.55 ES,Itaguaçu,14134,531.5 ES,Itapemirim,30988,561.87 ES,Itarana,10881,298.76 ES,Iúna,27328,461.08 ES,Jaguaré,24678,659.75 ES,Jerônimo Monteiro,10879,161.98 ES,João Neiva,15809,284.73 ES,Laranja da Terra,10826,458.37 ES,Linhares,141306,3504.14 ES,Mantenópolis,13612,321.42 ES,Marataízes,34140,133.08 ES,Marechal Floriano,14262,285.38 ES,Marilândia,11107,309.02 ES,Mimoso do Sul,25902,869.43 ES,Montanha,17849,1098.92 ES,Mucurici,5655,540.19 ES,Muniz Freire,18397,679.32 ES,Muqui,14396,327.49 ES,Nova Venécia,46031,1442.16 ES,Pancas,21548,829.94 ES,Pedro Canário,23794,433.88 ES,Pinheiros,23895,973.14 ES,Piúma,18123,74.83 ES,Ponto Belo,6979,360.66 ES,Presidente Kennedy,10314,583.93 ES,Rio Bananal,17530,642.23 ES,Rio Novo do Sul,11325,204.36 ES,Santa Leopoldina,12240,718.1 ES,Santa Maria de Jetibá,34176,735.58 ES,Santa Teresa,21823,683.16 ES,São Domingos do Norte,8001,298.71 ES,São Gabriel da Palha,31859,434.89 ES,São José do Calçado,10408,273.49 ES,São Mateus,109028,2338.73 ES,São Roque do Canaã,11273,342.01 ES,Serra,409267,551.69 ES,Sooretama,23843,586.42 ES,Vargem Alta,19130,413.63 ES,Venda Nova do Imigrante,20447,185.91 ES,Viana,65001,312.75 ES,Vila Pavão,8672,433.26 ES,Vila Valério,13830,470.1 ES,Vila Velha,414586,210.07 ES,Vitória,327801,98.19 GO,Abadia de Goiás,6876,146.78 GO,Abadiânia,15757,1045.13 GO,Acreúna,20279,1566.0 GO,Adelândia,2477,115.35 GO,Água Fria de Goiás,5090,2029.42 GO,Água Limpa,2013,452.86 GO,Águas Lindas de Goiás,159378,188.39 GO,Alexânia,23814,847.89 GO,Aloândia,2051,102.16 GO,Alto Horizonte,4505,503.76 GO,Alto Paraíso de Goiás,6885,2593.91 GO,Alvorada do Norte,8084,1259.37 GO,Amaralina,3434,1343.17 GO,Americano do Brasil,5508,133.56 GO,Amorinópolis,3609,408.53 GO,Anápolis,334613,933.16 GO,Anhanguera,1020,56.95 GO,Anicuns,20239,979.23 GO,Aparecida de Goiânia,455657,288.34 GO,Aparecida do Rio Doce,2427,602.13 GO,Aporé,3803,2900.16 GO,Araçu,3802,148.94 GO,Aragarças,18305,662.9 GO,Aragoiânia,8365,219.55 GO,Araguapaz,7510,2193.7 GO,Arenópolis,3277,1074.6 GO,Aruanã,7496,3050.31 GO,Aurilândia,3650,565.34 GO,Avelinópolis,2450,173.64 GO,Baliza,3714,1782.6 GO,Barro Alto,8716,1093.25 GO,Bela Vista de Goiás,24554,1255.42 GO,Bom Jardim de Goiás,8423,1899.49 GO,Bom Jesus de Goiás,20727,1405.12 GO,Bonfinópolis,7536,122.29 GO,Bonópolis,3503,1628.49 GO,Brazabrantes,3232,123.07 GO,Britânia,5509,1461.19 GO,Buriti Alegre,9054,895.46 GO,Buriti de Goiás,2560,199.29 GO,Buritinópolis,3321,247.05 GO,Cabeceiras,7354,1127.61 GO,Cachoeira Alta,10553,1654.55 GO,Cachoeira de Goiás,1417,422.75 GO,Cachoeira Dourada,8254,521.13 GO,Caçu,13283,2251.01 GO,Caiapônia,16757,8637.87 GO,Caldas Novas,70473,1595.97 GO,Caldazinha,3325,250.89 GO,Campestre de Goiás,3387,273.82 GO,Campinaçu,3656,1974.38 GO,Campinorte,11111,1067.2 GO,Campo Alegre de Goiás,6060,2462.99 GO,Campo Limpo de Goiás,6241,159.56 GO,Campos Belos,18410,724.07 GO,Campos Verdes,5020,441.65 GO,Carmo do Rio Verde,8928,418.54 GO,Castelândia,3638,297.43 GO,Catalão,86647,3821.46 GO,Caturaí,4686,207.26 GO,Cavalcante,9392,6953.67 GO,Ceres,20722,214.32 GO,Cezarina,7545,415.81 GO,Chapadão do Céu,7001,2185.12 GO,Cidade Ocidental,55915,389.99 GO,Cocalzinho de Goiás,17407,1789.04 GO,Colinas do Sul,3523,1708.19 GO,Córrego do Ouro,2632,462.3 GO,Corumbá de Goiás,10361,1061.96 GO,Corumbaíba,8181,1883.67 GO,Cristalina,46580,6162.09 GO,Cristianópolis,2932,225.36 GO,Crixás,15760,4661.16 GO,Cromínia,3555,364.11 GO,Cumari,2964,570.54 GO,Damianópolis,3292,415.35 GO,Damolândia,2747,84.5 GO,Davinópolis,2056,481.3 GO,Diorama,2479,687.35 GO,Divinópolis de Goiás,4962,830.97 GO,Doverlândia,7892,3222.94 GO,Edealina,3733,603.65 GO,Edéia,11266,1461.5 GO,Estrela do Norte,3320,301.64 GO,Faina,6983,1945.66 GO,Fazenda Nova,6322,1281.42 GO,Firminópolis,11580,423.65 GO,Flores de Goiás,12066,3709.43 GO,Formosa,100085,5811.79 GO,Formoso,4883,844.29 GO,Gameleira de Goiás,3275,592.0 GO,Goianápolis,10695,162.44 GO,Goiandira,5265,564.69 GO,Goianésia,59549,1547.27 GO,Goiânia,1302001,732.8 GO,Goianira,34060,209.04 GO,Goiás,24727,3108.02 GO,Goiatuba,32492,2475.11 GO,Gouvelândia,4949,824.26 GO,Guapó,13976,516.84 GO,Guaraíta,2376,205.31 GO,Guarani de Goiás,4258,1229.15 GO,Guarinos,2299,595.87 GO,Heitoraí,3571,229.64 GO,Hidrolândia,17398,943.9 GO,Hidrolina,4029,580.39 GO,Iaciara,12427,1550.38 GO,Inaciolândia,5699,688.4 GO,Indiara,13687,956.48 GO,Inhumas,48246,613.23 GO,Ipameri,24735,4368.99 GO,Ipiranga de Goiás,2844,241.29 GO,Iporá,31274,1026.38 GO,Israelândia,2887,577.48 GO,Itaberaí,35371,1457.28 GO,Itaguari,4513,146.64 GO,Itaguaru,5437,239.68 GO,Itajá,5062,2091.4 GO,Itapaci,18458,956.13 GO,Itapirapuã,7835,2043.72 GO,Itapuranga,26125,1276.48 GO,Itarumã,6300,3433.63 GO,Itauçu,8575,383.84 GO,Itumbiara,92883,2462.93 GO,Ivolândia,2663,1257.66 GO,Jandaia,6164,864.11 GO,Jaraguá,41870,1849.55 GO,Jataí,88006,7174.23 GO,Jaupaci,3000,527.1 GO,Jesúpolis,2300,122.48 GO,Joviânia,7118,445.49 GO,Jussara,19153,4084.11 GO,Lagoa Santa,1254,458.87 GO,Leopoldo de Bulhões,7882,480.89 GO,Luziânia,174531,3961.12 GO,Mairipotaba,2374,467.43 GO,Mambaí,6871,880.62 GO,Mara Rosa,10649,1687.91 GO,Marzagão,2072,222.43 GO,Matrinchã,4414,1150.89 GO,Maurilândia,11521,389.76 GO,Mimoso de Goiás,2685,1386.92 GO,Minaçu,31154,2860.74 GO,Mineiros,52935,9060.09 GO,Moiporá,1763,460.62 GO,Monte Alegre de Goiás,7730,3119.81 GO,Montes Claros de Goiás,7987,2899.18 GO,Montividiu,10572,1874.15 GO,Montividiu do Norte,4122,1333.0 GO,Morrinhos,41460,2846.2 GO,Morro Agudo de Goiás,2356,282.62 GO,Mossâmedes,5007,684.45 GO,Mozarlândia,13404,1734.36 GO,Mundo Novo,6438,2146.65 GO,Mutunópolis,3849,955.88 GO,Nazário,7874,269.1 GO,Nerópolis,24210,204.22 GO,Niquelândia,42361,9843.25 GO,Nova América,2259,212.03 GO,Nova Aurora,2062,302.66 GO,Nova Crixás,11927,7298.78 GO,Nova Glória,8508,412.95 GO,Nova Iguaçu de Goiás,2826,628.44 GO,Nova Roma,3471,2135.96 GO,Nova Veneza,8129,123.38 GO,Novo Brasil,3519,649.95 GO,Novo Gama,95018,194.99 GO,Novo Planalto,3956,1242.96 GO,Orizona,14300,1972.88 GO,Ouro Verde de Goiás,4034,208.77 GO,Ouvidor,5467,413.78 GO,Padre Bernardo,27671,3139.18 GO,Palestina de Goiás,3371,1320.69 GO,Palmeiras de Goiás,23338,1539.69 GO,Palmelo,2335,58.96 GO,Palminópolis,3557,387.69 GO,Panamá,2682,433.76 GO,Paranaiguara,9100,1153.83 GO,Paraúna,10863,3779.39 GO,Perolândia,2950,1029.62 GO,Petrolina de Goiás,10283,531.3 GO,Pilar de Goiás,2773,906.65 GO,Piracanjuba,24026,2405.12 GO,Piranhas,11266,2047.77 GO,Pirenópolis,23006,2205.01 GO,Pires do Rio,28762,1073.36 GO,Planaltina,81649,2543.87 GO,Pontalina,17121,1436.95 GO,Porangatu,42355,4820.52 GO,Porteirão,3347,603.94 GO,Portelândia,3839,556.58 GO,Posse,31419,2024.54 GO,Professor Jamil,3239,347.47 GO,Quirinópolis,43220,3786.69 GO,Rialma,10523,268.47 GO,Rianápolis,4566,159.26 GO,Rio Quente,3312,255.96 GO,Rio Verde,176424,8379.66 GO,Rubiataba,18915,748.26 GO,Sanclerlândia,7550,496.83 GO,Santa Bárbara de Goiás,5751,139.6 GO,Santa Cruz de Goiás,3142,1108.96 GO,Santa Fé de Goiás,4762,1169.17 GO,Santa Helena de Goiás,36469,1141.33 GO,Santa Isabel,3686,807.2 GO,Santa Rita do Araguaia,6924,1361.77 GO,Santa Rita do Novo Destino,3173,956.04 GO,Santa Rosa de Goiás,2909,164.1 GO,Santa Tereza de Goiás,3995,794.56 GO,Santa Terezinha de Goiás,10302,1202.24 GO,Santo Antônio da Barra,4423,451.6 GO,Santo Antônio de Goiás,4703,132.81 GO,Santo Antônio do Descoberto,63248,944.14 GO,São Domingos,11272,3295.74 GO,São Francisco de Goiás,6120,415.79 GO,São João da Paraúna,1689,287.83 GO,São João d`Aliança,10257,3327.38 GO,São Luís de Montes Belos,30034,826.0 GO,São Luíz do Norte,4617,586.06 GO,São Miguel do Araguaia,22283,6144.41 GO,São Miguel do Passa Quatro,3757,537.79 GO,São Patrício,1991,171.96 GO,São Simão,17088,414.01 GO,Senador Canedo,84443,245.28 GO,Serranópolis,7481,5526.72 GO,Silvânia,19089,2345.94 GO,Simolândia,6514,347.98 GO,Sítio d`Abadia,2825,1598.35 GO,Taquaral de Goiás,3541,204.22 GO,Teresina de Goiás,3016,774.64 GO,Terezópolis de Goiás,6561,106.91 GO,Três Ranchos,2819,282.07 GO,Trindade,104488,710.71 GO,Trombas,3452,799.13 GO,Turvânia,4839,480.78 GO,Turvelândia,4399,933.96 GO,Uirapuru,2933,1153.48 GO,Uruaçu,36929,2141.82 GO,Uruana,13826,522.51 GO,Urutaí,3074,626.72 GO,Valparaíso de Goiás,132982,61.41 GO,Varjão,3659,519.19 GO,Vianópolis,12548,954.28 GO,Vicentinópolis,7371,737.26 GO,Vila Boa,4735,1060.17 GO,Vila Propício,5145,2181.58 MA,Açailândia,104047,5806.44 MA,Afonso Cunha,5905,371.34 MA,Água Doce do Maranhão,11581,443.27 MA,Alcântara,21851,1486.68 MA,Aldeias Altas,23952,1942.11 MA,Altamira do Maranhão,11063,721.31 MA,Alto Alegre do Maranhão,24599,383.31 MA,Alto Alegre do Pindaré,31057,1932.29 MA,Alto Parnaíba,10766,11132.18 MA,Amapá do Maranhão,6431,502.4 MA,Amarante do Maranhão,37932,7438.15 MA,Anajatuba,25291,1011.13 MA,Anapurus,13939,608.3 MA,Apicum-Açu,14959,353.17 MA,Araguanã,13973,805.2 MA,Araioses,42505,1782.6 MA,Arame,31702,3008.69 MA,Arari,28488,1100.28 MA,Axixá,11407,203.15 MA,Bacabal,100014,1682.96 MA,Bacabeira,14925,615.59 MA,Bacuri,16604,787.86 MA,Bacurituba,5293,674.51 MA,Balsas,83528,13141.73 MA,Barão de Grajaú,17841,2247.24 MA,Barra do Corda,82830,5202.7 MA,Barreirinhas,54930,3111.99 MA,Bela Vista do Maranhão,12049,255.55 MA,Belágua,6524,499.43 MA,Benedito Leite,5469,1781.73 MA,Bequimão,20344,768.95 MA,Bernardo do Mearim,5996,261.45 MA,Boa Vista do Gurupi,7949,403.46 MA,Bom Jardim,39049,6590.53 MA,Bom Jesus das Selvas,28459,2679.1 MA,Bom Lugar,14818,445.98 MA,Brejo,33359,1074.63 MA,Brejo de Areia,5577,362.46 MA,Buriti,27013,1473.96 MA,Buriti Bravo,22899,1582.55 MA,Buriticupu,65237,2545.44 MA,Buritirana,14784,818.42 MA,Cachoeira Grande,8446,705.65 MA,Cajapió,10593,908.73 MA,Cajari,18338,662.07 MA,Campestre do Maranhão,13369,615.38 MA,Cândido Mendes,18505,1632.91 MA,Cantanhede,20448,773.01 MA,Capinzal do Norte,10698,590.53 MA,Carolina,23959,6441.6 MA,Carutapera,22006,1232.08 MA,Caxias,155129,5150.67 MA,Cedral,10297,283.19 MA,Central do Maranhão,7887,319.34 MA,Centro do Guilherme,12565,1074.07 MA,Centro Novo do Maranhão,17622,8258.42 MA,Chapadinha,73350,3247.38 MA,Cidelândia,13681,1464.03 MA,Codó,118038,4361.34 MA,Coelho Neto,46750,975.55 MA,Colinas,39132,1980.55 MA,Conceição do Lago-Açu,14436,733.23 MA,Coroatá,61725,2263.78 MA,Cururupu,32652,1223.37 MA,Davinópolis,12579,335.78 MA,Dom Pedro,22681,358.49 MA,Duque Bacelar,10649,317.92 MA,Esperantinópolis,18452,480.92 MA,Estreito,35835,2718.98 MA,Feira Nova do Maranhão,8126,1473.42 MA,Fernando Falcão,9241,5086.58 MA,Formosa da Serra Negra,17757,3950.53 MA,Fortaleza dos Nogueiras,11646,1664.33 MA,Fortuna,15098,695.0 MA,Godofredo Viana,10635,675.17 MA,Gonçalves Dias,17482,883.59 MA,Governador Archer,10205,445.86 MA,Governador Edison Lobão,15895,615.85 MA,Governador Eugênio Barros,15991,816.99 MA,Governador Luiz Rocha,7337,373.16 MA,Governador Newton Bello,11921,1160.49 MA,Governador Nunes Freire,25401,1037.13 MA,Graça Aranha,6140,271.44 MA,Grajaú,62093,8830.96 MA,Guimarães,12081,595.38 MA,Humberto de Campos,26189,2131.25 MA,Icatu,25145,1448.78 MA,Igarapé do Meio,12550,368.69 MA,Igarapé Grande,11041,374.25 MA,Imperatriz,247505,1368.99 MA,Itaipava do Grajaú,14297,1238.82 MA,Itapecuru Mirim,62110,1471.44 MA,Itinga do Maranhão,24863,3581.72 MA,Jatobá,8526,591.38 MA,Jenipapo dos Vieiras,15440,1962.9 MA,João Lisboa,20381,636.89 MA,Joselândia,15433,681.69 MA,Junco do Maranhão,4020,555.09 MA,Lago da Pedra,46083,1240.45 MA,Lago do Junco,10729,309.02 MA,Lago dos Rodrigues,7794,180.37 MA,Lago Verde,15412,623.24 MA,Lagoa do Mato,10934,1688.05 MA,Lagoa Grande do Maranhão,10517,744.3 MA,Lajeado Novo,6923,1047.73 MA,Lima Campos,11423,321.93 MA,Loreto,11390,3596.84 MA,Luís Domingues,6510,464.06 MA,Magalhães de Almeida,17587,433.15 MA,Maracaçumé,19155,629.31 MA,Marajá do Sena,8051,1447.68 MA,Maranhãozinho,14065,972.62 MA,Mata Roma,15150,548.41 MA,Matinha,21885,408.73 MA,Matões,31015,1976.14 MA,Matões do Norte,13794,794.65 MA,Milagres do Maranhão,8118,634.74 MA,Mirador,20452,8450.85 MA,Miranda do Norte,24427,341.11 MA,Mirinzal,14218,687.75 MA,Monção,31738,1301.97 MA,Montes Altos,9413,1488.34 MA,Morros,17783,1715.18 MA,Nina Rodrigues,12464,572.51 MA,Nova Colinas,4885,743.11 MA,Nova Iorque,4590,976.85 MA,Nova Olinda do Maranhão,19134,2452.62 MA,Olho d`Água das Cunhãs,18601,695.33 MA,Olinda Nova do Maranhão,13181,197.64 MA,Paço do Lumiar,105121,122.83 MA,Palmeirândia,18764,525.58 MA,Paraibano,20103,530.52 MA,Parnarama,34586,3439.23 MA,Passagem Franca,17562,1358.33 MA,Pastos Bons,18067,1635.31 MA,Paulino Neves,14519,979.18 MA,Paulo Ramos,20079,1053.41 MA,Pedreiras,39448,288.43 MA,Pedro do Rosário,22732,1749.89 MA,Penalva,34267,738.25 MA,Peri Mirim,13803,405.3 MA,Peritoró,21201,824.72 MA,Pindaré-Mirim,31152,273.53 MA,Pinheiro,78162,1512.68 MA,Pio XII,22016,545.14 MA,Pirapemas,17381,688.76 MA,Poção de Pedras,19708,961.94 MA,Porto Franco,21530,1417.49 MA,Porto Rico do Maranhão,6030,218.83 MA,Presidente Dutra,44731,771.57 MA,Presidente Juscelino,11541,354.7 MA,Presidente Médici,6374,437.69 MA,Presidente Sarney,17165,724.15 MA,Presidente Vargas,10717,459.36 MA,Primeira Cruz,13954,1367.68 MA,Raposa,26327,66.28 MA,Riachão,20209,6373.02 MA,Ribamar Fiquene,7318,750.55 MA,Rosário,39576,685.04 MA,Sambaíba,5487,2478.7 MA,Santa Filomena do Maranhão,7061,602.34 MA,Santa Helena,39110,2308.19 MA,Santa Inês,77282,381.16 MA,Santa Luzia,74043,5462.96 MA,Santa Luzia do Paruá,22644,897.15 MA,Santa Quitéria do Maranhão,29191,1917.59 MA,Santa Rita,32366,706.39 MA,Santana do Maranhão,11661,932.02 MA,Santo Amaro do Maranhão,13820,1601.18 MA,Santo Antônio dos Lopes,14288,771.42 MA,São Benedito do Rio Preto,17799,931.48 MA,São Bento,40736,459.07 MA,São Bernardo,26476,1006.92 MA,São Domingos do Azeitão,6983,960.93 MA,São Domingos do Maranhão,33607,1151.98 MA,São Félix de Balsas,4702,2032.36 MA,São Francisco do Brejão,10261,745.61 MA,São Francisco do Maranhão,12146,2347.2 MA,São João Batista,19920,690.68 MA,São João do Carú,12309,615.7 MA,São João do Paraíso,10814,2053.84 MA,São João do Soter,17238,1438.07 MA,São João dos Patos,24928,1500.63 MA,São José de Ribamar,163045,388.37 MA,São José dos Basílios,7496,362.69 MA,São Luís,1014837,834.79 MA,São Luís Gonzaga do Maranhão,20153,968.57 MA,São Mateus do Maranhão,39093,783.34 MA,São Pedro da Água Branca,12028,720.45 MA,São Pedro dos Crentes,4425,979.63 MA,São Raimundo das Mangabeiras,17474,3521.53 MA,São Raimundo do Doca Bezerra,6090,419.35 MA,São Roberto,5957,227.46 MA,São Vicente Ferrer,20863,390.85 MA,Satubinha,11990,441.81 MA,Senador Alexandre Costa,10256,426.44 MA,Senador La Rocque,17998,1236.87 MA,Serrano do Maranhão,10940,1207.06 MA,Sítio Novo,17002,3114.87 MA,Sucupira do Norte,10444,1074.47 MA,Sucupira do Riachão,4613,564.97 MA,Tasso Fragoso,7796,4382.98 MA,Timbiras,27997,1486.59 MA,Timon,155460,1743.25 MA,Trizidela do Vale,18953,222.95 MA,Tufilândia,5596,271.01 MA,Tuntum,39183,3390.0 MA,Turiaçu,33933,2578.5 MA,Turilândia,22846,1511.86 MA,Tutóia,52788,1651.66 MA,Urbano Santos,24573,1207.63 MA,Vargem Grande,49412,1957.75 MA,Viana,49496,1168.44 MA,Vila Nova dos Martírios,11258,1188.78 MA,Vitória do Mearim,31217,716.72 MA,Vitorino Freire,31658,1305.31 MA,Zé Doca,50173,2416.06 MT,Acorizal,5516,840.59 MT,Água Boa,20856,7481.12 MT,Alta Floresta,49164,8976.18 MT,Alto Araguaia,15644,5514.51 MT,Alto Boa Vista,5247,2240.45 MT,Alto Garças,10350,3748.05 MT,Alto Paraguai,10066,1846.3 MT,Alto Taquari,8072,1416.52 MT,Apiacás,8567,20377.51 MT,Araguaiana,3197,6429.38 MT,Araguainha,1096,687.97 MT,Araputanga,15342,1600.24 MT,Arenápolis,10316,416.79 MT,Aripuanã,18656,25056.78 MT,Barão de Melgaço,7591,11174.5 MT,Barra do Bugres,31793,6060.2 MT,Barra do Garças,56560,9078.98 MT,Bom Jesus do Araguaia,5314,4274.21 MT,Brasnorte,15357,15959.14 MT,Cáceres,87942,24351.41 MT,Campinápolis,14305,5967.35 MT,Campo Novo do Parecis,27577,9434.42 MT,Campo Verde,31589,4782.12 MT,Campos de Júlio,5154,6801.86 MT,Canabrava do Norte,4786,3452.68 MT,Canarana,18754,10882.4 MT,Carlinda,10990,2393.02 MT,Castanheira,8231,3909.54 MT,Chapada dos Guimarães,17821,6256.99 MT,Cláudia,11028,3849.99 MT,Cocalinho,5490,16530.66 MT,Colíder,30766,3093.17 MT,Colniza,26381,27946.83 MT,Comodoro,18178,21769.72 MT,Confresa,25124,5801.39 MT,Conquista d`Oeste,3385,2672.21 MT,Cotriguaçu,14983,9460.47 MT,Cuiabá,551098,3495.42 MT,Curvelândia,4866,359.76 MT,Denise,8523,1307.19 MT,Diamantino,20341,8230.1 MT,Dom Aquino,8171,2204.16 MT,Feliz Natal,10933,11462.46 MT,Figueirópolis d`Oeste,3796,899.25 MT,Gaúcha do Norte,6293,16930.67 MT,General Carneiro,5027,3794.94 MT,Glória d`Oeste,3135,853.84 MT,Guarantã do Norte,32216,4734.59 MT,Guiratinga,13934,5061.69 MT,Indiavaí,2397,603.29 MT,Ipiranga do Norte,5123,3467.05 MT,Itanhangá,5276,2898.08 MT,Itaúba,4575,4529.58 MT,Itiquira,11478,8722.48 MT,Jaciara,25647,1653.54 MT,Jangada,7696,1018.49 MT,Jauru,10455,1301.89 MT,Juara,32791,22641.19 MT,Juína,39255,26189.96 MT,Juruena,11201,2778.96 MT,Juscimeira,11430,2206.13 MT,Lambari d`Oeste,5431,1763.89 MT,Lucas do Rio Verde,45556,3663.99 MT,Luciára,2224,4243.04 MT,Marcelândia,12006,12281.25 MT,Matupá,14174,5239.67 MT,Mirassol d`Oeste,25299,1076.36 MT,Nobres,15002,3892.06 MT,Nortelândia,6436,1348.88 MT,Nossa Senhora do Livramento,11609,5076.78 MT,Nova Bandeirantes,11643,9606.26 MT,Nova Brasilândia,4587,3281.88 MT,Nova Canaã do Norte,12127,5966.2 MT,Nova Guarita,4932,1114.13 MT,Nova Lacerda,5436,4735.09 MT,Nova Marilândia,2951,1939.8 MT,Nova Maringá,6590,11557.3 MT,Nova Monte verde,8093,5248.54 MT,Nova Mutum,31649,9562.66 MT,Nova Nazaré,3029,4038.06 MT,Nova Olímpia,17515,1549.82 MT,Nova Santa Helena,3468,2359.82 MT,Nova Ubiratã,9218,12706.74 MT,Nova Xavantina,19643,5573.68 MT,Novo Horizonte do Norte,3749,879.66 MT,Novo Mundo,7332,5790.8 MT,Novo Santo Antônio,2005,4393.8 MT,Novo São Joaquim,6042,5035.15 MT,Paranaíta,10684,4796.01 MT,Paranatinga,19290,24166.08 MT,Pedra Preta,15755,4108.59 MT,Peixoto de Azevedo,30812,14257.44 MT,Planalto da Serra,2726,2455.43 MT,Poconé,31779,17270.99 MT,Pontal do Araguaia,5395,2738.78 MT,Ponte Branca,1768,685.99 MT,Pontes e Lacerda,41408,8558.93 MT,Porto Alegre do Norte,10748,3972.25 MT,Porto dos Gaúchos,5449,6992.7 MT,Porto Esperidião,11031,5809.02 MT,Porto Estrela,3649,2062.76 MT,Poxoréo,17599,6909.69 MT,Primavera do Leste,52066,5471.64 MT,Querência,13033,17786.2 MT,Reserva do Cabaçal,2572,1337.04 MT,Ribeirão Cascalheira,8881,11354.81 MT,Ribeirãozinho,2199,625.58 MT,Rio Branco,5070,562.84 MT,Rondolândia,3604,12670.49 MT,Rondonópolis,195476,4159.12 MT,Rosário Oeste,17679,7475.56 MT,Salto do Céu,3908,1752.31 MT,Santa Carmem,4085,3855.36 MT,Santa Cruz do Xingu,1900,5651.75 MT,Santa Rita do Trivelato,2491,4728.2 MT,Santa Terezinha,7397,6467.37 MT,Santo Afonso,2991,1174.19 MT,Santo Antônio do Leste,3754,3600.71 MT,Santo Antônio do Leverger,18463,12261.29 MT,São Félix do Araguaia,10625,16713.46 MT,São José do Povo,3592,443.88 MT,São José do Rio Claro,17124,4536.2 MT,São José do Xingu,5240,7459.65 MT,São José dos Quatro Marcos,18998,1287.88 MT,São Pedro da Cipa,4158,342.95 MT,Sapezal,18094,13624.37 MT,Serra Nova Dourada,1365,1500.39 MT,Sinop,113099,3942.23 MT,Sorriso,66521,9329.6 MT,Tabaporã,9932,8317.43 MT,Tangará da Serra,83431,11323.64 MT,Tapurah,10392,4510.65 MT,Terra Nova do Norte,11291,2562.23 MT,Tesouro,3418,4169.56 MT,Torixoréu,4071,2399.46 MT,União do Sul,3760,4581.91 MT,Vale de São Domingos,3052,1933.05 MT,Várzea Grande,252596,1048.21 MT,Vera,10235,2962.69 MT,Vila Bela da Santíssima Trindade,14493,13420.98 MT,Vila Rica,21382,7431.08 MS,Água Clara,14424,11031.12 MS,Alcinópolis,4569,4399.68 MS,Amambai,34730,4202.32 MS,Anastácio,23835,2949.13 MS,Anaurilândia,8493,3395.44 MS,Angélica,9185,1273.27 MS,Antônio João,8208,1145.18 MS,Aparecida do Taboado,22320,2750.15 MS,Aquidauana,45614,16957.75 MS,Aral Moreira,10251,1655.66 MS,Bandeirantes,6609,3115.68 MS,Bataguassu,19839,2415.3 MS,Batayporã,10936,1828.02 MS,Bela Vista,23181,4892.6 MS,Bodoquena,7985,2507.32 MS,Bonito,19587,4934.41 MS,Brasilândia,11826,5806.9 MS,Caarapó,25767,2089.6 MS,Camapuã,13625,6229.62 MS,Campo Grande,786797,8092.95 MS,Caracol,5398,2940.25 MS,Cassilândia,20966,3649.72 MS,Chapadão do Sul,19648,3851.0 MS,Corguinho,4862,2639.85 MS,Coronel Sapucaia,14064,1025.05 MS,Corumbá,103703,64962.72 MS,Costa Rica,19695,5371.8 MS,Coxim,32159,6409.22 MS,Deodápolis,12139,831.21 MS,Dois Irmãos do Buriti,10363,2344.59 MS,Douradina,5364,280.79 MS,Dourados,196035,4086.24 MS,Eldorado,11694,1017.79 MS,Fátima do Sul,19035,315.16 MS,Figueirão,2928,4882.87 MS,Glória de Dourados,9927,491.75 MS,Guia Lopes da Laguna,10366,1210.61 MS,Iguatemi,14875,2946.52 MS,Inocência,7669,5776.03 MS,Itaporã,20865,1321.81 MS,Itaquiraí,18614,2064.04 MS,Ivinhema,22341,2010.17 MS,Japorã,7731,419.4 MS,Jaraguari,6341,2912.82 MS,Jardim,24346,2201.51 MS,Jateí,4011,1927.95 MS,Juti,5900,1584.54 MS,Ladário,19617,340.77 MS,Laguna Carapã,6491,1734.07 MS,Maracaju,37405,5299.18 MS,Miranda,25595,5478.83 MS,Mundo Novo,17043,477.78 MS,Naviraí,46424,3193.54 MS,Nioaque,14391,3923.79 MS,Nova Alvorada do Sul,16432,4019.32 MS,Nova Andradina,45585,4776.0 MS,Novo Horizonte do Sul,4940,849.09 MS,Paranaíba,40192,5402.65 MS,Paranhos,12350,1309.16 MS,Pedro Gomes,7967,3651.18 MS,Ponta Porã,77872,5330.45 MS,Porto Murtinho,15372,17744.41 MS,Ribas do Rio Pardo,20946,17308.08 MS,Rio Brilhante,30663,3987.4 MS,Rio Negro,5036,1807.67 MS,Rio Verde de Mato Grosso,18890,8153.9 MS,Rochedo,4928,1561.06 MS,Santa Rita do Pardo,7259,6143.07 MS,São Gabriel do Oeste,22203,3864.69 MS,Selvíria,6287,3258.33 MS,Sete Quedas,10780,833.73 MS,Sidrolândia,42132,5286.41 MS,Sonora,14833,4075.42 MS,Tacuru,10215,1785.32 MS,Taquarussu,3518,1041.12 MS,Terenos,17146,2844.51 MS,Três Lagoas,101791,10206.95 MS,Vicentina,5901,310.16 MG,Abadia dos Dourados,6704,881.06 MG,Abaeté,22690,1817.07 MG,Abre Campo,13311,470.55 MG,Acaiaca,3920,101.89 MG,Açucena,10276,815.42 MG,Água Boa,15195,1320.27 MG,Água Comprida,2025,492.21 MG,Aguanil,4054,232.09 MG,Águas Formosas,18479,820.08 MG,Águas Vermelhas,12722,1259.28 MG,Aimorés,24959,1348.78 MG,Aiuruoca,6162,649.68 MG,Alagoa,2709,161.36 MG,Albertina,2913,58.01 MG,Além Paraíba,34349,510.35 MG,Alfenas,73774,850.45 MG,Alfredo Vasconcelos,6075,130.82 MG,Almenara,38775,2294.43 MG,Alpercata,7172,166.97 MG,Alpinópolis,18488,454.75 MG,Alterosa,13717,362.01 MG,Alto Caparaó,5297,103.69 MG,Alto Jequitibá,8318,152.27 MG,Alto Rio Doce,12159,518.05 MG,Alvarenga,4444,278.17 MG,Alvinópolis,15261,599.44 MG,Alvorada de Minas,3546,374.01 MG,Amparo do Serra,5053,145.91 MG,Andradas,37270,469.37 MG,Andrelândia,12173,1005.29 MG,Angelândia,8003,185.21 MG,Antônio Carlos,11114,529.92 MG,Antônio Dias,9565,787.06 MG,Antônio Prado de Minas,1671,83.8 MG,Araçaí,2243,186.54 MG,Aracitaba,2058,106.61 MG,Araçuaí,36013,2236.28 MG,Araguari,109801,2729.51 MG,Arantina,2823,89.42 MG,Araponga,8152,303.79 MG,Araporã,6144,295.84 MG,Arapuá,2775,173.89 MG,Araújos,7883,245.52 MG,Araxá,93672,1164.36 MG,Arceburgo,9509,162.88 MG,Arcos,36597,509.87 MG,Areado,13731,283.12 MG,Argirita,2901,159.38 MG,Aricanduva,4770,243.33 MG,Arinos,17674,5279.42 MG,Astolfo Dutra,13049,158.89 MG,Ataléia,14455,1836.98 MG,Augusto de Lima,4960,1254.83 MG,Baependi,18307,750.55 MG,Baldim,7913,556.27 MG,Bambuí,22734,1455.82 MG,Bandeira,4987,483.79 MG,Bandeira do Sul,5338,47.07 MG,Barão de Cocais,28442,340.6 MG,Barão de Monte Alto,5720,198.31 MG,Barbacena,126284,759.19 MG,Barra Longa,6143,383.63 MG,Barroso,19599,82.07 MG,Bela Vista de Minas,10004,109.14 MG,Belmiro Braga,3403,393.13 MG,Belo Horizonte,2375151,331.4 MG,Belo Oriente,23397,334.91 MG,Belo Vale,7536,365.92 MG,Berilo,12300,587.11 MG,Berizal,4370,488.76 MG,Bertópolis,4498,427.8 MG,Betim,378089,342.85 MG,Bias Fortes,3793,283.54 MG,Bicas,13653,140.08 MG,Biquinhas,2630,458.95 MG,Boa Esperança,38516,860.67 MG,Bocaina de Minas,5007,503.79 MG,Bocaiúva,46654,3227.63 MG,Bom Despacho,45624,1223.87 MG,Bom Jardim de Minas,6501,412.02 MG,Bom Jesus da Penha,3887,208.35 MG,Bom Jesus do Amparo,5491,195.61 MG,Bom Jesus do Galho,15364,592.29 MG,Bom Repouso,10457,229.85 MG,Bom Sucesso,17243,705.05 MG,Bonfim,6818,301.87 MG,Bonfinópolis de Minas,5865,1850.49 MG,Bonito de Minas,9673,3904.91 MG,Borda da Mata,17118,301.11 MG,Botelhos,14920,334.09 MG,Botumirim,6497,1568.88 MG,Brás Pires,4637,223.35 MG,Brasilândia de Minas,14226,2509.69 MG,Brasília de Minas,31213,1399.48 MG,Braúnas,5030,378.32 MG,Brazópolis,14661,367.69 MG,Brumadinho,33973,639.43 MG,Bueno Brandão,10892,356.15 MG,Buenópolis,10292,1599.88 MG,Bugre,3992,161.91 MG,Buritis,22737,5225.19 MG,Buritizeiro,26922,7218.4 MG,Cabeceira Grande,6453,1031.41 MG,Cabo Verde,13823,368.21 MG,Cachoeira da Prata,3654,61.38 MG,Cachoeira de Minas,11034,304.24 MG,Cachoeira de Pajeú,8959,695.67 MG,Cachoeira Dourada,2505,200.93 MG,Caetanópolis,10218,156.04 MG,Caeté,40750,542.57 MG,Caiana,4968,106.47 MG,Cajuri,4047,83.04 MG,Caldas,13633,711.41 MG,Camacho,3154,223.0 MG,Camanducaia,21080,528.48 MG,Cambuí,26488,244.57 MG,Cambuquira,12602,246.38 MG,Campanário,3564,442.4 MG,Campanha,15433,335.59 MG,Campestre,20686,577.84 MG,Campina Verde,19324,3650.75 MG,Campo Azul,3684,505.91 MG,Campo Belo,51544,528.23 MG,Campo do Meio,11476,275.43 MG,Campo Florido,6870,1264.25 MG,Campos Altos,14206,710.65 MG,Campos Gerais,27600,769.5 MG,Cana Verde,5589,212.72 MG,Canaã,4628,174.9 MG,Canápolis,11365,839.74 MG,Candeias,14595,720.51 MG,Cantagalo,4195,141.86 MG,Caparaó,5209,130.69 MG,Capela Nova,4755,111.07 MG,Capelinha,34803,965.37 MG,Capetinga,7089,297.94 MG,Capim Branco,8881,95.33 MG,Capinópolis,15290,620.72 MG,Capitão Andrade,4925,279.09 MG,Capitão Enéas,14206,971.58 MG,Capitólio,8183,521.8 MG,Caputira,9030,187.7 MG,Caraí,22343,1242.2 MG,Caranaíba,3288,159.95 MG,Carandaí,23346,485.73 MG,Carangola,32296,353.4 MG,Caratinga,85239,1258.78 MG,Carbonita,9148,1456.1 MG,Careaçu,6298,181.01 MG,Carlos Chagas,20069,3202.98 MG,Carmésia,2446,259.1 MG,Carmo da Cachoeira,11836,506.33 MG,Carmo da Mata,10927,357.18 MG,Carmo de Minas,13750,322.29 MG,Carmo do Cajuru,20012,455.81 MG,Carmo do Paranaíba,29735,1307.86 MG,Carmo do Rio Claro,20426,1065.69 MG,Carmópolis de Minas,17048,400.01 MG,Carneirinho,9471,2063.32 MG,Carrancas,3948,727.89 MG,Carvalhópolis,3341,81.1 MG,Carvalhos,4556,282.25 MG,Casa Grande,2244,157.73 MG,Cascalho Rico,2857,367.31 MG,Cássia,17412,665.8 MG,Cataguases,69757,491.77 MG,Catas Altas,4846,240.04 MG,Catas Altas da Noruega,3462,141.62 MG,Catuji,6708,419.53 MG,Catuti,5102,287.81 MG,Caxambu,21705,100.48 MG,Cedro do Abaeté,1210,283.21 MG,Central de Minas,6772,204.33 MG,Centralina,10266,327.19 MG,Chácara,2792,152.81 MG,Chalé,5645,212.67 MG,Chapada do Norte,15189,830.97 MG,Chapada Gaúcha,10805,3255.19 MG,Chiador,2785,252.94 MG,Cipotânea,6547,153.48 MG,Claraval,4542,227.63 MG,Claro dos Poções,7775,720.42 MG,Cláudio,25771,630.71 MG,Coimbra,7054,106.88 MG,Coluna,9024,348.49 MG,Comendador Gomes,2972,1041.05 MG,Comercinho,8298,654.96 MG,Conceição da Aparecida,9820,352.52 MG,Conceição da Barra de Minas,3954,273.01 MG,Conceição das Alagoas,23043,1340.25 MG,Conceição das Pedras,2749,102.21 MG,Conceição de Ipanema,4456,253.94 MG,Conceição do Mato Dentro,17908,1726.83 MG,Conceição do Pará,5158,250.33 MG,Conceição do Rio Verde,12949,369.68 MG,Conceição dos Ouros,10388,182.97 MG,Cônego Marinho,7101,1642.0 MG,Confins,5936,42.36 MG,Congonhal,10468,205.13 MG,Congonhas,48519,304.07 MG,Congonhas do Norte,4943,398.85 MG,Conquista,6526,618.36 MG,Conselheiro Lafaiete,116512,370.25 MG,Conselheiro Pena,22242,1483.88 MG,Consolação,1727,86.39 MG,Contagem,603442,195.27 MG,Coqueiral,9289,296.16 MG,Coração de Jesus,26033,2225.22 MG,Cordisburgo,8667,823.65 MG,Cordislândia,3435,179.54 MG,Corinto,23914,2525.4 MG,Coroaci,10270,576.27 MG,Coromandel,27547,3313.12 MG,Coronel Fabriciano,103694,221.25 MG,Coronel Murta,9117,815.41 MG,Coronel Pacheco,2983,131.51 MG,Coronel Xavier Chaves,3301,140.95 MG,Córrego Danta,3391,657.43 MG,Córrego do Bom Jesus,3730,123.65 MG,Córrego Fundo,5790,101.11 MG,Córrego Novo,3127,205.39 MG,Couto de Magalhães de Minas,4204,485.65 MG,Crisólita,6047,966.2 MG,Cristais,11286,628.43 MG,Cristália,5760,840.7 MG,Cristiano Otoni,5007,132.87 MG,Cristina,10210,311.33 MG,Crucilândia,4757,167.16 MG,Cruzeiro da Fortaleza,3934,188.13 MG,Cruzília,14591,522.42 MG,Cuparaque,4680,226.75 MG,Curral de Dentro,6913,568.26 MG,Curvelo,74219,3298.79 MG,Datas,5211,310.1 MG,Delfim Moreira,7971,408.47 MG,Delfinópolis,6830,1378.42 MG,Delta,8089,102.84 MG,Descoberto,4768,213.17 MG,Desterro de Entre Rios,7002,377.17 MG,Desterro do Melo,3015,142.28 MG,Diamantina,45880,3891.66 MG,Diogo de Vasconcelos,3848,165.09 MG,Dionísio,8739,344.44 MG,Divinésia,3293,116.97 MG,Divino,19133,337.78 MG,Divino das Laranjeiras,4937,342.25 MG,Divinolândia de Minas,7024,133.12 MG,Divinópolis,213016,708.12 MG,Divisa Alegre,5884,117.8 MG,Divisa Nova,5763,216.96 MG,Divisópolis,8974,572.93 MG,Dom Bosco,3814,817.38 MG,Dom Cavati,5209,59.52 MG,Dom Joaquim,4535,398.82 MG,Dom Silvério,5196,194.97 MG,Dom Viçoso,2994,113.92 MG,Dona Eusébia,6001,70.23 MG,Dores de Campos,9299,124.84 MG,Dores de Guanhães,5223,382.12 MG,Dores do Indaiá,13778,1111.2 MG,Dores do Turvo,4462,231.17 MG,Doresópolis,1440,152.91 MG,Douradoquara,1841,312.88 MG,Durandé,7423,217.46 MG,Elói Mendes,25220,499.54 MG,Engenheiro Caldas,10280,187.06 MG,Engenheiro Navarro,7122,608.31 MG,Entre Folhas,5175,85.21 MG,Entre Rios de Minas,14242,456.8 MG,Ervália,17946,357.49 MG,Esmeraldas,60271,910.38 MG,Espera Feliz,22856,317.64 MG,Espinosa,31113,1868.97 MG,Espírito Santo do Dourado,4429,263.88 MG,Estiva,10845,243.87 MG,Estrela Dalva,2470,131.37 MG,Estrela do Indaiá,3516,635.98 MG,Estrela do Sul,7446,822.45 MG,Eugenópolis,10540,309.4 MG,Ewbank da Câmara,3753,103.83 MG,Extrema,28599,244.58 MG,Fama,2350,86.02 MG,Faria Lemos,3376,165.22 MG,Felício dos Santos,5142,357.62 MG,Felisburgo,6877,596.22 MG,Felixlândia,14121,1554.63 MG,Fernandes Tourinho,3030,151.88 MG,Ferros,10837,1088.8 MG,Fervedouro,10349,357.68 MG,Florestal,6600,191.42 MG,Formiga,65128,1501.92 MG,Formoso,8177,3685.7 MG,Fortaleza de Minas,4098,218.79 MG,Fortuna de Minas,2705,198.71 MG,Francisco Badaró,10248,461.35 MG,Francisco Dumont,4863,1576.13 MG,Francisco Sá,24912,2747.29 MG,Franciscópolis,5800,717.09 MG,Frei Gaspar,5879,626.67 MG,Frei Inocêncio,8920,469.56 MG,Frei Lagonegro,3329,167.47 MG,Fronteira,14041,199.99 MG,Fronteira dos Vales,4687,320.76 MG,Fruta de Leite,5940,762.79 MG,Frutal,53468,2426.97 MG,Funilândia,3855,199.8 MG,Galiléia,6951,720.36 MG,Gameleiras,5139,1733.2 MG,Glaucilândia,2962,145.86 MG,Goiabeira,3053,112.44 MG,Goianá,3659,152.04 MG,Gonçalves,4220,187.35 MG,Gonzaga,5921,209.35 MG,Gouveia,11681,866.6 MG,Governador Valadares,263689,2342.32 MG,Grão Mogol,15024,3885.29 MG,Grupiara,1373,193.14 MG,Guanhães,31262,1075.12 MG,Guapé,13872,934.35 MG,Guaraciaba,10223,348.6 MG,Guaraciama,4718,390.26 MG,Guaranésia,18714,294.83 MG,Guarani,8678,264.19 MG,Guarará,3929,88.66 MG,Guarda-Mor,6565,2069.79 MG,Guaxupé,49430,286.4 MG,Guidoval,7206,158.38 MG,Guimarânia,7265,366.83 MG,Guiricema,8707,293.58 MG,Gurinhatã,6137,1849.14 MG,Heliodora,6121,153.95 MG,Iapu,10315,340.58 MG,Ibertioga,5036,346.24 MG,Ibiá,23218,2704.13 MG,Ibiaí,7839,874.76 MG,Ibiracatu,6155,353.41 MG,Ibiraci,12176,562.09 MG,Ibirité,158954,72.57 MG,Ibitiúra de Minas,3382,68.32 MG,Ibituruna,2866,153.11 MG,Icaraí de Minas,10746,625.66 MG,Igarapé,34851,110.26 MG,Igaratinga,9264,218.34 MG,Iguatama,8029,628.2 MG,Ijaci,5859,105.25 MG,Ilicínea,11488,376.34 MG,Imbé de Minas,6424,196.74 MG,Inconfidentes,6908,149.61 MG,Indaiabira,7330,1004.15 MG,Indianópolis,6190,830.03 MG,Ingaí,2629,305.59 MG,Inhapim,24294,858.02 MG,Inhaúma,5760,245.0 MG,Inimutaba,6724,524.47 MG,Ipaba,16708,113.13 MG,Ipanema,18170,456.64 MG,Ipatinga,239468,164.88 MG,Ipiaçu,4107,466.02 MG,Ipuiúna,9521,298.2 MG,Iraí de Minas,6467,356.26 MG,Itabira,109783,1253.7 MG,Itabirinha,10692,208.98 MG,Itabirito,45449,542.61 MG,Itacambira,4988,1788.45 MG,Itacarambi,17720,1225.27 MG,Itaguara,12372,410.47 MG,Itaipé,11798,480.83 MG,Itajubá,90658,294.84 MG,Itamarandiba,32175,2735.57 MG,Itamarati de Minas,4079,94.57 MG,Itambacuri,22809,1419.21 MG,Itambé do Mato Dentro,2283,380.34 MG,Itamogi,10349,243.69 MG,Itamonte,14003,431.79 MG,Itanhandu,14175,143.36 MG,Itanhomi,11856,488.84 MG,Itaobim,21001,679.02 MG,Itapagipe,13656,1802.44 MG,Itapecerica,21377,1040.52 MG,Itapeva,8664,177.35 MG,Itatiaiuçu,9928,295.15 MG,Itaú de Minas,14945,153.42 MG,Itaúna,85463,495.77 MG,Itaverava,5799,284.22 MG,Itinga,14407,1649.62 MG,Itueta,5830,452.68 MG,Ituiutaba,97171,2598.05 MG,Itumirim,6139,234.8 MG,Iturama,34456,1404.66 MG,Itutinga,3913,372.02 MG,Jaboticatubas,17134,1114.97 MG,Jacinto,12134,1393.61 MG,Jacuí,7502,409.23 MG,Jacutinga,22772,347.75 MG,Jaguaraçu,2990,163.76 MG,Jaíba,33587,2626.33 MG,Jampruca,5067,517.1 MG,Janaúba,66803,2181.32 MG,Januária,65463,6661.67 MG,Japaraíba,3939,172.14 MG,Japonvar,8298,375.23 MG,Jeceaba,5395,236.25 MG,Jenipapo de Minas,7116,284.45 MG,Jequeri,12848,547.9 MG,Jequitaí,8005,1268.44 MG,Jequitibá,5156,445.03 MG,Jequitinhonha,24131,3514.22 MG,Jesuânia,4768,153.85 MG,Joaíma,14941,1664.19 MG,Joanésia,5425,233.29 MG,João Monlevade,73610,99.16 MG,João Pinheiro,45260,10727.47 MG,Joaquim Felício,4305,790.94 MG,Jordânia,10324,546.71 MG,José Gonçalves de Minas,4553,381.33 MG,José Raydan,4376,180.82 MG,Josenópolis,4566,541.39 MG,Juatuba,22202,99.54 MG,Juiz de Fora,516247,1435.66 MG,Juramento,4113,431.63 MG,Juruaia,9238,220.35 MG,Juvenília,5708,1064.7 MG,Ladainha,16994,866.29 MG,Lagamar,7600,1474.56 MG,Lagoa da Prata,45984,439.98 MG,Lagoa dos Patos,4225,600.55 MG,Lagoa Dourada,12256,476.69 MG,Lagoa Formosa,17161,840.92 MG,Lagoa Grande,8631,1236.3 MG,Lagoa Santa,52520,229.27 MG,Lajinha,19609,431.92 MG,Lambari,19554,213.11 MG,Lamim,3452,118.6 MG,Laranjal,6465,204.88 MG,Lassance,6484,3204.22 MG,Lavras,92200,564.74 MG,Leandro Ferreira,3205,352.11 MG,Leme do Prado,4804,280.04 MG,Leopoldina,51130,943.08 MG,Liberdade,5346,401.34 MG,Lima Duarte,16149,848.56 MG,Limeira do Oeste,6890,1319.04 MG,Lontra,8397,258.87 MG,Luisburgo,6234,145.42 MG,Luislândia,6400,411.71 MG,Luminárias,5422,500.14 MG,Luz,17486,1171.66 MG,Machacalis,6976,332.38 MG,Machado,38688,585.96 MG,Madre de Deus de Minas,4904,492.91 MG,Malacacheta,18776,727.89 MG,Mamonas,6321,291.43 MG,Manga,19813,1950.18 MG,Manhuaçu,79574,628.32 MG,Manhumirim,21382,182.9 MG,Mantena,27111,685.21 MG,Mar de Espanha,11749,371.6 MG,Maravilhas,7163,261.6 MG,Maria da Fé,14216,202.9 MG,Mariana,54219,1194.21 MG,Marilac,4219,158.81 MG,Mário Campos,13192,35.2 MG,Maripá de Minas,2788,77.34 MG,Marliéria,4012,545.81 MG,Marmelópolis,2968,107.9 MG,Martinho Campos,12611,1048.1 MG,Martins Soares,7173,113.27 MG,Mata Verde,7874,227.52 MG,Materlândia,4595,280.53 MG,Mateus Leme,27856,302.71 MG,Mathias Lobato,3370,172.3 MG,Matias Barbosa,13435,157.11 MG,Matias Cardoso,9979,1949.74 MG,Matipó,17639,266.99 MG,Mato Verde,12684,472.25 MG,Matozinhos,33955,252.28 MG,Matutina,3761,260.96 MG,Medeiros,3444,946.44 MG,Medina,21026,1435.9 MG,Mendes Pimentel,6331,305.51 MG,Mercês,10368,348.27 MG,Mesquita,6069,274.94 MG,Minas Novas,30794,1812.4 MG,Minduri,3840,219.77 MG,Mirabela,13042,723.28 MG,Miradouro,10251,301.67 MG,Miraí,13808,320.7 MG,Miravânia,4549,602.13 MG,Moeda,4689,155.11 MG,Moema,7028,202.71 MG,Monjolos,2360,650.91 MG,Monsenhor Paulo,8161,216.54 MG,Montalvânia,15862,1503.79 MG,Monte Alegre de Minas,19619,2595.96 MG,Monte Azul,21994,994.23 MG,Monte Belo,13061,421.28 MG,Monte Carmelo,45772,1343.04 MG,Monte Formoso,4656,385.55 MG,Monte Santo de Minas,21234,594.63 MG,Monte Sião,21203,291.59 MG,Montes Claros,361915,3568.94 MG,Montezuma,7464,1130.42 MG,Morada Nova de Minas,8255,2084.28 MG,Morro da Garça,2660,414.77 MG,Morro do Pilar,3399,477.55 MG,Munhoz,6257,191.56 MG,Muriaé,100765,841.69 MG,Mutum,26661,1250.82 MG,Muzambinho,20430,409.95 MG,Nacip Raydan,3154,233.49 MG,Nanuque,40834,1517.94 MG,Naque,6341,127.17 MG,Natalândia,3280,468.66 MG,Natércia,4658,188.72 MG,Nazareno,7954,329.13 MG,Nepomuceno,25733,582.55 MG,Ninheira,9815,1108.23 MG,Nova Belém,3732,146.78 MG,Nova Era,17528,361.93 MG,Nova Lima,80998,429.16 MG,Nova Módica,3790,375.97 MG,Nova Ponte,12812,1111.01 MG,Nova Porteirinha,7398,120.94 MG,Nova Resende,15374,390.15 MG,Nova Serrana,73699,282.37 MG,Nova União,5555,172.13 MG,Novo Cruzeiro,30725,1702.98 MG,Novo Oriente de Minas,10339,755.15 MG,Novorizonte,4963,271.87 MG,Olaria,1976,178.24 MG,Olhos-d`Água,5267,2092.08 MG,Olímpio Noronha,2533,54.63 MG,Oliveira,39466,897.29 MG,Oliveira Fortes,2123,111.13 MG,Onça de Pitangui,3055,246.98 MG,Oratórios,4493,89.07 MG,Orizânia,7284,121.8 MG,Ouro Branco,35268,258.73 MG,Ouro Fino,31568,533.66 MG,Ouro Preto,70281,1245.87 MG,Ouro Verde de Minas,6016,175.48 MG,Padre Carvalho,5834,446.33 MG,Padre Paraíso,18849,544.38 MG,Pai Pedro,5934,839.81 MG,Paineiras,4631,637.31 MG,Pains,8014,421.86 MG,Paiva,1558,58.42 MG,Palma,6545,316.49 MG,Palmópolis,6931,433.15 MG,Papagaios,14175,553.58 MG,Pará de Minas,84215,551.25 MG,Paracatu,84718,8229.6 MG,Paraguaçu,20245,424.3 MG,Paraisópolis,19379,331.24 MG,Paraopeba,22563,625.62 MG,Passa Quatro,15582,277.22 MG,Passa Tempo,8197,429.17 MG,Passa-Vinte,2079,246.56 MG,Passabém,1766,94.18 MG,Passos,106290,1338.07 MG,Patis,5579,444.2 MG,Patos de Minas,138710,3189.77 MG,Patrocínio,82471,2874.34 MG,Patrocínio do Muriaé,5287,108.25 MG,Paula Cândido,9271,268.32 MG,Paulistas,4918,220.56 MG,Pavão,8589,601.19 MG,Peçanha,17260,996.65 MG,Pedra Azul,23839,1594.65 MG,Pedra Bonita,6673,173.93 MG,Pedra do Anta,3365,163.45 MG,Pedra do Indaiá,3875,347.92 MG,Pedra Dourada,2191,69.99 MG,Pedralva,11467,217.99 MG,Pedras de Maria da Cruz,10315,1525.49 MG,Pedrinópolis,3490,357.89 MG,Pedro Leopoldo,58740,292.95 MG,Pedro Teixeira,1785,112.96 MG,Pequeri,3165,90.83 MG,Pequi,4076,203.99 MG,Perdigão,8912,249.32 MG,Perdizes,14404,2450.82 MG,Perdões,20087,270.66 MG,Periquito,7036,228.91 MG,Pescador,4128,317.46 MG,Piau,2841,192.2 MG,Piedade de Caratinga,7110,109.35 MG,Piedade de Ponte Nova,4062,83.73 MG,Piedade do Rio Grande,4709,322.81 MG,Piedade dos Gerais,4640,259.64 MG,Pimenta,8236,414.97 MG,Pingo-d`Água,4420,66.57 MG,Pintópolis,7211,1228.74 MG,Piracema,6406,280.34 MG,Pirajuba,4656,337.98 MG,Piranga,17232,658.81 MG,Piranguçu,5217,203.62 MG,Piranguinho,8016,124.8 MG,Pirapetinga,10364,190.68 MG,Pirapora,53368,549.51 MG,Piraúba,10862,144.29 MG,Pitangui,25311,569.61 MG,Piumhi,31883,902.47 MG,Planura,10384,317.52 MG,Poço Fundo,15959,474.24 MG,Poços de Caldas,152435,547.26 MG,Pocrane,8986,691.07 MG,Pompéu,29105,2551.07 MG,Ponte Nova,57390,470.64 MG,Ponto Chique,3966,602.8 MG,Ponto dos Volantes,11345,1212.41 MG,Porteirinha,37627,1749.68 MG,Porto Firme,10417,284.78 MG,Poté,15667,625.11 MG,Pouso Alegre,130615,543.07 MG,Pouso Alto,6213,263.03 MG,Prados,8391,264.12 MG,Prata,25802,4847.54 MG,Pratápolis,8807,215.52 MG,Pratinha,3265,622.48 MG,Presidente Bernardes,5537,236.8 MG,Presidente Juscelino,3908,695.88 MG,Presidente Kubitschek,2959,189.24 MG,Presidente Olegário,18577,3503.8 MG,Prudente de Morais,9573,124.19 MG,Quartel Geral,3303,556.44 MG,Queluzito,1861,153.56 MG,Raposos,15342,72.07 MG,Raul Soares,23818,763.36 MG,Recreio,10299,234.3 MG,Reduto,6569,151.86 MG,Resende Costa,10913,618.31 MG,Resplendor,17089,1081.8 MG,Ressaquinha,4711,184.61 MG,Riachinho,8007,1719.27 MG,Riacho dos Machados,9360,1315.54 MG,Ribeirão das Neves,296317,155.54 MG,Ribeirão Vermelho,3826,49.25 MG,Rio Acima,9090,229.81 MG,Rio Casca,14201,384.36 MG,Rio do Prado,5217,479.82 MG,Rio Doce,2465,112.09 MG,Rio Espera,6070,238.6 MG,Rio Manso,5276,231.54 MG,Rio Novo,8712,209.31 MG,Rio Paranaíba,11885,1352.35 MG,Rio Pardo de Minas,29099,3117.44 MG,Rio Piracicaba,14149,373.04 MG,Rio Pomba,17110,252.42 MG,Rio Preto,5292,348.14 MG,Rio Vermelho,13645,986.56 MG,Ritápolis,4925,404.81 MG,Rochedo de Minas,2116,79.4 MG,Rodeiro,6867,72.67 MG,Romaria,3596,407.56 MG,Rosário da Limeira,4247,111.16 MG,Rubelita,7772,1110.3 MG,Rubim,9919,965.17 MG,Sabará,126269,302.17 MG,Sabinópolis,15704,919.81 MG,Sacramento,23896,3073.27 MG,Salinas,39178,1887.65 MG,Salto da Divisa,6859,937.92 MG,Santa Bárbara,27876,684.06 MG,Santa Bárbara do Leste,7682,107.4 MG,Santa Bárbara do Monte Verde,2788,417.83 MG,Santa Bárbara do Tugúrio,4570,194.56 MG,Santa Cruz de Minas,7865,3.57 MG,Santa Cruz de Salinas,4397,589.57 MG,Santa Cruz do Escalvado,4992,258.73 MG,Santa Efigênia de Minas,4600,131.97 MG,Santa Fé de Minas,3968,2917.45 MG,Santa Helena de Minas,6055,276.43 MG,Santa Juliana,11337,723.78 MG,Santa Luzia,202942,235.33 MG,Santa Margarida,15011,255.73 MG,Santa Maria de Itabira,10552,597.44 MG,Santa Maria do Salto,5284,440.61 MG,Santa Maria do Suaçuí,14395,624.05 MG,Santa Rita de Caldas,9027,503.01 MG,Santa Rita de Ibitipoca,3583,324.23 MG,Santa Rita de Jacutinga,4993,420.94 MG,Santa Rita de Minas,6547,68.15 MG,Santa Rita do Itueto,5697,485.08 MG,Santa Rita do Sapucaí,37754,352.97 MG,Santa Rosa da Serra,3224,284.33 MG,Santa Vitória,18138,3001.36 MG,Santana da Vargem,7231,172.44 MG,Santana de Cataguases,3622,161.49 MG,Santana de Pirapama,8009,1255.83 MG,Santana do Deserto,3860,182.66 MG,Santana do Garambéu,2234,203.07 MG,Santana do Jacaré,4607,106.17 MG,Santana do Manhuaçu,8582,347.36 MG,Santana do Paraíso,27265,276.07 MG,Santana do Riacho,4023,677.21 MG,Santana dos Montes,3822,196.57 MG,Santo Antônio do Amparo,17345,488.89 MG,Santo Antônio do Aventureiro,3538,202.03 MG,Santo Antônio do Grama,4085,130.21 MG,Santo Antônio do Itambé,4135,305.74 MG,Santo Antônio do Jacinto,11775,503.38 MG,Santo Antônio do Monte,25975,1125.78 MG,Santo Antônio do Retiro,6955,796.29 MG,Santo Antônio do Rio Abaixo,1777,107.27 MG,Santo Hipólito,3238,430.66 MG,Santos Dumont,46284,637.37 MG,São Bento Abade,4577,80.4 MG,São Brás do Suaçuí,3513,110.02 MG,São Domingos das Dores,5408,60.87 MG,São Domingos do Prata,17357,743.77 MG,São Félix de Minas,3382,162.56 MG,São Francisco,53828,3308.1 MG,São Francisco de Paula,6483,316.82 MG,São Francisco de Sales,5776,1128.86 MG,São Francisco do Glória,5178,164.61 MG,São Geraldo,10263,185.58 MG,São Geraldo da Piedade,4389,152.34 MG,São Geraldo do Baixio,3486,280.95 MG,São Gonçalo do Abaeté,6264,2692.17 MG,São Gonçalo do Pará,10398,265.73 MG,São Gonçalo do Rio Abaixo,9777,363.81 MG,São Gonçalo do Rio Preto,3056,314.46 MG,São Gonçalo do Sapucaí,23906,516.68 MG,São Gotardo,31819,866.09 MG,São João Batista do Glória,6887,547.91 MG,São João da Lagoa,4656,998.02 MG,São João da Mata,2731,120.54 MG,São João da Ponte,25358,1851.1 MG,São João das Missões,11715,678.27 MG,São João del Rei,84469,1464.33 MG,São João do Manhuaçu,10245,143.1 MG,São João do Manteninha,5188,137.93 MG,São João do Oriente,7874,120.12 MG,São João do Pacuí,4060,415.92 MG,São João do Paraíso,22319,1925.58 MG,São João Evangelista,15553,478.18 MG,São João Nepomuceno,25057,407.43 MG,São Joaquim de Bicas,25537,71.56 MG,São José da Barra,6778,314.25 MG,São José da Lapa,19799,47.93 MG,São José da Safira,4075,213.88 MG,São José da Varginha,4198,205.5 MG,São José do Alegre,3996,88.79 MG,São José do Divino,3834,328.7 MG,São José do Goiabal,5636,184.51 MG,São José do Jacuri,6553,345.15 MG,São José do Mantimento,2592,54.7 MG,São Lourenço,41657,58.02 MG,São Miguel do Anta,6760,152.11 MG,São Pedro da União,5040,260.83 MG,São Pedro do Suaçuí,5570,308.11 MG,São Pedro dos Ferros,8356,402.76 MG,São Romão,10276,2434.0 MG,São Roque de Minas,6686,2098.87 MG,São Sebastião da Bela Vista,4948,167.16 MG,São Sebastião da Vargem Alegre,2798,73.63 MG,São Sebastião do Anta,5739,80.62 MG,São Sebastião do Maranhão,10647,517.83 MG,São Sebastião do Oeste,5805,408.09 MG,São Sebastião do Paraíso,64980,814.93 MG,São Sebastião do Rio Preto,1613,128.0 MG,São Sebastião do Rio Verde,2110,90.85 MG,São Thomé das Letras,6655,369.75 MG,São Tiago,10561,572.4 MG,São Tomás de Aquino,7093,277.93 MG,São Vicente de Minas,7008,392.65 MG,Sapucaí-Mirim,6241,285.08 MG,Sardoá,5594,141.9 MG,Sarzedo,25814,62.13 MG,Sem-Peixe,2847,176.63 MG,Senador Amaral,5219,151.1 MG,Senador Cortes,1988,98.34 MG,Senador Firmino,7230,166.5 MG,Senador José Bento,1868,93.89 MG,Senador Modestino Gonçalves,4574,952.06 MG,Senhora de Oliveira,5683,170.75 MG,Senhora do Porto,3497,381.33 MG,Senhora dos Remédios,10196,237.82 MG,Sericita,7128,166.01 MG,Seritinga,1789,114.77 MG,Serra Azul de Minas,4220,218.6 MG,Serra da Saudade,815,335.66 MG,Serra do Salitre,10549,1295.27 MG,Serra dos Aimorés,8412,213.55 MG,Serrania,7542,209.27 MG,Serranópolis de Minas,4425,551.95 MG,Serranos,1995,213.17 MG,Serro,20835,1217.81 MG,Sete Lagoas,214152,537.64 MG,Setubinha,10885,534.66 MG,Silveirânia,2192,157.46 MG,Silvianópolis,6027,312.17 MG,Simão Pereira,2537,135.69 MG,Simonésia,18298,486.54 MG,Sobrália,5830,206.79 MG,Soledade de Minas,5676,196.87 MG,Tabuleiro,4079,211.08 MG,Taiobeiras,30917,1194.53 MG,Taparuba,3137,193.08 MG,Tapira,4112,1179.25 MG,Tapiraí,1873,407.92 MG,Taquaraçu de Minas,3794,329.24 MG,Tarumirim,14293,731.75 MG,Teixeiras,11355,166.74 MG,Teófilo Otoni,134745,3242.27 MG,Timóteo,81243,144.38 MG,Tiradentes,6961,83.05 MG,Tiros,6906,2091.77 MG,Tocantins,15823,173.87 MG,Tocos do Moji,3950,114.71 MG,Toledo,5764,136.78 MG,Tombos,9537,285.13 MG,Três Corações,72765,828.04 MG,Três Marias,28318,2678.25 MG,Três Pontas,53860,689.79 MG,Tumiritinga,6293,500.07 MG,Tupaciguara,24188,1823.96 MG,Turmalina,18055,1153.11 MG,Turvolândia,4658,221.0 MG,Ubá,101519,407.45 MG,Ubaí,11681,820.52 MG,Ubaporanga,12040,189.05 MG,Uberaba,295988,4523.96 MG,Uberlândia,604013,4115.21 MG,Umburatiba,2705,405.83 MG,Unaí,77565,8447.11 MG,União de Minas,4418,1147.41 MG,Uruana de Minas,3235,598.5 MG,Urucânia,10291,138.79 MG,Urucuia,13604,2076.94 MG,Vargem Alegre,6461,116.66 MG,Vargem Bonita,2163,409.89 MG,Vargem Grande do Rio Pardo,4733,491.51 MG,Varginha,123081,395.4 MG,Varjão de Minas,6054,651.56 MG,Várzea da Palma,35809,2220.28 MG,Varzelândia,19116,814.99 MG,Vazante,19723,1913.4 MG,Verdelândia,8346,1570.58 MG,Veredinha,5549,631.69 MG,Veríssimo,3483,1031.82 MG,Vermelho Novo,4689,115.24 MG,Vespasiano,104527,71.22 MG,Viçosa,72220,299.42 MG,Vieiras,3731,112.69 MG,Virgem da Lapa,13619,868.91 MG,Virgínia,8623,326.52 MG,Virginópolis,10572,439.88 MG,Virgolândia,5658,281.02 MG,Visconde do Rio Branco,37942,243.35 MG,Volta Grande,5070,208.13 MG,Wenceslau Braz,2553,102.49 PA,Abaetetuba,141100,1610.61 PA,Abel Figueiredo,6780,614.27 PA,Acará,53569,4343.81 PA,Afuá,35042,8372.8 PA,Água Azul do Norte,25057,7113.96 PA,Alenquer,52626,23645.45 PA,Almeirim,33614,72954.8 PA,Altamira,99075,159533.73 PA,Anajás,24759,6921.75 PA,Ananindeua,471980,190.5 PA,Anapu,20543,11895.51 PA,Augusto Corrêa,40497,1091.54 PA,Aurora do Pará,26546,1811.84 PA,Aveiro,15849,17074.04 PA,Bagre,23864,4397.32 PA,Baião,36882,3758.3 PA,Bannach,3431,2956.65 PA,Barcarena,99859,1310.34 PA,Belém,1393399,1059.41 PA,Belterra,16318,4398.42 PA,Benevides,51651,187.83 PA,Bom Jesus do Tocantins,15298,2816.48 PA,Bonito,13630,586.74 PA,Bragança,113227,2091.93 PA,Brasil Novo,15690,6362.58 PA,Brejo Grande do Araguaia,7317,1288.48 PA,Breu Branco,52493,3941.94 PA,Breves,92860,9550.51 PA,Bujaru,25695,1005.17 PA,Cachoeira do Arari,20443,3100.26 PA,Cachoeira do Piriá,26484,2461.97 PA,Cametá,120896,3081.37 PA,Canaã dos Carajás,26716,3146.41 PA,Capanema,63639,614.69 PA,Capitão Poço,51893,2899.55 PA,Castanhal,173149,1028.89 PA,Chaves,21005,13084.96 PA,Colares,11381,609.79 PA,Conceição do Araguaia,45557,5829.48 PA,Concórdia do Pará,28216,690.95 PA,Cumaru do Norte,10466,17085.0 PA,Curionópolis,18288,2368.74 PA,Curralinho,28549,3617.25 PA,Curuá,12254,1431.16 PA,Curuçá,34294,672.68 PA,Dom Eliseu,51319,5268.82 PA,Eldorado dos Carajás,31786,2956.73 PA,Faro,8177,11770.63 PA,Floresta do Araguaia,17768,3444.29 PA,Garrafão do Norte,25034,1599.03 PA,Goianésia do Pará,30436,7023.91 PA,Gurupá,29062,8540.11 PA,Igarapé-Açu,35887,785.98 PA,Igarapé-Miri,58077,1996.84 PA,Inhangapi,10037,471.45 PA,Ipixuna do Pará,51309,5215.56 PA,Irituia,31364,1379.36 PA,Itaituba,97493,62040.71 PA,Itupiranga,51220,7880.11 PA,Jacareacanga,14103,53303.08 PA,Jacundá,51360,2008.32 PA,Juruti,47086,8306.27 PA,Limoeiro do Ajuru,25021,1490.19 PA,Mãe do Rio,27904,469.49 PA,Magalhães Barata,8115,325.27 PA,Marabá,233669,15128.42 PA,Maracanã,28376,855.66 PA,Marapanim,26605,795.99 PA,Marituba,108246,103.34 PA,Medicilândia,27328,8272.63 PA,Melgaço,24808,6774.02 PA,Mocajuba,26731,870.81 PA,Moju,70018,9094.14 PA,Monte Alegre,55462,18152.56 PA,Muaná,34204,3765.55 PA,Nova Esperança do Piriá,20158,2809.31 PA,Nova Ipixuna,14645,1564.18 PA,Nova Timboteua,13670,489.85 PA,Novo Progresso,25124,38162.13 PA,Novo Repartimento,62050,15398.71 PA,Óbidos,49333,28021.42 PA,Oeiras do Pará,28595,3852.29 PA,Oriximiná,62794,107603.29 PA,Ourém,16311,562.39 PA,Ourilândia do Norte,27359,14410.57 PA,Pacajá,39979,11832.33 PA,Palestina do Pará,7475,984.36 PA,Paragominas,97819,19342.25 PA,Parauapebas,153908,6886.21 PA,Pau d`Arco,6033,1671.42 PA,Peixe-Boi,7854,450.22 PA,Piçarra,12697,3312.66 PA,Placas,23934,7173.19 PA,Ponta de Pedras,25999,3365.15 PA,Portel,52172,25384.96 PA,Porto de Moz,33956,17423.02 PA,Prainha,29349,14786.99 PA,Primavera,10268,258.6 PA,Quatipuru,12411,326.11 PA,Redenção,75556,3823.81 PA,Rio Maria,17697,4114.61 PA,Rondon do Pará,46964,8246.44 PA,Rurópolis,40087,7021.32 PA,Salinópolis,37421,237.74 PA,Salvaterra,20183,1039.07 PA,Santa Bárbara do Pará,17141,278.15 PA,Santa Cruz do Arari,8155,1076.65 PA,Santa Isabel do Pará,59466,717.66 PA,Santa Luzia do Pará,19424,1356.12 PA,Santa Maria das Barreiras,17206,10330.21 PA,Santa Maria do Pará,23026,457.73 PA,Santana do Araguaia,56153,11591.56 PA,Santarém,294580,22886.62 PA,Santarém Novo,6141,229.51 PA,Santo Antônio do Tauá,26674,537.63 PA,São Caetano de Odivelas,16891,743.47 PA,São Domingos do Araguaia,23130,1392.46 PA,São Domingos do Capim,29846,1677.25 PA,São Félix do Xingu,91340,84213.28 PA,São Francisco do Pará,15060,479.56 PA,São Geraldo do Araguaia,25587,3168.38 PA,São João da Ponta,5265,195.92 PA,São João de Pirabas,20647,705.54 PA,São João do Araguaia,13155,1279.89 PA,São Miguel do Guamá,51567,1110.18 PA,São Sebastião da Boa Vista,22904,1632.25 PA,Sapucaia,5047,1298.19 PA,Senador José Porfírio,13045,14419.92 PA,Soure,23001,3517.32 PA,Tailândia,79297,4430.22 PA,Terra Alta,10262,206.41 PA,Terra Santa,16949,1896.51 PA,Tomé-Açu,56518,5145.36 PA,Tracuateua,27455,934.27 PA,Trairão,16875,11991.09 PA,Tucumã,33690,2512.59 PA,Tucuruí,97128,2086.19 PA,Ulianópolis,43341,5088.47 PA,Uruará,44789,10791.37 PA,Vigia,47889,539.08 PA,Viseu,56716,4915.07 PA,Vitória do Xingu,13431,3089.54 PA,Xinguara,40573,3779.36 PB,Água Branca,9449,236.61 PB,Aguiar,5530,344.71 PB,Alagoa Grande,28479,320.56 PB,Alagoa Nova,19681,122.26 PB,Alagoinha,13576,96.98 PB,Alcantil,5239,305.39 PB,Algodão de Jandaíra,2366,220.25 PB,Alhandra,18007,182.66 PB,Amparo,2088,121.98 PB,Aparecida,7676,295.71 PB,Araçagi,17224,231.16 PB,Arara,12653,99.11 PB,Araruna,18879,245.72 PB,Areia,23829,269.49 PB,Areia de Baraúnas,1927,96.34 PB,Areial,6470,33.14 PB,Aroeiras,19082,374.7 PB,Assunção,3522,126.43 PB,Baía da Traição,8012,102.37 PB,Bananeiras,21851,257.93 PB,Baraúna,4220,50.58 PB,Barra de Santa Rosa,14157,775.66 PB,Barra de Santana,8206,376.91 PB,Barra de São Miguel,5611,595.21 PB,Bayeux,99716,31.97 PB,Belém,17093,100.15 PB,Belém do Brejo do Cruz,7143,603.04 PB,Bernardino Batista,3075,50.63 PB,Boa Ventura,5751,170.58 PB,Boa Vista,6227,476.54 PB,Bom Jesus,2400,47.63 PB,Bom Sucesso,5035,184.1 PB,Bonito de Santa Fé,10804,228.33 PB,Boqueirão,16888,371.98 PB,Borborema,5111,25.98 PB,Brejo do Cruz,13123,398.92 PB,Brejo dos Santos,6198,93.85 PB,Caaporã,20362,150.17 PB,Cabaceiras,5035,452.92 PB,Cabedelo,57944,31.92 PB,Cachoeira dos Índios,9546,193.07 PB,Cacimba de Areia,3557,220.38 PB,Cacimba de Dentro,16748,163.69 PB,Cacimbas,6814,126.54 PB,Caiçara,7220,127.91 PB,Cajazeiras,58446,565.9 PB,Cajazeirinhas,3033,287.89 PB,Caldas Brandão,5637,55.85 PB,Camalaú,5749,543.69 PB,Campina Grande,385213,594.18 PB,Capim,5601,78.17 PB,Caraúbas,3899,497.2 PB,Carrapateira,2378,54.52 PB,Casserengue,7058,201.38 PB,Catingueira,4812,529.45 PB,Catolé do Rocha,28759,552.11 PB,Caturité,4543,118.08 PB,Conceição,18363,579.44 PB,Condado,6584,280.92 PB,Conde,21400,172.95 PB,Congo,4687,333.47 PB,Coremas,15149,379.49 PB,Coxixola,1771,169.88 PB,Cruz do Espírito Santo,16257,195.6 PB,Cubati,6866,136.97 PB,Cuité,19978,741.84 PB,Cuité de Mamanguape,6202,108.45 PB,Cuitegi,6889,39.3 PB,Curral de Cima,5209,85.1 PB,Curral Velho,2505,222.96 PB,Damião,4900,185.69 PB,Desterro,7991,179.39 PB,Diamante,6616,269.11 PB,Dona Inês,10517,166.17 PB,Duas Estradas,3638,26.26 PB,Emas,3317,240.9 PB,Esperança,31095,163.78 PB,Fagundes,11405,189.03 PB,Frei Martinho,2933,244.32 PB,Gado Bravo,8376,192.41 PB,Guarabira,55326,165.74 PB,Gurinhém,13872,346.07 PB,Gurjão,3159,343.2 PB,Ibiara,6031,244.49 PB,Igaracy,6156,192.26 PB,Imaculada,11352,316.98 PB,Ingá,18180,287.99 PB,Itabaiana,24481,218.85 PB,Itaporanga,23192,468.06 PB,Itapororoca,16997,146.07 PB,Itatuba,10201,244.22 PB,Jacaraú,13942,253.01 PB,Jericó,7538,179.31 PB,João Pessoa,723515,211.48 PB,Joca Claudino,2615,74.01 PB,Juarez Távora,7459,70.84 PB,Juazeirinho,16776,467.53 PB,Junco do Seridó,6643,170.42 PB,Juripiranga,10237,78.85 PB,Juru,9826,403.28 PB,Lagoa,4681,177.9 PB,Lagoa de Dentro,7370,84.51 PB,Lagoa Seca,25900,107.59 PB,Lastro,2841,102.67 PB,Livramento,7164,260.22 PB,Logradouro,3942,38.0 PB,Lucena,11730,88.94 PB,Mãe d`Água,4019,243.75 PB,Malta,5613,156.24 PB,Mamanguape,42303,340.53 PB,Manaíra,10759,352.57 PB,Marcação,7609,122.9 PB,Mari,21176,154.82 PB,Marizópolis,6173,63.61 PB,Massaranduba,12902,205.96 PB,Mataraca,7407,184.3 PB,Matinhas,4321,38.12 PB,Mato Grosso,2702,83.52 PB,Maturéia,5939,83.69 PB,Mogeiro,12491,193.94 PB,Montadas,4990,31.59 PB,Monte Horebe,4508,116.17 PB,Monteiro,30852,986.36 PB,Mulungu,9469,195.31 PB,Natuba,10566,205.04 PB,Nazarezinho,7280,191.49 PB,Nova Floresta,10533,47.38 PB,Nova Olinda,6070,84.25 PB,Nova Palmeira,4361,310.35 PB,Olho d`Água,6931,596.13 PB,Olivedos,3627,317.92 PB,Ouro Velho,2928,129.4 PB,Parari,1256,128.48 PB,Passagem,2233,111.88 PB,Patos,100674,473.06 PB,Paulista,11788,576.9 PB,Pedra Branca,3721,112.93 PB,Pedra Lavrada,7475,351.68 PB,Pedras de Fogo,27032,400.39 PB,Pedro Régis,5765,73.56 PB,Piancó,15465,564.74 PB,Picuí,18222,661.66 PB,Pilar,11191,102.4 PB,Pilões,6978,64.45 PB,Pilõezinhos,5155,43.9 PB,Pirpirituba,10326,79.84 PB,Pitimbu,17024,136.44 PB,Pocinhos,17032,628.08 PB,Poço Dantas,3751,97.25 PB,Poço de José de Moura,3978,100.97 PB,Pombal,32110,888.81 PB,Prata,3854,192.01 PB,Princesa Isabel,21283,367.98 PB,Puxinanã,12923,72.68 PB,Queimadas,41049,401.78 PB,Quixabá,1699,156.68 PB,Remígio,17581,178.0 PB,Riachão,3266,90.15 PB,Riachão do Bacamarte,4264,38.37 PB,Riachão do Poço,4164,39.91 PB,Riacho de Santo Antônio,1722,91.32 PB,Riacho dos Cavalos,8314,264.03 PB,Rio Tinto,22976,464.89 PB,Salgadinho,3508,184.24 PB,Salgado de São Félix,11976,201.85 PB,Santa Cecília,6658,227.88 PB,Santa Cruz,6471,210.17 PB,Santa Helena,5369,210.32 PB,Santa Inês,3539,324.43 PB,Santa Luzia,14719,455.7 PB,Santa Rita,120310,726.85 PB,Santa Teresinha,4581,357.95 PB,Santana de Mangueira,5331,402.15 PB,Santana dos Garrotes,7266,353.82 PB,Santo André,2638,225.17 PB,São Bentinho,4138,195.97 PB,São Bento,30879,248.2 PB,São Domingos,2855,169.11 PB,São Domingos do Cariri,2420,218.8 PB,São Francisco,3364,95.06 PB,São João do Cariri,4344,653.6 PB,São João do Rio do Peixe,18201,474.43 PB,São João do Tigre,4396,816.12 PB,São José da Lagoa Tapada,7564,341.81 PB,São José de Caiana,6010,176.33 PB,São José de Espinharas,4760,725.66 PB,São José de Piranhas,19096,677.31 PB,São José de Princesa,4219,158.02 PB,São José do Bonfim,3233,134.72 PB,São José do Brejo do Cruz,1684,253.02 PB,São José do Sabugi,4010,206.92 PB,São José dos Cordeiros,3985,417.75 PB,São José dos Ramos,5508,98.23 PB,São Mamede,7748,530.73 PB,São Miguel de Taipu,6696,92.53 PB,São Sebastião de Lagoa de Roça,11041,49.92 PB,São Sebastião do Umbuzeiro,3235,460.57 PB,São Vicente do Seridó,10230,276.47 PB,Sapé,50143,315.53 PB,Serra Branca,12973,686.92 PB,Serra da Raiz,3204,29.08 PB,Serra Grande,2975,83.47 PB,Serra Redonda,7050,55.91 PB,Serraria,6238,65.3 PB,Sertãozinho,4395,32.8 PB,Sobrado,7373,61.74 PB,Solânea,26693,232.1 PB,Soledade,13739,560.04 PB,Sossêgo,3169,154.75 PB,Sousa,65803,738.55 PB,Sumé,16060,838.07 PB,Tacima,10262,246.66 PB,Taperoá,14936,662.91 PB,Tavares,14103,237.33 PB,Teixeira,14153,160.9 PB,Tenório,2813,105.27 PB,Triunfo,9220,219.87 PB,Uiraúna,14584,294.5 PB,Umbuzeiro,9298,181.33 PB,Várzea,2504,190.45 PB,Vieirópolis,5045,146.78 PB,Vista Serrana,3512,61.36 PB,Zabelê,2075,109.39 PR,Abatiá,7764,228.72 PR,Adrianópolis,6376,1349.33 PR,Agudos do Sul,8270,192.26 PR,Almirante Tamandaré,103204,194.74 PR,Altamira do Paraná,4306,386.95 PR,Alto Paraíso,3206,967.77 PR,Alto Paraná,13663,407.72 PR,Alto Piquiri,10179,447.67 PR,Altônia,20516,661.56 PR,Alvorada do Sul,10283,424.25 PR,Amaporã,5443,384.74 PR,Ampére,17308,298.35 PR,Anahy,2874,102.65 PR,Andirá,20610,236.08 PR,Ângulo,2859,106.02 PR,Antonina,18891,882.32 PR,Antônio Olinto,7351,469.62 PR,Apucarana,120919,558.39 PR,Arapongas,104150,382.22 PR,Arapoti,25855,1360.49 PR,Arapuã,3561,217.97 PR,Araruna,13419,493.19 PR,Araucária,119123,469.24 PR,Ariranha do Ivaí,2453,239.56 PR,Assaí,16354,440.35 PR,Assis Chateaubriand,33025,969.59 PR,Astorga,24698,434.79 PR,Atalaia,3913,137.66 PR,Balsa Nova,11300,348.93 PR,Bandeirantes,32184,445.19 PR,Barbosa Ferraz,12656,538.64 PR,Barra do Jacaré,2727,115.72 PR,Barracão,9735,171.46 PR,Bela Vista da Caroba,3945,148.11 PR,Bela Vista do Paraíso,15079,242.69 PR,Bituruna,15880,1214.91 PR,Boa Esperança,4568,307.38 PR,Boa Esperança do Iguaçu,2764,151.8 PR,Boa Ventura de São Roque,6554,622.18 PR,Boa Vista da Aparecida,7911,256.3 PR,Bocaiúva do Sul,10987,826.34 PR,Bom Jesus do Sul,3796,173.97 PR,Bom Sucesso,6561,322.76 PR,Bom Sucesso do Sul,3293,195.93 PR,Borrazópolis,7878,334.38 PR,Braganey,5735,343.32 PR,Brasilândia do Sul,3209,291.04 PR,Cafeara,2695,185.8 PR,Cafelândia,14662,271.72 PR,Cafezal do Sul,4290,335.39 PR,Califórnia,8069,141.82 PR,Cambará,23886,366.17 PR,Cambé,96733,494.87 PR,Cambira,7236,163.39 PR,Campina da Lagoa,15394,796.61 PR,Campina do Simão,4076,448.42 PR,Campina Grande do Sul,38769,539.24 PR,Campo Bonito,4407,433.83 PR,Campo do Tenente,7125,304.49 PR,Campo Largo,112377,1249.67 PR,Campo Magro,24843,275.35 PR,Campo Mourão,87194,757.88 PR,Cândido de Abreu,16655,1510.16 PR,Candói,14983,1512.79 PR,Cantagalo,12952,583.54 PR,Capanema,18526,418.71 PR,Capitão Leônidas Marques,14970,275.75 PR,Carambeí,19163,649.68 PR,Carlópolis,13706,451.42 PR,Cascavel,286205,2100.83 PR,Castro,67084,2531.5 PR,Catanduvas,10202,581.76 PR,Centenário do Sul,11190,371.83 PR,Cerro Azul,16938,1341.19 PR,Céu Azul,11032,1179.45 PR,Chopinzinho,19679,959.69 PR,Cianorte,69958,811.67 PR,Cidade Gaúcha,11062,403.05 PR,Clevelândia,17240,703.64 PR,Colombo,212967,197.79 PR,Colorado,22345,403.26 PR,Congonhinhas,8279,535.96 PR,Conselheiro Mairinck,3636,204.71 PR,Contenda,15891,299.04 PR,Corbélia,16312,529.38 PR,Cornélio Procópio,46928,635.1 PR,Coronel Domingos Soares,7238,1576.22 PR,Coronel Vivida,21749,684.42 PR,Corumbataí do Sul,4002,164.34 PR,Cruz Machado,18040,1478.35 PR,Cruzeiro do Iguaçu,4278,161.86 PR,Cruzeiro do Oeste,20416,779.22 PR,Cruzeiro do Sul,4563,259.1 PR,Cruzmaltina,3162,312.3 PR,Curitiba,1751907,435.04 PR,Curiúva,13923,576.26 PR,Diamante do Norte,5516,242.89 PR,Diamante do Sul,3510,359.95 PR,Diamante d`Oeste,5027,309.11 PR,Dois Vizinhos,36179,418.65 PR,Douradina,7445,419.85 PR,Doutor Camargo,5828,118.28 PR,Doutor Ulysses,5727,781.45 PR,Enéas Marques,6103,192.2 PR,Engenheiro Beltrão,13906,467.47 PR,Entre Rios do Oeste,3926,122.07 PR,Esperança Nova,1970,138.56 PR,Espigão Alto do Iguaçu,4677,326.44 PR,Farol,3472,289.23 PR,Faxinal,16314,715.94 PR,Fazenda Rio Grande,81675,116.68 PR,Fênix,4802,234.1 PR,Fernandes Pinheiro,5932,406.5 PR,Figueira,8293,129.77 PR,Flor da Serra do Sul,4726,238.91 PR,Floraí,5050,191.13 PR,Floresta,5931,158.23 PR,Florestópolis,11222,246.33 PR,Flórida,2543,83.05 PR,Formosa do Oeste,7541,275.71 PR,Foz do Iguaçu,256088,617.7 PR,Foz do Jordão,5420,235.38 PR,Francisco Alves,6418,321.9 PR,Francisco Beltrão,78943,735.11 PR,General Carneiro,13669,1071.18 PR,Godoy Moreira,3337,131.01 PR,Goioerê,29018,564.16 PR,Goioxim,7503,702.47 PR,Grandes Rios,6625,314.2 PR,Guaíra,30704,560.49 PR,Guairaçá,6197,493.94 PR,Guamiranga,7900,244.8 PR,Guapirama,3891,189.1 PR,Guaporema,2219,200.19 PR,Guaraci,5227,211.72 PR,Guaraniaçu,14582,1225.61 PR,Guarapuava,167328,3117.01 PR,Guaraqueçaba,7871,2020.09 PR,Guaratuba,32095,1326.79 PR,Honório Serpa,5955,502.24 PR,Ibaiti,28751,897.74 PR,Ibema,6066,145.45 PR,Ibiporã,48198,297.74 PR,Icaraíma,8839,675.24 PR,Iguaraçu,3982,164.98 PR,Iguatu,2234,106.94 PR,Imbaú,11274,330.7 PR,Imbituva,28455,756.54 PR,Inácio Martins,10943,936.21 PR,Inajá,2988,194.7 PR,Indianópolis,4299,122.62 PR,Ipiranga,14150,927.09 PR,Iporã,14981,647.89 PR,Iracema do Oeste,2578,81.54 PR,Irati,56207,999.52 PR,Iretama,10622,570.46 PR,Itaguajé,4568,190.37 PR,Itaipulândia,9026,331.29 PR,Itambaracá,6759,207.34 PR,Itambé,5979,243.82 PR,Itapejara d`Oeste,10531,254.01 PR,Itaperuçu,23887,314.46 PR,Itaúna do Sul,3583,128.87 PR,Ivaí,12815,607.85 PR,Ivaiporã,31816,431.5 PR,Ivaté,7514,410.91 PR,Ivatuba,3010,96.66 PR,Jaboti,4902,139.28 PR,Jacarezinho,39121,602.53 PR,Jaguapitã,12225,475.0 PR,Jaguariaíva,32606,1453.07 PR,Jandaia do Sul,20269,187.6 PR,Janiópolis,6532,335.65 PR,Japira,4903,188.29 PR,Japurá,8549,165.19 PR,Jardim Alegre,12324,405.55 PR,Jardim Olinda,1409,128.52 PR,Jataizinho,11875,159.18 PR,Jesuítas,9001,247.5 PR,Joaquim Távora,10736,289.17 PR,Jundiaí do Sul,3433,320.82 PR,Juranda,7641,349.72 PR,Jussara,6610,210.87 PR,Kaloré,4506,193.3 PR,Lapa,44932,2093.86 PR,Laranjal,6360,559.44 PR,Laranjeiras do Sul,30777,672.08 PR,Leópolis,4145,344.92 PR,Lidianópolis,3973,158.69 PR,Lindoeste,5361,361.37 PR,Loanda,21201,722.5 PR,Lobato,4401,240.9 PR,Londrina,506701,1653.08 PR,Luiziana,7315,908.6 PR,Lunardelli,5160,199.21 PR,Lupionópolis,4592,121.07 PR,Mallet,12973,723.02 PR,Mamborê,13961,788.06 PR,Mandaguaçu,19781,294.02 PR,Mandaguari,32658,335.81 PR,Mandirituba,22220,379.18 PR,Manfrinópolis,3127,216.42 PR,Mangueirinha,17048,1055.46 PR,Manoel Ribas,13169,571.14 PR,Marechal Cândido Rondon,46819,748.0 PR,Maria Helena,5956,486.22 PR,Marialva,31959,475.56 PR,Marilândia do Sul,8863,384.42 PR,Marilena,6858,232.37 PR,Mariluz,10224,433.17 PR,Maringá,357077,487.05 PR,Mariópolis,6268,230.36 PR,Maripá,5684,283.79 PR,Marmeleiro,13900,387.38 PR,Marquinho,4981,511.15 PR,Marumbi,4603,208.47 PR,Matelândia,16078,639.75 PR,Matinhos,29428,117.74 PR,Mato Rico,3818,394.53 PR,Mauá da Serra,8555,108.32 PR,Medianeira,41817,328.73 PR,Mercedes,5046,200.86 PR,Mirador,2327,221.71 PR,Miraselva,1862,90.29 PR,Missal,10474,324.4 PR,Moreira Sales,12606,353.77 PR,Morretes,15718,684.58 PR,Munhoz de Melo,3672,137.02 PR,Nossa Senhora das Graças,3836,185.73 PR,Nova Aliança do Ivaí,1431,131.27 PR,Nova América da Colina,3478,129.48 PR,Nova Aurora,11866,474.01 PR,Nova Cantu,7425,555.49 PR,Nova Esperança,26615,401.59 PR,Nova Esperança do Sudoeste,5098,208.47 PR,Nova Fátima,8147,283.42 PR,Nova Laranjeiras,11241,1145.49 PR,Nova Londrina,13067,269.39 PR,Nova Olímpia,5503,136.35 PR,Nova Prata do Iguaçu,10377,352.57 PR,Nova Santa Bárbara,3908,71.76 PR,Nova Santa Rosa,7626,204.67 PR,Nova Tebas,7398,545.69 PR,Novo Itacolomi,2827,161.41 PR,Ortigueira,23380,2429.56 PR,Ourizona,3380,176.46 PR,Ouro Verde do Oeste,5692,293.04 PR,Paiçandu,35936,171.38 PR,Palmas,42888,1557.89 PR,Palmeira,32123,1457.26 PR,Palmital,14865,817.65 PR,Palotina,28683,651.24 PR,Paraíso do Norte,11772,204.56 PR,Paranacity,10250,348.63 PR,Paranaguá,140469,826.67 PR,Paranapoema,2791,175.88 PR,Paranavaí,81590,1202.27 PR,Pato Bragado,4822,135.29 PR,Pato Branco,72370,539.09 PR,Paula Freitas,5434,421.41 PR,Paulo Frontin,6913,369.86 PR,Peabiru,13624,468.6 PR,Perobal,5653,407.58 PR,Pérola,10208,240.64 PR,Pérola d`Oeste,6761,206.05 PR,Piên,11236,254.79 PR,Pinhais,117008,60.87 PR,Pinhal de São Bento,2625,97.46 PR,Pinhalão,6215,220.63 PR,Pinhão,30208,2001.59 PR,Piraí do Sul,23424,1403.07 PR,Piraquara,93207,227.05 PR,Pitanga,32638,1663.75 PR,Pitangueiras,2814,123.23 PR,Planaltina do Paraná,4095,356.19 PR,Planalto,13654,345.74 PR,Ponta Grossa,311611,2067.55 PR,Pontal do Paraná,20920,199.87 PR,Porecatu,14189,291.67 PR,Porto Amazonas,4514,186.58 PR,Porto Barreiro,3663,361.02 PR,Porto Rico,2530,217.68 PR,Porto Vitória,4020,213.01 PR,Prado Ferreira,3434,153.4 PR,Pranchita,5628,225.84 PR,Presidente Castelo Branco,4784,155.73 PR,Primeiro de Maio,10832,414.44 PR,Prudentópolis,48792,2308.5 PR,Quarto Centenário,4856,321.88 PR,Quatiguá,7045,112.69 PR,Quatro Barras,19851,180.47 PR,Quatro Pontes,3803,114.39 PR,Quedas do Iguaçu,30605,821.5 PR,Querência do Norte,11729,914.76 PR,Quinta do Sol,5088,326.18 PR,Quitandinha,17089,447.02 PR,Ramilândia,4134,237.2 PR,Rancho Alegre,3955,167.65 PR,Rancho Alegre d`Oeste,2847,241.39 PR,Realeza,16338,353.42 PR,Rebouças,14176,481.84 PR,Renascença,6812,425.27 PR,Reserva,25172,1635.52 PR,Reserva do Iguaçu,7307,834.23 PR,Ribeirão Claro,10678,629.22 PR,Ribeirão do Pinhal,13524,374.73 PR,Rio Azul,14093,629.75 PR,Rio Bom,3334,177.84 PR,Rio Bonito do Iguaçu,13661,746.12 PR,Rio Branco do Ivaí,3898,382.33 PR,Rio Branco do Sul,30650,812.29 PR,Rio Negro,31274,604.14 PR,Rolândia,57862,459.02 PR,Roncador,11537,742.12 PR,Rondon,8996,556.09 PR,Rosário do Ivaí,5588,371.25 PR,Sabáudia,6096,190.33 PR,Salgado Filho,4403,189.32 PR,Salto do Itararé,5178,200.52 PR,Salto do Lontra,13689,312.72 PR,Santa Amélia,3803,78.05 PR,Santa Cecília do Pavão,3646,110.2 PR,Santa Cruz de Monte Castelo,8092,442.01 PR,Santa Fé,10432,276.24 PR,Santa Helena,23413,758.23 PR,Santa Inês,1818,138.48 PR,Santa Isabel do Ivaí,8760,349.5 PR,Santa Izabel do Oeste,13132,321.18 PR,Santa Lúcia,3925,116.86 PR,Santa Maria do Oeste,11500,847.14 PR,Santa Mariana,12435,427.19 PR,Santa Mônica,3571,259.96 PR,Santa Tereza do Oeste,10332,326.19 PR,Santa Terezinha de Itaipu,20841,259.39 PR,Santana do Itararé,5249,251.27 PR,Santo Antônio da Platina,42707,721.47 PR,Santo Antônio do Caiuá,2727,219.07 PR,Santo Antônio do Paraíso,2408,165.9 PR,Santo Antônio do Sudoeste,18893,325.74 PR,Santo Inácio,5269,306.87 PR,São Carlos do Ivaí,6354,225.08 PR,São Jerônimo da Serra,11337,823.77 PR,São João,10599,388.06 PR,São João do Caiuá,5911,304.41 PR,São João do Ivaí,11525,353.33 PR,São João do Triunfo,13704,720.41 PR,São Jorge do Ivaí,5517,315.09 PR,São Jorge do Patrocínio,6041,404.69 PR,São Jorge d`Oeste,9085,379.55 PR,São José da Boa Vista,6511,399.67 PR,São José das Palmeiras,3830,182.42 PR,São José dos Pinhais,264210,946.44 PR,São Manoel do Paraná,2098,95.38 PR,São Mateus do Sul,41257,1341.71 PR,São Miguel do Iguaçu,25769,851.3 PR,São Pedro do Iguaçu,6491,308.32 PR,São Pedro do Ivaí,10167,322.69 PR,São Pedro do Paraná,2491,250.65 PR,São Sebastião da Amoreira,8626,227.98 PR,São Tomé,5349,218.62 PR,Sapopema,6736,677.61 PR,Sarandi,82847,103.46 PR,Saudade do Iguaçu,5028,152.09 PR,Sengés,18414,1437.36 PR,Serranópolis do Iguaçu,4568,483.66 PR,Sertaneja,5817,444.49 PR,Sertanópolis,15638,505.53 PR,Siqueira Campos,18454,278.04 PR,Sulina,3394,170.76 PR,Tamarana,12262,472.16 PR,Tamboara,4664,193.35 PR,Tapejara,14598,591.4 PR,Tapira,5836,434.37 PR,Teixeira Soares,10283,902.79 PR,Telêmaco Borba,69872,1382.86 PR,Terra Boa,15776,320.85 PR,Terra Rica,15221,700.59 PR,Terra Roxa,16759,800.81 PR,Tibagi,19344,2951.57 PR,Tijucas do Sul,14537,671.89 PR,Toledo,119313,1197.0 PR,Tomazina,8791,591.44 PR,Três Barras do Paraná,11824,504.17 PR,Tunas do Paraná,6256,668.48 PR,Tuneiras do Oeste,8695,698.87 PR,Tupãssi,7997,310.91 PR,Turvo,13811,916.49 PR,Ubiratã,21558,652.58 PR,Umuarama,100676,1232.77 PR,União da Vitória,52735,720.0 PR,Uniflor,2466,94.82 PR,Uraí,11472,237.81 PR,Ventania,9957,759.37 PR,Vera Cruz do Oeste,8973,327.09 PR,Verê,7878,311.8 PR,Virmond,3950,243.17 PR,Vitorino,6513,308.22 PR,Wenceslau Braz,19298,397.92 PR,Xambrê,6012,359.71 PE,Abreu e Lima,94429,126.19 PE,Afogados da Ingazeira,35088,377.7 PE,Afrânio,17586,1490.6 PE,Agrestina,22679,201.45 PE,Água Preta,33095,533.33 PE,Águas Belas,40235,885.99 PE,Alagoinha,13759,217.83 PE,Aliança,37415,272.79 PE,Altinho,22353,454.48 PE,Amaraji,21939,234.96 PE,Angelim,10202,118.04 PE,Araçoiaba,18156,96.38 PE,Araripina,77302,1892.6 PE,Arcoverde,68793,350.9 PE,Barra de Guabiraba,12776,114.65 PE,Barreiros,40732,233.37 PE,Belém de Maria,11353,73.74 PE,Belém de São Francisco,20253,1830.8 PE,Belo Jardim,72432,647.7 PE,Betânia,12003,1244.07 PE,Bezerros,58668,490.82 PE,Bodocó,35158,1616.5 PE,Bom Conselho,45503,792.19 PE,Bom Jardim,37826,223.18 PE,Bonito,37566,395.61 PE,Brejão,8844,159.79 PE,Brejinho,7307,106.28 PE,Brejo da Madre de Deus,45180,762.35 PE,Buenos Aires,12537,93.19 PE,Buíque,52105,1329.74 PE,Cabo de Santo Agostinho,185025,448.74 PE,Cabrobó,30873,1657.71 PE,Cachoeirinha,18819,179.26 PE,Caetés,26577,329.48 PE,Calçado,11125,121.95 PE,Calumbi,5648,179.31 PE,Camaragibe,144466,51.26 PE,Camocim de São Félix,17104,72.48 PE,Camutanga,8156,37.52 PE,Canhotinho,24521,423.08 PE,Capoeiras,19593,336.33 PE,Carnaíba,18574,427.8 PE,Carnaubeira da Penha,11782,1004.67 PE,Carpina,74858,144.93 PE,Caruaru,314912,920.61 PE,Casinhas,13766,115.87 PE,Catende,37820,207.24 PE,Cedro,10778,148.76 PE,Chã de Alegria,12404,48.55 PE,Chã Grande,20137,84.85 PE,Condado,24282,89.65 PE,Correntes,17419,328.66 PE,Cortês,12452,101.32 PE,Cumaru,17183,292.23 PE,Cupira,23390,105.56 PE,Custódia,33855,1404.13 PE,Dormentes,16917,1537.64 PE,Escada,63517,346.96 PE,Exu,31636,1337.5 PE,Feira Nova,20571,107.73 PE,Fernando de Noronha,2630,17.02 PE,Ferreiros,11430,89.35 PE,Flores,22169,995.56 PE,Floresta,29285,3644.17 PE,Frei Miguelinho,14293,212.71 PE,Gameleira,27912,255.96 PE,Garanhuns,129408,458.55 PE,Glória do Goitá,29019,231.83 PE,Goiana,75644,501.88 PE,Granito,6855,521.94 PE,Gravatá,76458,506.79 PE,Iati,18360,635.14 PE,Ibimirim,26954,1906.44 PE,Ibirajuba,7534,189.6 PE,Igarassu,102021,305.56 PE,Iguaraci,11779,838.13 PE,Ilha de Itamaracá,21884,66.68 PE,Inajá,19081,1182.55 PE,Ingazeira,4496,243.67 PE,Ipojuca,80637,527.11 PE,Ipubi,28120,861.42 PE,Itacuruba,4369,430.03 PE,Itaíba,26256,1084.78 PE,Itambé,35398,304.81 PE,Itapetim,13881,404.85 PE,Itapissuma,23769,74.24 PE,Itaquitinga,15692,103.42 PE,Jaboatão dos Guararapes,644620,258.69 PE,Jaqueira,11501,87.21 PE,Jataúba,15819,672.18 PE,Jatobá,13963,277.86 PE,João Alfredo,30743,135.12 PE,Joaquim Nabuco,15773,121.9 PE,Jucati,10604,120.6 PE,Jupi,13705,104.99 PE,Jurema,14541,148.25 PE,Lagoa do Carro,16007,69.67 PE,Lagoa do Itaenga,20659,57.28 PE,Lagoa do Ouro,12132,198.76 PE,Lagoa dos Gatos,15615,222.87 PE,Lagoa Grande,22760,1848.9 PE,Lajedo,36628,189.1 PE,Limoeiro,55439,273.74 PE,Macaparana,23925,108.05 PE,Machados,13596,60.04 PE,Manari,18083,380.23 PE,Maraial,12230,199.87 PE,Mirandiba,14308,821.68 PE,Moreilândia,11132,404.57 PE,Moreno,56696,196.07 PE,Nazaré da Mata,30796,150.26 PE,Olinda,377779,41.68 PE,Orobó,22878,138.66 PE,Orocó,13180,554.76 PE,Ouricuri,64358,2422.89 PE,Palmares,59526,339.29 PE,Palmeirina,8189,158.02 PE,Panelas,25645,370.94 PE,Paranatama,11001,230.89 PE,Parnamirim,20224,2597.11 PE,Passira,28628,326.76 PE,Paudalho,51357,277.51 PE,Paulista,300466,97.31 PE,Pedra,20944,803.22 PE,Pesqueira,62931,995.54 PE,Petrolândia,32492,1056.6 PE,Petrolina,293962,4561.87 PE,Poção,11242,246.75 PE,Pombos,24046,203.18 PE,Primavera,13439,110.19 PE,Quipapá,24186,230.62 PE,Quixaba,6739,210.71 PE,Recife,1537704,218.44 PE,Riacho das Almas,19162,314.0 PE,Ribeirão,44439,287.9 PE,Rio Formoso,22151,227.46 PE,Sairé,11240,189.37 PE,Salgadinho,9312,87.22 PE,Salgueiro,56629,1686.81 PE,Saloá,15309,252.08 PE,Sanharó,21955,268.69 PE,Santa Cruz,13594,1255.94 PE,Santa Cruz da Baixa Verde,11768,114.93 PE,Santa Cruz do Capibaribe,87582,335.31 PE,Santa Filomena,13371,1005.05 PE,Santa Maria da Boa Vista,39435,3001.18 PE,Santa Maria do Cambucá,13021,92.15 PE,Santa Terezinha,10991,195.59 PE,São Benedito do Sul,13941,160.48 PE,São Bento do Una,53242,719.15 PE,São Caitano,35274,382.47 PE,São João,21312,258.33 PE,São Joaquim do Monte,20488,231.8 PE,São José da Coroa Grande,18180,69.34 PE,São José do Belmonte,32617,1474.09 PE,São José do Egito,31829,798.88 PE,São Lourenço da Mata,102895,262.11 PE,São Vicente Ferrer,17000,113.99 PE,Serra Talhada,79232,2980.01 PE,Serrita,18331,1537.26 PE,Sertânia,33787,2421.53 PE,Sirinhaém,40296,374.61 PE,Solidão,5744,138.4 PE,Surubim,58515,252.86 PE,Tabira,26427,388.01 PE,Tacaimbó,12725,227.6 PE,Tacaratu,22068,1264.53 PE,Tamandaré,20715,214.31 PE,Taquaritinga do Norte,24903,475.18 PE,Terezinha,6737,151.45 PE,Terra Nova,9278,320.5 PE,Timbaúba,53825,291.52 PE,Toritama,35554,25.7 PE,Tracunhaém,13055,118.39 PE,Trindade,26116,229.54 PE,Triunfo,15006,191.52 PE,Tupanatinga,24425,950.47 PE,Tuparetama,7925,178.57 PE,Venturosa,16052,320.73 PE,Verdejante,9142,476.04 PE,Vertente do Lério,7873,73.63 PE,Vertentes,18222,196.33 PE,Vicência,30732,228.02 PE,Vitória de Santo Antão,129974,372.64 PE,Xexéu,14093,110.81 PI,Acauã,6749,1279.59 PI,Agricolândia,5098,112.43 PI,Água Branca,16451,97.04 PI,Alagoinha do Piauí,7341,532.98 PI,Alegrete do Piauí,5153,282.71 PI,Alto Longá,13646,1737.84 PI,Altos,38822,957.66 PI,Alvorada do Gurguéia,5050,2131.92 PI,Amarante,17135,1155.2 PI,Angical do Piauí,6672,223.44 PI,Anísio de Abreu,9098,337.88 PI,Antônio Almeida,3039,645.75 PI,Aroazes,5779,821.66 PI,Aroeiras do Itaim,2440,257.14 PI,Arraial,4688,682.76 PI,Assunção do Piauí,7503,1690.7 PI,Avelino Lopes,11067,1305.52 PI,Baixa Grande do Ribeiro,10516,7808.91 PI,Barra d`Alcântara,3852,263.38 PI,Barras,44850,1719.8 PI,Barreiras do Piauí,3234,2028.29 PI,Barro Duro,6607,131.12 PI,Batalha,25774,1588.9 PI,Bela Vista do Piauí,3778,499.39 PI,Belém do Piauí,3284,243.28 PI,Beneditinos,9911,788.58 PI,Bertolínia,5319,1225.34 PI,Betânia do Piauí,6015,564.71 PI,Boa Hora,6296,337.57 PI,Bocaina,4369,268.58 PI,Bom Jesus,22629,5469.18 PI,Bom Princípio do Piauí,5304,521.57 PI,Bonfim do Piauí,5393,289.21 PI,Boqueirão do Piauí,6193,278.3 PI,Brasileira,7966,880.91 PI,Brejo do Piauí,3850,2183.36 PI,Buriti dos Lopes,19074,691.18 PI,Buriti dos Montes,7974,2652.11 PI,Cabeceiras do Piauí,9928,608.53 PI,Cajazeiras do Piauí,3343,514.36 PI,Cajueiro da Praia,7163,271.71 PI,Caldeirão Grande do Piauí,5671,494.89 PI,Campinas do Piauí,5408,831.2 PI,Campo Alegre do Fidalgo,4693,657.8 PI,Campo Grande do Piauí,5592,311.83 PI,Campo Largo do Piauí,6803,477.8 PI,Campo Maior,45177,1675.71 PI,Canavieira,3921,2162.87 PI,Canto do Buriti,20020,4325.64 PI,Capitão de Campos,10953,592.17 PI,Capitão Gervásio Oliveira,3878,1134.17 PI,Caracol,10212,1610.96 PI,Caraúbas do Piauí,5525,471.45 PI,Caridade do Piauí,4826,501.36 PI,Castelo do Piauí,18336,2035.19 PI,Caxingó,5039,488.17 PI,Cocal,26036,1269.5 PI,Cocal de Telha,4525,282.11 PI,Cocal dos Alves,5572,357.69 PI,Coivaras,3811,485.5 PI,Colônia do Gurguéia,6036,430.62 PI,Colônia do Piauí,7433,947.87 PI,Conceição do Canindé,4475,831.41 PI,Coronel José Dias,4541,1914.82 PI,Corrente,25407,3048.45 PI,Cristalândia do Piauí,7831,1202.9 PI,Cristino Castro,9981,1846.34 PI,Curimatá,10761,2337.54 PI,Currais,4704,3156.66 PI,Curral Novo do Piauí,4869,752.31 PI,Curralinhos,4183,345.85 PI,Demerval Lobão,13278,216.81 PI,Dirceu Arcoverde,6675,1017.06 PI,Dom Expedito Lopes,6569,219.07 PI,Dom Inocêncio,9245,3870.17 PI,Domingos Mourão,4264,846.84 PI,Elesbão Veloso,14512,1347.48 PI,Eliseu Martins,4665,1090.45 PI,Esperantina,37767,911.22 PI,Fartura do Piauí,5074,712.92 PI,Flores do Piauí,4366,946.73 PI,Floresta do Piauí,2482,194.7 PI,Floriano,57690,3409.65 PI,Francinópolis,5235,268.7 PI,Francisco Ayres,4477,656.48 PI,Francisco Macedo,2879,155.28 PI,Francisco Santos,8592,491.86 PI,Fronteiras,11117,775.68 PI,Geminiano,5475,462.52 PI,Gilbués,10402,3494.96 PI,Guadalupe,10268,1023.59 PI,Guaribas,4401,3118.23 PI,Hugo Napoleão,3771,224.46 PI,Ilha Grande,8914,134.32 PI,Inhuma,14845,978.22 PI,Ipiranga do Piauí,9327,527.73 PI,Isaías Coelho,8221,776.05 PI,Itainópolis,11109,828.15 PI,Itaueira,10678,2554.18 PI,Jacobina do Piauí,5722,1370.7 PI,Jaicós,18035,865.14 PI,Jardim do Mulato,4309,509.85 PI,Jatobá do Piauí,4656,653.23 PI,Jerumenha,4390,1867.31 PI,João Costa,2960,1800.24 PI,Joaquim Pires,13817,739.58 PI,Joca Marques,5100,166.44 PI,José de Freitas,37085,1538.18 PI,Juazeiro do Piauí,4757,827.24 PI,Júlio Borges,5373,1297.11 PI,Jurema,4517,1271.89 PI,Lagoa Alegre,8008,394.66 PI,Lagoa de São Francisco,6422,155.64 PI,Lagoa do Barro do Piauí,4523,1261.94 PI,Lagoa do Piauí,3863,426.63 PI,Lagoa do Sítio,4850,804.7 PI,Lagoinha do Piauí,2656,67.5 PI,Landri Sales,5281,1088.58 PI,Luís Correia,28406,1070.93 PI,Luzilândia,24721,704.35 PI,Madeiro,7816,177.15 PI,Manoel Emídio,5213,1618.98 PI,Marcolândia,7812,143.88 PI,Marcos Parente,4456,677.41 PI,Massapê do Piauí,6220,521.13 PI,Matias Olímpio,10473,226.37 PI,Miguel Alves,32289,1393.71 PI,Miguel Leão,1253,93.52 PI,Milton Brandão,6769,1371.74 PI,Monsenhor Gil,10333,568.73 PI,Monsenhor Hipólito,7391,401.43 PI,Monte Alegre do Piauí,10345,2417.93 PI,Morro Cabeça no Tempo,4068,2116.94 PI,Morro do Chapéu do Piauí,6499,328.29 PI,Murici dos Portelas,8464,481.71 PI,Nazaré do Piauí,7321,1315.84 PI,Nazária,8068,363.59 PI,Nossa Senhora de Nazaré,4556,356.26 PI,Nossa Senhora dos Remédios,8206,358.49 PI,Nova Santa Rita,4187,909.74 PI,Novo Oriente do Piauí,6498,525.33 PI,Novo Santo Antônio,3260,481.71 PI,Oeiras,35640,2702.49 PI,Olho d`Água do Piauí,2626,219.6 PI,Padre Marcos,6657,272.04 PI,Paes Landim,4059,401.38 PI,Pajeú do Piauí,3363,1079.17 PI,Palmeira do Piauí,4993,2023.51 PI,Palmeirais,13745,1499.18 PI,Paquetá,4147,448.46 PI,Parnaguá,10276,3429.28 PI,Parnaíba,145705,435.57 PI,Passagem Franca do Piauí,4546,849.61 PI,Patos do Piauí,6105,751.6 PI,Pau d`Arco do Piauí,3757,430.82 PI,Paulistana,19785,1969.96 PI,Pavussu,3663,1090.7 PI,Pedro II,37496,1518.23 PI,Pedro Laurentino,2407,870.34 PI,Picos,73414,534.72 PI,Pimenteiras,11733,4563.13 PI,Pio IX,17671,1947.16 PI,Piracuruca,27553,2380.41 PI,Piripiri,61834,1408.93 PI,Porto,11897,252.72 PI,Porto Alegre do Piauí,2559,1169.44 PI,Prata do Piauí,3085,196.33 PI,Queimada Nova,8553,1352.4 PI,Redenção do Gurguéia,8400,2468.01 PI,Regeneração,17556,1251.04 PI,Riacho Frio,4241,2222.1 PI,Ribeira do Piauí,4263,1004.23 PI,Ribeiro Gonçalves,6845,3978.96 PI,Rio Grande do Piauí,6273,635.95 PI,Santa Cruz do Piauí,6027,611.62 PI,Santa Cruz dos Milagres,3794,979.66 PI,Santa Filomena,6096,5285.44 PI,Santa Luz,5513,1186.84 PI,Santa Rosa do Piauí,5149,340.2 PI,Santana do Piauí,4917,141.12 PI,Santo Antônio de Lisboa,6007,387.4 PI,Santo Antônio dos Milagres,2059,33.15 PI,Santo Inácio do Piauí,3648,852.89 PI,São Braz do Piauí,4313,656.36 PI,São Félix do Piauí,3069,657.24 PI,São Francisco de Assis do Piauí,5567,1100.4 PI,São Francisco do Piauí,6298,1340.67 PI,São Gonçalo do Gurguéia,2825,1385.3 PI,São Gonçalo do Piauí,4754,150.22 PI,São João da Canabrava,4445,480.28 PI,São João da Fronteira,5608,764.87 PI,São João da Serra,6157,1006.5 PI,São João da Varjota,4651,395.31 PI,São João do Arraial,7336,213.36 PI,São João do Piauí,19548,1527.77 PI,São José do Divino,5148,319.13 PI,São José do Peixe,3700,1287.17 PI,São José do Piauí,6591,364.95 PI,São Julião,5675,257.19 PI,São Lourenço do Piauí,4427,672.71 PI,São Luis do Piauí,2561,220.38 PI,São Miguel da Baixa Grande,2110,384.19 PI,São Miguel do Fidalgo,2976,813.44 PI,São Miguel do Tapuio,18134,5207.02 PI,São Pedro do Piauí,13639,518.29 PI,São Raimundo Nonato,32327,2415.6 PI,Sebastião Barros,3560,893.72 PI,Sebastião Leal,4116,3151.59 PI,Sigefredo Pacheco,9619,966.99 PI,Simões,14180,1071.54 PI,Simplício Mendes,12077,1345.79 PI,Socorro do Piauí,4522,761.85 PI,Sussuapara,6229,209.7 PI,Tamboril do Piauí,2753,1587.3 PI,Tanque do Piauí,2620,398.72 PI,Teresina,814230,1391.98 PI,União,42654,1173.45 PI,Uruçuí,20149,8411.91 PI,Valença do Piauí,20326,1334.63 PI,Várzea Branca,4913,450.76 PI,Várzea Grande,4336,237.01 PI,Vera Mendes,2986,341.97 PI,Vila Nova do Piauí,3076,218.32 PI,Wall Ferraz,4280,269.99 RJ,Angra dos Reis,169511,825.09 RJ,Aperibé,10213,94.64 RJ,Araruama,112008,638.02 RJ,Areal,11423,110.92 RJ,Armação dos Búzios,27560,70.28 RJ,Arraial do Cabo,27715,160.29 RJ,Barra do Piraí,94778,578.97 RJ,Barra Mansa,177813,547.23 RJ,Belford Roxo,469332,77.82 RJ,Bom Jardim,25333,384.64 RJ,Bom Jesus do Itabapoana,35411,598.83 RJ,Cabo Frio,186227,410.42 RJ,Cachoeiras de Macacu,54273,953.8 RJ,Cambuci,14827,561.7 RJ,Campos dos Goytacazes,463731,4026.7 RJ,Cantagalo,19830,749.28 RJ,Carapebus,13359,308.13 RJ,Cardoso Moreira,12600,524.63 RJ,Carmo,17434,321.94 RJ,Casimiro de Abreu,35347,460.77 RJ,Comendador Levy Gasparian,8180,106.89 RJ,Conceição de Macabu,21211,347.27 RJ,Cordeiro,20430,116.35 RJ,Duas Barras,10930,375.13 RJ,Duque de Caxias,855048,467.62 RJ,Engenheiro Paulo de Frontin,13237,132.94 RJ,Guapimirim,51483,360.77 RJ,Iguaba Grande,22851,51.95 RJ,Itaboraí,218008,430.37 RJ,Itaguaí,109091,275.87 RJ,Italva,14063,293.82 RJ,Itaocara,22899,431.34 RJ,Itaperuna,95841,1105.34 RJ,Itatiaia,28783,245.15 RJ,Japeri,95492,81.87 RJ,Laje do Muriaé,7487,249.97 RJ,Macaé,206728,1216.85 RJ,Macuco,5269,77.72 RJ,Magé,227322,388.5 RJ,Mangaratiba,36456,356.41 RJ,Maricá,127461,362.57 RJ,Mendes,17935,97.04 RJ,Mesquita,168376,39.06 RJ,Miguel Pereira,24642,289.18 RJ,Miracema,26843,304.51 RJ,Natividade,15082,386.74 RJ,Nilópolis,157425,19.39 RJ,Niterói,487562,133.92 RJ,Nova Friburgo,182082,933.41 RJ,Nova Iguaçu,796257,521.25 RJ,Paracambi,47124,179.68 RJ,Paraíba do Sul,41084,580.53 RJ,Paraty,37533,925.05 RJ,Paty do Alferes,26359,318.8 RJ,Petrópolis,295917,795.8 RJ,Pinheiral,22719,76.53 RJ,Piraí,26314,505.38 RJ,Porciúncula,17760,302.03 RJ,Porto Real,16592,50.75 RJ,Quatis,12793,286.09 RJ,Queimados,137962,75.7 RJ,Quissamã,20242,712.87 RJ,Resende,119769,1095.25 RJ,Rio Bonito,55551,456.46 RJ,Rio Claro,17425,837.27 RJ,Rio das Flores,8561,478.31 RJ,Rio das Ostras,105676,229.04 RJ,Rio de Janeiro,6320446,1200.28 RJ,Santa Maria Madalena,10321,814.76 RJ,Santo Antônio de Pádua,40589,603.36 RJ,São Fidélis,37543,1031.56 RJ,São Francisco de Itabapoana,41354,1122.44 RJ,São Gonçalo,999728,247.71 RJ,São João da Barra,32747,455.04 RJ,São João de Meriti,458673,35.22 RJ,São José de Ubá,7003,250.28 RJ,São José do Vale do Rio Preto,20251,220.43 RJ,São Pedro da Aldeia,87875,332.79 RJ,São Sebastião do Alto,8895,397.9 RJ,Sapucaia,17525,541.71 RJ,Saquarema,74234,353.57 RJ,Seropédica,78186,283.76 RJ,Silva Jardim,21349,937.55 RJ,Sumidouro,14900,395.52 RJ,Tanguá,30732,145.5 RJ,Teresópolis,163746,770.6 RJ,Trajano de Moraes,10289,589.81 RJ,Três Rios,77432,326.14 RJ,Valença,71843,1304.81 RJ,Varre-Sai,9475,190.06 RJ,Vassouras,34410,538.13 RJ,Volta Redonda,257803,182.48 RN,Acari,11035,608.57 RN,Açu,53227,1303.44 RN,Afonso Bezerra,10844,576.18 RN,Água Nova,2980,50.68 RN,Alexandria,13507,381.21 RN,Almino Afonso,4871,128.04 RN,Alto do Rodrigues,12305,191.33 RN,Angicos,11549,741.58 RN,Antônio Martins,6907,244.62 RN,Apodi,34763,1602.48 RN,Areia Branca,25315,357.63 RN,Arês,12924,115.51 RN,Augusto Severo,9289,896.95 RN,Baía Formosa,8573,245.66 RN,Baraúna,24182,825.68 RN,Barcelona,3950,152.63 RN,Bento Fernandes,5113,301.07 RN,Bodó,2425,253.52 RN,Bom Jesus,9440,122.04 RN,Brejinho,11577,61.56 RN,Caiçara do Norte,6016,189.55 RN,Caiçara do Rio do Vento,3308,261.19 RN,Caicó,62709,1228.58 RN,Campo Redondo,10266,213.73 RN,Canguaretama,30916,245.41 RN,Caraúbas,19576,1095.01 RN,Carnaúba dos Dantas,7429,245.65 RN,Carnaubais,9762,542.53 RN,Ceará-Mirim,68141,724.38 RN,Cerro Corá,10916,393.57 RN,Coronel Ezequiel,5405,185.75 RN,Coronel João Pessoa,4772,117.14 RN,Cruzeta,7967,295.83 RN,Currais Novos,42652,864.35 RN,Doutor Severiano,6492,108.28 RN,Encanto,5231,125.75 RN,Equador,5822,264.99 RN,Espírito Santo,10475,135.84 RN,Extremoz,24569,139.58 RN,Felipe Guerra,5734,268.59 RN,Fernando Pedroza,2854,322.63 RN,Florânia,8959,504.8 RN,Francisco Dantas,2874,181.56 RN,Frutuoso Gomes,4233,63.28 RN,Galinhos,2159,342.22 RN,Goianinha,22481,192.28 RN,Governador Dix-Sept Rosado,12374,1129.38 RN,Grossos,9393,126.46 RN,Guamaré,12404,258.96 RN,Ielmo Marinho,12171,312.03 RN,Ipanguaçu,13856,374.25 RN,Ipueira,2077,127.35 RN,Itajá,6932,203.62 RN,Itaú,5564,133.03 RN,Jaçanã,7925,54.56 RN,Jandaíra,6801,435.95 RN,Janduís,5345,304.9 RN,Januário Cicco,9011,187.21 RN,Japi,5522,188.99 RN,Jardim de Angicos,2607,254.02 RN,Jardim de Piranhas,13506,330.53 RN,Jardim do Seridó,12113,368.65 RN,João Câmara,32227,714.96 RN,João Dias,2601,88.17 RN,José da Penha,5868,117.64 RN,Jucurutu,17692,933.73 RN,Jundiá,3582,44.64 RN,Lagoa d`Anta,6227,105.65 RN,Lagoa de Pedras,6989,117.66 RN,Lagoa de Velhos,2668,112.85 RN,Lagoa Nova,13983,176.3 RN,Lagoa Salgada,7564,79.33 RN,Lajes,10381,676.62 RN,Lajes Pintadas,4612,130.21 RN,Lucrécia,3633,30.93 RN,Luís Gomes,9610,166.64 RN,Macaíba,69467,510.77 RN,Macau,28954,788.04 RN,Major Sales,3536,31.97 RN,Marcelino Vieira,8265,345.71 RN,Martins,8218,169.46 RN,Maxaranguape,10441,131.32 RN,Messias Targino,4188,135.1 RN,Montanhas,11413,82.21 RN,Monte Alegre,20685,210.92 RN,Monte das Gameleiras,2261,71.95 RN,Mossoró,259815,2099.33 RN,Natal,803739,167.26 RN,Nísia Floresta,23784,307.84 RN,Nova Cruz,35490,277.66 RN,Olho-d`Água do Borges,4295,141.17 RN,Ouro Branco,4699,253.3 RN,Paraná,3952,81.39 RN,Paraú,3859,383.21 RN,Parazinho,4845,274.67 RN,Parelhas,20354,513.06 RN,Parnamirim,202456,123.47 RN,Passa e Fica,11100,42.14 RN,Passagem,2895,41.22 RN,Patu,11964,319.13 RN,Pau dos Ferros,27745,259.96 RN,Pedra Grande,3521,221.42 RN,Pedra Preta,2590,294.99 RN,Pedro Avelino,7171,952.76 RN,Pedro Velho,14114,192.71 RN,Pendências,13432,419.14 RN,Pilões,3453,82.69 RN,Poço Branco,13949,230.4 RN,Portalegre,7320,110.05 RN,Porto do Mangue,5217,318.97 RN,Presidente Juscelino,8768,167.35 RN,Pureza,8424,504.3 RN,Rafael Fernandes,4692,78.23 RN,Rafael Godeiro,3063,100.07 RN,Riacho da Cruz,3165,127.22 RN,Riacho de Santana,4156,128.11 RN,Riachuelo,7067,262.89 RN,Rio do Fogo,10059,150.26 RN,Rodolfo Fernandes,4418,154.84 RN,Ruy Barbosa,3595,125.81 RN,Santa Cruz,35797,624.36 RN,Santa Maria,4762,219.57 RN,Santana do Matos,13809,1419.54 RN,Santana do Seridó,2526,188.4 RN,Santo Antônio,22216,301.08 RN,São Bento do Norte,2975,288.73 RN,São Bento do Trairí,3905,190.82 RN,São Fernando,3401,404.43 RN,São Francisco do Oeste,3874,75.59 RN,São Gonçalo do Amarante,87668,249.12 RN,São João do Sabugi,5922,277.01 RN,São José de Mipibu,39776,290.33 RN,São José do Campestre,12356,341.12 RN,São José do Seridó,4231,174.51 RN,São Miguel,22157,171.69 RN,São Miguel do Gostoso,8670,343.75 RN,São Paulo do Potengi,15843,240.43 RN,São Pedro,6235,195.24 RN,São Rafael,8111,469.1 RN,São Tomé,10827,862.59 RN,São Vicente,6028,197.82 RN,Senador Elói de Souza,5637,167.61 RN,Senador Georgino Avelino,3924,25.93 RN,Serra de São Bento,5743,96.63 RN,Serra do Mel,10287,616.51 RN,Serra Negra do Norte,7770,562.4 RN,Serrinha,6581,193.35 RN,Serrinha dos Pintos,4540,122.65 RN,Severiano Melo,5752,157.85 RN,Sítio Novo,5020,213.46 RN,Taboleiro Grande,2317,124.09 RN,Taipu,11836,352.82 RN,Tangará,14175,356.83 RN,Tenente Ananias,9883,223.67 RN,Tenente Laurentino Cruz,5406,74.38 RN,Tibau,3687,169.24 RN,Tibau do Sul,11385,101.82 RN,Timbaúba dos Batistas,2295,135.45 RN,Touros,31089,838.67 RN,Triunfo Potiguar,3368,268.73 RN,Umarizal,10659,213.58 RN,Upanema,12992,873.93 RN,Várzea,5236,72.68 RN,Venha-Ver,3821,71.62 RN,Vera Cruz,10719,83.89 RN,Viçosa,1618,37.91 RN,Vila Flor,2872,47.66 RS,Aceguá,4394,1549.38 RS,Água Santa,3722,291.79 RS,Agudo,16722,536.11 RS,Ajuricaba,7255,323.24 RS,Alecrim,7045,314.74 RS,Alegrete,77653,7803.95 RS,Alegria,4301,172.69 RS,Almirante Tamandaré do Sul,2067,265.37 RS,Alpestre,8027,324.64 RS,Alto Alegre,1848,114.45 RS,Alto Feliz,2917,79.17 RS,Alvorada,195673,71.31 RS,Amaral Ferrador,6353,506.46 RS,Ametista do Sul,7323,93.49 RS,André da Rocha,1216,324.33 RS,Anta Gorda,6073,242.96 RS,Antônio Prado,12833,347.62 RS,Arambaré,3693,519.12 RS,Araricá,4864,35.29 RS,Aratiba,6565,342.5 RS,Arroio do Meio,18783,157.96 RS,Arroio do Padre,2730,124.32 RS,Arroio do Sal,7740,120.91 RS,Arroio do Tigre,12648,318.23 RS,Arroio dos Ratos,13606,425.93 RS,Arroio Grande,18470,2513.6 RS,Arvorezinha,10225,271.64 RS,Augusto Pestana,7096,347.44 RS,Áurea,3665,158.29 RS,Bagé,116794,4095.53 RS,Balneário Pinhal,10856,103.76 RS,Barão,5741,124.49 RS,Barão de Cotegipe,6529,260.13 RS,Barão do Triunfo,7018,436.4 RS,Barra do Guarita,3089,64.38 RS,Barra do Quaraí,4012,1056.14 RS,Barra do Ribeiro,12572,728.95 RS,Barra do Rio Azul,2003,147.14 RS,Barra Funda,2367,60.03 RS,Barracão,5357,516.73 RS,Barros Cassal,11133,648.9 RS,Benjamin Constant do Sul,2307,132.4 RS,Bento Gonçalves,107278,381.96 RS,Boa Vista das Missões,2114,194.82 RS,Boa Vista do Buricá,6574,108.73 RS,Boa Vista do Cadeado,2441,701.1 RS,Boa Vista do Incra,2425,503.47 RS,Boa Vista do Sul,2776,94.35 RS,Bom Jesus,11519,2624.67 RS,Bom Princípio,11789,88.5 RS,Bom Progresso,2328,88.74 RS,Bom Retiro do Sul,11472,102.33 RS,Boqueirão do Leão,7673,265.43 RS,Bossoroca,6884,1610.57 RS,Bozano,2200,201.04 RS,Braga,3702,128.99 RS,Brochier,4675,106.73 RS,Butiá,20406,752.25 RS,Caçapava do Sul,33690,3047.11 RS,Cacequi,13676,2369.95 RS,Cachoeira do Sul,83827,3735.16 RS,Cachoeirinha,118278,44.02 RS,Cacique Doble,4868,203.91 RS,Caibaté,4954,259.66 RS,Caiçara,5071,189.2 RS,Camaquã,62764,1679.43 RS,Camargo,2592,138.07 RS,Cambará do Sul,6542,1208.65 RS,Campestre da Serra,3247,538.0 RS,Campina das Missões,6117,225.76 RS,Campinas do Sul,5506,276.16 RS,Campo Bom,60074,60.51 RS,Campo Novo,5459,222.07 RS,Campos Borges,3494,226.58 RS,Candelária,30171,943.95 RS,Cândido Godói,6535,246.28 RS,Candiota,8771,933.83 RS,Canela,39229,253.77 RS,Canguçu,53259,3525.29 RS,Canoas,323827,131.1 RS,Canudos do Vale,1807,81.91 RS,Capão Bonito do Sul,1754,527.12 RS,Capão da Canoa,42040,97.1 RS,Capão do Cipó,3104,1008.65 RS,Capão do Leão,24298,785.37 RS,Capela de Santana,11612,183.76 RS,Capitão,2636,73.97 RS,Capivari do Sul,3890,412.79 RS,Caraá,7312,294.32 RS,Carazinho,59317,665.09 RS,Carlos Barbosa,25192,228.67 RS,Carlos Gomes,1607,83.16 RS,Casca,8651,271.75 RS,Caseiros,3007,235.71 RS,Catuípe,9323,583.26 RS,Caxias do Sul,435564,1644.3 RS,Centenário,2965,134.33 RS,Cerrito,6402,451.7 RS,Cerro Branco,4454,158.77 RS,Cerro Grande,2417,73.44 RS,Cerro Grande do Sul,10268,324.79 RS,Cerro Largo,13289,177.68 RS,Chapada,9377,684.04 RS,Charqueadas,35320,216.51 RS,Charrua,3471,198.12 RS,Chiapetta,4044,396.55 RS,Chuí,5917,202.55 RS,Chuvisca,4944,220.47 RS,Cidreira,12668,245.89 RS,Ciríaco,4922,273.87 RS,Colinas,2420,58.37 RS,Colorado,3550,285.26 RS,Condor,6552,465.19 RS,Constantina,9752,203.0 RS,Coqueiro Baixo,1528,112.28 RS,Coqueiros do Sul,2457,275.55 RS,Coronel Barros,2459,162.95 RS,Coronel Bicaco,7748,492.12 RS,Coronel Pilar,1725,105.45 RS,Cotiporã,3917,172.38 RS,Coxilha,2826,422.79 RS,Crissiumal,14084,362.15 RS,Cristal,7280,681.63 RS,Cristal do Sul,2826,97.72 RS,Cruz Alta,62821,1360.37 RS,Cruzaltense,2141,166.88 RS,Cruzeiro do Sul,12320,155.55 RS,David Canabarro,4683,174.94 RS,Derrubadas,3190,361.2 RS,Dezesseis de Novembro,2866,216.85 RS,Dilermando de Aguiar,3064,600.55 RS,Dois Irmãos,27572,65.16 RS,Dois Irmãos das Missões,2157,225.68 RS,Dois Lajeados,3278,133.37 RS,Dom Feliciano,14380,1356.17 RS,Dom Pedrito,38898,5192.1 RS,Dom Pedro de Alcântara,2550,78.16 RS,Dona Francisca,3401,114.35 RS,Doutor Maurício Cardoso,5313,252.69 RS,Doutor Ricardo,2030,108.43 RS,Eldorado do Sul,34343,509.73 RS,Encantado,20510,139.16 RS,Encruzilhada do Sul,24534,3348.32 RS,Engenho Velho,1527,71.19 RS,Entre-Ijuís,8938,552.6 RS,Entre Rios do Sul,3080,120.07 RS,Erebango,2970,153.12 RS,Erechim,96087,430.67 RS,Ernestina,3088,239.15 RS,Erval Grande,5163,285.73 RS,Erval Seco,7878,363.89 RS,Esmeralda,3168,829.77 RS,Esperança do Sul,3272,148.38 RS,Espumoso,15240,783.07 RS,Estação,6011,100.27 RS,Estância Velha,42574,52.15 RS,Esteio,80755,27.68 RS,Estrela,30619,184.18 RS,Estrela Velha,3628,281.67 RS,Eugênio de Castro,2798,419.32 RS,Fagundes Varela,2579,134.3 RS,Farroupilha,63635,360.39 RS,Faxinal do Soturno,6672,169.9 RS,Faxinalzinho,2567,143.38 RS,Fazenda Vilanova,3697,84.79 RS,Feliz,12359,95.37 RS,Flores da Cunha,27126,273.45 RS,Floriano Peixoto,2018,168.43 RS,Fontoura Xavier,10719,583.47 RS,Formigueiro,7014,581.99 RS,Forquetinha,2479,93.57 RS,Fortaleza dos Valos,4575,650.33 RS,Frederico Westphalen,28843,264.98 RS,Garibaldi,30689,169.24 RS,Garruchos,3234,799.85 RS,Gaurama,5862,204.26 RS,General Câmara,8447,510.01 RS,Gentil,1677,184.01 RS,Getúlio Vargas,16154,286.57 RS,Giruá,17075,855.92 RS,Glorinha,6891,323.64 RS,Gramado,32273,237.83 RS,Gramado dos Loureiros,2269,131.4 RS,Gramado Xavier,3970,217.53 RS,Gravataí,255660,463.5 RS,Guabiju,1598,148.39 RS,Guaíba,95204,376.95 RS,Guaporé,22814,297.66 RS,Guarani das Missões,8115,290.5 RS,Harmonia,4254,44.76 RS,Herval,6753,1757.84 RS,Herveiras,2954,118.28 RS,Horizontina,18348,232.48 RS,Hulha Negra,6043,822.9 RS,Humaitá,4919,134.51 RS,Ibarama,4371,193.11 RS,Ibiaçá,4710,348.82 RS,Ibiraiaras,7171,300.65 RS,Ibirapuitã,4061,307.03 RS,Ibirubá,19310,607.45 RS,Igrejinha,31660,135.86 RS,Ijuí,78915,689.13 RS,Ilópolis,4102,116.48 RS,Imbé,17670,39.4 RS,Imigrante,3023,73.36 RS,Independência,6618,357.44 RS,Inhacorá,2267,114.11 RS,Ipê,6016,599.25 RS,Ipiranga do Sul,1944,157.88 RS,Iraí,8078,180.96 RS,Itaara,5010,172.99 RS,Itacurubi,3441,1120.87 RS,Itapuca,2344,184.25 RS,Itaqui,38159,3404.04 RS,Itati,2584,206.91 RS,Itatiba do Sul,4171,212.24 RS,Ivorá,2156,122.93 RS,Ivoti,19874,63.15 RS,Jaboticaba,4098,128.05 RS,Jacuizinho,2507,338.54 RS,Jacutinga,3633,179.3 RS,Jaguarão,27931,2054.38 RS,Jaguari,11473,673.4 RS,Jaquirana,4177,907.94 RS,Jari,3575,856.46 RS,Jóia,8331,1235.88 RS,Júlio de Castilhos,19579,1929.38 RS,Lagoa Bonita do Sul,2662,108.5 RS,Lagoa dos Três Cantos,1598,138.64 RS,Lagoa Vermelha,27525,1263.5 RS,Lagoão,6185,383.6 RS,Lajeado,71445,90.09 RS,Lajeado do Bugre,2487,67.93 RS,Lavras do Sul,7679,2600.6 RS,Liberato Salzano,5780,245.63 RS,Lindolfo Collor,5227,32.99 RS,Linha Nova,1624,63.73 RS,Maçambara,4738,1682.82 RS,Machadinho,5510,335.03 RS,Mampituba,3003,157.92 RS,Manoel Viana,7072,1390.7 RS,Maquiné,6905,621.69 RS,Maratá,2527,81.18 RS,Marau,36364,649.3 RS,Marcelino Ramos,5134,229.76 RS,Mariana Pimentel,3768,337.79 RS,Mariano Moro,2210,98.98 RS,Marques de Souza,4068,125.18 RS,Mata,5111,311.88 RS,Mato Castelhano,2470,238.37 RS,Mato Leitão,3865,45.9 RS,Mato Queimado,1799,114.64 RS,Maximiliano de Almeida,4911,208.44 RS,Minas do Leão,7631,424.34 RS,Miraguaí,4855,130.39 RS,Montauri,1542,82.08 RS,Monte Alegre dos Campos,3102,549.74 RS,Monte Belo do Sul,2670,68.37 RS,Montenegro,59415,424.01 RS,Mormaço,2749,146.11 RS,Morrinhos do Sul,3182,165.44 RS,Morro Redondo,6227,244.65 RS,Morro Reuter,5676,87.64 RS,Mostardas,12124,1982.99 RS,Muçum,4791,110.89 RS,Muitos Capões,2988,1197.93 RS,Muliterno,1813,111.13 RS,Não-Me-Toque,15936,361.67 RS,Nicolau Vergueiro,1721,155.82 RS,Nonoai,12074,468.91 RS,Nova Alvorada,3182,149.36 RS,Nova Araçá,4001,74.36 RS,Nova Bassano,8840,211.61 RS,Nova Boa Vista,1960,94.24 RS,Nova Bréscia,3184,102.82 RS,Nova Candelária,2751,97.83 RS,Nova Esperança do Sul,4671,191.0 RS,Nova Hartz,18346,62.56 RS,Nova Pádua,2450,103.24 RS,Nova Palma,6342,313.51 RS,Nova Petrópolis,19045,291.3 RS,Nova Prata,22830,258.74 RS,Nova Ramada,2437,254.76 RS,Nova Roma do Sul,3343,149.05 RS,Nova Santa Rita,22716,217.87 RS,Novo Barreiro,3978,123.58 RS,Novo Cabrais,3855,192.29 RS,Novo Hamburgo,238940,223.82 RS,Novo Machado,3925,218.67 RS,Novo Tiradentes,2277,75.4 RS,Novo Xingu,1757,80.59 RS,Osório,40906,663.55 RS,Paim Filho,4243,182.18 RS,Palmares do Sul,10969,949.21 RS,Palmeira das Missões,34328,1419.43 RS,Palmitinho,6920,144.05 RS,Panambi,38058,490.86 RS,Pantano Grande,9895,841.23 RS,Paraí,6812,120.42 RS,Paraíso do Sul,7336,337.84 RS,Pareci Novo,3511,57.41 RS,Parobé,51502,108.65 RS,Passa Sete,5154,304.54 RS,Passo do Sobrado,6011,265.11 RS,Passo Fundo,184826,783.42 RS,Paulo Bento,2196,148.36 RS,Paverama,8044,171.86 RS,Pedras Altas,2212,1377.37 RS,Pedro Osório,7811,608.79 RS,Pejuçara,3973,414.24 RS,Pelotas,328275,1610.08 RS,Picada Café,5182,85.15 RS,Pinhal,2513,68.21 RS,Pinhal da Serra,2130,438.0 RS,Pinhal Grande,4471,477.13 RS,Pinheirinho do Vale,4497,105.61 RS,Pinheiro Machado,12780,2249.56 RS,Pirapó,2757,291.74 RS,Piratini,19841,3539.69 RS,Planalto,10524,230.42 RS,Poço das Antas,2017,65.06 RS,Pontão,3857,505.71 RS,Ponte Preta,1750,99.87 RS,Portão,30920,159.89 RS,Porto Alegre,1409351,496.68 RS,Porto Lucena,5413,250.08 RS,Porto Mauá,2542,105.56 RS,Porto Vera Cruz,1852,113.65 RS,Porto Xavier,10558,280.51 RS,Pouso Novo,1875,106.53 RS,Presidente Lucena,2484,49.43 RS,Progresso,6163,255.86 RS,Protásio Alves,2000,172.82 RS,Putinga,4141,205.05 RS,Quaraí,23021,3147.63 RS,Quatro Irmãos,1775,267.99 RS,Quevedos,2710,543.36 RS,Quinze de Novembro,3653,223.64 RS,Redentora,10222,302.68 RS,Relvado,2155,123.44 RS,Restinga Seca,15849,956.05 RS,Rio dos Índios,3616,235.32 RS,Rio Grande,197228,2709.52 RS,Rio Pardo,37591,2050.59 RS,Riozinho,4330,239.56 RS,Roca Sales,10284,208.63 RS,Rodeio Bonito,5743,83.2 RS,Rolador,2546,295.01 RS,Rolante,19485,295.64 RS,Ronda Alta,10221,419.34 RS,Rondinha,5518,252.21 RS,Roque Gonzales,7203,346.62 RS,Rosário do Sul,39707,4369.65 RS,Sagrada Família,2595,78.25 RS,Saldanha Marinho,2869,221.61 RS,Salto do Jacuí,11880,507.42 RS,Salvador das Missões,2669,94.04 RS,Salvador do Sul,6747,99.82 RS,Sananduva,15373,504.55 RS,Santa Bárbara do Sul,8829,975.51 RS,Santa Cecília do Sul,1655,199.4 RS,Santa Clara do Sul,5697,86.64 RS,Santa Cruz do Sul,118374,733.41 RS,Santa Margarida do Sul,2352,955.3 RS,Santa Maria,261031,1788.12 RS,Santa Maria do Herval,6053,139.6 RS,Santa Rosa,68587,489.8 RS,Santa Tereza,1720,72.39 RS,Santa Vitória do Palmar,30990,5244.35 RS,Santana da Boa Vista,8242,1420.62 RS,Santana do Livramento,82464,6950.35 RS,Santiago,49071,2413.13 RS,Santo Ângelo,76275,680.5 RS,Santo Antônio da Patrulha,39685,1049.81 RS,Santo Antônio das Missões,11210,1710.87 RS,Santo Antônio do Palma,2139,126.09 RS,Santo Antônio do Planalto,1987,203.44 RS,Santo Augusto,13968,468.1 RS,Santo Cristo,14378,366.89 RS,Santo Expedito do Sul,2461,125.74 RS,São Borja,61671,3616.02 RS,São Domingos do Sul,2926,78.95 RS,São Francisco de Assis,19254,2508.45 RS,São Francisco de Paula,20537,3272.98 RS,São Gabriel,60425,5023.82 RS,São Jerônimo,22134,936.38 RS,São João da Urtiga,4726,171.18 RS,São João do Polêsine,2635,85.17 RS,São Jorge,2774,118.05 RS,São José das Missões,2720,98.07 RS,São José do Herval,2204,103.09 RS,São José do Hortêncio,4094,64.11 RS,São José do Inhacorá,2200,77.81 RS,São José do Norte,25503,1118.1 RS,São José do Ouro,6904,334.77 RS,São José do Sul,2082,59.03 RS,São José dos Ausentes,3290,1173.95 RS,São Leopoldo,214087,102.74 RS,São Lourenço do Sul,43111,2036.13 RS,São Luiz Gonzaga,34556,1295.68 RS,São Marcos,20103,256.25 RS,São Martinho,5773,171.66 RS,São Martinho da Serra,3201,669.55 RS,São Miguel das Missões,7421,1229.84 RS,São Nicolau,5727,485.32 RS,São Paulo das Missões,6364,223.89 RS,São Pedro da Serra,3315,35.39 RS,São Pedro das Missões,1886,79.97 RS,São Pedro do Butiá,2873,107.63 RS,São Pedro do Sul,16368,873.59 RS,São Sebastião do Caí,21932,111.44 RS,São Sepé,23798,2200.69 RS,São Valentim,3632,154.19 RS,São Valentim do Sul,2168,92.24 RS,São Valério do Sul,2647,107.97 RS,São Vendelino,1944,32.09 RS,São Vicente do Sul,8440,1175.23 RS,Sapiranga,74985,138.31 RS,Sapucaia do Sul,130957,58.31 RS,Sarandi,21285,353.39 RS,Seberi,10897,301.42 RS,Sede Nova,3011,119.3 RS,Segredo,7158,247.44 RS,Selbach,4929,177.64 RS,Senador Salgado Filho,2814,147.21 RS,Sentinela do Sul,5198,281.96 RS,Serafina Corrêa,14253,163.28 RS,Sério,2281,99.63 RS,Sertão,6294,439.47 RS,Sertão Santana,5850,251.85 RS,Sete de Setembro,2124,129.99 RS,Severiano de Almeida,3842,167.6 RS,Silveira Martins,2449,118.42 RS,Sinimbu,10068,510.12 RS,Sobradinho,14283,130.39 RS,Soledade,30044,1213.41 RS,Tabaí,4131,94.75 RS,Tapejara,19250,238.8 RS,Tapera,10448,179.66 RS,Tapes,16629,806.3 RS,Taquara,54643,457.86 RS,Taquari,26092,349.97 RS,Taquaruçu do Sul,2966,76.85 RS,Tavares,5351,604.25 RS,Tenente Portela,13719,338.08 RS,Terra de Areia,9878,141.77 RS,Teutônia,27272,178.62 RS,Tio Hugo,2724,114.24 RS,Tiradentes do Sul,6461,234.48 RS,Toropi,2952,202.98 RS,Torres,34656,160.57 RS,Tramandaí,41585,144.41 RS,Travesseiro,2314,81.12 RS,Três Arroios,2855,148.58 RS,Três Cachoeiras,10217,251.06 RS,Três Coroas,23848,185.54 RS,Três de Maio,23726,422.2 RS,Três Forquilhas,2914,217.26 RS,Três Palmeiras,4381,180.6 RS,Três Passos,23965,268.4 RS,Trindade do Sul,5787,268.42 RS,Triunfo,25793,818.8 RS,Tucunduva,5898,180.81 RS,Tunas,4395,218.07 RS,Tupanci do Sul,1573,135.12 RS,Tupanciretã,22281,2251.86 RS,Tupandi,3924,59.54 RS,Tuparendi,8557,307.68 RS,Turuçu,3522,253.64 RS,Ubiretama,2296,126.69 RS,União da Serra,1487,130.99 RS,Unistalda,2450,602.39 RS,Uruguaiana,125435,5715.76 RS,Vacaria,61342,2124.58 RS,Vale do Sol,11077,328.23 RS,Vale Real,5118,45.09 RS,Vale Verde,3253,329.73 RS,Vanini,1984,64.87 RS,Venâncio Aires,65946,773.24 RS,Vera Cruz,23983,309.62 RS,Veranópolis,22810,289.34 RS,Vespasiano Correa,1974,113.89 RS,Viadutos,5311,268.36 RS,Viamão,239384,1497.02 RS,Vicente Dutra,5285,193.06 RS,Victor Graeff,3036,238.27 RS,Vila Flores,3207,107.91 RS,Vila Lângaro,2152,152.17 RS,Vila Maria,4221,181.44 RS,Vila Nova do Sul,4221,507.94 RS,Vista Alegre,2832,77.46 RS,Vista Alegre do Prata,1569,119.33 RS,Vista Gaúcha,2759,88.72 RS,Vitória das Missões,3485,259.61 RS,Westfália,2793,64.0 RS,Xangri-lá,12434,60.69 RO,Alta Floresta d`Oeste,24392,7067.03 RO,Alto Alegre dos Parecis,12816,3958.27 RO,Alto Paraíso,17135,2651.82 RO,Alvorada d`Oeste,16853,3029.19 RO,Ariquemes,90353,4426.57 RO,Buritis,32383,3265.81 RO,Cabixi,6313,1314.36 RO,Cacaulândia,5736,1961.78 RO,Cacoal,78574,3792.8 RO,Campo Novo de Rondônia,12665,3442.01 RO,Candeias do Jamari,19779,6843.87 RO,Castanheiras,3575,892.84 RO,Cerejeiras,17029,2783.3 RO,Chupinguaia,8301,5126.72 RO,Colorado do Oeste,18591,1451.06 RO,Corumbiara,8783,3060.32 RO,Costa Marques,13678,4987.18 RO,Cujubim,15854,3863.94 RO,Espigão d`Oeste,28729,4518.03 RO,Governador Jorge Teixeira,10512,5067.38 RO,Guajará-Mirim,41656,24855.72 RO,Itapuã do Oeste,8566,4081.58 RO,Jaru,52005,2944.13 RO,Ji-Paraná,116610,6896.74 RO,Machadinho d`Oeste,31135,8509.31 RO,Ministro Andreazza,10352,798.08 RO,Mirante da Serra,11878,1191.88 RO,Monte Negro,14091,1931.38 RO,Nova Brasilândia d`Oeste,19874,1703.01 RO,Nova Mamoré,22546,10071.64 RO,Nova União,7493,807.13 RO,Novo Horizonte do Oeste,10240,843.45 RO,Ouro Preto do Oeste,37928,1969.85 RO,Parecis,4810,2548.68 RO,Pimenta Bueno,33822,6240.93 RO,Pimenteiras do Oeste,2315,6014.73 RO,Porto Velho,428527,34096.39 RO,Presidente Médici,22319,1758.47 RO,Primavera de Rondônia,3524,605.69 RO,Rio Crespo,3316,1717.64 RO,Rolim de Moura,50648,1457.89 RO,Santa Luzia d`Oeste,8886,1197.8 RO,São Felipe d`Oeste,6018,541.65 RO,São Francisco do Guaporé,16035,10959.77 RO,São Miguel do Guaporé,21828,7460.22 RO,Seringueiras,11629,3773.51 RO,Teixeirópolis,4888,459.98 RO,Theobroma,10649,2197.41 RO,Urupá,12974,831.86 RO,Vale do Anari,9384,3135.14 RO,Vale do Paraíso,8210,965.68 RO,Vilhena,76202,11518.94 RR,Alto Alegre,16448,25567.02 RR,Amajari,9327,28472.33 RR,Boa Vista,284313,5687.04 RR,Bonfim,10943,8095.42 RR,Cantá,13902,7664.83 RR,Caracaraí,18398,47411.03 RR,Caroebe,8114,12065.75 RR,Iracema,8696,14409.58 RR,Mucajaí,14792,12461.21 RR,Normandia,8940,6966.81 RR,Pacaraima,10433,8028.48 RR,Rorainópolis,24279,33594.05 RR,São João da Baliza,6769,4284.51 RR,São Luiz,6750,1526.89 RR,Uiramutã,8375,8065.56 SC,Abdon Batista,2653,235.83 SC,Abelardo Luz,17100,953.06 SC,Agrolândia,9323,207.55 SC,Agronômica,4904,130.53 SC,Água Doce,6961,1314.27 SC,Águas de Chapecó,6110,139.83 SC,Águas Frias,2424,76.14 SC,Águas Mornas,5548,327.36 SC,Alfredo Wagner,9410,732.77 SC,Alto Bela Vista,2005,103.98 SC,Anchieta,6380,228.34 SC,Angelina,5250,500.04 SC,Anita Garibaldi,8623,587.77 SC,Anitápolis,3214,542.12 SC,Antônio Carlos,7458,228.65 SC,Apiúna,9600,493.34 SC,Arabutã,4193,132.84 SC,Araquari,24810,383.99 SC,Araranguá,61310,303.3 SC,Armazém,7753,173.58 SC,Arroio Trinta,3502,94.3 SC,Arvoredo,2260,90.77 SC,Ascurra,7412,110.9 SC,Atalanta,3300,94.19 SC,Aurora,5549,206.61 SC,Balneário Arroio do Silva,9586,95.26 SC,Balneário Barra do Sul,8430,111.27 SC,Balneário Camboriú,108089,46.24 SC,Balneário Gaivota,8234,145.76 SC,Balneário Piçarras,17078,99.41 SC,Bandeirante,2906,147.37 SC,Barra Bonita,1878,93.48 SC,Barra Velha,22386,140.1 SC,Bela Vista do Toldo,6004,538.13 SC,Belmonte,2635,92.39 SC,Benedito Novo,10336,388.8 SC,Biguaçu,58206,370.87 SC,Blumenau,309011,518.5 SC,Bocaina do Sul,3290,512.85 SC,Bom Jardim da Serra,4395,935.87 SC,Bom Jesus,2526,63.47 SC,Bom Jesus do Oeste,2132,67.09 SC,Bom Retiro,8942,1055.55 SC,Bombinhas,14293,35.91 SC,Botuverá,4468,296.19 SC,Braço do Norte,29018,211.86 SC,Braço do Trombudo,3457,90.32 SC,Brunópolis,2850,337.04 SC,Brusque,105503,283.22 SC,Caçador,70762,984.29 SC,Caibi,6219,174.84 SC,Calmon,3387,638.18 SC,Camboriú,62361,212.34 SC,Campo Alegre,11748,499.07 SC,Campo Belo do Sul,7483,1027.65 SC,Campo Erê,9370,479.09 SC,Campos Novos,32824,1719.37 SC,Canelinha,10603,152.56 SC,Canoinhas,52765,1140.4 SC,Capão Alto,2753,1335.84 SC,Capinzal,20769,244.2 SC,Capivari de Baixo,21674,53.34 SC,Catanduvas,9555,197.3 SC,Caxambu do Sul,4411,140.71 SC,Celso Ramos,2771,208.27 SC,Cerro Negro,3581,417.34 SC,Chapadão do Lageado,2762,124.76 SC,Chapecó,183530,626.06 SC,Cocal do Sul,15159,71.13 SC,Concórdia,68621,799.88 SC,Cordilheira Alta,3767,82.86 SC,Coronel Freitas,10213,233.97 SC,Coronel Martins,2458,107.3 SC,Correia Pinto,14785,651.12 SC,Corupá,13852,402.79 SC,Criciúma,192308,235.71 SC,Cunha Porã,10613,217.92 SC,Cunhataí,1882,55.77 SC,Curitibanos,37748,948.74 SC,Descanso,8634,286.14 SC,Dionísio Cerqueira,14811,379.19 SC,Dona Emma,3721,181.17 SC,Doutor Pedrinho,3604,374.63 SC,Entre Rios,3018,104.55 SC,Ermo,2050,63.44 SC,Erval Velho,4352,207.36 SC,Faxinal dos Guedes,10661,339.7 SC,Flor do Sertão,1588,58.89 SC,Florianópolis,421240,675.41 SC,Formosa do Sul,2601,100.11 SC,Forquilhinha,22548,183.13 SC,Fraiburgo,34553,547.85 SC,Frei Rogério,2474,159.22 SC,Galvão,3472,121.96 SC,Garopaba,18138,115.41 SC,Garuva,14761,501.97 SC,Gaspar,57981,386.78 SC,Governador Celso Ramos,12999,117.18 SC,Grão Pará,6223,338.16 SC,Gravatal,10635,164.75 SC,Guabiruba,18430,174.68 SC,Guaraciaba,10498,330.37 SC,Guaramirim,35172,268.5 SC,Guarujá do Sul,4908,100.22 SC,Guatambú,4679,205.88 SC,Herval d`Oeste,21239,217.33 SC,Ibiam,1945,146.72 SC,Ibicaré,3373,155.79 SC,Ibirama,17330,247.35 SC,Içara,58833,293.55 SC,Ilhota,12355,252.88 SC,Imaruí,11672,542.63 SC,Imbituba,40170,182.93 SC,Imbuia,5707,123.04 SC,Indaial,54854,430.79 SC,Iomerê,2739,113.75 SC,Ipira,4752,154.57 SC,Iporã do Oeste,8409,199.72 SC,Ipuaçu,6798,260.89 SC,Ipumirim,7220,247.37 SC,Iraceminha,4253,163.23 SC,Irani,9531,325.74 SC,Irati,2096,78.28 SC,Irineópolis,10448,589.56 SC,Itá,6426,165.84 SC,Itaiópolis,20301,1295.43 SC,Itajaí,183373,288.27 SC,Itapema,45797,57.8 SC,Itapiranga,15409,282.7 SC,Itapoá,14763,248.41 SC,Ituporanga,22250,336.93 SC,Jaborá,4041,191.93 SC,Jacinto Machado,10609,431.38 SC,Jaguaruna,17290,328.35 SC,Jaraguá do Sul,143123,529.54 SC,Jardinópolis,1766,67.68 SC,Joaçaba,27020,232.23 SC,Joinville,515288,1126.11 SC,José Boiteux,4721,405.23 SC,Jupiá,2148,92.05 SC,Lacerdópolis,2199,68.89 SC,Lages,156727,2631.5 SC,Laguna,51562,441.57 SC,Lajeado Grande,1490,65.28 SC,Laurentino,6004,79.59 SC,Lauro Muller,14367,270.78 SC,Lebon Régis,11838,941.49 SC,Leoberto Leal,3365,291.21 SC,Lindóia do Sul,4642,188.64 SC,Lontras,10244,197.11 SC,Luiz Alves,10438,259.88 SC,Luzerna,5600,118.38 SC,Macieira,1826,259.64 SC,Mafra,52912,1404.03 SC,Major Gercino,3279,285.72 SC,Major Vieira,7479,525.5 SC,Maracajá,6404,62.46 SC,Maravilha,22101,171.28 SC,Marema,2203,104.07 SC,Massaranduba,14674,374.08 SC,Matos Costa,2839,433.07 SC,Meleiro,7000,187.06 SC,Mirim Doce,2513,335.73 SC,Modelo,4045,91.11 SC,Mondaí,10231,202.15 SC,Monte Carlo,9312,193.52 SC,Monte Castelo,8346,573.59 SC,Morro da Fumaça,16126,83.12 SC,Morro Grande,2890,258.18 SC,Navegantes,60556,112.02 SC,Nova Erechim,4275,64.89 SC,Nova Itaberaba,4267,137.55 SC,Nova Trento,12190,402.89 SC,Nova Veneza,13309,295.04 SC,Novo Horizonte,2750,151.85 SC,Orleans,21393,548.79 SC,Otacílio Costa,16337,845.01 SC,Ouro,7372,213.67 SC,Ouro Verde,2271,189.22 SC,Paial,1763,85.76 SC,Painel,2353,740.18 SC,Palhoça,137334,395.13 SC,Palma Sola,7765,330.1 SC,Palmeira,2373,289.3 SC,Palmitos,16020,352.51 SC,Papanduva,17928,747.86 SC,Paraíso,4080,181.24 SC,Passo de Torres,6627,95.11 SC,Passos Maia,4425,619.16 SC,Paulo Lopes,6692,449.68 SC,Pedras Grandes,4107,159.31 SC,Penha,25141,58.76 SC,Peritiba,2988,95.84 SC,Petrolândia,6131,305.87 SC,Pinhalzinho,16332,128.16 SC,Pinheiro Preto,3147,65.86 SC,Piratuba,4786,145.98 SC,Planalto Alegre,2654,62.46 SC,Pomerode,27759,214.73 SC,Ponte Alta,4894,568.96 SC,Ponte Alta do Norte,3303,399.24 SC,Ponte Serrada,11031,564.49 SC,Porto Belo,16083,93.63 SC,Porto União,33493,845.34 SC,Pouso Redondo,14810,359.39 SC,Praia Grande,7267,284.13 SC,Presidente Castello Branco,1725,65.61 SC,Presidente Getúlio,14887,294.27 SC,Presidente Nereu,2284,225.66 SC,Princesa,2758,86.15 SC,Quilombo,10248,280.26 SC,Rancho Queimado,2748,286.29 SC,Rio das Antas,6143,318.0 SC,Rio do Campo,6192,506.25 SC,Rio do Oeste,7090,247.81 SC,Rio do Sul,61198,260.36 SC,Rio dos Cedros,10284,554.08 SC,Rio Fortuna,4446,302.87 SC,Rio Negrinho,39846,907.31 SC,Rio Rufino,2436,282.5 SC,Riqueza,4838,191.97 SC,Rodeio,10922,129.93 SC,Romelândia,5551,225.85 SC,Salete,7370,179.35 SC,Saltinho,3961,156.53 SC,Salto Veloso,4301,105.07 SC,Sangão,10400,82.89 SC,Santa Cecília,15757,1145.81 SC,Santa Helena,2382,81.7 SC,Santa Rosa de Lima,2065,202.0 SC,Santa Rosa do Sul,8054,151.03 SC,Santa Terezinha,8767,715.26 SC,Santa Terezinha do Progresso,2896,118.81 SC,Santiago do Sul,1465,73.84 SC,Santo Amaro da Imperatriz,19823,344.05 SC,São Bento do Sul,74801,501.63 SC,São Bernardino,2677,144.86 SC,São Bonifácio,3008,460.36 SC,São Carlos,10291,161.29 SC,São Cristovão do Sul,5012,351.1 SC,São Domingos,9491,384.59 SC,São Francisco do Sul,42520,498.65 SC,São João Batista,26260,221.05 SC,São João do Itaperiú,3435,151.42 SC,São João do Oeste,6036,163.3 SC,São João do Sul,7002,183.36 SC,São Joaquim,24812,1892.26 SC,São José,209804,152.39 SC,São José do Cedro,13684,281.03 SC,São José do Cerrito,9273,944.92 SC,São Lourenço do Oeste,21792,360.48 SC,São Ludgero,10993,107.66 SC,São Martinho,3209,223.89 SC,São Miguel da Boa Vista,1904,71.41 SC,São Miguel do Oeste,36306,234.06 SC,São Pedro de Alcântara,4704,140.02 SC,Saudades,9016,206.6 SC,Schroeder,15316,164.38 SC,Seara,16936,311.39 SC,Serra Alta,3285,92.35 SC,Siderópolis,12998,261.66 SC,Sombrio,26613,143.33 SC,Sul Brasil,2766,112.87 SC,Taió,17260,692.88 SC,Tangará,8674,388.24 SC,Tigrinhos,1757,57.94 SC,Tijucas,30960,279.58 SC,Timbé do Sul,5308,330.09 SC,Timbó,36774,127.41 SC,Timbó Grande,7167,598.47 SC,Três Barras,18129,437.56 SC,Treviso,3527,157.08 SC,Treze de Maio,6876,161.67 SC,Treze Tílias,6341,186.64 SC,Trombudo Central,6553,108.62 SC,Tubarão,97235,301.76 SC,Tunápolis,4633,133.23 SC,Turvo,11854,235.52 SC,União do Oeste,2910,92.62 SC,Urubici,10699,1017.64 SC,Urupema,2482,350.04 SC,Urussanga,20223,254.87 SC,Vargeão,3532,166.65 SC,Vargem,2808,350.15 SC,Vargem Bonita,4793,298.5 SC,Vidal Ramos,6290,342.89 SC,Videira,47188,380.27 SC,Vitor Meireles,5207,370.52 SC,Witmarsum,3600,151.98 SC,Xanxerê,44128,377.76 SC,Xavantina,4142,216.69 SC,Xaxim,25713,293.28 SC,Zortéa,2991,189.72 SP,Adamantina,33797,411.39 SP,Adolfo,3557,211.08 SP,Aguaí,32148,474.74 SP,Águas da Prata,7584,142.96 SP,Águas de Lindóia,17266,60.13 SP,Águas de Santa Bárbara,5601,404.94 SP,Águas de São Pedro,2707,5.54 SP,Agudos,34524,966.16 SP,Alambari,4884,159.27 SP,Alfredo Marcondes,3891,118.4 SP,Altair,3815,313.86 SP,Altinópolis,15607,928.96 SP,Alto Alegre,4102,319.04 SP,Alumínio,16839,83.66 SP,Álvares Florence,3897,362.94 SP,Álvares Machado,23513,347.38 SP,Álvaro de Carvalho,4650,153.17 SP,Alvinlândia,3000,84.8 SP,Americana,210638,133.93 SP,Américo Brasiliense,34478,122.74 SP,Américo de Campos,5706,253.1 SP,Amparo,65829,445.55 SP,Analândia,4293,325.67 SP,Andradina,55334,964.19 SP,Angatuba,22210,1027.98 SP,Anhembi,5653,736.56 SP,Anhumas,3738,320.45 SP,Aparecida,35007,121.08 SP,Aparecida d`Oeste,4450,179.02 SP,Apiaí,25191,974.32 SP,Araçariguama,17080,145.2 SP,Araçatuba,181579,1167.44 SP,Araçoiaba da Serra,27299,255.43 SP,Aramina,5152,202.89 SP,Arandu,6123,285.91 SP,Arapeí,2493,156.9 SP,Araraquara,208662,1003.67 SP,Araras,118843,644.83 SP,Arco-Íris,1925,264.73 SP,Arealva,7841,504.97 SP,Areias,3696,305.23 SP,Areiópolis,10579,85.77 SP,Ariranha,8547,133.15 SP,Artur Nogueira,44177,178.03 SP,Arujá,74905,96.11 SP,Aspásia,1809,69.34 SP,Assis,95144,460.31 SP,Atibaia,126603,478.52 SP,Auriflama,14202,433.99 SP,Avaí,4959,540.46 SP,Avanhandava,11310,338.64 SP,Avaré,82934,1213.06 SP,Bady Bassitt,14603,110.36 SP,Balbinos,3702,91.64 SP,Bálsamo,8160,150.6 SP,Bananal,10223,616.43 SP,Barão de Antonina,3116,153.14 SP,Barbosa,6593,205.15 SP,Bariri,31593,444.07 SP,Barra Bonita,35246,149.91 SP,Barra do Chapéu,5244,405.68 SP,Barra do Turvo,7729,1007.82 SP,Barretos,112101,1565.64 SP,Barrinha,28496,145.64 SP,Barueri,240749,65.69 SP,Bastos,20445,171.89 SP,Batatais,56476,849.53 SP,Bauru,343937,667.68 SP,Bebedouro,75035,683.3 SP,Bento de Abreu,2674,301.4 SP,Bernardino de Campos,10775,244.2 SP,Bertioga,47645,490.15 SP,Bilac,7048,157.9 SP,Birigui,108728,530.92 SP,Biritiba-Mirim,28575,317.41 SP,Boa Esperança do Sul,13645,690.76 SP,Bocaina,10859,363.93 SP,Bofete,9618,653.54 SP,Boituva,48314,248.95 SP,Bom Jesus dos Perdões,19708,108.37 SP,Bom Sucesso de Itararé,3571,133.58 SP,Borá,805,118.45 SP,Boracéia,4268,122.11 SP,Borborema,14529,552.26 SP,Borebi,2293,347.99 SP,Botucatu,127328,1482.64 SP,Bragança Paulista,146744,512.62 SP,Braúna,5021,195.33 SP,Brejo Alegre,2573,105.4 SP,Brodowski,21107,278.46 SP,Brotas,21580,1101.38 SP,Buri,18563,1195.91 SP,Buritama,15418,326.76 SP,Buritizal,4053,266.42 SP,Cabrália Paulista,4365,239.91 SP,Cabreúva,41604,260.23 SP,Caçapava,84752,369.03 SP,Cachoeira Paulista,30091,287.99 SP,Caconde,18538,469.98 SP,Cafelândia,16607,920.1 SP,Caiabu,4072,252.84 SP,Caieiras,86529,96.1 SP,Caiuá,5039,549.89 SP,Cajamar,64114,131.33 SP,Cajati,28372,454.44 SP,Cajobi,9768,176.9 SP,Cajuru,23371,660.09 SP,Campina do Monte Alegre,5567,185.03 SP,Campinas,1080113,794.43 SP,Campo Limpo Paulista,74074,79.4 SP,Campos do Jordão,47789,290.06 SP,Campos Novos Paulista,4539,483.98 SP,Cananéia,12226,1239.38 SP,Canas,4385,53.26 SP,Cândido Mota,29884,596.21 SP,Cândido Rodrigues,2668,70.31 SP,Canitar,4369,57.23 SP,Capão Bonito,46178,1640.23 SP,Capela do Alto,17532,169.89 SP,Capivari,48576,322.88 SP,Caraguatatuba,100840,485.1 SP,Carapicuíba,369584,34.55 SP,Cardoso,11805,639.73 SP,Casa Branca,28307,864.18 SP,Cássia dos Coqueiros,2634,191.68 SP,Castilho,18003,1065.8 SP,Catanduva,112820,290.6 SP,Catiguá,7127,148.39 SP,Cedral,7972,197.69 SP,Cerqueira César,17532,511.62 SP,Cerquilho,39617,127.8 SP,Cesário Lange,15540,190.77 SP,Charqueada,15085,175.85 SP,Chavantes,12114,188.1 SP,Clementina,7065,168.84 SP,Colina,17371,422.57 SP,Colômbia,5994,729.25 SP,Conchal,25229,182.79 SP,Conchas,16288,466.02 SP,Cordeirópolis,21080,137.58 SP,Coroados,5238,246.36 SP,Coronel Macedo,5001,303.93 SP,Corumbataí,3874,278.62 SP,Cosmópolis,58827,154.66 SP,Cosmorama,7214,441.71 SP,Cotia,201150,324.01 SP,Cravinhos,31691,311.4 SP,Cristais Paulista,7588,385.23 SP,Cruzália,2274,149.05 SP,Cruzeiro,77039,305.7 SP,Cubatão,118720,142.88 SP,Cunha,21866,1407.32 SP,Descalvado,31056,753.71 SP,Diadema,386089,30.8 SP,Dirce Reis,1689,88.35 SP,Divinolândia,11208,222.13 SP,Dobrada,7939,149.73 SP,Dois Córregos,24761,632.97 SP,Dolcinópolis,2096,78.34 SP,Dourado,8609,205.87 SP,Dracena,43258,488.04 SP,Duartina,12251,264.56 SP,Dumont,8143,111.36 SP,Echaporã,6318,515.43 SP,Eldorado,14641,1654.26 SP,Elias Fausto,15775,202.69 SP,Elisiário,3120,93.98 SP,Embaúba,2423,83.13 SP,Embu das Artes,240230,70.39 SP,Embu-Guaçu,62769,155.63 SP,Emilianópolis,3020,224.49 SP,Engenheiro Coelho,15721,109.94 SP,Espírito Santo do Pinhal,41907,389.42 SP,Espírito Santo do Turvo,4244,193.66 SP,Estiva Gerbi,10044,74.21 SP,Estrela do Norte,2658,263.42 SP,Estrela d`Oeste,8208,296.41 SP,Euclides da Cunha Paulista,9585,575.21 SP,Fartura,15320,429.17 SP,Fernando Prestes,5534,170.67 SP,Fernandópolis,64696,550.03 SP,Fernão,1563,100.76 SP,Ferraz de Vasconcelos,168306,29.57 SP,Flora Rica,1752,225.3 SP,Floreal,3003,204.3 SP,Flórida Paulista,12848,525.08 SP,Florínia,2829,225.63 SP,Franca,318640,605.68 SP,Francisco Morato,154472,49.07 SP,Franco da Rocha,131604,134.16 SP,Gabriel Monteiro,2708,138.55 SP,Gália,7011,356.01 SP,Garça,43115,555.63 SP,Gastão Vidigal,4193,180.94 SP,Gavião Peixoto,4419,243.77 SP,General Salgado,10669,493.35 SP,Getulina,10765,678.7 SP,Glicério,4565,273.56 SP,Guaiçara,10670,271.14 SP,Guaimbê,5425,218.01 SP,Guaíra,37404,1258.48 SP,Guapiaçu,17869,324.92 SP,Guapiara,17998,408.29 SP,Guará,19858,362.48 SP,Guaraçaí,8435,569.87 SP,Guaraci,9976,641.5 SP,Guarani d`Oeste,1970,85.53 SP,Guarantã,6404,461.15 SP,Guararapes,30597,956.34 SP,Guararema,25844,270.82 SP,Guaratinguetá,112072,752.64 SP,Guareí,14565,566.35 SP,Guariba,35486,270.29 SP,Guarujá,290752,143.45 SP,Guarulhos,1221979,318.68 SP,Guatapará,6966,413.74 SP,Guzolândia,4754,252.01 SP,Herculândia,8696,364.64 SP,Holambra,11299,65.58 SP,Hortolândia,192692,62.28 SP,Iacanga,10013,547.39 SP,Iacri,6419,322.63 SP,Iaras,6376,401.31 SP,Ibaté,30734,290.66 SP,Ibirá,10896,271.91 SP,Ibirarema,6725,228.32 SP,Ibitinga,53158,689.25 SP,Ibiúna,71217,1058.08 SP,Icém,7462,362.59 SP,Iepê,7628,595.49 SP,Igaraçu do Tietê,23362,97.72 SP,Igarapava,27952,468.25 SP,Igaratá,8831,292.95 SP,Iguape,28841,1977.95 SP,Ilha Comprida,9025,191.97 SP,Ilha Solteira,25064,652.45 SP,Ilhabela,28196,347.54 SP,Indaiatuba,201619,312.05 SP,Indiana,4825,126.62 SP,Indiaporã,3903,279.6 SP,Inúbia Paulista,3630,87.41 SP,Ipaussu,13663,209.66 SP,Iperó,28300,170.28 SP,Ipeúna,6016,190.01 SP,Ipiguá,4463,135.69 SP,Iporanga,4299,1152.05 SP,Ipuã,14148,465.88 SP,Iracemápolis,20029,115.12 SP,Irapuã,7275,257.91 SP,Irapuru,7789,214.9 SP,Itaberá,17858,1110.5 SP,Itaí,24008,1082.78 SP,Itajobi,14556,502.07 SP,Itaju,3246,229.82 SP,Itanhaém,87057,601.67 SP,Itaóca,3228,183.02 SP,Itapecerica da Serra,152614,150.87 SP,Itapetininga,144377,1790.21 SP,Itapeva,87753,1826.26 SP,Itapevi,200769,82.66 SP,Itapira,68537,518.39 SP,Itapirapuã Paulista,3880,406.48 SP,Itápolis,40051,996.85 SP,Itaporanga,14549,507.71 SP,Itapuí,12173,140.8 SP,Itapura,4357,301.37 SP,Itaquaquecetuba,321770,82.61 SP,Itararé,47934,1003.58 SP,Itariri,15471,273.67 SP,Itatiba,101471,322.23 SP,Itatinga,18052,979.82 SP,Itirapina,15524,564.76 SP,Itirapuã,5914,161.12 SP,Itobi,7546,139.21 SP,Itu,154147,639.58 SP,Itupeva,44859,200.82 SP,Ituverava,38695,705.24 SP,Jaborandi,6592,273.44 SP,Jaboticabal,71662,706.6 SP,Jacareí,211214,464.27 SP,Jaci,5657,145.52 SP,Jacupiranga,17208,704.09 SP,Jaguariúna,44311,141.4 SP,Jales,47012,368.52 SP,Jambeiro,5349,184.41 SP,Jandira,108344,17.45 SP,Jardinópolis,37661,502.22 SP,Jarinu,23847,207.64 SP,Jaú,131040,685.76 SP,Jeriquara,3160,141.97 SP,Joanópolis,11768,374.28 SP,João Ramalho,4150,415.25 SP,José Bonifácio,32763,859.95 SP,Júlio Mesquita,4430,128.22 SP,Jumirim,2798,56.69 SP,Jundiaí,370126,431.17 SP,Junqueirópolis,18726,582.96 SP,Juquiá,19246,812.75 SP,Juquitiba,28737,522.18 SP,Lagoinha,4841,255.47 SP,Laranjal Paulista,25251,384.02 SP,Lavínia,8779,537.73 SP,Lavrinhas,6590,167.07 SP,Leme,91756,402.87 SP,Lençóis Paulista,61428,809.49 SP,Limeira,276022,580.71 SP,Lindóia,6712,48.76 SP,Lins,71432,571.54 SP,Lorena,82537,414.16 SP,Lourdes,2128,113.74 SP,Louveira,37125,55.13 SP,Lucélia,19882,314.76 SP,Lucianópolis,2249,189.82 SP,Luís Antônio,11286,598.77 SP,Luiziânia,5030,166.55 SP,Lupércio,4353,154.49 SP,Lutécia,2714,474.93 SP,Macatuba,16259,225.21 SP,Macaubal,7663,248.13 SP,Macedônia,3664,327.72 SP,Magda,3200,311.71 SP,Mairinque,43223,210.31 SP,Mairiporã,80956,320.7 SP,Manduri,8992,229.05 SP,Marabá Paulista,4812,918.77 SP,Maracaí,13332,533.94 SP,Marapoama,2633,111.27 SP,Mariápolis,3916,185.9 SP,Marília,216745,1170.25 SP,Marinópolis,2113,77.83 SP,Martinópolis,24219,1252.71 SP,Matão,76786,524.86 SP,Mauá,417064,61.87 SP,Mendonça,4640,195.04 SP,Meridiano,3855,229.25 SP,Mesópolis,1886,148.86 SP,Miguelópolis,20451,821.96 SP,Mineiros do Tietê,12038,213.24 SP,Mira Estrela,2820,216.83 SP,Miracatu,20592,1001.54 SP,Mirandópolis,27483,918.8 SP,Mirante do Paranapanema,17059,1239.08 SP,Mirassol,53792,243.29 SP,Mirassolândia,4295,166.17 SP,Mococa,66290,854.86 SP,Mogi das Cruzes,387779,712.67 SP,Mogi Guaçu,137245,812.16 SP,Moji Mirim,86505,497.8 SP,Mombuca,3266,133.7 SP,Monções,2132,104.24 SP,Mongaguá,46293,142.01 SP,Monte Alegre do Sul,7152,110.31 SP,Monte Alto,46642,346.5 SP,Monte Aprazível,21746,496.91 SP,Monte Azul Paulista,18931,263.44 SP,Monte Castelo,4063,232.57 SP,Monte Mor,48949,240.41 SP,Monteiro Lobato,4120,332.74 SP,Morro Agudo,29116,1388.2 SP,Morungaba,11769,146.75 SP,Motuca,4290,228.7 SP,Murutinga do Sul,4186,250.84 SP,Nantes,2707,286.16 SP,Narandiba,4288,358.03 SP,Natividade da Serra,6678,833.37 SP,Nazaré Paulista,16414,326.29 SP,Neves Paulista,8772,218.34 SP,Nhandeara,10725,435.77 SP,Nipoã,4274,137.82 SP,Nova Aliança,5891,217.31 SP,Nova Campina,8515,385.38 SP,Nova Canaã Paulista,2114,124.42 SP,Nova Castilho,1125,183.23 SP,Nova Europa,9300,160.35 SP,Nova Granada,19180,531.88 SP,Nova Guataporanga,2177,34.12 SP,Nova Independência,3068,265.78 SP,Nova Luzitânia,3441,74.06 SP,Nova Odessa,51242,74.32 SP,Novais,4592,117.77 SP,Novo Horizonte,36593,931.67 SP,Nuporanga,6817,348.27 SP,Ocauçu,4163,300.35 SP,Óleo,2673,198.14 SP,Olímpia,50024,802.65 SP,Onda Verde,3884,242.31 SP,Oriente,6097,218.61 SP,Orindiúva,5675,248.11 SP,Orlândia,39781,291.77 SP,Osasco,666740,64.95 SP,Oscar Bressane,2537,221.34 SP,Osvaldo Cruz,30917,248.39 SP,Ourinhos,103035,296.27 SP,Ouro Verde,7800,267.61 SP,Ouroeste,8405,288.84 SP,Pacaembu,13226,338.5 SP,Palestina,11051,695.46 SP,Palmares Paulista,10934,82.13 SP,Palmeira d`Oeste,9584,319.22 SP,Palmital,21186,547.81 SP,Panorama,14583,356.31 SP,Paraguaçu Paulista,42278,1001.3 SP,Paraibuna,17388,809.58 SP,Paraíso,5898,155.84 SP,Paranapanema,17808,1018.72 SP,Paranapuã,3815,140.48 SP,Parapuã,10844,365.69 SP,Pardinho,5582,209.89 SP,Pariquera-Açu,18446,359.3 SP,Parisi,2032,84.52 SP,Patrocínio Paulista,13000,602.85 SP,Paulicéia,6339,373.57 SP,Paulínia,82146,138.72 SP,Paulistânia,1779,256.65 SP,Paulo de Faria,8589,738.29 SP,Pederneiras,41497,729.0 SP,Pedra Bela,5780,158.59 SP,Pedranópolis,2558,260.19 SP,Pedregulho,15700,712.6 SP,Pedreira,41558,108.59 SP,Pedrinhas Paulista,2940,152.52 SP,Pedro de Toledo,10204,670.44 SP,Penápolis,58510,710.83 SP,Pereira Barreto,24962,978.88 SP,Pereiras,7454,223.27 SP,Peruíbe,59773,324.14 SP,Piacatu,5287,232.36 SP,Piedade,52143,746.87 SP,Pilar do Sul,26406,681.12 SP,Pindamonhangaba,146995,729.89 SP,Pindorama,15039,184.83 SP,Pinhalzinho,13105,154.53 SP,Piquerobi,3537,482.57 SP,Piquete,14107,176.0 SP,Piracaia,25116,385.53 SP,Piracicaba,364571,1378.5 SP,Piraju,28475,504.5 SP,Pirajuí,22704,824.2 SP,Pirangi,10623,215.46 SP,Pirapora do Bom Jesus,15733,108.52 SP,Pirapozinho,24694,477.99 SP,Pirassununga,70081,727.12 SP,Piratininga,12072,402.41 SP,Pitangueiras,35307,430.64 SP,Planalto,4463,290.1 SP,Platina,3192,326.73 SP,Poá,106013,17.26 SP,Poloni,5395,133.54 SP,Pompéia,19964,784.06 SP,Pongaí,3481,183.33 SP,Pontal,40244,356.32 SP,Pontalinda,4074,210.19 SP,Pontes Gestal,2518,217.38 SP,Populina,4223,315.95 SP,Porangaba,8326,265.69 SP,Porto Feliz,48893,556.71 SP,Porto Ferreira,51400,244.91 SP,Potim,19397,44.47 SP,Potirendaba,15449,342.38 SP,Pracinha,2858,62.84 SP,Pradópolis,17377,167.38 SP,Praia Grande,262051,147.07 SP,Pratânia,4599,175.1 SP,Presidente Alves,4123,287.19 SP,Presidente Bernardes,13570,748.95 SP,Presidente Epitácio,41318,1260.24 SP,Presidente Prudente,207610,562.79 SP,Presidente Venceslau,37910,756.74 SP,Promissão,35674,779.28 SP,Quadra,3236,205.68 SP,Quatá,12799,650.37 SP,Queiroz,2808,233.79 SP,Queluz,11309,249.83 SP,Quintana,6004,319.57 SP,Rafard,8612,121.65 SP,Rancharia,28804,1587.47 SP,Redenção da Serra,3873,309.37 SP,Regente Feijó,18494,265.07 SP,Reginópolis,7323,410.82 SP,Registro,54261,722.41 SP,Restinga,6587,245.75 SP,Ribeira,3358,335.75 SP,Ribeirão Bonito,12135,471.55 SP,Ribeirão Branco,18269,697.5 SP,Ribeirão Corrente,4273,148.33 SP,Ribeirão do Sul,4446,203.69 SP,Ribeirão dos Índios,2187,196.34 SP,Ribeirão Grande,7422,333.36 SP,Ribeirão Pires,113068,99.12 SP,Ribeirão Preto,604682,650.96 SP,Rifaina,3436,162.51 SP,Rincão,10414,315.95 SP,Rinópolis,9935,358.33 SP,Rio Claro,186253,498.42 SP,Rio das Pedras,29501,226.66 SP,Rio Grande da Serra,43974,36.34 SP,Riolândia,10575,633.38 SP,Riversul,6163,386.2 SP,Rosana,19691,742.87 SP,Roseira,9599,130.65 SP,Rubiácea,2729,236.93 SP,Rubinéia,2862,242.9 SP,Sabino,5217,310.9 SP,Sagres,2395,147.8 SP,Sales,5451,308.46 SP,Sales Oliveira,10568,305.64 SP,Salesópolis,15635,425.0 SP,Salmourão,4818,172.29 SP,Saltinho,7059,99.74 SP,Salto,105516,133.21 SP,Salto de Pirapora,40132,280.61 SP,Salto Grande,8787,188.4 SP,Sandovalina,3699,455.12 SP,Santa Adélia,14333,330.9 SP,Santa Albertina,5723,272.77 SP,Santa Bárbara d`Oeste,180009,270.9 SP,Santa Branca,13763,272.24 SP,Santa Clara d`Oeste,2084,183.43 SP,Santa Cruz da Conceição,4002,150.13 SP,Santa Cruz da Esperança,1953,148.06 SP,Santa Cruz das Palmeiras,29932,295.34 SP,Santa Cruz do Rio Pardo,43921,1113.5 SP,Santa Ernestina,5568,134.42 SP,Santa Fé do Sul,29239,206.19 SP,Santa Gertrudes,21634,98.29 SP,Santa Isabel,50453,363.3 SP,Santa Lúcia,8248,154.03 SP,Santa Maria da Serra,5413,252.62 SP,Santa Mercedes,2831,166.87 SP,Santa Rita do Passa Quatro,26478,754.14 SP,Santa Rita d`Oeste,2543,210.08 SP,Santa Rosa de Viterbo,23862,288.58 SP,Santa Salete,1447,79.39 SP,Santana da Ponte Pensa,1641,130.26 SP,Santana de Parnaíba,108813,179.93 SP,Santo Anastácio,20475,552.54 SP,Santo André,676407,175.78 SP,Santo Antônio da Alegria,6304,310.29 SP,Santo Antônio de Posse,20650,154.0 SP,Santo Antônio do Aracanguá,7626,1308.24 SP,Santo Antônio do Jardim,5943,109.96 SP,Santo Antônio do Pinhal,6486,133.01 SP,Santo Expedito,2803,94.44 SP,Santópolis do Aguapeí,4277,127.91 SP,Santos,419400,280.67 SP,São Bento do Sapucaí,10468,253.05 SP,São Bernardo do Campo,765463,409.48 SP,São Caetano do Sul,149263,15.33 SP,São Carlos,221950,1137.33 SP,São Francisco,2793,75.62 SP,São João da Boa Vista,83639,516.42 SP,São João das Duas Pontes,2566,129.34 SP,São João de Iracema,1780,178.61 SP,São João do Pau d`Alho,2103,117.72 SP,São Joaquim da Barra,46512,410.6 SP,São José da Bela Vista,8406,276.95 SP,São José do Barreiro,4077,570.69 SP,São José do Rio Pardo,51900,419.19 SP,São José do Rio Preto,408258,431.96 SP,São José dos Campos,629921,1099.41 SP,São Lourenço da Serra,13973,186.33 SP,São Luís do Paraitinga,10397,617.32 SP,São Manuel,38342,650.77 SP,São Miguel Arcanjo,31450,930.34 SP,São Paulo,11253503,1521.1 SP,São Pedro,31662,609.09 SP,São Pedro do Turvo,7198,731.76 SP,São Roque,78821,306.91 SP,São Sebastião,73942,399.68 SP,São Sebastião da Grama,12099,252.38 SP,São Simão,14346,617.25 SP,São Vicente,332445,147.89 SP,Sarapuí,9027,352.69 SP,Sarutaiá,3622,141.61 SP,Sebastianópolis do Sul,3031,168.08 SP,Serra Azul,11256,283.14 SP,Serra Negra,26387,203.74 SP,Serrana,38878,126.05 SP,Sertãozinho,110074,402.87 SP,Sete Barras,13005,1062.7 SP,Severínia,15501,140.43 SP,Silveiras,5792,414.78 SP,Socorro,36686,449.03 SP,Sorocaba,586625,449.8 SP,Sud Mennucci,7435,591.3 SP,Sumaré,241311,153.5 SP,Suzanápolis,3383,330.21 SP,Suzano,262480,206.2 SP,Tabapuã,11363,345.58 SP,Tabatinga,14686,369.56 SP,Taboão da Serra,244528,20.39 SP,Taciba,5714,607.31 SP,Taguaí,10828,145.33 SP,Taiaçu,5894,106.64 SP,Taiúva,5447,132.46 SP,Tambaú,22406,561.79 SP,Tanabi,24055,745.8 SP,Tapiraí,8012,755.1 SP,Tapiratiba,12737,222.54 SP,Taquaral,2726,53.89 SP,Taquaritinga,53988,593.58 SP,Taquarituba,22291,448.43 SP,Taquarivaí,5151,231.79 SP,Tarabai,6607,201.54 SP,Tarumã,12885,303.18 SP,Tatuí,107326,523.48 SP,Taubaté,278686,624.89 SP,Tejupá,4809,296.28 SP,Teodoro Sampaio,21386,1555.99 SP,Terra Roxa,8505,221.54 SP,Tietê,36835,404.4 SP,Timburi,2646,196.79 SP,Torre de Pedra,2254,71.35 SP,Torrinha,9330,315.27 SP,Trabiju,1544,63.42 SP,Tremembé,40984,191.36 SP,Três Fronteiras,5427,151.19 SP,Tuiuti,5930,126.7 SP,Tupã,63476,628.51 SP,Tupi Paulista,14269,245.34 SP,Turiúba,1930,153.13 SP,Turmalina,1978,147.94 SP,Ubarana,5289,209.63 SP,Ubatuba,78801,723.83 SP,Ubirajara,4427,282.37 SP,Uchoa,9471,252.46 SP,União Paulista,1599,79.11 SP,Urânia,8836,208.94 SP,Uru,1251,146.97 SP,Urupês,12714,323.75 SP,Valentim Gentil,11036,149.69 SP,Valinhos,106793,148.59 SP,Valparaíso,22576,857.5 SP,Vargem,8801,142.61 SP,Vargem Grande do Sul,39266,267.23 SP,Vargem Grande Paulista,42997,42.48 SP,Várzea Paulista,107089,35.12 SP,Vera Cruz,10769,248.07 SP,Vinhedo,63611,81.6 SP,Viradouro,17297,217.73 SP,Vista Alegre do Alto,6886,94.98 SP,Vitória Brasil,1737,49.7 SP,Votorantim,108809,184.1 SP,Votuporanga,84692,421.03 SP,Zacarias,2335,319.14 SE,Amparo de São Francisco,2275,35.13 SE,Aquidabã,20056,359.29 SE,Aracaju,571149,181.86 SE,Arauá,10878,198.75 SE,Areia Branca,16857,146.68 SE,Barra dos Coqueiros,24976,90.32 SE,Boquim,25533,205.94 SE,Brejo Grande,7742,148.86 SE,Campo do Brito,16749,201.73 SE,Canhoba,3956,170.29 SE,Canindé de São Francisco,24686,902.25 SE,Capela,30761,442.74 SE,Carira,20007,636.4 SE,Carmópolis,13503,45.91 SE,Cedro de São João,5633,83.71 SE,Cristinápolis,16519,236.19 SE,Cumbe,3813,128.6 SE,Divina Pastora,4326,91.79 SE,Estância,64409,644.08 SE,Feira Nova,5324,184.93 SE,Frei Paulo,13874,400.36 SE,Gararu,11405,654.99 SE,General Maynard,2929,19.98 SE,Gracho Cardoso,5645,242.06 SE,Ilha das Flores,8348,54.64 SE,Indiaroba,15831,313.53 SE,Itabaiana,86967,336.69 SE,Itabaianinha,38910,493.31 SE,Itabi,4972,184.42 SE,Itaporanga d`Ajuda,30419,739.93 SE,Japaratuba,16864,364.9 SE,Japoatã,12938,407.42 SE,Lagarto,94861,969.58 SE,Laranjeiras,26902,162.28 SE,Macambira,6401,136.94 SE,Malhada dos Bois,3456,63.2 SE,Malhador,12042,100.94 SE,Maruim,16343,93.77 SE,Moita Bonita,11001,95.82 SE,Monte Alegre de Sergipe,13627,407.41 SE,Muribeca,7344,75.86 SE,Neópolis,18506,265.95 SE,Nossa Senhora Aparecida,8508,340.38 SE,Nossa Senhora da Glória,32497,756.49 SE,Nossa Senhora das Dores,24580,483.35 SE,Nossa Senhora de Lourdes,6238,81.06 SE,Nossa Senhora do Socorro,160827,156.77 SE,Pacatuba,13137,373.82 SE,Pedra Mole,2974,82.03 SE,Pedrinhas,8833,33.94 SE,Pinhão,5973,155.89 SE,Pirambu,8369,205.88 SE,Poço Redondo,30880,1232.12 SE,Poço Verde,21983,440.13 SE,Porto da Folha,27146,877.3 SE,Propriá,28451,89.12 SE,Riachão do Dantas,19386,531.47 SE,Riachuelo,9355,78.94 SE,Ribeirópolis,17173,258.53 SE,Rosário do Catete,9221,105.66 SE,Salgado,19365,247.83 SE,Santa Luzia do Itanhy,12969,325.73 SE,Santa Rosa de Lima,3749,67.61 SE,Santana do São Francisco,7038,45.62 SE,Santo Amaro das Brotas,11410,234.16 SE,São Cristóvão,78864,436.86 SE,São Domingos,10271,102.47 SE,São Francisco,3393,83.85 SE,São Miguel do Aleixo,3698,144.09 SE,Simão Dias,38702,564.69 SE,Siriri,8004,165.81 SE,Telha,2957,49.03 SE,Tobias Barreto,48040,1021.31 SE,Tomar do Geru,12855,304.9 SE,Umbaúba,22434,118.86 TO,Abreulândia,2391,1895.21 TO,Aguiarnópolis,5162,235.39 TO,Aliança do Tocantins,5671,1579.75 TO,Almas,7586,4013.24 TO,Alvorada,8374,1212.17 TO,Ananás,9865,1576.97 TO,Angico,3175,451.73 TO,Aparecida do Rio Negro,4213,1160.37 TO,Aragominas,5882,1173.06 TO,Araguacema,6317,2778.48 TO,Araguaçu,8786,5167.95 TO,Araguaína,150484,4000.42 TO,Araguanã,5030,836.03 TO,Araguatins,31329,2625.29 TO,Arapoema,6742,1552.22 TO,Arraias,10645,5786.87 TO,Augustinópolis,15950,394.98 TO,Aurora do Tocantins,3446,752.83 TO,Axixá do Tocantins,9275,150.21 TO,Babaçulândia,10424,1788.46 TO,Bandeirantes do Tocantins,3122,1541.84 TO,Barra do Ouro,4123,1106.35 TO,Barrolândia,5349,713.3 TO,Bernardo Sayão,4456,926.89 TO,Bom Jesus do Tocantins,3768,1332.67 TO,Brasilândia do Tocantins,2064,641.47 TO,Brejinho de Nazaré,5185,1724.45 TO,Buriti do Tocantins,9768,251.92 TO,Cachoeirinha,2148,352.35 TO,Campos Lindos,8139,3240.18 TO,Cariri do Tocantins,3756,1128.6 TO,Carmolândia,2316,339.41 TO,Carrasco Bonito,3688,192.94 TO,Caseara,4601,1691.61 TO,Centenário,2566,1954.7 TO,Chapada da Natividade,3277,1646.47 TO,Chapada de Areia,1335,659.25 TO,Colinas do Tocantins,30838,843.85 TO,Colméia,8611,990.72 TO,Combinado,4669,209.58 TO,Conceição do Tocantins,4182,2500.74 TO,Couto Magalhães,5009,1585.79 TO,Cristalândia,7234,1848.24 TO,Crixás do Tocantins,1564,986.69 TO,Darcinópolis,5273,1639.16 TO,Dianópolis,19112,3217.31 TO,Divinópolis do Tocantins,6363,2347.43 TO,Dois Irmãos do Tocantins,7161,3757.04 TO,Dueré,4592,3424.85 TO,Esperantina,9476,504.02 TO,Fátima,3805,382.91 TO,Figueirópolis,5340,1930.07 TO,Filadélfia,8505,1988.08 TO,Formoso do Araguaia,18427,13423.38 TO,Fortaleza do Tabocão,2419,621.56 TO,Goianorte,4956,1800.98 TO,Goiatins,12064,6408.6 TO,Guaraí,23200,2268.16 TO,Gurupi,76755,1836.09 TO,Ipueiras,1639,815.25 TO,Itacajá,7104,3051.36 TO,Itaguatins,6029,739.85 TO,Itapiratins,3532,1243.96 TO,Itaporã do Tocantins,2445,972.98 TO,Jaú do Tocantins,3507,2173.05 TO,Juarina,2231,481.05 TO,Lagoa da Confusão,10210,10564.66 TO,Lagoa do Tocantins,3525,911.34 TO,Lajeado,2773,322.49 TO,Lavandeira,1605,519.61 TO,Lizarda,3725,5723.23 TO,Luzinópolis,2622,279.56 TO,Marianópolis do Tocantins,4352,2091.37 TO,Mateiros,2223,9681.46 TO,Maurilândia do Tocantins,3154,738.11 TO,Miracema do Tocantins,20684,2656.09 TO,Miranorte,12623,1031.62 TO,Monte do Carmo,6716,3616.67 TO,Monte Santo do Tocantins,2085,1091.55 TO,Muricilândia,3152,1186.65 TO,Natividade,9000,3240.72 TO,Nazaré,4386,395.91 TO,Nova Olinda,10686,1566.18 TO,Nova Rosalândia,3770,516.31 TO,Novo Acordo,3762,2674.68 TO,Novo Alegre,2286,200.1 TO,Novo Jardim,2457,1309.67 TO,Oliveira de Fátima,1037,205.85 TO,Palmas,228332,2218.94 TO,Palmeirante,4954,2640.82 TO,Palmeiras do Tocantins,5740,747.9 TO,Palmeirópolis,7339,1703.94 TO,Paraíso do Tocantins,44417,1268.06 TO,Paranã,10338,11260.21 TO,Pau d`Arco,4588,1377.41 TO,Pedro Afonso,11539,2010.9 TO,Peixe,10384,5291.21 TO,Pequizeiro,5054,1209.8 TO,Pindorama do Tocantins,4506,1559.09 TO,Piraquê,2920,1367.61 TO,Pium,6694,10013.79 TO,Ponte Alta do Bom Jesus,4544,1806.14 TO,Ponte Alta do Tocantins,7180,6491.13 TO,Porto Alegre do Tocantins,2796,501.86 TO,Porto Nacional,49146,4449.92 TO,Praia Norte,7659,289.05 TO,Presidente Kennedy,3681,770.42 TO,Pugmil,2369,401.83 TO,Recursolândia,3768,2216.66 TO,Riachinho,4191,517.48 TO,Rio da Conceição,1714,787.12 TO,Rio dos Bois,2570,845.07 TO,Rio Sono,6254,6354.37 TO,Sampaio,3864,222.29 TO,Sandolândia,3326,3528.62 TO,Santa Fé do Araguaia,6599,1678.09 TO,Santa Maria do Tocantins,2894,1410.46 TO,Santa Rita do Tocantins,2128,3274.95 TO,Santa Rosa do Tocantins,4568,1796.26 TO,Santa Tereza do Tocantins,2523,539.91 TO,Santa Terezinha do Tocantins,2474,269.68 TO,São Bento do Tocantins,4608,1105.9 TO,São Félix do Tocantins,1437,1908.68 TO,São Miguel do Tocantins,10481,398.82 TO,São Salvador do Tocantins,2910,1422.03 TO,São Sebastião do Tocantins,4283,287.28 TO,São Valério,4383,2519.59 TO,Silvanópolis,5068,1258.83 TO,Sítio Novo do Tocantins,9148,324.11 TO,Sucupira,1742,1025.52 TO,Taguatinga,15051,2437.4 TO,Taipas do Tocantins,1945,1116.2 TO,Talismã,2562,2156.9 TO,Tocantínia,6736,2601.6 TO,Tocantinópolis,22619,1077.07 TO,Tupirama,1574,712.21 TO,Tupiratins,2097,895.31 TO,Wanderlândia,10981,1373.06 TO,Xambioá,11484,1186.43 rows-0.3.1/examples/data/tesouro-direto.csv000066400000000000000000000035521310400316700207350ustar00rootroot00000000000000timestamp,titulo,vencimento,taxa_compra,taxa_venda,preco_compra,preco_venda 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2017 (NTNB),2017-05-15,7.02%,6.3%,0.0,2792.97 2015-11-06T17:43:00,Tesouro IPCA+ 2019 (NTNB Princ),2019-05-15,7.02%,7.06%,2150.33,2147.53 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2020 (NTNB),2020-08-15,7.14%,7.18%,2644.22,2640.14 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2024 (NTNB),2024-08-15,7.02%,7.39%,0.0,2531.91 2015-11-06T17:43:00,Tesouro IPCA+ 2024 (NTNB Princ),2024-08-15,7.38%,7.44%,1463.24,1456.12 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2035 (NTNB),2035-05-15,7.03%,7.11%,2520.76,2500.2 2015-11-06T17:43:00,Tesouro IPCA+ 2035 (NTNB Princ),2035-05-15,6.97%,7.05%,735.21,724.6 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2045 (NTNB),2045-05-15,7.02%,7.1%,0.0,2449.17 2015-11-06T17:43:00,Tesouro IPCA+ com Juros Semestrais 2050 (NTNB),2050-08-15,6.94%,7.04%,2440.99,2409.92 2015-11-06T17:43:00,Tesouro Prefixado 2016 (LTN),2016-01-01,7.02%,14.27%,0.0,980.08 2015-11-06T17:43:00,Tesouro Prefixado com Juros Semestrais 2017 (NTNF),2017-01-01,7.02%,15.32%,0.0,982.94 2015-11-06T17:43:00,Tesouro Prefixado 2017 (LTN),2017-01-01,7.02%,15.33%,0.0,849.1 2015-11-06T17:43:00,Tesouro Prefixado 2018 (LTN),2018-01-01,15.59%,15.65%,733.95,733.14 2015-11-06T17:43:00,Tesouro Prefixado 2021 (LTN),2021-01-01,15.58%,15.64%,475.99,474.72 2015-11-06T17:43:00,Tesouro Prefixado com Juros Semestrais 2021 (NTNF),2021-01-01,7.02%,15.61%,0.0,851.42 2015-11-06T17:43:00,Tesouro Prefixado com Juros Semestrais 2023 (NTNF),2023-01-01,7.02%,15.61%,0.0,809.9 2015-11-06T17:43:00,Tesouro Prefixado com Juros Semestrais 2025 (NTNF),2025-01-01,15.59%,15.65%,779.41,777.19 2015-11-06T17:43:00,Tesouro Selic 2017 (LFT),2017-03-07,7.02%,0.02%,0.0,7259.64 2015-11-06T17:43:00,Tesouro Selic 2021 (LFT),2021-03-01,0%,0.04%,7261.57,7246.25 rows-0.3.1/examples/library/000077500000000000000000000000001310400316700157625ustar00rootroot00000000000000rows-0.3.1/examples/library/airports.py000066400000000000000000000015531310400316700202030ustar00rootroot00000000000000# coding: utf-8 # This script downloads the list of airport codes and cities from # worldnetlogistics.com and creates a `dict` called `code_to_city` with the # correspondent mapping. # # Install dependencies: # pip install requests rows # or # aptitude install python-requests python-rows from __future__ import unicode_literals from io import BytesIO import requests import rows # Get data url = 'http://www.worldnetlogistics.com/information/iata-city-airport-codes/' response = requests.get(url) html = response.content # Parse/normalize data table = rows.import_from_html(BytesIO(html), index=4) code_to_city = {} for row in table: code_to_city[row.code] = row.city if row.city_2 is not None: code_to_city[row.code_2] = row.city_2 codes = sorted(code_to_city.keys()) for code in codes: print('{} = {}'.format(code, code_to_city[code])) rows-0.3.1/examples/library/brazilian_cities_wikipedia.py000066400000000000000000000024651310400316700237040ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import re from collections import OrderedDict from io import BytesIO try: from urlparse import urljoin # Python 2 except ImportError: from urllib.parse import urljoin # Python 3 import requests import rows # Get data from Portuguese Wikipedia city_list_url = 'https://pt.wikipedia.org/wiki/Lista_de_munic%C3%ADpios_do_Brasil' response = requests.get(city_list_url) html = response.content # Extract desired data using XPath cities = rows.import_from_xpath( BytesIO(html), rows_xpath='//table/tr/td/ul/li', fields_xpath=OrderedDict([('name', './/text()'), ('link', './/a/@href')])) regexp_city_state = re.compile(r'(.*) \(([A-Z]{2})\)') def transform(row, table): 'Transform row "link" into full URL and add "state" based on "name"' data = row._asdict() data['link'] = urljoin('https://pt.wikipedia.org', data['link']) data['name'], data['state'] = regexp_city_state.findall(data['name'])[0] return data new_fields = OrderedDict() new_fields['name'] = cities.fields['name'] new_fields['state'] = rows.fields.TextField # new field new_fields['link'] = cities.fields['link'] cities = rows.transform(new_fields, transform, cities) rows.export_to_csv(cities, 'brazilian-cities.csv') rows-0.3.1/examples/library/custom_field.py000066400000000000000000000015701310400316700210140ustar00rootroot00000000000000from __future__ import unicode_literals import sys import rows class MyIntegerField(rows.fields.IntegerField): '''Weird integer represetation, having a `#` just before the number''' @classmethod def serialize(cls, value): return '#' + str(value) @classmethod def deserialize(cls, value): return int(value.replace('#', '')) class PtBrDateField(rows.fields.DateField): INPUT_FORMAT = '%d/%m/%Y' data = [['name', 'age', 'birthdate'], ['alvaro', '#30', '29/04/1987'], ['joao', '#17', '01/02/2000']] table = rows.plugins.utils.create_table( data, force_types={'age': MyIntegerField, 'birthdate': PtBrDateField,}) print(type(table[0].age)) # `` print(type(table[0].birthdate)) # `` print(rows.export_to_txt(table)) # "age" values will start with "#" rows-0.3.1/examples/library/ecuador_radiodifusoras.py000066400000000000000000000016621310400316700230610ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import os from collections import OrderedDict import rows # taken from: # http://www.supercom.gob.ec/es/informate-y-participa/directorio-de-medios/21-radiodifusoras filename = os.path.join(os.path.dirname(__file__), '../../tests/data/ecuador-medios-radiodifusoras.html') rows_xpath = '//*[@class="entry-container"]/*[@class="row-fluid"]/*[@class="span6"]' fields_xpath = OrderedDict([ ('url', './/h2/a/@href'), ('name', './/h2/a/text()'), ('address', './/div[@class="spField field_direccion"]/text()'), ('phone', './/div[@class="spField field_telefono"]/text()'), ('website', './/div[@class="spField field_sitio_web"]/text()'), ('email', './/div[@class="spField field_email"]/text()'), ]) table = rows.import_from_xpath(filename, rows_xpath, fields_xpath) rows.export_to_csv(table, 'ecuador-radiodifusoras.csv') rows-0.3.1/examples/library/extract_links.py000066400000000000000000000016551310400316700212150ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from io import BytesIO import requests import rows import six extract_links = rows.plugins.html.extract_links extract_text = rows.plugins.html.extract_text # Get the HTML url = 'http://wnpp.debian.net/' response = requests.get(url) html = response.content # Import data, preserving cell's HTML packages = rows.import_from_html(BytesIO(html), index=10, preserve_html=True) def transform(row, table): 'Extract links from "project" field and remove HTML from all' data = row._asdict() data['links'] = ' '.join(extract_links(row.project)) for key, value in data.items(): if isinstance(value, six.text_type): data[key] = extract_text(value) return data new_fields = packages.fields.copy() new_fields['links'] = rows.fields.TextField packages = rows.transform(new_fields, transform, packages) rows.export_to_csv(packages, 'debian-wnpp.csv') rows-0.3.1/examples/library/organizaciones.py000066400000000000000000000014201310400316700213440ustar00rootroot00000000000000# coding: utf-8 # This example downloads some Ecuatorian organizations in JSON, extracts the # desired `dict`s, then import then into a `rows.Table` object to finally # export as XLS. # Install dependencies by running: pip install requests rows[xls] import requests import rows URL = 'http://www.onumujeres-ecuador.org/geovisor/data/organizaciones.php' def download_organizations(): 'Download organizations JSON and extract its properties' response = requests.get(URL) data = response.json() organizations = [organization['properties'] for organization in data['features']] return rows.import_from_dicts(organizations) if __name__ == '__main__': table = download_organizations() rows.export_to_xls(table, 'organizaciones.xls') rows-0.3.1/examples/library/slip_opinions.py000066400000000000000000000013171310400316700212230ustar00rootroot00000000000000# coding: utf-8 from __future__ import print_function from __future__ import unicode_literals # This example was based on: # https://github.com/compjour/search-script-scrape/blob/master/scripts/42.py from io import BytesIO try: from urlparse import urljoin # Python 2 except ImportError: from urllib.parse import urljoin # Python 3 import requests import rows tag_to_dict = rows.plugins.html.tag_to_dict url = 'http://www.supremecourt.gov/opinions/slipopinions.aspx' html = requests.get(url).content table = rows.import_from_html(BytesIO(html), index=1, preserve_html=True) for element in table: attributes = tag_to_dict(element.name) print(attributes['text'], urljoin(url, attributes['href'])) rows-0.3.1/examples/library/tests_uwsgi_log.py000066400000000000000000000037221310400316700215610ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import unittest from rows.table import Table from uwsgi_log_plugin import import_from_uwsgi_log class UwsgiLogPluginTestCase(unittest.TestCase): def test_import_from_uwsgi_log(self): filename = 'uwsgi.log' table = import_from_uwsgi_log(filename, 'utf-8') self.assertEqual(len(table), 2) first = table.Row(pid=879, ip='127.0.0.1', datetime=datetime.datetime(2015, 6, 1, 11, 23, 16), generation_time=0.17378, http_path='/something', http_verb='GET', http_version=1.1, http_status=404) second = table.Row(pid=31460, ip='127.0.1.1', datetime=datetime.datetime(2015, 7, 15, 23, 49, 20), generation_time=0.000466, http_path='/about', http_verb='OPTIONS', http_version=1.1, http_status=200) self.assertEqual(table[0], first) self.assertEqual(table[1], second) rows-0.3.1/examples/library/usa_legislators.py000066400000000000000000000021431310400316700215340ustar00rootroot00000000000000# coding: utf-8 # This example was based on: # https://github.com/compjour/search-script-scrape/blob/master/scripts/101.py from io import BytesIO import requests import rows # Capture url = 'http://unitedstates.sunlightfoundation.com/legislators/legislators.csv' csv = BytesIO(requests.get(url).content) # Normalize table = rows.import_from_csv(csv) # Analyze total = len(table) total_in_office = sum(1 for row in table if row.in_office) men = sum(1 for row in table if row.gender == 'M') men_in_office = sum(1 for row in table if row.gender == 'M' and row.in_office) women = sum(1 for row in table if row.gender == 'F') women_in_office = sum(1 for row in table if row.gender == 'F' and row.in_office) # View print(' Men: {}/{} ({:02.2f}%), in office: {}/{} ({:02.2f}%)' .format(men, total, 100 * men / float(total), men_in_office, total_in_office, 100 * men_in_office / float(total))) print('Women: {}/{} ({:02.2f}%), in office: {}/{} ({:02.2f}%)' .format(women, total, 100 * women / float(total), women_in_office, total_in_office, 100 * women_in_office / float(total))) rows-0.3.1/examples/library/uwsgi.log000066400000000000000000000075351310400316700176350ustar00rootroot00000000000000*** Starting uWSGI 2.0.8 (64bit) on [Mon Jun 1 11:08:58 2015] *** compiled with version: 4.8.2 on 01 June 2015 11:01:28 os: Linux-3.13.0-48-generic #80-Ubuntu SMP Thu Mar 12 11:16:15 UTC 2015 nodename: localhost.localdomain machine: x86_64 clock source: unix detected number of CPU cores: 1 current working directory: /home/ubuntu writing pidfile to /home/ubuntu/uwsgi/uwsgi.pid detected binary path: /home/ubuntu/venv/bin/uwsgi !!! no internal routing support, rebuild with pcre support !!! chdir() to /home/ubuntu/repo your processes number limit is 7862 your memory page size is 4096 bytes detected max file descriptor number: 1024 lock engine: pthread robust mutexes thunder lock: disabled (you can enable it with --thunder-lock) uwsgi socket 0 bound to TCP address 127.0.0.1:8001 fd 3 Python version: 2.7.6 (default, Mar 22 2014, 23:03:41) [GCC 4.8.2] Set PythonHome to /home/ubuntu/venv *** Python threads support is disabled. You can enable it with --enable-threads *** Python main interpreter initialized at 0x14d9cf0 your server socket listen backlog is limited to 100 connections your mercy for graceful operations on workers is 60 seconds mapped 363840 bytes (355 KB) for 4 cores *** Operational MODE: preforking *** WSGI app 0 (mountpoint='') ready in 0 seconds on interpreter 0x14d9cf0 pid: 32310 (default app) *** uWSGI is running in multiple interpreter mode *** spawned uWSGI master process (pid: 32310) spawned uWSGI worker 1 (pid: 32317, cores: 1) spawned uWSGI worker 2 (pid: 32318, cores: 1) spawned uWSGI worker 3 (pid: 32319, cores: 1) spawned uWSGI worker 4 (pid: 32320, cores: 1) *** Stats server enabled on 127.0.0.1:8002 fd: 16 *** SIGINT/SIGQUIT received...killing workers... worker 1 buried after 1 seconds worker 2 buried after 1 seconds worker 3 buried after 1 seconds worker 4 buried after 1 seconds goodbye to uWSGI. VACUUM: pidfile removed. *** Starting uWSGI 2.0.8 (64bit) on [Mon Jun 1 11:20:46 2015] *** compiled with version: 4.8.2 on 01 June 2015 11:01:28 os: Linux-3.13.0-48-generic #80-Ubuntu SMP Thu Mar 12 11:16:15 UTC 2015 nodename: localhost.localdomain machine: x86_64 clock source: unix detected number of CPU cores: 1 current working directory: /home/ubuntu writing pidfile to /home/ubuntu/uwsgi/uwsgi.pid detected binary path: /home/ubuntu/venv/bin/uwsgi !!! no internal routing support, rebuild with pcre support !!! chdir() to /home/ubuntu/repo your processes number limit is 7862 your memory page size is 4096 bytes detected max file descriptor number: 1024 lock engine: pthread robust mutexes thunder lock: disabled (you can enable it with --thunder-lock) uwsgi socket 0 bound to TCP address 127.0.0.1:8001 fd 3 Python version: 2.7.6 (default, Mar 22 2014, 23:03:41) [GCC 4.8.2] Set PythonHome to /home/ubuntu/venv *** Python threads support is disabled. You can enable it with --enable-threads *** Python main interpreter initialized at 0xf6dcf0 your server socket listen backlog is limited to 100 connections your mercy for graceful operations on workers is 60 seconds mapped 363840 bytes (355 KB) for 4 cores *** Operational MODE: preforking *** WSGI app 0 (mountpoint='') ready in 1 seconds on interpreter 0xf6dcf0 pid: 872 (default app) *** uWSGI is running in multiple interpreter mode *** spawned uWSGI master process (pid: 872) spawned uWSGI worker 1 (pid: 879, cores: 1) spawned uWSGI worker 2 (pid: 880, cores: 1) spawned uWSGI worker 3 (pid: 881, cores: 1) spawned uWSGI worker 4 (pid: 882, cores: 1) *** Stats server enabled on 127.0.0.1:8002 fd: 16 *** [pid: 879|app: 0|req: 1/1] 127.0.0.1 () {40 vars in 743 bytes} [Mon Jun 1 11:23:16 2015] GET /something => generated 93 bytes in 173780 micros (HTTP/1.1 404) 2 headers in 80 bytes (1 switches on core 0) [pid: 31460|app: 0|req: 21/2786] 127.0.1.1 () {46 vars in 840 bytes} [Wed Jul 15 23:49:20 2015] OPTIONS /about => generated 0 bytes in 466 micros (HTTP/1.1 200) 6 headers in 327 bytes (1 switches on core 0) rows-0.3.1/examples/library/uwsgi_log_plugin.py000066400000000000000000000036621310400316700217200ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import codecs import datetime import re from collections import OrderedDict import rows.fields from rows.table import Table REGEXP_UWSGI_LOG = re.compile(r'\[pid: ([0-9]+)\|app: [0-9]+\|req: ' r'[0-9]+/[0-9]+\] ' r'([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+) .+ \[(.+)\] ' r'([^ ]+) (.+) => generated .+ in ([0-9]+) ' r'micros \(HTTP/([^ ]+) ([^)]+)\)') UWSGI_FIELDS = OrderedDict([('pid', rows.fields.IntegerField), ('ip', rows.fields.TextField), ('datetime', rows.fields.DatetimeField), ('http_verb', rows.fields.TextField), ('http_path', rows.fields.TextField), ('generation_time', rows.fields.FloatField), ('http_version', rows.fields.FloatField), ('http_status', rows.fields.IntegerField)]) UWSGI_DATETIME_FORMAT = '%a %b %d %H:%M:%S %Y' strptime = datetime.datetime.strptime def import_from_uwsgi_log(filename, encoding): table = Table(fields=UWSGI_FIELDS) field_names = list(UWSGI_FIELDS.keys()) with codecs.open(filename, encoding=encoding) as fobj: for line in fobj: result = REGEXP_UWSGI_LOG.findall(line) if result: data = list(result[0]) # Convert datetime data[2] = strptime(data[2], UWSGI_DATETIME_FORMAT) # Convert generation time (micros -> seconds) data[5] = float(data[5]) / 1000000 table.append({field_name: value for field_name, value in zip(field_names, data)}) return table if __name__ == '__main__': table = import_from_uwsgi_log('uwsgi.log', 'utf-8') for row in table: print(row) rows-0.3.1/requirements-development.txt000066400000000000000000000003201310400316700202770ustar00rootroot00000000000000-r requirements.txt # Everything except install_requires click lxml openpyxl parquet>=1.1 requests xlrd xlwt file-magic # Test tools coverage ipdb mock nose pylint yanc tox # Doc tools sphinx recommonmark rows-0.3.1/requirements.txt000066400000000000000000000000051310400316700157570ustar00rootroot00000000000000-e . rows-0.3.1/rows.1.txt000066400000000000000000000055011310400316700143730ustar00rootroot00000000000000.TH ROWS "1" "Sep 2015" "ROWS 0.1.0" "common, beautiful interface to tabular data, no matter the format" NAME rows - common, beautiful interface to tabular data, no matter the format SYNOPSIS rows [OPTIONS] COMMAND [ARGS] ... DESCRIPTION No matter in which format your tabular data is: rows will import it, automatically detect types and give you high-level Python objects so you can start working with the data instead of trying to parse it. It is also locale-and-unicode aware. OPTIONS --help Show this message and exit COMMANDS . convert - Convert table on `source` URI to... join - Join tables from `source` URIs using `key(s)`... sort - Sort from `source` by `key(s)` and save into... sum - Sum tables from `source` URIs and save into... SYNOPSIS rows convert [OPTIONS] SOURCE DESTINATION DESCRIPTION Convert table on `source` URI to `destination` OPTIONS --input-encoding TEXT --output-encoding TEXT --input-locale TEXT --output-locale TEXT --help Show this message and exit. SYNOPSIS rows join [OPTIONS] KEYS SOURCES ... DESTINATION DESCRIPTION Join tables from `source` URIs using `key(s)` to group rows and save into destination` OPTIONS --input-encoding TEXT --output-encoding TEXT --input-locale TEXT --output-locale TEXT --help Show this message and exit. SYNOPSIS rows sort [OPTIONS] KEY SOURCE DESTINATION DESCRIPTION Sort from `source` by `key(s)` and save into `destination` OPTIONS --input-encoding TEXT --output-encoding TEXT --input-locale TEXT --output-locale TEXT --help Show this message and exit. SYNOPSIS rows sum [OPTIONS] SOURCES ... DESTINATION DESCRIPTION Sum tables from `source` URIs and save into `destination` OPTIONS --input-encoding TEXT --output-encoding TEXT --input-locale TEXT --output-locale TEXT --help Show this message and exit. EXAMPLES - To export csv from site and converting locales from pt_BR to en both UTF-8 rows convert \-\-input-locale pt_BR.UTF-8 \-\-output-locale en.UTF-8 "" data.csv - To export xls from site and no changes locales rows convert "" data.xls - To convert csv to xls rows convert file.csv file.xls REPORTING BUGS To report a bug please visit rows' issue tracking system at . AUTHOR Written by Álvaro Justen . This manual page was written by Paulo Roberto Alves de Oliveira (aka kretcheu) for the Debian project (but may be used by others). COPYRIGHT Copyright © 2014-2015 Álvaro Justen. License GPLv3+: GNU GPL version 3 or later . This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. rows-0.3.1/rows/000077500000000000000000000000001310400316700134725ustar00rootroot00000000000000rows-0.3.1/rows/__init__.py000066400000000000000000000042271310400316700156100ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals # General imports import rows.plugins as plugins from rows.operations import join, transform, transpose from rows.table import Table, FlexibleTable from rows.localization import locale_context # Don't have dependencies or dependencies installed on `install_requires` import_from_json = plugins.json.import_from_json export_to_json = plugins.json.export_to_json import_from_dicts = plugins.dicts.import_from_dicts export_to_dicts = plugins.dicts.export_to_dicts import_from_csv = plugins.csv.import_from_csv export_to_csv = plugins.csv.export_to_csv import_from_txt = plugins.txt.import_from_txt export_to_txt = plugins.txt.export_to_txt # Have dependencies if plugins.html: import_from_html = plugins.html.import_from_html export_to_html = plugins.html.export_to_html if plugins.xpath: import_from_xpath = plugins.xpath.import_from_xpath if plugins.ods: import_from_ods = plugins.ods.import_from_ods if plugins.sqlite: import_from_sqlite = plugins.sqlite.import_from_sqlite export_to_sqlite = plugins.sqlite.export_to_sqlite if plugins.xls: import_from_xls = plugins.xls.import_from_xls export_to_xls = plugins.xls.export_to_xls if plugins.xlsx: import_from_xlsx = plugins.xlsx.import_from_xlsx export_to_xlsx = plugins.xlsx.export_to_xlsx if plugins.parquet: import_from_parquet = plugins.parquet.import_from_parquet __version__ = '0.3.1' rows-0.3.1/rows/cli.py000077500000000000000000000333051310400316700146220ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # TODO: define exit codes # TODO: move default options to base command # TODO: may move all 'destination' to '--output' # TODO: test this whole module # TODO: add option to pass 'create_table' options in command-line (like force # fields) import shlex import sqlite3 import sys from io import BytesIO import click import requests.exceptions import rows from rows.utils import (detect_source, export_to_uri, import_from_source, import_from_uri) from rows.plugins.utils import make_header DEFAULT_INPUT_ENCODING = 'utf-8' DEFAULT_OUTPUT_ENCODING = 'utf-8' DEFAULT_INPUT_LOCALE = 'C' DEFAULT_OUTPUT_LOCALE = 'C' def _import_table(source, encoding, verify_ssl=True, *args, **kwargs): try: table = import_from_uri(source, default_encoding=DEFAULT_INPUT_ENCODING, verify_ssl=verify_ssl, encoding=encoding, *args, **kwargs) except requests.exceptions.SSLError: click.echo('ERROR: SSL verification failed! ' 'Use `--verify-ssl=no` if you want to ignore.', err=True) sys.exit(2) else: return table def _get_field_names(field_names, table_field_names, permit_not=False): new_field_names = make_header(field_names.split(','), permit_not=permit_not) if not permit_not: diff = set(new_field_names) - set(table_field_names) else: diff = set(field_name.replace('^', '') for field_name in new_field_names) - set(table_field_names) if diff: missing = ', '.join(['"{}"'.format(field) for field in diff]) click.echo('Table does not have fields: {}'.format(missing), err=True) sys.exit(1) else: return new_field_names @click.group() @click.version_option(version=rows.__version__, prog_name='rows') def cli(): pass @cli.command(help='Convert table on `source` URI to `destination`') @click.option('--input-encoding') @click.option('--output-encoding') @click.option('--input-locale') @click.option('--output-locale') @click.option('--verify-ssl', default=True, type=bool) @click.option('--order-by') @click.argument('source') @click.argument('destination') def convert(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, source, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) if order_by is not None: order_by = _get_field_names(order_by, table.field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(table, destination, encoding=output_encoding) else: export_to_uri(table, destination, encoding=output_encoding) @cli.command(help='Join tables from `source` URIs using `key(s)` to group ' 'rows and save into `destination`') @click.option('--input-encoding') @click.option('--output-encoding') @click.option('--input-locale') @click.option('--output-locale') @click.option('--verify-ssl', default=True, type=bool) @click.option('--order-by') @click.argument('keys') @click.argument('sources', nargs=-1, required=True) @click.argument('destination') def join(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, keys, sources, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING keys = [key.strip() for key in keys.split(',')] if input_locale is not None: with rows.locale_context(input_locale): tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] else: tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] result = rows.join(keys, tables) if order_by is not None: order_by = _get_field_names(order_by, result.field_names, permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, destination, encoding=output_encoding) else: export_to_uri(result, destination, encoding=output_encoding) @cli.command(name='sum', help='Sum tables from `source` URIs and save into `destination`') @click.option('--input-encoding') @click.option('--output-encoding') @click.option('--input-locale') @click.option('--output-locale') @click.option('--verify-ssl', default=True, type=bool) @click.option('--order-by') @click.argument('sources', nargs=-1, required=True) @click.argument('destination') def sum_(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, order_by, sources, destination): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or DEFAULT_OUTPUT_ENCODING if input_locale is not None: with rows.locale_context(input_locale): tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] else: tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] result = sum(tables) if order_by is not None: order_by = _get_field_names(order_by, result.field_names, permit_not=True) # TODO: use complete list of `order_by` fields result.order_by(order_by[0].replace('^', '-')) if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, destination, encoding=output_encoding) else: export_to_uri(result, destination, encoding=output_encoding) @cli.command(name='print', help='Print a table') @click.option('--input-encoding') @click.option('--output-encoding') @click.option('--input-locale') @click.option('--output-locale') @click.option('--table-index', default=0) @click.option('--verify-ssl', default=True, type=bool) @click.option('--fields') @click.option('--fields-except') @click.option('--order-by') @click.argument('source', required=True) def print_(input_encoding, output_encoding, input_locale, output_locale, table_index, verify_ssl, fields, fields_except, order_by, source): if fields is not None and fields_except is not None: click.echo('ERROR: `--fields` cannot be used with `--fields-except`', err=True) sys.exit(20) output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING # TODO: may use `import_fields` for better performance if input_locale is not None: with rows.locale_context(input_locale): table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) else: table = _import_table(source, encoding=input_encoding, verify_ssl=verify_ssl, index=table_index) table_field_names = table.field_names if fields is not None: fields = _get_field_names(fields, table_field_names) if fields_except is not None: fields_except = _get_field_names(fields_except, table_field_names) # TODO: should set `export_fields = None` if `--fields` and # `--fields-except` are `None` if fields is not None and fields_except is None: export_fields = fields elif fields is not None and fields_except is not None: export_fields = list(fields) for field_to_remove in fields_except: export_fields.remove(field_to_remove) elif fields is None and fields_except is not None: export_fields = list(table_field_names) for field_to_remove in fields_except: export_fields.remove(field_to_remove) else: export_fields = table_field_names if order_by is not None: order_by = _get_field_names(order_by, table_field_names, permit_not=True) # TODO: use complete list of `order_by` fields table.order_by(order_by[0].replace('^', '-')) fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) else: rows.export_to_txt(table, fobj, encoding=output_encoding, export_fields=export_fields) fobj.seek(0) # TODO: may pass unicode to click.echo if output_encoding is not provided click.echo(fobj.read()) @cli.command(name='query', help='Query a table using SQL') @click.option('--input-encoding') @click.option('--output-encoding') @click.option('--input-locale') @click.option('--output-locale') @click.option('--verify-ssl', default=True, type=bool) @click.option('--fields') @click.option('--output') @click.argument('query', required=True) @click.argument('sources', nargs=-1, required=True) def query(input_encoding, output_encoding, input_locale, output_locale, verify_ssl, fields, output, query, sources): # TODO: may use sys.stdout.encoding if output_file = '-' output_encoding = output_encoding or sys.stdout.encoding or \ DEFAULT_OUTPUT_ENCODING if not query.lower().startswith('select'): field_names = '*' if fields is None else fields table_names = ', '.join(['table{}'.format(index) for index in range(1, len(sources) + 1)]) query = 'SELECT {} FROM {} WHERE {}'.format(field_names, table_names, query) if len(sources) == 1: source = detect_source(sources[0], verify_ssl=verify_ssl) if source.plugin_name != 'sqlite': if input_locale is not None: with rows.locale_context(input_locale): table = import_from_source(source, DEFAULT_INPUT_ENCODING) else: table = import_from_source(source, DEFAULT_INPUT_ENCODING) sqlite_connection = sqlite3.Connection(':memory:') rows.export_to_sqlite(table, sqlite_connection, table_name='table1') result = rows.import_from_sqlite(sqlite_connection, query=query) else: # Optimization: query the SQLite database directly result = import_from_source(source, DEFAULT_INPUT_ENCODING, query=query) else: if input_locale is not None: with rows.locale_context(input_locale): tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] else: tables = [_import_table(source, encoding=input_encoding, verify_ssl=verify_ssl) for source in sources] sqlite_connection = sqlite3.Connection(':memory:') for index, table in enumerate(tables, start=1): rows.export_to_sqlite(table, sqlite_connection, table_name='table{}'.format(index)) result = rows.import_from_sqlite(sqlite_connection, query=query) if output is None: fobj = BytesIO() if output_locale is not None: with rows.locale_context(output_locale): rows.export_to_txt(result, fobj, encoding=output_encoding) else: rows.export_to_txt(result, fobj, encoding=output_encoding) fobj.seek(0) click.echo(fobj.read()) else: if output_locale is not None: with rows.locale_context(output_locale): export_to_uri(result, output, encoding=output_encoding) else: export_to_uri(result, output, encoding=output_encoding) if __name__ == '__main__': cli() rows-0.3.1/rows/fields.py000066400000000000000000000405011310400316700153120ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import binascii import collections import datetime import json import locale import re import types from base64 import b64decode, b64encode from decimal import Decimal, InvalidOperation import six # Order matters here __all__ = ['BoolField', 'IntegerField', 'FloatField', 'DatetimeField', 'DateField', 'DecimalField', 'PercentField', 'JSONField', 'EmailField', 'TextField', 'BinaryField', 'Field'] REGEXP_ONLY_NUMBERS = re.compile('[^0-9\-]') SHOULD_NOT_USE_LOCALE = True # This variable is changed by rows.locale_manager NULL = ('-', 'null', 'none', 'nil', 'n/a', 'na') NULL_BYTES = (b'-', b'null', b'none', b'nil', b'n/a', b'na') class Field(object): """Base Field class - all fields should inherit from this As the fallback for all other field types are the BinaryField, this Field actually implements what is expected in the BinaryField """ TYPE = (type(None), ) @classmethod def serialize(cls, value, *args, **kwargs): """Serialize a value to be exported `cls.serialize` should always return an unicode value, except for BinaryField """ if value is None: value = '' return value @classmethod def deserialize(cls, value, *args, **kwargs): """Deserialize a value just after importing it `cls.deserialize` should always return a value of type `cls.TYPE` or `None`. """ if isinstance(value, cls.TYPE): return value elif is_null(value): return None else: return value class BinaryField(Field): """Field class to represent byte arrays Is not locale-aware (does not need to be) """ TYPE = (six.binary_type, ) @classmethod def serialize(cls, value, *args, **kwargs): if value is not None: if not isinstance(value, six.binary_type): raise ValueError("Can't be {}".format(cls.__name__)) else: try: return b64encode(value).decode('ascii') except (TypeError, binascii.Error): return value else: return '' @classmethod def deserialize(cls, value, *args, **kwargs): if value is not None: if isinstance(value, six.binary_type): return value elif isinstance(value, six.text_type): try: return b64decode(value) except (TypeError, ValueError, binascii.Error): raise ValueError("Can't decode base64") else: raise ValueError("Can't be {}".format(cls.__name__)) else: return b'' class BoolField(Field): """Base class to representing boolean Is not locale-aware (if you need to, please customize by changing its attributes like `TRUE_VALUES` and `FALSE_VALUES`) """ TYPE = (bool, ) SERIALIZED_VALUES = {True: 'true', False: 'false', None: ''} TRUE_VALUES = ('true', 'yes') FALSE_VALUES = ('false', 'no') @classmethod def serialize(cls, value, *args, **kwargs): # TODO: should we serialize `None` as well or give it to the plugin? return cls.SERIALIZED_VALUES[value] @classmethod def deserialize(cls, value, *args, **kwargs): value = super(BoolField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value value = as_string(value).lower() if value in cls.TRUE_VALUES: return True elif value in cls.FALSE_VALUES: return False else: raise ValueError('Value is not boolean') class IntegerField(Field): """Field class to represent integer Is locale-aware """ TYPE = (int, ) @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' if SHOULD_NOT_USE_LOCALE: return six.text_type(value) else: grouping = kwargs.get('grouping', None) return locale.format('%d', value, grouping=grouping) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(IntegerField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value elif isinstance(value, float): new_value = int(value) if new_value != value: raise ValueError("It's float, not integer") else: value = new_value value = as_string(value) return int(value) if SHOULD_NOT_USE_LOCALE \ else locale.atoi(value) class FloatField(Field): """Field class to represent float Is locale-aware """ TYPE = (float, ) @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' if SHOULD_NOT_USE_LOCALE: return six.text_type(value) else: grouping = kwargs.get('grouping', None) return locale.format('%f', value, grouping=grouping) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(FloatField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value value = as_string(value) if SHOULD_NOT_USE_LOCALE: return float(value) else: return locale.atof(value) class DecimalField(Field): """Field class to represent decimal data (as Python's decimal.Decimal) Is locale-aware """ TYPE = (Decimal, ) @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' value_as_string = six.text_type(value) if SHOULD_NOT_USE_LOCALE: return value_as_string else: grouping = kwargs.get('grouping', None) has_decimal_places = value_as_string.find('.') != -1 if not has_decimal_places: string_format = '%d' else: decimal_places = len(value_as_string.split('.')[1]) string_format = '%.{}f'.format(decimal_places) return locale.format(string_format, value, grouping=grouping) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(DecimalField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value elif type(value) in (int, float): return Decimal(six.text_type(value)) if SHOULD_NOT_USE_LOCALE: try: return Decimal(value) except InvalidOperation: raise ValueError("Can't be {}".format(cls.__name__)) else: locale_vars = locale.localeconv() decimal_separator = locale_vars['decimal_point'] interesting_vars = ('decimal_point', 'mon_decimal_point', 'mon_thousands_sep', 'negative_sign', 'positive_sign', 'thousands_sep') chars = (locale_vars[x].replace('.', r'\.').replace('-', r'\-') for x in interesting_vars) interesting_chars = ''.join(set(chars)) regexp = re.compile(r'[^0-9{} ]'.format(interesting_chars)) value = as_string(value) if regexp.findall(value): raise ValueError("Can't be {}".format(cls.__name__)) parts = [REGEXP_ONLY_NUMBERS.subn('', number)[0] for number in value.split(decimal_separator)] if len(parts) > 2: raise ValueError("Can't deserialize with this locale.") try: value = Decimal(parts[0]) if len(parts) == 2: decimal_places = len(parts[1]) value = value + (Decimal(parts[1]) / (10 ** decimal_places)) except InvalidOperation: raise ValueError("Can't be {}".format(cls.__name__)) return value class PercentField(DecimalField): """Field class to represent percent values Is locale-aware (inherit this behaviour from `rows.DecimalField`) """ @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' elif value == Decimal('0'): return '0.00%' value = Decimal(six.text_type(value * 100)[:-2]) value = super(PercentField, cls).serialize(value, *args, **kwargs) return '{}%'.format(value) @classmethod def deserialize(cls, value, *args, **kwargs): if isinstance(value, cls.TYPE): return value elif is_null(value): return None value = as_string(value) if '%' not in value: raise ValueError("Can't be {}".format(cls.__name__)) value = value.replace('%', '') return super(PercentField, cls).deserialize(value) / 100 class DateField(Field): """Field class to represent date Is not locale-aware (does not need to be) """ TYPE = (datetime.date, ) INPUT_FORMAT = '%Y-%m-%d' OUTPUT_FORMAT = '%Y-%m-%d' @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' return six.text_type(value.strftime(cls.OUTPUT_FORMAT)) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(DateField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value value = as_string(value) dt_object = datetime.datetime.strptime(value, cls.INPUT_FORMAT) return datetime.date(dt_object.year, dt_object.month, dt_object.day) class DatetimeField(Field): """Field class to represent date-time Is not locale-aware (does not need to be) """ TYPE = (datetime.datetime, ) DATETIME_REGEXP = re.compile('^([0-9]{4})-([0-9]{2})-([0-9]{2})[ T]' '([0-9]{2}):([0-9]{2}):([0-9]{2})$') @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' return six.text_type(value.isoformat()) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(DatetimeField, cls).deserialize(value) if value is None or isinstance(value, cls.TYPE): return value value = as_string(value) # TODO: may use iso8601 groups = cls.DATETIME_REGEXP.findall(value) if not groups: raise ValueError("Can't be {}".format(cls.__name__)) else: return datetime.datetime(*[int(x) for x in groups[0]]) class TextField(Field): """Field class to represent unicode strings Is not locale-aware (does not need to be) """ TYPE = (six.text_type, ) @classmethod def deserialize(cls, value, *args, **kwargs): if value is None or isinstance(value, cls.TYPE): return value else: return as_string(value) class EmailField(TextField): """Field class to represent e-mail addresses Is not locale-aware (does not need to be) """ EMAIL_REGEXP = re.compile(r'^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]+$', flags=re.IGNORECASE) @classmethod def serialize(cls, value, *args, **kwargs): if value is None: return '' return six.text_type(value) @classmethod def deserialize(cls, value, *args, **kwargs): value = super(EmailField, cls).deserialize(value) if value is None or not value.strip(): return None result = cls.EMAIL_REGEXP.findall(value) if not result: raise ValueError("Can't be {}".format(cls.__name__)) else: return result[0] class JSONField(Field): """Field class to represent JSON-encoded strings Is not locale-aware (does not need to be) """ TYPE = (list, dict) @classmethod def serialize(cls, value, *args, **kwargs): return json.dumps(value) @classmethod def deserialize(cls, value, *args, **kwargs): if value is None or isinstance(value, cls.TYPE): return value else: return json.loads(value) local_vars = locals() TYPES = [(key, local_vars.get(key)) for key in __all__ if key != 'Field'] AVAILABLE_FIELD_TYPES = [item[1] for item in TYPES] def as_string(value): if isinstance(value, six.binary_type): raise ValueError('Binary is not supported') elif isinstance(value, six.text_type): return value else: return six.text_type(value) def is_null(value): if value is None: return True elif type(value) is six.binary_type: value = value.strip().lower() return not value or value in NULL_BYTES else: value_str = as_string(value).strip().lower() return not value_str or value_str in NULL def unique_values(values): result = [] for value in values: if not is_null(value) and value not in result: result.append(value) return result def detect_types(field_names, field_values, field_types=AVAILABLE_FIELD_TYPES, *args, **kwargs): """Where the magic happens""" # TODO: look strategy of csv.Sniffer.has_header # TODO: may receive 'type hints' # TODO: should support receiving unicode objects directly # TODO: should expect data in unicode or will be able to use binary data? field_values = list(field_values) if not field_values: return collections.OrderedDict([(field_name, BinaryField) for field_name in field_names]) number_of_fields = len(field_names) columns = list(zip(*[row for row in field_values if len(row) == number_of_fields])) if len(columns) != number_of_fields: raise ValueError('Number of fields differ') detected_types = collections.OrderedDict([(field_name, None) for field_name in field_names]) for index, field_name in enumerate(field_names): data = unique_values(columns[index]) native_types = set(type(value) for value in data) if not data: # all values with an empty field (can't identify) -> BinaryField identified_type = BinaryField elif native_types == set([six.binary_type]): identified_type = BinaryField else: # ok, let's try to identify the type of this column by # trying to convert every non-null value in the sample possible_types = list(field_types) for value in data: cant_be = set() for type_ in possible_types: try: type_.deserialize(value, *args, **kwargs) except (ValueError, TypeError): cant_be.add(type_) for type_to_remove in cant_be: possible_types.remove(type_to_remove) identified_type = possible_types[0] # priorities matter detected_types[field_name] = identified_type return detected_types def identify_type(value): value_type = type(value) if value_type not in (six.text_type, six.binary_type): possible_types = [type_class for type_name, type_class in TYPES if value_type in type_class.TYPE] if not possible_types: detected = detect_types(['some_field'], [[value]])['some_field'] else: detected = possible_types[0] else: detected = detect_types(['some_field'], [[value]])['some_field'] return detected rows-0.3.1/rows/localization.py000066400000000000000000000012021310400316700165270ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import contextlib import locale import six import rows.fields @contextlib.contextmanager def locale_context(name, category=locale.LC_ALL): old_name = locale.getlocale() if None not in old_name: old_name = '.'.join(old_name) if isinstance(name, six.text_type): name = str(name) if old_name != name: locale.setlocale(category, name) rows.fields.SHOULD_NOT_USE_LOCALE = False try: yield finally: if old_name != name: locale.setlocale(category, old_name) rows.fields.SHOULD_NOT_USE_LOCALE = True rows-0.3.1/rows/operations.py000066400000000000000000000053531310400316700162350ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from collections import OrderedDict from rows.table import Table from rows.plugins.utils import create_table def join(keys, tables): """Merge a list of `Table` objects using `keys` to group rows""" # Make new (merged) Table fields fields = OrderedDict() for table in tables: fields.update(table.fields) # TODO: may raise an error if a same field is different in some tables # Check if all keys are inside merged Table's fields fields_keys = set(fields.keys()) for key in keys: if key not in fields_keys: raise ValueError('Invalid key: "{}"'.format(key)) # Group rows by key, without missing ordering none_fields = lambda: OrderedDict({field: None for field in fields.keys()}) data = OrderedDict() for table in tables: for row in table: row_key = tuple([getattr(row, key) for key in keys]) if row_key not in data: data[row_key] = none_fields() data[row_key].update(row._asdict()) merged = Table(fields=fields) merged.extend(data.values()) return merged def transform(fields, function, *tables): "Return a new table based on other tables and a transformation function" new_table = Table(fields=fields) for table in tables: for row in filter(bool, map(lambda row: function(row, table), table)): new_table.append(row) return new_table def transpose(table, fields_column, *args, **kwargs): field_names = [] new_rows = [{} for _ in range(len(table.fields) - 1)] for row in table: row = row._asdict() field_name = row[fields_column] field_names.append(field_name) del row[fields_column] for index, value in enumerate(row.values()): new_rows[index][field_name] = value table_rows = [[row[field_name] for field_name in field_names] for row in new_rows] return create_table([field_names] + table_rows, *args, **kwargs) rows-0.3.1/rows/plugins/000077500000000000000000000000001310400316700151535ustar00rootroot00000000000000rows-0.3.1/rows/plugins/__init__.py000066400000000000000000000026051310400316700172670ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from . import plugin_json as json from . import dicts as dicts from . import plugin_csv as csv from . import txt as txt try: from . import plugin_html as html except ImportError: html = None try: from . import xpath as xpath except ImportError: xpath = None try: from . import ods as ods except ImportError: ods = None try: from . import sqlite as sqlite except ImportError: sqlite = None try: from . import xls as xls except ImportError: xls = None try: from . import xlsx as xlsx except ImportError: xlsx = None try: from . import plugin_parquet as parquet except ImportError: parquet = None rows-0.3.1/rows/plugins/dicts.py000066400000000000000000000025331310400316700166360ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from rows.plugins.utils import create_table def import_from_dicts(data, *args, **kwargs): 'Import data from a list of dicts' headers = set() for row in data: headers.update(row.keys()) headers = sorted(list(headers)) data = [[row.get(header, None) for header in headers] for row in data] meta = {'imported_from': 'dicts', } return create_table([headers] + data, meta=meta, *args, **kwargs) def export_to_dicts(table, *args, **kwargs): return [{key: getattr(row, key) for key in table.field_names} for row in table] rows-0.3.1/rows/plugins/ods.py000066400000000000000000000077441310400316700163260ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import zipfile from decimal import Decimal from lxml.etree import tostring as xml_to_string, fromstring as xml_from_string from rows.plugins.utils import create_table, get_filename_and_fobj def xpath(element, xpath, namespaces): return xml_from_string(xml_to_string(element)).xpath(xpath, namespaces=namespaces) def attrib(cell, namespace, name): return cell.attrib['{{{}}}{}'.format(namespace, name)] def complete_with_None(lists, size): for element in lists: element.extend([None] * (size - len(element))) yield element def import_from_ods(filename_or_fobj, index=0, *args, **kwargs): # TODO: import spreadsheet by name # TODO: unescape values filename, _ = get_filename_and_fobj(filename_or_fobj) ods_file = zipfile.ZipFile(filename) content_fobj = ods_file.open('content.xml') xml = content_fobj.read() # will return bytes content_fobj.close() document = xml_from_string(xml) namespaces = document.nsmap spreadsheet = document.xpath('//office:spreadsheet', namespaces=namespaces)[0] tables = xpath(spreadsheet, '//table:table', namespaces) table = tables[index] table_rows_obj = xpath(table, '//table:table-row', namespaces) table_rows = [] for row_obj in table_rows_obj: row = [] for cell in xpath(row_obj, '//table:table-cell', namespaces): children = cell.getchildren() if not children: continue # TODO: evalute 'boolean' and 'time' types value_type = attrib(cell, namespaces['office'], 'value-type') if value_type == 'date': cell_value = attrib(cell, namespaces['office'], 'date-value') elif value_type == 'float': cell_value = attrib(cell, namespaces['office'], 'value') elif value_type == 'percentage': cell_value = attrib(cell, namespaces['office'], 'value') cell_value = Decimal(str(Decimal(cell_value) * 100)[:-2]) cell_value = '{}%'.format(cell_value) elif value_type == 'string': try: # get computed string (from formula, for example) cell_value = attrib(cell, namespaces['office'], 'string-value') except KeyError: # computed string not present => get from

...

cell_value = children[0].text else: # value_type == some type we don't know cell_value = children[0].text try: repeat = attrib(cell, namespaces['table'], 'number-columns-repeated') except KeyError: row.append(cell_value) else: for _ in range(int(repeat)): row.append(cell_value) if row: table_rows.append(row) max_length = max(len(row) for row in table_rows) full_rows = complete_with_None(table_rows, max_length) meta = {'imported_from': 'ods', 'filename': filename,} return create_table(full_rows, meta=meta, *args, **kwargs) rows-0.3.1/rows/plugins/plugin_csv.py000066400000000000000000000064671310400316700177130ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from io import BytesIO import six import unicodecsv from rows.plugins.utils import create_table, get_filename_and_fobj, serialize sniffer = unicodecsv.Sniffer() DELIMITERS_PY2 = (b',', b';', b'\t') DELIMITERS_PY3 = (',', ';', '\t') if six.PY2: def discover_dialect(sample, encoding): try: return sniffer.sniff(sample, delimiters=DELIMITERS_PY2) except unicodecsv.Error: # Could not detect dialect, fall back to 'excel' return unicodecsv.excel elif six.PY3: def discover_dialect(sample, encoding): try: return sniffer.sniff(sample.decode(encoding), delimiters=DELIMITERS_PY3) except unicodecsv.Error: # Could not detect dialect, fall back to 'excel' return unicodecsv.excel def import_from_csv(filename_or_fobj, encoding='utf-8', dialect=None, sample_size=8192, *args, **kwargs): '''Import data from a CSV file If a file-like object is provided it MUST be in binary mode, like in `open(filename, mode='rb')`. ''' filename, fobj = get_filename_and_fobj(filename_or_fobj, mode='rb') if dialect is None: cursor = fobj.tell() dialect = discover_dialect(fobj.read(sample_size), encoding) fobj.seek(cursor) reader = unicodecsv.reader(fobj, encoding=encoding, dialect=dialect) meta = {'imported_from': 'csv', 'filename': filename, 'encoding': encoding,} return create_table(reader, meta=meta, *args, **kwargs) def export_to_csv(table, filename_or_fobj=None, encoding='utf-8', dialect=unicodecsv.excel, *args, **kwargs): '''Export a `rows.Table` to a CSV file If a file-like object is provided it MUST be in binary mode, like in `open(filename, mode='wb')`. If not filename/fobj is provided, the function returns a string with CSV contents. ''' # TODO: will work only if table.fields is OrderedDict # TODO: should use fobj? What about creating a method like json.dumps? if filename_or_fobj is not None: _, fobj = get_filename_and_fobj(filename_or_fobj, mode='wb') else: fobj = BytesIO() writer = unicodecsv.writer(fobj, encoding=encoding, dialect=dialect) for row in serialize(table, *args, **kwargs): writer.writerow(row) if filename_or_fobj is not None: fobj.flush() return fobj else: fobj.seek(0) result = fobj.read() fobj.close() return result rows-0.3.1/rows/plugins/plugin_html.py000066400000000000000000000123271310400316700200540ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals try: from HTMLParser import HTMLParser # Python 2 except: from html.parser import HTMLParser # Python 3 try: from html import escape # Python 3 except: from cgi import escape # Python 2 import six from lxml.html import document_fromstring from lxml.etree import tostring as to_string, strip_tags from rows.plugins.utils import (create_table, export_data, get_filename_and_fobj, serialize) unescape = HTMLParser().unescape def _get_content(element): return (element.text if element.text is not None else '') + \ ''.join(to_string(child, encoding=six.text_type) for child in element.getchildren()) def _get_row(row, column_tag, preserve_html, properties): if not preserve_html: data = list(map(_extract_node_text, row.xpath(column_tag))) else: data = list(map(_get_content, row.xpath(column_tag))) if properties: data.append(dict(row.attrib)) return data def import_from_html(filename_or_fobj, encoding='utf-8', index=0, ignore_colspan=True, preserve_html=False, properties=False, table_tag='table', row_tag='tr', column_tag='td|th', *args, **kwargs): filename, fobj = get_filename_and_fobj(filename_or_fobj, mode='rb') html = fobj.read().decode(encoding) html_tree = document_fromstring(html) tables = html_tree.xpath('//{}'.format(table_tag)) table = tables[index] strip_tags(table, 'thead') strip_tags(table, 'tbody') row_elements = table.xpath(row_tag) table_rows = [_get_row(row, column_tag=column_tag, preserve_html=preserve_html, properties=properties) for row in row_elements] if properties: table_rows[0][-1] = 'properties' if preserve_html and kwargs.get('fields', None) is None: # The field names will be the first table row, so we need to strip HTML # from it even if `preserve_html` is `True` (it's `True` only for rows, # not for the header). table_rows[0] = list(map(_extract_node_text, row_elements[0])) max_columns = max(map(len, table_rows)) if ignore_colspan: table_rows = [row for row in table_rows if len(row) == max_columns] meta = {'imported_from': 'html', 'filename': filename, 'encoding': encoding,} return create_table(table_rows, meta=meta, *args, **kwargs) def export_to_html(table, filename_or_fobj=None, encoding='utf-8', *args, **kwargs): serialized_table = serialize(table, *args, **kwargs) fields = next(serialized_table) result = ['
title firstname middlename lastname name_suffix nickname
\n\n', ' \n', ' \n'] header = [' \n'.format(field) for field in fields] result.extend(header) result.extend([' \n', ' \n', '\n', ' \n', '\n']) for index, row in enumerate(serialized_table, start=1): css_class = 'odd' if index % 2 == 1 else 'even' result.append(' \n'.format(css_class)) for value in row: result.extend([' \n']) result.append(' \n\n') result.append(' \n\n
{}
', escape(value), '
\n') html = ''.join(result).encode(encoding) return export_data(filename_or_fobj, html, mode='wb') def _extract_node_text(node): 'Extract text from a given lxml node' texts = map(six.text_type.strip, map(six.text_type, map(unescape, node.xpath('.//text()')))) return ' '.join(text for text in texts if text) def count_tables(filename_or_fobj, encoding='utf-8', table_tag='table'): filename, fobj = get_filename_and_fobj(filename_or_fobj) html = fobj.read().decode(encoding) html_tree = document_fromstring(html) tables = html_tree.xpath('//{}'.format(table_tag)) return len(tables) def tag_to_dict(html): "Extract tag's attributes into a `dict`" element = document_fromstring(html).xpath('//html/body/child::*')[0] attributes = dict(element.attrib) attributes['text'] = element.text_content() return attributes def extract_text(html): 'Extract text from a given HTML' return _extract_node_text(document_fromstring(html)) def extract_links(html): 'Extract the href values from a given HTML (returns a list of strings)' return document_fromstring(html).xpath('.//@href') rows-0.3.1/rows/plugins/plugin_json.py000066400000000000000000000065051310400316700200620ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import decimal import json import six from rows import fields from rows.plugins.utils import (create_table, export_data, get_filename_and_fobj, prepare_to_export) def import_from_json(filename_or_fobj, encoding='utf-8', *args, **kwargs): '''Import a JSON file or file-like object into a `rows.Table` If a file-like object is provided it MUST be open in text (non-binary) mode on Python 3 and could be open in both binary or text mode on Python 2. ''' filename, fobj = get_filename_and_fobj(filename_or_fobj) json_obj = json.load(fobj, encoding=encoding) field_names = list(json_obj[0].keys()) table_rows = [[item[key] for key in field_names] for item in json_obj] meta = {'imported_from': 'json', 'filename': filename, 'encoding': encoding,} return create_table([field_names] + table_rows, meta=meta, *args, **kwargs) def _convert(value, field_type, *args, **kwargs): if value is None or field_type in ( fields.BinaryField, fields.BoolField, fields.FloatField, fields.IntegerField, fields.JSONField, fields.TextField, ): # If the field_type is one of those, the value can be passed directly # to the JSON encoder return value else: # The field type is not represented natively in JSON, then it needs to # be serialized (converted to a string) return field_type.serialize(value, *args, **kwargs) def export_to_json(table, filename_or_fobj=None, encoding='utf-8', indent=None, *args, **kwargs): '''Export a `rows.Table` to a JSON file or file-like object If a file-like object is provided it MUST be open in binary mode (like in `open('myfile.json', mode='wb')`). ''' # TODO: will work only if table.fields is OrderedDict fields = table.fields prepared_table = prepare_to_export(table, *args, **kwargs) field_names = next(prepared_table) data = [{field_name: _convert(value, fields[field_name], *args, **kwargs) for field_name, value in zip(field_names, row)} for row in prepared_table] result = json.dumps(data, indent=indent) if type(result) is six.text_type: # Python 3 result = result.encode(encoding) if indent is not None: # clean up empty spaces at the end of lines result = b'\n'.join(line.rstrip() for line in result.splitlines()) return export_data(filename_or_fobj, result, mode='wb') rows-0.3.1/rows/plugins/plugin_parquet.py000066400000000000000000000044441310400316700205720ustar00rootroot00000000000000# coding: utf-8 # Copyright 2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import logging from collections import OrderedDict from rows import fields from rows.plugins.utils import create_table, get_filename_and_fobj class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger("parquet").addHandler(NullHandler()) import parquet PARQUET_TO_ROWS = { parquet.parquet_thrift.Type.BOOLEAN: fields.BoolField, parquet.parquet_thrift.Type.BYTE_ARRAY: fields.BinaryField, parquet.parquet_thrift.Type.DOUBLE: fields.FloatField, parquet.parquet_thrift.Type.FIXED_LEN_BYTE_ARRAY: fields.BinaryField, parquet.parquet_thrift.Type.FLOAT: fields.FloatField, parquet.parquet_thrift.Type.INT32: fields.IntegerField, parquet.parquet_thrift.Type.INT64: fields.IntegerField, parquet.parquet_thrift.Type.INT96: fields.IntegerField, } def import_from_parquet(filename_or_fobj, *args, **kwargs): 'Import data from a Parquet file' filename, fobj = get_filename_and_fobj(filename_or_fobj, mode='rb') # TODO: should look into `schema.converted_type` also types = OrderedDict([(schema.name, PARQUET_TO_ROWS[schema.type]) for schema in parquet._read_footer(fobj).schema if schema.type is not None]) header = list(types.keys()) table_rows = list(parquet.reader(fobj)) # TODO: be lazy meta = {'imported_from': 'parquet', 'filename': filename,} return create_table([header] + table_rows, meta=meta, force_types=types, *args, **kwargs) rows-0.3.1/rows/plugins/sqlite.py000066400000000000000000000131521310400316700170300ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import sqlite3 import string import six import rows.fields as fields from rows.plugins.utils import (create_table, get_filename_and_fobj, ipartition, make_unique_name, prepare_to_export) SQL_TABLE_NAMES = 'SELECT name FROM sqlite_master WHERE type="table"' SQL_CREATE_TABLE = 'CREATE TABLE IF NOT EXISTS "{table_name}" ({field_types})' SQL_SELECT_ALL = 'SELECT * FROM "{table_name}"' SQL_INSERT = 'INSERT INTO "{table_name}" ({field_names}) VALUES ({placeholders})' SQLITE_TYPES = { fields.BinaryField: 'BLOB', fields.BoolField: 'INTEGER', fields.DateField: 'TEXT', fields.DatetimeField: 'TEXT', fields.DecimalField: 'REAL', fields.FloatField: 'REAL', fields.IntegerField: 'INTEGER', fields.PercentField: 'REAL', fields.TextField: 'TEXT', } DEFAULT_TYPE = 'BLOB' def _python_to_sqlite(field_types): def convert_value(field_type, value): if field_type in ( fields.BinaryField, fields.BoolField, fields.DateField, fields.DatetimeField, fields.FloatField, fields.IntegerField, fields.TextField ): return value elif field_type in (fields.DecimalField, fields.PercentField): return float(value) if value is not None else None else: # don't know this field return field_type.serialize(value) def convert_row(row): return [convert_value(field_type, value) for field_type, value in zip(field_types, row)] return convert_row def _get_connection(filename_or_connection): if isinstance(filename_or_connection, (six.binary_type, six.text_type)): return sqlite3.connect(filename_or_connection) # filename else: # already a connection return filename_or_connection def _valid_table_name(name): '''Verify if a given table name is valid for `rows` Rules: - Should start with a letter or '_' - Letters can be capitalized or not - Acceps letters, numbers and _ ''' if name[0] not in '_' + string.ascii_letters or \ not set(name).issubset('_' + string.ascii_letters + string.digits): return False else: return True def import_from_sqlite(filename_or_connection, table_name='table1', query=None, query_args=None, *args, **kwargs): connection = _get_connection(filename_or_connection) cursor = connection.cursor() if query is None: if not _valid_table_name(table_name): raise ValueError('Invalid table name: {}'.format(table_name)) query = SQL_SELECT_ALL.format(table_name=table_name) if query_args is None: query_args = tuple() table_rows = list(cursor.execute(query, query_args)) # TODO: may be lazy header = [six.text_type(info[0]) for info in cursor.description] cursor.close() # TODO: should close connection also? meta = {'imported_from': 'sqlite', 'filename': filename_or_connection, } return create_table([header] + table_rows, meta=meta, *args, **kwargs) def export_to_sqlite(table, filename_or_connection, table_name=None, table_name_format='table{index}', batch_size=100, *args, **kwargs): # TODO: should add transaction support? prepared_table = prepare_to_export(table, *args, **kwargs) connection = _get_connection(filename_or_connection) cursor = connection.cursor() if table_name is None: table_names = [item[0] for item in cursor.execute(SQL_TABLE_NAMES)] table_name = make_unique_name(table_name_format.format(index=1), existing_names=table_names, name_format=table_name_format, start=1) elif not _valid_table_name(table_name): raise ValueError('Invalid table name: {}'.format(table_name)) field_names = next(prepared_table) field_types = list(map(table.fields.get, field_names)) columns = ['{} {}'.format(field_name, SQLITE_TYPES.get(field_type, DEFAULT_TYPE)) for field_name, field_type in zip(field_names, field_types)] cursor.execute(SQL_CREATE_TABLE.format(table_name=table_name, field_types=', '.join(columns))) insert_sql = SQL_INSERT.format( table_name=table_name, field_names=', '.join(field_names), placeholders=', '.join('?' for _ in field_names)) _convert_row = _python_to_sqlite(field_types) for batch in ipartition(prepared_table, batch_size): cursor.executemany(insert_sql, map(_convert_row, batch)) connection.commit() return connection rows-0.3.1/rows/plugins/txt.py000066400000000000000000000063171310400316700163530ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from rows.plugins.utils import (create_table, export_data, get_filename_and_fobj, serialize) DASH, PLUS, PIPE = '-', '+', '|' def _max_column_sizes(field_names, table_rows): columns = zip(*([field_names] + table_rows)) return {field_name: max(len(value) for value in column) for field_name, column in zip(field_names, columns)} def import_from_txt(filename_or_fobj, encoding='utf-8', *args, **kwargs): # TODO: should be able to change DASH, PLUS and PIPE filename, fobj = get_filename_and_fobj(filename_or_fobj, mode='rb') contents = fobj.read().decode(encoding).strip().splitlines() # remove '+----+----+' lines contents = contents[1:-1] del contents[1] table_rows = [[value.strip() for value in row.split(PIPE)[1:-1]] for row in contents] meta = {'imported_from': 'txt', 'filename': filename, 'encoding': encoding,} return create_table(table_rows, meta=meta, *args, **kwargs) def export_to_txt(table, filename_or_fobj=None, encoding=None, *args, **kwargs): '''Export a `rows.Table` to text This function can return the result as a string or save into a file (via filename or file-like object). `encoding` could be `None` if no filename/file-like object is specified, then the return type will be `six.text_type`. ''' # TODO: should be able to change DASH, PLUS and PIPE # TODO: will work only if table.fields is OrderedDict serialized_table = serialize(table, *args, **kwargs) field_names = next(serialized_table) table_rows = list(serialized_table) max_sizes = _max_column_sizes(field_names, table_rows) dashes = [DASH * (max_sizes[field] + 2) for field in field_names] header = [field.center(max_sizes[field]) for field in field_names] header = '{} {} {}'.format(PIPE, ' {} '.format(PIPE).join(header), PIPE) split_line = PLUS + PLUS.join(dashes) + PLUS result = [split_line, header, split_line] for row in table_rows: values = [value.rjust(max_sizes[field_name]) for field_name, value in zip(field_names, row)] row_data = ' {} '.format(PIPE).join(values) result.append('{} {} {}'.format(PIPE, row_data, PIPE)) result.extend([split_line, '']) data = '\n'.join(result) if encoding is not None: data = data.encode(encoding) return export_data(filename_or_fobj, data, mode='wb') rows-0.3.1/rows/plugins/utils.py000066400000000000000000000173521310400316700166750ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from collections import Iterator, OrderedDict from itertools import chain, islice from unicodedata import normalize from rows.fields import detect_types from rows.table import FlexibleTable, Table SLUG_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_' def slug(text, separator='_', permitted_chars=SLUG_CHARS, replace_with_separator=' -_'): '''Slugfy text Example: ' ÁLVARO justen% ' -> 'alvaro_justen' ''' # Strip non-ASCII characters # Example: u' ÁLVARO justen% ' -> ' ALVARO justen% ' text = normalize('NFKD', text.strip()).encode('ascii', 'ignore')\ .decode('ascii') # Replace spaces and other chars with separator # Example: u' ALVARO justen% ' -> u'_ALVARO__justen%_' for char in replace_with_separator: text = text.replace(char, separator) # Remove non-permitted characters and put everything to lowercase # Example: u'_ALVARO__justen%_' -> u'_alvaro__justen_' text = ''.join(char for char in text if char in permitted_chars).lower() # Remove double occurrencies of separator # Example: u'_alvaro__justen_' -> u'_alvaro_justen_' double_separator = separator + separator while double_separator in text: text = text.replace(double_separator, separator) # Strip separators # Example: u'_alvaro_justen_' -> u'alvaro_justen' return text.strip(separator) def ipartition(iterable, partition_size): if not isinstance(iterable, Iterator): iterator = iter(iterable) else: iterator = iterable finished = False while not finished: data = [] for _ in range(partition_size): try: data.append(next(iterator)) except StopIteration: finished = True break if data: yield data def get_filename_and_fobj(filename_or_fobj, mode='r', dont_open=False): if getattr(filename_or_fobj, 'read', None) is not None: fobj = filename_or_fobj filename = getattr(fobj, 'name', None) else: fobj = open(filename_or_fobj, mode=mode) if not dont_open else None filename = filename_or_fobj return filename, fobj def make_unique_name(name, existing_names, name_format='{name}_{index}', start=2): '''Return a unique name based on `name_format` and `name`.''' index = start new_name = name while new_name in existing_names: new_name = name_format.format(name=name, index=index) index += 1 return new_name def make_header(field_names, permit_not=False): 'Return unique and slugged field names' slug_chars = SLUG_CHARS if not permit_not else SLUG_CHARS + '^' header = [slug(field_name, permitted_chars=slug_chars) for field_name in field_names] result = [] for index, field_name in enumerate(header): if not field_name: field_name = 'field_{}'.format(index) elif field_name[0].isdigit(): field_name = 'field_{}'.format(field_name) if field_name in result: field_name = make_unique_name(name=field_name, existing_names=result, start=2) result.append(field_name) return result def create_table(data, meta=None, fields=None, skip_header=True, import_fields=None, samples=None, force_types=None, *args, **kwargs): # TODO: add auto_detect_types=True parameter table_rows = iter(data) sample_rows = [] if fields is None: header = make_header(next(table_rows)) if samples is not None: sample_rows = list(islice(table_rows, 0, samples)) else: sample_rows = list(table_rows) fields = detect_types(header, sample_rows, *args, **kwargs) if force_types is not None: # TODO: optimize field detection (ignore fields on `force_types`) for field_name, field_type in force_types.items(): fields[field_name] = field_type else: if not isinstance(fields, OrderedDict): raise ValueError('`fields` must be an `OrderedDict`') if skip_header: _ = next(table_rows) header = make_header(list(fields.keys())) fields = OrderedDict([(field_name, fields[key]) for field_name, key in zip(header, fields)]) if import_fields is not None: # TODO: can optimize if import_fields is not None. # Example: do not detect all columns import_fields = make_header(import_fields) diff = set(import_fields) - set(header) if diff: field_names = ', '.join('"{}"'.format(field) for field in diff) raise ValueError("Invalid field names: {}".format(field_names)) new_fields = OrderedDict() for field_name in import_fields: new_fields[field_name] = fields[field_name] fields = new_fields table = Table(fields=fields, meta=meta) # TODO: put this inside Table.__init__ for row in chain(sample_rows, table_rows): table.append({field_name: value for field_name, value in zip(header, row)}) return table def prepare_to_export(table, export_fields=None, *args, **kwargs): # TODO: optimize for more used cases (export_fields=None) table_type = type(table) if table_type not in (FlexibleTable, Table): raise ValueError('Table type not recognized') if export_fields is None: # we use already slugged-fieldnames export_fields = table.field_names else: # we need to slug all the field names export_fields = make_header(export_fields) table_field_names = table.field_names diff = set(export_fields) - set(table_field_names) if diff: field_names = ', '.join('"{}"'.format(field) for field in diff) raise ValueError("Invalid field names: {}".format(field_names)) yield export_fields if table_type is Table: field_indexes = list(map(table_field_names.index, export_fields)) for row in table._rows: yield [row[field_index] for field_index in field_indexes] elif table_type is FlexibleTable: for row in table._rows: yield [row[field_name] for field_name in export_fields] def serialize(table, *args, **kwargs): prepared_table = prepare_to_export(table, *args, **kwargs) field_names = next(prepared_table) yield field_names field_types = [table.fields[field_name] for field_name in field_names] for row in prepared_table: yield [field_type.serialize(value, *args, **kwargs) for value, field_type in zip(row, field_types)] def export_data(filename_or_fobj, data, mode='w'): if filename_or_fobj is not None: _, fobj = get_filename_and_fobj(filename_or_fobj, mode=mode) fobj.write(data) fobj.flush() return fobj else: return data rows-0.3.1/rows/plugins/xls.py000066400000000000000000000136421310400316700163410ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime from decimal import Decimal from io import BytesIO import xlrd import xlwt import rows.fields as fields from rows.plugins.utils import (create_table, get_filename_and_fobj, prepare_to_export) CELL_TYPES = { xlrd.XL_CELL_BLANK: fields.TextField, xlrd.XL_CELL_DATE: fields.DatetimeField, xlrd.XL_CELL_ERROR: None, xlrd.XL_CELL_TEXT: fields.TextField, xlrd.XL_CELL_BOOLEAN: fields.BoolField, xlrd.XL_CELL_EMPTY: None, xlrd.XL_CELL_NUMBER: fields.FloatField, } # TODO: add more formatting styles for other types such as currency # TODO: styles may be influenced by locale FORMATTING_STYLES = { fields.DateField: xlwt.easyxf(num_format_str='yyyy-mm-dd'), fields.DatetimeField: xlwt.easyxf(num_format_str='yyyy-mm-dd hh:mm:ss'), fields.PercentField: xlwt.easyxf(num_format_str='0.00%'), } def _python_to_xls(field_types): def convert_value(field_type, value): data = {} if field_type in FORMATTING_STYLES: data['style'] = FORMATTING_STYLES[field_type] if field_type in ( fields.BinaryField, fields.BoolField, fields.DateField, fields.DatetimeField, fields.DecimalField, fields.FloatField, fields.IntegerField, fields.PercentField, fields.TextField, ): return value, data else: # don't know this field return field_type.serialize(value), data def convert_row(row): return [convert_value(field_type, value) for field_type, value in zip(field_types, row)] return convert_row def cell_value(sheet, row, col): cell = sheet.cell(row, col) field_type = CELL_TYPES[cell.ctype] # TODO: this approach will not work if using locale value = cell.value if field_type is None: return None elif field_type is fields.TextField: if cell.ctype != xlrd.XL_CELL_BLANK: return value else: return '' elif field_type is fields.DatetimeField: time_tuple = xlrd.xldate_as_tuple(value, sheet.book.datemode) value = field_type.serialize(datetime.datetime(*time_tuple)) return value.split('T00:00:00')[0] elif field_type is fields.BoolField: if value == 0: return False elif value == 1: return True else: book = sheet.book xf = book.xf_list[cell.xf_index] fmt = book.format_map[xf.format_key] if fmt.format_str.endswith('%'): # TODO: we may optimize this approach: we're converting to string # and the library is detecting the type when we could just say to # the library this value is PercentField if value is not None: try: decimal_places = len(fmt.format_str[:-1].split('.')[-1]) except IndexError: decimal_places = 2 return '{}%'.format(str(round(value * 100, decimal_places))) else: return None elif type(value) == float and int(value) == value: return int(value) else: return value def import_from_xls(filename_or_fobj, sheet_name=None, sheet_index=0, start_row=0, start_column=0, *args, **kwargs): filename, _ = get_filename_and_fobj(filename_or_fobj, mode='rb') book = xlrd.open_workbook(filename, formatting_info=True) if sheet_name is not None: sheet = book.sheet_by_name(sheet_name) else: sheet = book.sheet_by_index(sheet_index) # TODO: may re-use Excel data types # Get header and rows table_rows = [[cell_value(sheet, row_index, column_index) for column_index in range(start_column, sheet.ncols)] for row_index in range(start_row, sheet.nrows)] meta = {'imported_from': 'xls', 'filename': filename, 'sheet_name': sheet.name, } return create_table(table_rows, meta=meta, *args, **kwargs) def export_to_xls(table, filename_or_fobj=None, sheet_name='Sheet1', *args, **kwargs): work_book = xlwt.Workbook() sheet = work_book.add_sheet(sheet_name) prepared_table = prepare_to_export(table, *args, **kwargs) field_names = next(prepared_table) for column_index, field_name in enumerate(field_names): sheet.write(0, column_index, field_name) _convert_row = _python_to_xls([table.fields.get(field) for field in field_names]) for row_index, row in enumerate(prepared_table, start=1): for column_index, (value, data) in enumerate(_convert_row(row)): sheet.write(row_index, column_index, value, **data) if filename_or_fobj is not None: _, fobj = get_filename_and_fobj(filename_or_fobj, mode='wb') work_book.save(fobj) fobj.flush() return fobj else: fobj = BytesIO() work_book.save(fobj) fobj.seek(0) result = fobj.read() fobj.close() return result rows-0.3.1/rows/plugins/xlsx.py000066400000000000000000000115651310400316700165330ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import decimal from decimal import Decimal from io import BytesIO from openpyxl import load_workbook, Workbook from rows import fields from rows.plugins.utils import (create_table, get_filename_and_fobj, prepare_to_export) def _cell_to_python(cell): '''Convert a PyOpenXL's `Cell` object to the corresponding Python object''' value = cell.value if value == '=TRUE()': return True elif value == '=FALSE()': return False elif cell.number_format.lower() == 'yyyy-mm-dd': return str(value).split(' 00:00:00')[0] elif cell.number_format.lower() == 'yyyy-mm-dd hh:mm:ss': return str(value).split('.')[0] elif cell.number_format.endswith('%'): if value is not None: value = str(Decimal(str(value)) * 100)[:-2] if value.endswith('.'): value = value[:-1] return '{}%'.format(value) else: return None elif value is None: return '' else: return value def import_from_xlsx(filename_or_fobj, sheet_name=None, sheet_index=0, start_row=0, start_column=0, *args, **kwargs): workbook = load_workbook(filename_or_fobj) if sheet_name is None: sheet_name = workbook.sheetnames[sheet_index] sheet = workbook.get_sheet_by_name(sheet_name) start_row, end_row = max(start_row, sheet.min_row), sheet.max_row start_col, end_col = max(start_column, sheet.min_column), sheet.max_column table_rows = [[_cell_to_python(sheet.cell(row=row_index, column=col_index)) for col_index in range(start_col, end_col + 1)] for row_index in range(start_row, end_row + 1)] filename, _ = get_filename_and_fobj(filename_or_fobj, dont_open=True) metadata = {'imported_from': 'xlsx', 'filename': filename, 'sheet_name': sheet_name, } return create_table(table_rows, meta=metadata, *args, **kwargs) FORMATTING_STYLES = { fields.DateField: 'YYYY-MM-DD', fields.DatetimeField: 'YYYY-MM-DD HH:MM:SS', fields.PercentField: '0.00%', } def _python_to_cell(field_types): def convert_value(field_type, value): number_format = FORMATTING_STYLES.get(field_type, None) if field_type not in ( fields.BoolField, fields.DateField, fields.DatetimeField, fields.DecimalField, fields.FloatField, fields.IntegerField, fields.PercentField, fields.TextField, ): # BinaryField, DatetimeField, JSONField or unknown value = field_type.serialize(value) return value, number_format def convert_row(row): return [convert_value(field_type, value) for field_type, value in zip(field_types, row)] return convert_row def export_to_xlsx(table, filename_or_fobj=None, sheet_name='Sheet1', *args, **kwargs): workbook = Workbook() sheet = workbook.active sheet.title = sheet_name prepared_table = prepare_to_export(table, *args, **kwargs) # Write header field_names = next(prepared_table) for col_index, field_name in enumerate(field_names): cell = sheet.cell(row=1, column=col_index + 1) cell.value = field_name # Write sheet rows _convert_row = _python_to_cell(list(map(table.fields.get, field_names))) for row_index, row in enumerate(prepared_table, start=1): for col_index, (value, number_format) in enumerate(_convert_row(row)): cell = sheet.cell(row=row_index + 1, column=col_index + 1) cell.value = value if number_format is not None: cell.number_format = number_format if filename_or_fobj is not None: _, fobj = get_filename_and_fobj(filename_or_fobj, mode='wb') workbook.save(fobj) fobj.flush() return fobj else: fobj = BytesIO() workbook.save(fobj) fobj.seek(0) result = fobj.read() fobj.close() return result rows-0.3.1/rows/plugins/xpath.py000066400000000000000000000051641310400316700166570ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals try: from HTMLParser import HTMLParser # Python 2 except ImportError: from html.parser import HTMLParser # Python 3 import string import six from lxml.html import fromstring as tree_from_string from lxml.etree import strip_tags from lxml.etree import tostring as tree_to_string from rows.plugins.utils import create_table, get_filename_and_fobj unescape = HTMLParser().unescape def _get_row_data(fields_xpath): fields = list(fields_xpath.items()) def get_data(row): data = [] for field_name, field_xpath in fields: result = row.xpath(field_xpath) if result: result = ' '.join(text for text in map(six.text_type.strip, map(six.text_type, map(unescape, result))) if text) else: result = None data.append(result) return data return get_data def import_from_xpath(filename_or_fobj, rows_xpath, fields_xpath, encoding='utf-8', *args, **kwargs): types = set([type(rows_xpath)] + \ [type(xpath) for xpath in fields_xpath.values()]) if types != set([six.text_type]): raise TypeError('XPath must be {}'.format(six.text_type.__name__)) filename, fobj = get_filename_and_fobj(filename_or_fobj, mode='rb') xml = fobj.read().decode(encoding) tree = tree_from_string(xml) row_elements = tree.xpath(rows_xpath) header = list(fields_xpath.keys()) row_data = _get_row_data(fields_xpath) result_rows = list(map(row_data, row_elements)) meta = {'imported_from': 'xpath', 'filename': filename, 'encoding': encoding,} return create_table([header] + result_rows, meta=meta, *args, **kwargs) rows-0.3.1/rows/table.py000066400000000000000000000165201310400316700151370ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from collections import MutableSequence, namedtuple, OrderedDict, Sized from operator import itemgetter import six from rows.fields import identify_type class Table(MutableSequence): def __init__(self, fields, meta=None): # TODO: should we really use OrderedDict here? # TODO: should use slug on each field name automatically or inside each # plugin? self.fields = OrderedDict(fields) # TODO: should be able to customize row return type (namedtuple, dict # etc.) self.Row = namedtuple('Row', self.field_names) self._rows = [] self.meta = dict(meta) if meta is not None else {} @property def field_names(self): return list(self.fields.keys()) @property def field_types(self): return list(self.fields.values()) def __repr__(self): length = len(self._rows) if isinstance(self._rows, Sized) else '?' imported = '' if 'imported_from' in self.meta: imported = ' (from {})'.format(self.meta['imported_from']) return ''.format(imported, len(self.fields), length) def _make_row(self, row): # TODO: should be able to customize row type (namedtuple, dict etc.) return [field_type.deserialize(row.get(field_name, None)) for field_name, field_type in self.fields.items()] def append(self, row): """Add a row to the table. Should be a dict""" self._rows.append(self._make_row(row)) def __len__(self): return len(self._rows) def __getitem__(self, key): key_type = type(key) if key_type == int: return self.Row(*self._rows[key]) elif key_type == slice: return [self.Row(*row) for row in self._rows[key]] elif key_type is six.text_type: try: field_index = self.field_names.index(key) except ValueError: raise KeyError(key) # TODO: should change the line below to return a generator exp? return [row[field_index] for row in self._rows] else: raise ValueError('Unsupported key type: {}' .format(type(key).__name__)) def __setitem__(self, key, value): key_type = type(key) if key_type == int: self._rows[key] = self._make_row(value) elif key_type is six.text_type: values = list(value) # I'm not lazy, sorry if len(values) != len(self): raise ValueError('Values length ({}) should be the same as ' 'Table length ({})' .format(len(values), len(self))) from rows.fields import detect_types from rows.plugins.utils import slug field_name = slug(key) is_new_field = field_name not in self.field_names field_type = detect_types([field_name], [[value] for value in values])[field_name] self.fields[field_name] = field_type self.Row = namedtuple('Row', self.field_names) if is_new_field: for row, value in zip(self._rows, values): row.append(field_type.deserialize(value)) else: field_index = self.field_names.index(field_name) for row, value in zip(self._rows, values): row[field_index] = field_type.deserialize(value) else: raise ValueError('Unsupported key type: {}' .format(type(key).__name__)) def __delitem__(self, key): key_type = type(key) if key_type == int: del self._rows[key] elif key_type is six.text_type: try: field_index = self.field_names.index(key) except ValueError: raise KeyError(key) del self.fields[key] self.Row = namedtuple('Row', self.field_names) for row in self._rows: row.pop(field_index) else: raise ValueError('Unsupported key type: {}' .format(type(key).__name__)) def insert(self, index, row): self._rows.insert(index, self._make_row(row)) def __radd__(self, other): if other == 0: return self raise ValueError() # TODO: fix "table += other" def __add__(self, other): if other == 0: return self if not isinstance(self, type(other)) or self.fields != other.fields: raise ValueError('Tables have incompatible fields') table = Table(fields=self.fields) table._rows = self._rows + other._rows return table def order_by(self, key): # TODO: implement locale # TODO: implement for more than one key reverse = False if key.startswith('-'): key = key[1:] reverse = True field_names = self.field_names if key not in field_names: raise ValueError('Field "{}" does not exist'.format(key)) key_index = field_names.index(key) self._rows.sort(key=itemgetter(key_index), reverse=reverse) class FlexibleTable(Table): def __init__(self, fields=None, meta=None): if fields is None: fields = {} super(FlexibleTable, self).__init__(fields, meta) def __getitem__(self, key): if isinstance(key, int): return self.Row(**self._rows[key]) elif isinstance(key, slice): return [self.Row(**row) for row in self._rows[key]] else: raise ValueError('Unsupported key type: {}' .format(type(key).__name__)) def _add_field(self, field_name, field_type): self.fields[field_name] = field_type self.Row = namedtuple('Row', self.field_names) def _make_row(self, row): for field_name in row.keys(): if field_name not in self.field_names: self._add_field(field_name, identify_type(row[field_name])) return {field_name: field_type.deserialize(row.get(field_name, None)) for field_name, field_type in self.fields.items()} def insert(self, index, row): self._rows.insert(index, self._make_row(row)) def __setitem__(self, key, value): self._rows[key] = self._make_row(value) def append(self, row): """Add a row to the table. Should be a dict""" self._rows.append(self._make_row(row)) rows-0.3.1/rows/utils.py000066400000000000000000000223111310400316700152030ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import cgi import mimetypes import os import tempfile try: from urlparse import urlparse # Python 2 except ImportError: from urllib.parse import urlparse # Python 3 try: import magic except ImportError: magic = None else: if not hasattr(magic, 'detect_from_content'): # This is not the file-magic library magic = None import requests chardet = requests.compat.chardet try: import urllib3 except ImportError: from requests.packages import urllib3 else: try: urllib3.disable_warnings() except AttributeError: # old versions of urllib3 or requests pass import rows # TODO: should get this information from the plugins TEXT_PLAIN = { 'txt': 'text/txt', 'text': 'text/txt', 'csv': 'text/csv', 'json': 'application/json', } OCTET_STREAM = { 'microsoft ooxml': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', 'par archive data': 'application/parquet', } FILE_EXTENSIONS = { 'csv': 'text/csv', 'db': 'application/x-sqlite3', 'htm': 'text/html', 'html': 'text/html', 'json': 'application/json', 'ods': 'application/vnd.oasis.opendocument.spreadsheet', 'parquet': 'application/parquet', 'sqlite': 'application/x-sqlite3', 'text': 'text/txt', 'tsv': 'text/csv', 'txt': 'text/txt', 'xls': 'application/vnd.ms-excel', 'xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', } MIME_TYPE_TO_PLUGIN_NAME = { 'application/json': 'json', 'application/parquet': 'parquet', 'application/vnd.ms-excel': 'xls', 'application/vnd.oasis.opendocument.spreadsheet': 'ods', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'xlsx', 'application/x-sqlite3': 'sqlite', 'text/csv': 'csv', 'text/html': 'html', 'text/txt': 'txt', } class Source(object): 'Define a source to import a `rows.Table`' __slots__ = ['plugin_name', 'uri', 'encoding', 'delete'] def __init__(self, plugin_name=None, uri=None, encoding=None, delete=False): self.plugin_name = plugin_name self.uri = uri self.delete = delete self.encoding = encoding def __repr__(self): return 'Source(plugin_name={}, uri={}, encoding={}, delete={})'\ .format(self.plugin_name, self.uri, self.encoding, self.delete) def plugin_name_by_uri(uri): 'Return the plugin name based on the URI' # TODO: parse URIs like 'sqlite://' also parsed = urlparse(uri) basename = os.path.basename(parsed.path) if not basename.strip(): raise RuntimeError('Could not identify file format.') plugin_name = basename.split('.')[-1].lower() if plugin_name in FILE_EXTENSIONS: plugin_name = MIME_TYPE_TO_PLUGIN_NAME[FILE_EXTENSIONS[plugin_name]] return plugin_name def extension_by_plugin_name(plugin_name): 'Return the file extension used by this plugin' # TODO: should get this information from the plugin return plugin_name def normalize_mime_type(mime_type, mime_name, file_extension): file_extension = file_extension.lower() if file_extension else '' mime_name = mime_name.lower() if mime_name else '' mime_type = mime_type.lower() if mime_type else '' if mime_type == 'text/plain' and file_extension in TEXT_PLAIN: return TEXT_PLAIN[file_extension] elif mime_type == 'application/octet-stream' and mime_name in OCTET_STREAM: return OCTET_STREAM[mime_name] elif file_extension in FILE_EXTENSIONS: return FILE_EXTENSIONS[file_extension] else: return mime_type def plugin_name_by_mime_type(mime_type, mime_name, file_extension): 'Return the plugin name based on the MIME type' return MIME_TYPE_TO_PLUGIN_NAME.get( normalize_mime_type(mime_type, mime_name, file_extension), None) def detect_local_source(path, content, mime_type=None, encoding=None): # TODO: may add sample_size filename = os.path.basename(path) parts = filename.split('.') extension = parts[-1] if len(parts) > 1 else None if magic is not None: detected = magic.detect_from_content(content) encoding = detected.encoding or encoding mime_name = detected.name mime_type = detected.mime_type or mime_type else: encoding = chardet.detect(content)['encoding'] or encoding mime_name = None mime_type = mime_type or mimetypes.guess_type(filename)[0] plugin_name = plugin_name_by_mime_type(mime_type, mime_name, extension) if encoding == 'binary': encoding = None return Source(uri=path, plugin_name=plugin_name, encoding=encoding) def local_file(path, sample_size=1048576): # TODO: may change sample_size with open(path, 'rb') as fobj: content = fobj.read(sample_size) source = detect_local_source(path, content, mime_type=None, encoding=None) return Source(uri=path, plugin_name=source.plugin_name, encoding=source.encoding, delete=False) def download_file(uri, verify_ssl=True, timeout=5): response = requests.get(uri, verify=verify_ssl, timeout=timeout) content = response.content filename = uri encoding = None mime_type = None # Extract data from headers to help plugin + encoding detection, if # available headers = response.headers if 'content-type' in headers: mime_type, options = cgi.parse_header(headers['content-type']) encoding = options.get('charset', encoding) if 'content-disposition' in headers: _, options = cgi.parse_header(headers['content-disposition']) filename = options.get('filename', filename) # TODO: may send only a sample (chardet can be very slow if content is big) source = detect_local_source(filename, content, mime_type, encoding) # Save file locally extension = extension_by_plugin_name(source.plugin_name) tmp = tempfile.NamedTemporaryFile() filename = '{}.{}'.format(tmp.name, extension) tmp.close() with open(filename, 'wb') as fobj: fobj.write(content) return Source(uri=filename, plugin_name=source.plugin_name, encoding=source.encoding, delete=True) def detect_source(uri, verify_ssl, timeout=5): '''Return a `rows.Source` with information for a given URI If URI starts with "http" or "https" the file will be downloaded. This function should only be used if the URI already exists because it's going to download/open the file to detect its encoding and MIME type. ''' # TODO: should also supporte other schemes, like file://, sqlite:// etc. if uri.startswith('http://') or uri.startswith('https://'): return download_file(uri, verify_ssl=verify_ssl, timeout=timeout) else: return local_file(uri) def import_from_source(source, default_encoding, *args, **kwargs): 'Import data described in a `rows.Source` into a `rows.Table`' plugin_name = source.plugin_name kwargs['encoding'] = (kwargs.get('encoding', None) or source.encoding or default_encoding) try: import_function = getattr(rows, 'import_from_{}'.format(plugin_name)) except AttributeError: raise ValueError('Plugin (import) "{}" not found'.format(plugin_name)) table = import_function(source.uri, *args, **kwargs) if source.delete: os.unlink(source.uri) return table def import_from_uri(uri, default_encoding, verify_ssl=True, *args, **kwargs): 'Given an URI, detects plugin and encoding and imports into a `rows.Table`' # TODO: support '-' also # TODO: (optimization) if `kwargs.get('encoding', None) is not None` we can # skip encoding detection. source = detect_source(uri, verify_ssl=verify_ssl) return import_from_source(source, default_encoding, *args, **kwargs) def export_to_uri(table, uri, *args, **kwargs): 'Given a `rows.Table` and an URI, detects plugin (from URI) and exports' # TODO: support '-' also plugin_name = plugin_name_by_uri(uri) try: export_function = getattr(rows, 'export_to_{}'.format(plugin_name)) except AttributeError: raise ValueError('Plugin (export) "{}" not found'.format(plugin_name)) return export_function(table, uri, *args, **kwargs) rows-0.3.1/setup.py000066400000000000000000000057671310400316700142310ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals from setuptools import setup EXTRA_REQUIREMENTS = { 'csv': ['unicodecsv'], 'cli': ['click', 'requests'], 'html': ['lxml'], # apt: libxslt-dev libxml2-dev 'ods': ['lxml'], 'parquet': ['parquet>=1.1'], 'xls': ['xlrd', 'xlwt'], 'xlsx': ['openpyxl'], 'xpath': ['lxml'], 'detect': ['file-magic'], } EXTRA_REQUIREMENTS['all'] = sum(EXTRA_REQUIREMENTS.values(), []) INSTALL_REQUIREMENTS = ['six'] + EXTRA_REQUIREMENTS['csv'] LONG_DESCRIPTION = ''' No matter in which format your tabular data is: rows will import it, automatically detect types and give you high-level Python objects so you can start working with the data instead of trying to parse it. It is also locale-and-unicode aware. :) See a quick start tutorial at: https://github.com/turicas/rows/blob/develop/README.md '''.strip() setup(name='rows', description=('A common, beautiful interface to tabular data, ' 'no matter the format'), long_description=LONG_DESCRIPTION, version='0.3.1', author='Álvaro Justen', author_email='alvarojusten@gmail.com', url='https://github.com/turicas/rows/', packages=['rows', 'rows.plugins'], install_requires=INSTALL_REQUIREMENTS, extras_require=EXTRA_REQUIREMENTS, keywords=['tabular', 'table', 'csv', 'xls', 'xlsx', 'xpath', 'sqlite', 'html', 'rows', 'data', 'opendata'], entry_points={ 'console_scripts': [ 'rows = rows.cli:cli', ], }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup :: HTML', 'Topic :: Utilities', ]) rows-0.3.1/tests/000077500000000000000000000000001310400316700136425ustar00rootroot00000000000000rows-0.3.1/tests/__init__.py000066400000000000000000000000001310400316700157410ustar00rootroot00000000000000rows-0.3.1/tests/data/000077500000000000000000000000001310400316700145535ustar00rootroot00000000000000rows-0.3.1/tests/data/all-field-types.csv000066400000000000000000000010171310400316700202620ustar00rootroot00000000000000bool_column,integer_column,float_column,decimal_column,percent_column,date_column,datetime_column,unicode_column True,1,3.141592,3.141592,1%,2015-01-01,2015-08-18T15:10:00,Álvaro False,2,1.234,1.234,11.69%,1999-02-03,1999-02-03T00:01:02,àáãâä¹²³ true,3,4.56,4.56,12%,2050-01-02,2050-01-02T23:45:31,éèẽêë false,4,7.89,7.89,13.64%,2015-08-18,2015-08-18T22:21:33,~~~~ yes,5,9.87,9.87,13.14%,2015-03-04,2015-03-04T16:00:01,álvaro no,6,1.2345,1.2345,2%,2015-05-06,2015-05-06T12:01:02,test ,-,null,nil,none,n/a,null, rows-0.3.1/tests/data/all-field-types.html000066400000000000000000000035611310400316700204410ustar00rootroot00000000000000
bool_column integer_column float_column decimal_column percent_column date_column datetime_column unicode_column
True 1 3.141592 3.141592 1% 2015-01-01 2015-08-18T15:10:00 Álvaro
False 2 1.234 1.234 11.69% 1999-02-03 1999-02-03T00:01:02 àáãâä¹²³
true 3 4.56 4.56 12% 2050-01-02 2050-01-02T23:45:31 éèẽêë
false 4 7.89 7.89 13.64% 2015-08-18 2015-08-18T22:21:33 ~~~~
yes 5 9.87 9.87 13.14% 2015-03-04 2015-03-04T16:00:01 álvaro
no 6 1.2345 1.2345 2% 2015-05-06 2015-05-06T12:01:02 test
- null nil none n/a null
rows-0.3.1/tests/data/all-field-types.json000066400000000000000000000040741310400316700204460ustar00rootroot00000000000000[ { "float_column": 3.141592, "decimal_column": 3.141592, "bool_column": "True", "integer_column": 1, "date_column": "2015-01-01", "datetime_column": "2015-08-18T15:10:00", "percent_column": "1%", "unicode_column": "Álvaro" }, { "float_column": 1.234, "decimal_column": 1.234, "bool_column": "False", "integer_column": 2, "date_column": "1999-02-03", "datetime_column": "1999-02-03T00:01:02", "percent_column": "11.69%", "unicode_column": "àáãâä¹²³" }, { "float_column": 4.56, "decimal_column": 4.56, "bool_column": true, "integer_column": 3, "date_column": "2050-01-02", "datetime_column": "2050-01-02T23:45:31", "percent_column": "12%", "unicode_column": "éèẽêë" }, { "float_column": 7.89, "decimal_column": 7.89, "bool_column": false, "integer_column": 4, "date_column": "2015-08-18", "datetime_column": "2015-08-18T22:21:33", "percent_column": "13.64%", "unicode_column": "~~~~" }, { "float_column": 9.87, "decimal_column": 9.87, "bool_column": "yes", "integer_column": 5, "date_column": "2015-03-04", "datetime_column": "2015-03-04T16:00:01", "percent_column": "13.14%", "unicode_column": "álvaro" }, { "float_column": 1.2345, "decimal_column": 1.2345, "bool_column": "no", "integer_column": 6, "date_column": "2015-05-06", "datetime_column": "2015-05-06T12:01:02", "percent_column": "2%", "unicode_column": "test" }, { "float_column": "", "decimal_column": "-", "bool_column": "null", "integer_column": "nil", "date_column": "none", "datetime_column": "n/a", "percent_column": "null", "unicode_column": "" } ] rows-0.3.1/tests/data/all-field-types.ods000066400000000000000000000251221310400316700202570ustar00rootroot00000000000000PKCIl9..mimetypeapplication/vnd.oasis.opendocument.spreadsheetPKCI$$Thumbnails/thumbnail.pngPNG  IHDRkyܒPLTE===DDDKKKTTT[[[ccclllsss|||rXIDATxά64/N"EIb`_Qdz 6lذaÆ 6lذaÆ 6lذaÆ 6l~e;r_͎gR.]1Ҕ间Q(<џʦmW;vheۤof~F#~*=Uϩ~.pY7 -N)}D=H}Hkg[U?C(DkMvJlMB+G/$XxA_?OKɽظ {kv3-D5s}[}w(i>:]lyx4Uvb^J%4B'a+y6Gpi%ҹөQVby^ٸz/#Nn$!aasn,Ǝz\n5vIߗcOοAU}HRIذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6lذaÆ 6ߕC>IENDB`PKCI settings.xmlY]s8}_NqRR3qJC>lpd[,y$C~)v;tѹW՗eOqDIC $6ѰsYN&ȃuzQ8Pم1R#^' .: !]_K,1"2"W*qgM+U]+mW k*d@ژrqMh4vͫ $9vgQ&tG~F0w}3hv#Q_lci'u-͉7˪8 t<уZxZ~/HPMzwd`ۚW~i#c퐱s`Tv5mNt3 GzzqmYջִ8aW#5xV[ TUN\(|W$֍%tkIjO3V1Tav ױ푕V;$d(NuF~q%kQlDŕ2 :]{bU_W6Ä2l3C"ttlbª,ɗSa[8!-U$ZU94kܺ{?,N`[B:|OY4'=B#i<;8,4CCR͘7=N=ź~oIϝ8X4=6OKLqm򋡈 ඬ r.V55>2G:j? 7$|q-0lUS!1(t@!54d>'GmvJqG0 |~~ 톥@P02qh^^A#iXVF/a0Cߔ~ T y)eq2(-A3"j,7'~6爟mP.=0ed4A$~.lΥ]Mx?l'{%t-*2>EbpS3 '|7",ad Q?3p3^Ƽ@L/ ٧ !3TfAH $<:wwf5"&wEwx9'N.2A9HA`WA>V!q ijCD"+ݭK@MäĠ ]<E%TcδUi<tNԓ3ڙ{96۞=%Y@Es{7E$dkRDUSV>>,CQBy\(%7%&z/⍀*5\'6̋Ss"99"$V&~V(i[pXh*~ U"^'7q˚U {DtN=3Cbj% yFtOu^?{5;Nm@^ 5eQ>|ȸ#!IoE)gէ+>nѭza\o}.z^fwo_qY;KBJ~&oZh+YTKiwiX%oz Kb(r*>8f;#09Z(KCsԊpԄiRbж:J Ʀ .Ұ3!G:90ٶ䭙Q]=Bh31 D0tܑg >l2l(bڎi:rFg-s-Z53J˲{_onZ<OD]xn2/n -Bf8yTmYiQ,NRH=W0ݚey/Ͽ}OioR;CBഫᴣj p;Ha+ɨUv:>[`8 &'F9K6e$$(نLT7r)5".9c' \t3~e\0t;YOwt,vi"NZE$j=NWz\uFP>HJe8=E {+DŌh> A Md%]t4fwt$QTg_Ml(iT9P@CקXшDnXp'Mߟd:\TnjPL3D$$<'Ysqߵ~s4;742#fx+QP!>/Qdlm\m5[ 5d4OJZUV1c5lD 9%2*VUމQZ$GuEQ߾1?]Ciig[5X3LW9G?oDMw2,L`t޲x-M, .EY%4g8e_ F#Wb;+U|M,+w(ܑy%:D";iH+؃ N> v*N?~Lj_OaOgݓeM3Vqq\v1be"L ntMx5gO]*?VSPlM57=ޟ?ux10z_6xLtT;ǏK,p}-3:C^l\!O<ܿףԎ&|6>{':3~x]gjчIJi3З'B٨נpv'8tu`~(08=OjLu(ѯxzaێCqբI9ԈOu9[8nZbT;P$ФwP.zot^]j_a2z=JJsRGSW aiJXqުs>H+:m'$," 6<Hr ZrN{8t5=`MPxSw( 2K LgyP+97v(8H8 j {z55{u)PBڮ ׇ ;ɥ&c\ZV$ꯝv㢡~ao(Vik6}LcwQ `Xm"$I^0qlb77eOmlG)DS u~mwvj4꓀5o4*aiJ0Cv{EC^/Oxฤ*63E%"/Q 춖m G4**!MP%ʎNS;&+ Rzq_WhPb,D=(9&Zq;HsKT+#i#y,rҜH3ʶb߄-l^Y2R vˏ\Yr^˳pSPΞU|ʢߵQ[6$kacuqw ɒ/:\X|8v`cf7hbr7{wy\b# WdUFVu^WI+"8EخxXH!IQ\ )Js!绫 teaX#=Sؤ^& 'sJq(f k@@ݵ=1d?PK qDPKCIConfigurations2/images/Bitmaps/PKCIConfigurations2/popupmenu/PKCIConfigurations2/statusbar/PKCIConfigurations2/toolpanel/PKCI'Configurations2/accelerator/current.xmlPKPKCIConfigurations2/floater/PKCIConfigurations2/menubar/PKCIConfigurations2/toolbar/PKCIConfigurations2/progressbar/PKCI manifest.rdf͓n02]sUWy|Vw:6B|HwPvʣ`ySR`ZPSl%|qM#ӃXݐyajLOy|qy:(gع\kƬlYrvU/a.ԯ` PKK!EPKCIl9..mimetypePKCI$$TThumbnails/thumbnail.pngPKCI;0c settings.xmlPKCI_!GA  content.xmlPKCIM5meta.xmlPKCI qD styles.xmlPKCI!Configurations2/images/Bitmaps/PKCIN!Configurations2/popupmenu/PKCI!Configurations2/statusbar/PKCI!Configurations2/toolpanel/PKCI'!Configurations2/accelerator/current.xmlPKCIM"Configurations2/floater/PKCI"Configurations2/menubar/PKCI"Configurations2/toolbar/PKCI"Configurations2/progressbar/PKCIh )#manifest.rdfPKCIK!Eh$META-INF/manifest.xmlPKp%rows-0.3.1/tests/data/all-field-types.sqlite000066400000000000000000000200001310400316700207610ustar00rootroot00000000000000SQLite format 3@ - ''Vtabletable1table1CREATE TABLE table1 (bool_column INTEGER, integer_column INTEGER, float_column FLOAT, decimal_column FLOAT, percent_column TEXT, date_column TEXT, datetime_column TEXT, unicode_column TEXT) ,gJ,  -nullnilnonen/anull? !3no?n?n2%2015-05-062015-05-06T12:01:02testG !3yes@#p =@#p =13.14%2015-03-042015-03-04T16:00:01álvaroF !3false@\(@\(13.64%2015-08-182015-08-18T22:21:33~~~~I !3#true@=p =@=p =12%2050-01-022050-01-02T23:45:31éèẽêëR !3-False?vȴ9X?vȴ9X11.69%1999-02-031999-02-03T00:01:02àáãâä¹²³C  !3True@ !z@ !z1%2015-01-012015-08-18T15:10:00Álvarorows-0.3.1/tests/data/all-field-types.txt000066400000000000000000000027351310400316700203160ustar00rootroot00000000000000+-------------+----------------+--------------+----------------+----------------+-------------+---------------------+----------------+ | bool_column | integer_column | float_column | decimal_column | percent_column | date_column | datetime_column | unicode_column | +-------------+----------------+--------------+----------------+----------------+-------------+---------------------+----------------+ | True | 1 | 3.141592 | 3.141592 | 1% | 2015-01-01 | 2015-08-18T15:10:00 | Álvaro | | False | 2 | 1.234 | 1.234 | 11.69% | 1999-02-03 | 1999-02-03T00:01:02 | àáãâä¹²³ | | true | 3 | 4.56 | 4.56 | 12% | 2050-01-02 | 2050-01-02T23:45:31 | éèẽêë | | false | 4 | 7.89 | 7.89 | 13.64% | 2015-08-18 | 2015-08-18T22:21:33 | ~~~~ | | yes | 5 | 9.87 | 9.87 | 13.14% | 2015-03-04 | 2015-03-04T16:00:01 | álvaro | | no | 6 | 1.2345 | 1.2345 | 2% | 2015-05-06 | 2015-05-06T12:01:02 | test | | | - | null | nil | none | n/a | null | | +-------------+----------------+--------------+----------------+----------------+-------------+---------------------+----------------+ rows-0.3.1/tests/data/all-field-types.xls000066400000000000000000000160001310400316700202730ustar00rootroot00000000000000ࡱ;   Root Entry  !"#$%&'()*+,-./025678:  \pCalc Ba==@ 8@"1Arial1Arial1Arial1Arial General 0.00%MMM\ D", "YYYYYYYY\-MM\-DD\ HH:MM:SS@                + ) , *           `Sheet1TZR3  @@   bool_columninteger_column float_columndecimal_columnpercent_column date_columndatetime_columnunicode_columnlvaro乲truefalse~~~~yeslvaronotest-nullnilnonen/acc   dMbP?_%*+&C&P&C&F&333333?'333333?(-؂-؂?)[[?" d,,??U }  } } x}  %%%%%%%%        ~ z! @z! @V98T@ ~  X9v?X9v?(?~ 5@@   ~ > ףp=@> ףp=@3X@   W W R!u?~ ]@  ~ = ףp#@= ףp#@X?~ NmUu@  ~ n?n? JP@       PH0(  >@gg  FMicrosoft Excel 97-TabelleBiff8Oh+'0 px 7@Ύ@@@ܭ,՜.+,D՜.+,\Root EntryFWorkbook6 CompObj1IOle 3SummaryInformation(48DocumentSummaryInformation89trows-0.3.1/tests/data/all-field-types.xlsx000066400000000000000000000125251310400316700204730ustar00rootroot00000000000000PKI _rels/.relsJ1}{wDdЛH}a70u}{ZI~7CfGFoZ+{kW#VJ$cʪl n0\QX^:`dd{m]_dhVFw^F9W-(F/3ODSUNl/w{N([qTu| V/y$E'0l4ekzwpPW)f}vvo84_ R>/PK\uRPKIxl/_rels/workbook.xml.relsMk0 FIc8AQ6ֵ~.[ eГH4C vV͢h%؀I/Z8Y.b;]JNɺQ/IӜS[n+Psc _{0 r3Rb+/2m=?"#ʯR6w _fz o ۭP~$E|PKOz%PKIxl/workbook.xmlSn0+m-v\۰8MΔ4XS@(+MzY\K bu!.fy* bvn'm{W6fb3}WPs; (4;6xa+įPCe)rXPĀHm%˖RHxx|5N̙|x&옕\ZF+}}#.% ΂qFʤ2lONw56E+r-eՈ(_m;Gy~Чъ.o'w}Vh:}w vl"!Al6XtE N@Ztv֟ru_-U: t: +2I\PCaݜ/ 'B ']ĊЮ\5HDra ghѝW%IM$EfӏFI4Ih 77 ݤ) .Y''ʡI: #Pn/s'U9 WO{a,PKĥuPKI xl/styles.xmlYn0SX4!IKfY'-SդҦu.`8mҧ@ ]MZ*ۇ;>>92p6C\6›A,  yerE4X! #vG'>!)F 8I 6Maб"2._09V]|63,/7?\̮o`|i![*yh4[˯%_N"oht ./'tK. 32lmvj'J7SVb>D죐Е Hii'7iD(-ljC 8vĂUd*R.35C4M@V[JH y-C#h7Ј?tl}I)yd$$RPUk\TT.tL/z"]ϧ#KjHgU͔5P՘'$R,p!茒9 F2]RرQ.ȓN^<[ L7x!x)DEiz()00/X=AIX2=rTj)XNkZu}tnUNnlS{n5b1FL#>픝^앚>g1FsU+/K/L/4jVǶ^c[mulԶmf/5#\KWf@Ar8YZ2nA$,kX \c6i.xխH *Be@3wU0*Lݙ*ޟ]a7^ *DDCz'PK0PKIxl/worksheets/sheet1.xmlXKs6Wpx贇Leeb=8'r`1! ؿ ($$ra}4v7k95H1>/JLPIk|c|?m{[` pP7fyζu+Z5U#m0d=lj stхۖӪ4hGC ߡLFMå EÉ)Jt u7u&N#nu{19b#sRG;#uD|DؠQ2x20WV5/_9oAN>:· SUK (.;?nPEPXnK4L5K V'Ek+tkO|&(&DYn sG]&`:.`peX8T}(:  旀JʴXFaJwDYizQoHE uNnC]E$[2痀H~‘P[wOwPn k?oUd[_Q4G&Gs90QwP"Po[$Hlޕ'r_JξaJāO+N'j,].Ui e+rh d'2(*FLwE59`G51QtNY D{=km-x %3:ƩzE1l\&""I+|{.e%0@^s^Wie䕺ο4Q`mh24D@8MPKuv7PKI[Content_Types].xmlAK1KmG-Xnnfb{'k"B[ZdɃ}mzWiQ ƺ/ˇVgj @:Ect p|ث[ldPzey#w\cqh d QP Б,*/AbBY"'7;RR=@W $} E; W+={0Ҁ[F DZG8}פ>8th?<߼_'}Y)S@PZÏuХ7$2?hyȯA{Tr(fPK_6X0WPKIf; _rels/.relsPKI@5}docProps/app.xmlPKI\uRCdocProps/core.xmlPKIOz%xl/_rels/workbook.xml.relsPKIĥuxl/workbook.xmlPKI0 )xl/styles.xmlPKIJNWy xl/worksheets/sheet1.xmlPKIuv7xl/sharedStrings.xmlPKI_6X0W[Content_Types].xmlPK ?rows-0.3.1/tests/data/colspan-table.html000066400000000000000000000004071310400316700201660ustar00rootroot00000000000000
huge title
field1 field2
row1field1 row1field2
row2field1 row2field2
rows-0.3.1/tests/data/ecuador-medios-radiodifusoras.csv000066400000000000000000000061411310400316700232060ustar00rootroot00000000000000url,name,address,phone,website,email /es/informate-y-participa/directorio-de-medios/28-ecos-de-cayambe-1470-am,Ecos de Cayambe (1470 AM),Terán 52-91 y 10 de Agosto,02 236 0047 / 236 3055,www.ecosdecayambe.es.tl,ecos@pi.pro.ec /es/informate-y-participa/directorio-de-medios/29-jm-radio-88-9-fm,JM Radio (88.9 FM),Cristóbal Colón y Panzaleo,02 231 6642,,www.jmradio.net /es/informate-y-participa/directorio-de-medios/191-activa-88-5fm,Activa (88.5FM),"Av. Miguel Cordero y Av. Paucarbamba Ed. Work Center Piso 5, Of. 508",07 281 4688 / 281 9992 / 288 1088,www.fm88radioactiva.com,radio@cadencactiva.com /es/informate-y-participa/directorio-de-medios/192-alfa-musical-1140am,Alfa Musical (1140AM),Hermano Miguel 1068 y Gran Colombia,07 283 8451,,radioalfa1140am@hotmail.com /es/informate-y-participa/directorio-de-medios/193-catolica-nacional-98-1fm-cuenca-paute-gualaceo,"Católica Nacional (98.1FM) (Cuenca, Paute, Gualaceo)",Bolívar 9-49 y Luis Cordero,07 282 5845 / 283 8292 / 283 2280,www.radiocatolicacuenca.com.ec,radiocatolicacuenca@gmail.com /es/informate-y-participa/directorio-de-medios/194-contacot-xg-1260am,Contacot XG (1260AM),Jesús Dávila y Cornelio Merchán,07 288 1240 / 281 4164,www.contactoxg.webpin.com,radionexo.fem@hotmail.com /es/informate-y-participa/directorio-de-medios/195-cosmos-fm-97-3fm-cuenca-paute,"Cosmos FM (97.3FM) (Cuenca, Paute)",Av. Abelardo J. Andrade 2-07 y Francisco Tamariz,07 283 1423 / 284 2837,www.cosmos.ec,cosmos@rcgrupo.com /es/informate-y-participa/directorio-de-medios/196-cuenca-la-voz-de-los-cuatro-rios-1180am,Cuenca La Voz de los Cuatro Ríos (1180AM),Bomboíza 1-83 entre Loja y Pastaza,07 288 4128,www.radiocuenca.com,kleberpinosabad@yahoo.es / radiocuenca1180@hotmail.com /es/informate-y-participa/directorio-de-medios/197-el-mercurio-1200am,El Mercurio (1200AM),Av. de las Américas y Francisco Azcásubi Ed. Mercurio,07 409 5684 / 409 5645,www.radioelmercurio.com.ec,radioinfo@radioelmercurio.com.ec /es/informate-y-participa/directorio-de-medios/198-la-voz-de-tomebamba-am-fm-1-070am-102-1fm-cuenca-giron-paute,"La Voz de Tomebamba AM / FM (1.070AM; 102.1FM) (Cuenca, Girón, Paute)","Benigno Malo 15-91 y Muñoz Vernaza, esquina",07 284 2000 / 282 5301 / 284 2222,www.lavozdeltomebamba.com,info@lavozdeltomebamba.com /es/informate-y-participa/directorio-de-medios/199-ondas-azuayas-1110-am,Ondas Azuayas (1110 AM),Héroes de Verdeloma 915 y Francisco Tamariz,07 282 3911 / 283 1975 / 283 1792 / 284 4485,www.ondasazuayas.ec,max_azuayas@hotmail.com /es/informate-y-participa/directorio-de-medios/200-infinito-fm-97-5-fm,Infinito FM (97.5 FM),Av. Sixto Durán Ballen y Vega Dávila,07 294 4899,www.radioinfinito.ser.ec,radioinfinito@hotmail.com /es/informate-y-participa/directorio-de-medios/201-radio-super-94-9fm-cuenca-giron-paute,"Radio Súper (94.9FM) (Cuenca, Girón, Paute)","Benigno Malo 1591 y Muñoz Vernaza, esquina",07 284 2949 / 284 2949 / 282 5301,www.super949.com,Súper949@gmail.com /es/informate-y-participa/directorio-de-medios/202-tropicana-1380am,Tropicana (1390AM),Av. Pumapungo 5-50 y Juan Benigno Vela,07 280 7970 / 280 9644,www.radiotropicana1390.com,radiotropicana1390@hotmail.com rows-0.3.1/tests/data/ecuador-medios-radiodifusoras.html000066400000000000000000002127631310400316700233700ustar00rootroot00000000000000 Directorio de medios - Radiodifusoras - Página 1 de 7

Radiodifusoras

D: Terán 52-91 y 10 de Agosto
T: 02 236 0047 / 236 3055
W: www.ecosdecayambe.es.tl
E: ecos@pi.pro.ec
D: Cristóbal Colón y Panzaleo
T: 02 231 6642
F: 02 231 0836
E: www.jmradio.net
D: Av. Miguel Cordero y Av. Paucarbamba Ed. Work Center Piso 5, Of. 508
T: 07 281 4688 / 281 9992 / 288 1088
F: 07 281 9992
W: www.fm88radioactiva.com
E: radio@cadencactiva.com
T: @fm88radioactiva
D: Hermano Miguel 1068 y Gran Colombia
T: 07 283 8451
F: 07 283 8714
E: radioalfa1140am@hotmail.com
T: @radioalfaec
D: Bolívar 9-49 y Luis Cordero
T: 07 282 5845 / 283 8292 / 283 2280
F: 07 282 5845 / 254 1557 / 284 4436
W: www.radiocatolicacuenca.com.ec
E: radiocatolicacuenca@gmail.com
T: @RadioCatolicaC
D: Jesús Dávila y Cornelio Merchán
T: 07 288 1240 / 281 4164
F: 07 288 0691
W: www.contactoxg.webpin.com
E: radionexo.fem@hotmail.com
D: Av. Abelardo J. Andrade 2-07 y Francisco Tamariz
T: 07 283 1423 / 284 2837
F: 07 284 3727
W: www.cosmos.ec
E: cosmos@rcgrupo.com
D: Bomboíza 1-83 entre Loja y Pastaza
T: 07 288 4128
F: 07 288 4128
W: www.radiocuenca.com
E: kleberpinosabad@yahoo.es / radiocuenca1180@hotmail.com
D: Av. de las Américas y Francisco Azcásubi Ed. Mercurio
T: 07 409 5684 / 409 5645
F: 07 409 5684
W: www.radioelmercurio.com.ec
E: radioinfo@radioelmercurio.com.ec
T: @radioelmercurio
D: Benigno Malo 15-91 y Muñoz Vernaza, esquina
T: 07 284 2000 / 282 5301 / 284 2222
F: 07 284 2222
W: www.lavozdeltomebamba.com
E: info@lavozdeltomebamba.com
T: @tomebamba
D: Héroes de Verdeloma 915 y Francisco Tamariz
T: 07 282 3911 / 283 1975 / 283 1792 / 284 4485
F: 07 283 9067
W: www.ondasazuayas.ec
E: max_azuayas@hotmail.com
T: @ondasazuayas
D: Av. Sixto Durán Ballen y Vega Dávila
T: 07 294 4899
W: www.radioinfinito.ser.ec
E: radioinfinito@hotmail.com
T: @RADIOINFINITOFM
D: Benigno Malo 1591 y Muñoz Vernaza, esquina
T: 07 284 2949 / 284 2949 / 282 5301
F: 07 284 2222
W: www.super949.com
E: Súper949@gmail.com
T: @super949
D: Av. Pumapungo 5-50 y Juan Benigno Vela
T: 07 280 7970 / 280 9644
F: 07 280 9644
W: www.radiotropicana1390.com
E: radiotropicana1390@hotmail.com
¡Haga clic para escuchar el texto resaltado!
rows-0.3.1/tests/data/ibge-censo.html000066400000000000000000000672161310400316700174700ustar00rootroot00000000000000
RJ Angra dos Reis 169.511
RJ Aperibé 10.213
RJ Araruama 112.008
RJ Areal 11.423
RJ Armação dos Búzios 27.560
RJ Arraial do Cabo 27.715
RJ Barra do Piraí 94.778
RJ Barra Mansa 177.813
RJ Belford Roxo 469.332
RJ Bom Jardim 25.333
RJ Bom Jesus do Itabapoana 35.411
RJ Cabo Frio 186.227
RJ Cachoeiras de Macacu 54.273
RJ Cambuci 14.827
RJ Campos dos Goytacazes 463.731
RJ Cantagalo 19.830
RJ Carapebus 13.359
RJ Cardoso Moreira 12.600
RJ Carmo 17.434
RJ Casimiro de Abreu 35.347
RJ Comendador Levy Gasparian 8.180
RJ Conceição de Macabu 21.211
RJ Cordeiro 20.430
RJ Duas Barras 10.930
RJ Duque de Caxias 855.048
RJ Engenheiro Paulo de Frontin 13.237
RJ Guapimirim 51.483
RJ Iguaba Grande 22.851
RJ Itaboraí 218.008
RJ Itaguaí 109.091
RJ Italva 14.063
RJ Itaocara 22.899
RJ Itaperuna 95.841
RJ Itatiaia 28.783
RJ Japeri 95.492
RJ Laje do Muriaé 7.487
RJ Macaé 206.728
RJ Macuco 5.269
RJ Magé 227.322
RJ Mangaratiba 36.456
RJ Maricá 127.461
RJ Mendes 17.935
RJ Mesquita 168.376
RJ Miguel Pereira 24.642
RJ Miracema 26.843
RJ Natividade 15.082
RJ Nilópolis 157.425
RJ Niterói 487.562
RJ Nova Friburgo 182.082
RJ Nova Iguaçu 796.257
RJ Paracambi 47.124
RJ Paraíba do Sul 41.084
RJ Paraty 37.533
RJ Paty do Alferes 26.359
RJ Petrópolis 295.917
RJ Pinheiral 22.719
RJ Piraí 26.314
RJ Porciúncula 17.760
RJ Porto Real 16.592
RJ Quatis 12.793
RJ Queimados 137.962
RJ Quissamã 20.242
RJ Resende 119.769
RJ Rio Bonito 55.551
RJ Rio Claro 17.425
RJ Rio das Flores 8.561
RJ Rio das Ostras 105.676
RJ Rio de Janeiro 6.320.446
RJ Santa Maria Madalena 10.321
RJ Santo Antônio de Pádua 40.589
RJ São Fidélis 37.543
RJ São Francisco de Itabapoana 41.354
RJ São Gonçalo 999.728
RJ São João da Barra 32.747
RJ São João de Meriti 458.673
RJ São José de Ubá 7.003
RJ São José do Vale do Rio Preto 20.251
RJ São Pedro da Aldeia 87.875
RJ São Sebastião do Alto 8.895
RJ Sapucaia 17.525
RJ Saquarema 74.234
RJ Seropédica 78.186
RJ Silva Jardim 21.349
RJ Sumidouro 14.900
RJ Tanguá 30.732
RJ Teresópolis 163.746
RJ Trajano de Moraes 10.289
RJ Três Rios 77.432
RJ Valença 71.843
RJ Varre-Sai 9.475
RJ Vassouras 34.410
RJ Volta Redonda 257.803
rows-0.3.1/tests/data/merged.csv000066400000000000000000000001421310400316700165300ustar00rootroot00000000000000id,username,birthday,gender 1,turicas,1987-04-29,M 2,abc,,F 3,def,2000-01-01,F 4,qwe,1999-12-31,F rows-0.3.1/tests/data/nation.dict.parquet000077500000000000000000000054421310400316700204000ustar00rootroot00000000000000PAR1,22 L2ALGERIA ARGENTINABRAZILCANADAEGYPTETHIOPIAFRANCEGERMANYINDIA INDONESIAIRANIRAQJAPANJORDANKENYAMOROCCO MOZAMBIQUEPERUCHINAROMANIA SAUDI ARABIAVIETNAMRUSSIAUNITED KINGDOM UNITED STATES88,22 A9(Ś{0I,22L23 haggle. carefully final deposits detect slyly agaiLal foxes promise slyly according to the regular accounts. bold requests alonky alongside of the pending deposits. carefully special packages are about the ironic forges. slyly special eeas hang ironic, silent packages. slyly regular packages are furiously over the tithes. fluffily boldcy above the carefully unusual theodolites. final dugouts are quickly across the furiously regular dven packages wake quickly. regu&refully final requests. regular, ironi:l platelets. regular accounts x-ray: unusual, regular accoAss excuses cajole slyly across the packages. deposits print arounr slyly express asymptotes. regular deposits haggle slyly. carefully ironic hockey players sleep blithely. carefull2efully alongside of the slyly final dependencies. Bnic deposits boost atop the quickly final requests? quickly regula$ously. final, express gifts cajole a7ic deposits are blithely about the carefully regular pa] pending excuses haggle furiously deposits. pending, express pinto beans wake fluffily past tZrns. blithely bold courts among the closely regular packages use furiously bold platelets?-s. ironic, unusual asymptotes wake blithely rjplatelets. blithely pending dependencies use fluffily across the even pinto beans. carefully silent accoun[c dependencies. furiously express notornis sleep slyly regular accounts. ideas sleep. deposoular asymptotes are about the furious multipliers. express dependencies nag above the ironically ironic accountNts. silent requests haggle. closely express packages sleep across the blithely.hely enticingly express accounts. even, final O requests against the platelets use never according to the quickly regular pint=eans boost carefully special requests. accounts are. carefullny final packages. slow foxes cajole quickly. quickly silent platelets breach ironic accounts. unusual pinto be88,22 A9(Ś{0I\Hm% nation_key %name% region_key % comment_col2L& nation_key2&& name2&& region_key2&&   comment_col2& 2( parquet-mrPAR1rows-0.3.1/tests/data/nested-table.html000066400000000000000000000022271310400316700200130ustar00rootroot00000000000000
t0,0r0c0 t0,0r0c1 t0,0r0c2
t0,0r1c0 t0,0r1c1 t0,0r1c2
t0,0r2c0
t0,1r0c0 t0,1r0c1
t0,1r1c0 t0,1r1c1
t0,1r2c0 t0,1r2c1
t0,2r0c0 t0,2r0c1
t0,2r1c0 t0,2r1c1
t0,1r3c1
t0,1r4c0 t0,1r4c1
t0,1r5c0 t0,1r5c1
t0,0r2c2
t0,0r3c0 t0,0r3c1 t0,0r3c2
rows-0.3.1/tests/data/properties-table.html000066400000000000000000000004521310400316700207230ustar00rootroot00000000000000
field1 field2
row1field1 row1field2
row2field1 row2field2
rows-0.3.1/tests/data/scripts/000077500000000000000000000000001310400316700162425ustar00rootroot00000000000000rows-0.3.1/tests/data/scripts/create_sqlite.py000066400000000000000000000022621310400316700214420ustar00rootroot00000000000000# coding: utf-8 import os import sqlite3 from collections import OrderedDict input_filename = '../all-field-types.csv' output_filename = '../all-field-types.sqlite' field_types = OrderedDict([ ('bool_column', 'INTEGER'), ('integer_column', 'INTEGER'), ('float_column', 'FLOAT'), ('decimal_column', 'FLOAT'), ('percent_column', 'TEXT'), ('date_column', 'TEXT'), ('datetime_column', 'TEXT'), ('unicode_column', 'TEXT'), ]) column_types = ', '.join(['{} {}'.format(key, value) for key, value in field_types.items()]) create_sql = 'CREATE TABLE table1 ({})'.format(column_types) field_names = ', '.join(field_types.keys()) placeholders = ', '.join(['?' for _ in field_types]) insert_sql = 'INSERT INTO table1 ({}) VALUES ({})'.format(field_names, placeholders) if os.path.exists(output_filename): os.unlink(output_filename) connection = sqlite3.connect(output_filename) connection.execute(create_sql) with open(input_filename) as fobj: data = fobj.read().decode('utf-8').splitlines() for row in data[1:]: connection.execute(insert_sql, row.split(',')) connection.commit() rows-0.3.1/tests/data/simple-table.html000066400000000000000000000005121310400316700200150ustar00rootroot00000000000000
t0r0c0 t0r0c1
t0r1c0 t0r1c1
t1r0c0 t1r0c1
t1r1c0 t1r1c1
t1r2c0 t1r2c1
rows-0.3.1/tests/data/table-thead-tbody.html000066400000000000000000000003141310400316700207300ustar00rootroot00000000000000
t1 t2
456 123
qqq aaa
rows-0.3.1/tests/data/table-with-sections.html000066400000000000000000000031131310400316700213240ustar00rootroot00000000000000
t2r0c0 t2r0c1
t2r0c0 t2r0c1
t2r1c0 t2r1c1
t2r2c0 t2r2c1
id username name signup_date
1 turicas Álvaro Justen 2014-04-01
2 test Test User 2014-04-02
3 example Example User 2014-04-03
4 python Python Heavy User 2014-04-04
5 erlang Erlang User 2014-04-05
rows-0.3.1/tests/data/to-merge-1.csv000066400000000000000000000000421310400316700171410ustar00rootroot00000000000000id,username 1,turicas 2,abc 3,def rows-0.3.1/tests/data/to-merge-2.csv000066400000000000000000000001141310400316700171420ustar00rootroot00000000000000id,username,birthday 1,turicas,1987-04-29 3,def,2000-01-01 4,qwe,1999-12-31 rows-0.3.1/tests/data/to-merge-3.csv000066400000000000000000000000671310400316700171520ustar00rootroot00000000000000id,username,gender 1,turicas,M 2,abc,F 3,def,F 4,qwe,F rows-0.3.1/tests/tests_cli.py000066400000000000000000000016221310400316700162060ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import unittest import rows.cli as cli class CliTestCase(unittest.TestCase): # TODO: test everything pass rows-0.3.1/tests/tests_fields.py000066400000000000000000000542361310400316700167160ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import collections import datetime import json import platform import unittest from base64 import b64encode from decimal import Decimal import rows import six from rows import fields if platform.system() == 'Windows': locale_name = 'ptb_bra' else: locale_name = 'pt_BR.UTF-8' class FieldsTestCase(unittest.TestCase): def test_Field(self): self.assertEqual(fields.Field.TYPE, (type(None), )) self.assertIs(fields.Field.deserialize(None), None) self.assertEqual(fields.Field.deserialize('Álvaro'), 'Álvaro') self.assertEqual(fields.Field.serialize(None), '') self.assertIs(type(fields.Field.serialize(None)), six.text_type) self.assertEqual(fields.Field.serialize('Álvaro'), 'Álvaro') self.assertIs(type(fields.Field.serialize('Álvaro')), six.text_type) def test_BinaryField(self): deserialized = 'Álvaro'.encode('utf-8') serialized = b64encode(deserialized).decode('ascii') self.assertEqual(type(deserialized), six.binary_type) self.assertEqual(type(serialized), six.text_type) self.assertEqual(fields.BinaryField.TYPE, (bytes, )) self.assertEqual(fields.BinaryField.serialize(None), '') self.assertIs(type(fields.BinaryField.serialize(None)), six.text_type) self.assertEqual(fields.BinaryField.serialize(deserialized), serialized) self.assertIs(type(fields.BinaryField.serialize(deserialized)), six.text_type) with self.assertRaises(ValueError): fields.BinaryField.serialize(42) with self.assertRaises(ValueError): fields.BinaryField.serialize(3.14) with self.assertRaises(ValueError): fields.BinaryField.serialize('Álvaro') with self.assertRaises(ValueError): fields.BinaryField.serialize('123') self.assertIs(fields.BinaryField.deserialize(None), b'') self.assertEqual(fields.BinaryField.deserialize(serialized), deserialized) self.assertIs(type(fields.BinaryField.deserialize(serialized)), six.binary_type) with self.assertRaises(ValueError): fields.BinaryField.deserialize(42) with self.assertRaises(ValueError): fields.BinaryField.deserialize(3.14) with self.assertRaises(ValueError): fields.BinaryField.deserialize('Álvaro') self.assertEqual(fields.BinaryField.deserialize(deserialized), deserialized) self.assertEqual(fields.BinaryField.deserialize(serialized), deserialized) self.assertEqual( fields.BinaryField.deserialize(serialized.encode('ascii')), serialized.encode('ascii') ) def test_BoolField(self): self.assertEqual(fields.BoolField.TYPE, (bool, )) self.assertEqual(fields.BoolField.serialize(None), '') false_values = ('False', 'false', 'no', False) for value in false_values: self.assertIs(fields.BoolField.deserialize(value), False) self.assertIs(fields.BoolField.deserialize(None), None) self.assertEqual(fields.BoolField.deserialize(''), None) true_values = ('True', 'true', 'yes', True) for value in true_values: self.assertIs(fields.BoolField.deserialize(value), True) self.assertEqual(fields.BoolField.serialize(False), 'false') self.assertIs(type(fields.BoolField.serialize(False)), six.text_type) self.assertEqual(fields.BoolField.serialize(True), 'true') self.assertIs(type(fields.BoolField.serialize(True)), six.text_type) # '0' and '1' should be not accepted as boolean values because the # sample could not contain other integers but the actual type could be # integer with self.assertRaises(ValueError): fields.BoolField.deserialize('0') with self.assertRaises(ValueError): fields.BoolField.deserialize(b'0') with self.assertRaises(ValueError): fields.BoolField.deserialize('1') with self.assertRaises(ValueError): fields.BoolField.deserialize(b'1') def test_IntegerField(self): self.assertEqual(fields.IntegerField.TYPE, (int, )) self.assertEqual(fields.IntegerField.serialize(None), '') self.assertIs(type(fields.IntegerField.serialize(None)), six.text_type) self.assertIn(type(fields.IntegerField.deserialize('42')), fields.IntegerField.TYPE) self.assertEqual(fields.IntegerField.deserialize('42'), 42) self.assertEqual(fields.IntegerField.deserialize(42), 42) self.assertEqual(fields.IntegerField.serialize(42), '42') self.assertIs(type(fields.IntegerField.serialize(42)), six.text_type) self.assertEqual(fields.IntegerField.deserialize(None), None) self.assertEqual(fields.IntegerField.deserialize('10152709355006317'), 10152709355006317) with rows.locale_context(locale_name): self.assertEqual(fields.IntegerField.serialize(42000), '42000') self.assertIs(type(fields.IntegerField.serialize(42000)), six.text_type) self.assertEqual(fields.IntegerField.serialize(42000, grouping=True), '42.000') self.assertEqual(fields.IntegerField.deserialize('42.000'), 42000) self.assertEqual(fields.IntegerField.deserialize(42), 42) self.assertEqual(fields.IntegerField.deserialize(42.0), 42) with self.assertRaises(ValueError): fields.IntegerField.deserialize(1.23) def test_FloatField(self): self.assertEqual(fields.FloatField.TYPE, (float, )) self.assertEqual(fields.FloatField.serialize(None), '') self.assertIs(type(fields.FloatField.serialize(None)), six.text_type) self.assertIn(type(fields.FloatField.deserialize('42.0')), fields.FloatField.TYPE) self.assertEqual(fields.FloatField.deserialize('42.0'), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(None), None) self.assertEqual(fields.FloatField.serialize(42.0), '42.0') self.assertIs(type(fields.FloatField.serialize(42.0)), six.text_type) with rows.locale_context(locale_name): self.assertEqual(fields.FloatField.serialize(42000.0), '42000,000000') self.assertIs(type(fields.FloatField.serialize(42000.0)), six.text_type) self.assertEqual(fields.FloatField.serialize(42000, grouping=True), '42.000,000000') self.assertEqual(fields.FloatField.deserialize('42.000,00'), 42000.0) self.assertEqual(fields.FloatField.deserialize(42), 42.0) self.assertEqual(fields.FloatField.deserialize(42.0), 42.0) def test_DecimalField(self): deserialized = Decimal('42.010') self.assertEqual(fields.DecimalField.TYPE, (Decimal, )) self.assertEqual(fields.DecimalField.serialize(None), '') self.assertIs(type(fields.DecimalField.serialize(None)), six.text_type) self.assertEqual(fields.DecimalField.deserialize(''), None) self.assertIn(type(fields.DecimalField.deserialize('42.0')), fields.DecimalField.TYPE) self.assertEqual(fields.DecimalField.deserialize('42.0'), Decimal('42.0')) self.assertEqual(fields.DecimalField.deserialize(deserialized), deserialized) self.assertEqual(fields.DecimalField.serialize(deserialized), '42.010') self.assertEqual(type(fields.DecimalField.serialize(deserialized)), six.text_type) self.assertEqual(fields.DecimalField.deserialize('21.21657469231'), Decimal('21.21657469231')) self.assertEqual(fields.DecimalField.deserialize('-21.34'), Decimal('-21.34')) self.assertEqual(fields.DecimalField.serialize(Decimal('-21.34')), '-21.34') self.assertEqual(fields.DecimalField.deserialize(None), None) with rows.locale_context(locale_name): self.assertEqual( six.text_type, type(fields.DecimalField.serialize(deserialized)) ) self.assertEqual(fields.DecimalField.serialize(Decimal('4200')), '4200') self.assertEqual(fields.DecimalField.serialize(Decimal('42.0')), '42,0') self.assertEqual(fields.DecimalField.serialize(Decimal('42000.0')), '42000,0') self.assertEqual(fields.DecimalField.serialize(Decimal('-42.0')), '-42,0') self.assertEqual(fields.DecimalField.deserialize('42.000,00'), Decimal('42000.00')) self.assertEqual(fields.DecimalField.deserialize('-42.000,00'), Decimal('-42000.00')) self.assertEqual( fields.DecimalField.serialize( Decimal('42000.0'), grouping=True ), '42.000,0' ) self.assertEqual(fields.DecimalField.deserialize(42000), Decimal('42000')) self.assertEqual(fields.DecimalField.deserialize(42000.0), Decimal('42000')) def test_PercentField(self): deserialized = Decimal('0.42010') self.assertEqual(fields.PercentField.TYPE, (Decimal, )) self.assertIn(type(fields.PercentField.deserialize('42.0%')), fields.PercentField.TYPE) self.assertEqual(fields.PercentField.deserialize('42.0%'), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(Decimal('0.420')), Decimal('0.420')) self.assertEqual(fields.PercentField.deserialize(deserialized), deserialized) self.assertEqual(fields.PercentField.deserialize(None), None) self.assertEqual(fields.PercentField.serialize(deserialized), '42.010%') self.assertEqual(type(fields.PercentField.serialize(deserialized)), six.text_type) self.assertEqual(fields.PercentField.serialize(Decimal('42.010')), '4201.0%') self.assertEqual(fields.PercentField.serialize(Decimal('0')), '0.00%') self.assertEqual(fields.PercentField.serialize(None), '') self.assertEqual(fields.PercentField.serialize(Decimal('0.01')), '1%') with rows.locale_context(locale_name): self.assertEqual( type(fields.PercentField.serialize(deserialized)), six.text_type ) self.assertEqual(fields.PercentField.serialize(Decimal('42.0')), '4200%') self.assertEqual(fields.PercentField.serialize(Decimal('42000.0')), '4200000%') self.assertEqual(fields.PercentField.deserialize('42.000,00%'), Decimal('420.0000')) self.assertEqual(fields.PercentField.serialize(Decimal('42000.00'), grouping=True), '4.200.000%') with self.assertRaises(ValueError): fields.PercentField.deserialize(42) def test_DateField(self): # TODO: test timezone-aware datetime.date serialized = '2015-05-27' deserialized = datetime.date(2015, 5, 27) self.assertEqual(fields.DateField.TYPE, (datetime.date, )) self.assertEqual(fields.DateField.serialize(None), '') self.assertIs(type(fields.DateField.serialize(None)), six.text_type) self.assertIn(type(fields.DateField.deserialize(serialized)), fields.DateField.TYPE) self.assertEqual(fields.DateField.deserialize(serialized), deserialized) self.assertEqual(fields.DateField.deserialize(deserialized), deserialized) self.assertEqual(fields.DateField.deserialize(None), None) self.assertEqual(fields.DateField.deserialize(''), None) self.assertEqual(fields.DateField.serialize(deserialized), serialized) self.assertIs(type(fields.DateField.serialize(deserialized)), six.text_type) with self.assertRaises(ValueError): fields.DateField.deserialize(42) with self.assertRaises(ValueError): fields.DateField.deserialize(serialized + 'T00:00:00') with self.assertRaises(ValueError): fields.DateField.deserialize('Álvaro') with self.assertRaises(ValueError): fields.DateField.deserialize(serialized.encode('utf-8')) def test_DatetimeField(self): # TODO: test timezone-aware datetime.date serialized = '2015-05-27T01:02:03' self.assertEqual(fields.DatetimeField.TYPE, (datetime.datetime, )) deserialized = fields.DatetimeField.deserialize(serialized) self.assertIn(type(deserialized), fields.DatetimeField.TYPE) self.assertEqual(fields.DatetimeField.serialize(None), '') self.assertIs(type(fields.DatetimeField.serialize(None)), six.text_type) value = datetime.datetime(2015, 5, 27, 1, 2, 3) self.assertEqual(fields.DatetimeField.deserialize(serialized), value) self.assertEqual(fields.DatetimeField.deserialize(deserialized), deserialized) self.assertEqual(fields.DatetimeField.deserialize(None), None) self.assertEqual(fields.DatetimeField.serialize(value), serialized) self.assertIs(type(fields.DatetimeField.serialize(value)), six.text_type) with self.assertRaises(ValueError): fields.DatetimeField.deserialize(42) with self.assertRaises(ValueError): fields.DatetimeField.deserialize('2015-01-01') with self.assertRaises(ValueError): fields.DatetimeField.deserialize('Álvaro') with self.assertRaises(ValueError): fields.DatetimeField.deserialize(serialized.encode('utf-8')) def test_EmailField(self): # TODO: accept spaces also serialized = 'test@domain.com' self.assertEqual(fields.EmailField.TYPE, (six.text_type, )) deserialized = fields.EmailField.deserialize(serialized) self.assertIn(type(deserialized), fields.EmailField.TYPE) self.assertEqual(fields.EmailField.serialize(None), '') self.assertIs(type(fields.EmailField.serialize(None)), six.text_type) self.assertEqual(fields.EmailField.serialize(serialized), serialized) self.assertEqual(fields.EmailField.deserialize(serialized), serialized) self.assertEqual(fields.EmailField.deserialize(None), None) self.assertEqual(fields.EmailField.deserialize(''), None) self.assertIs(type(fields.EmailField.serialize(serialized)), six.text_type) with self.assertRaises(ValueError): fields.EmailField.deserialize(42) with self.assertRaises(ValueError): fields.EmailField.deserialize('2015-01-01') with self.assertRaises(ValueError): fields.EmailField.deserialize('Álvaro') with self.assertRaises(ValueError): fields.EmailField.deserialize('test@example.com'.encode('utf-8')) def test_TextField(self): self.assertEqual(fields.TextField.TYPE, (six.text_type, )) self.assertEqual(fields.TextField.serialize(None), '') self.assertIs(type(fields.TextField.serialize(None)), six.text_type) self.assertIn(type(fields.TextField.deserialize('test')), fields.TextField.TYPE) self.assertEqual(fields.TextField.deserialize('Álvaro'), 'Álvaro') self.assertIs(fields.TextField.deserialize(None), None) self.assertIs(fields.TextField.deserialize(''), '') self.assertEqual(fields.TextField.serialize('Álvaro'), 'Álvaro') self.assertIs(type(fields.TextField.serialize('Álvaro')), six.text_type) with self.assertRaises(ValueError) as exception_context: fields.TextField.deserialize('Álvaro'.encode('utf-8')) self.assertEqual(exception_context.exception.args[0], 'Binary is not supported') def test_JSONField(self): self.assertEqual(fields.JSONField.TYPE, (list, dict)) self.assertEqual(type(fields.JSONField.deserialize('[]')), list) self.assertEqual(type(fields.JSONField.deserialize('{}')), dict) deserialized = {'a': 123, 'b': 3.14, 'c': [42, 24], } serialized = json.dumps(deserialized) self.assertEqual(fields.JSONField.deserialize(serialized), deserialized) class FieldUtilsTestCase(unittest.TestCase): maxDiff = None def setUp(self): with open('tests/data/all-field-types.csv', 'rb') as fobj: data = fobj.read().decode('utf-8') lines = [line.split(',') for line in data.splitlines()] self.fields = lines[0] self.data = lines[1:] self.expected = { 'bool_column': fields.BoolField, 'integer_column': fields.IntegerField, 'float_column': fields.FloatField, 'decimal_column': fields.FloatField, 'percent_column': fields.PercentField, 'date_column': fields.DateField, 'datetime_column': fields.DatetimeField, 'unicode_column': fields.TextField, } def test_detect_types_no_sample(self): expected = {key: fields.BinaryField for key in self.expected.keys()} result = fields.detect_types(self.fields, []) self.assertDictEqual(dict(result), expected) def test_detect_types_binary(self): # first, try values as (`bytes`/`str`) expected = {key: fields.BinaryField for key in self.expected.keys()} values = [[value.encode('utf-8') for value in row] for row in self.data] result = fields.detect_types(self.fields, values) self.assertDictEqual(dict(result), expected) # second, try base64-encoded values (as `str`/`unicode`) expected = {key: fields.TextField for key in self.expected.keys()} values = [[b64encode(value.encode('utf-8')).decode('ascii') for value in row] for row in self.data] result = fields.detect_types(self.fields, values) self.assertDictEqual(dict(result), expected) def test_detect_types(self): result = fields.detect_types(self.fields, self.data) self.assertDictEqual(dict(result), self.expected) def test_precedence(self): field_types = [ ('bool', fields.BoolField), ('integer', fields.IntegerField), ('float', fields.FloatField), ('datetime', fields.DatetimeField), ('date', fields.DateField), ('float', fields.FloatField), ('percent', fields.PercentField), ('json', fields.JSONField), ('email', fields.EmailField), ('binary1', fields.BinaryField), ('binary2', fields.BinaryField), ('text', fields.TextField), ] data = [ [ 'false', '42', '3.14', '2016-08-15T05:21:10', '2016-08-15', '2.71', '76.38%', '{"key": "value"}', 'test@example.com', b'cHl0aG9uIHJ1bGVz', b'python rules', 'Álvaro Justen' ] ] result = fields.detect_types([item[0] for item in field_types], data) self.assertDictEqual(dict(result), dict(field_types)) class FieldsFunctionsTestCase(unittest.TestCase): def test_is_null(self): self.assertTrue(fields.is_null(None)) self.assertTrue(fields.is_null('')) self.assertTrue(fields.is_null(' \t ')) self.assertTrue(fields.is_null('null')) self.assertTrue(fields.is_null('nil')) self.assertTrue(fields.is_null('none')) self.assertTrue(fields.is_null('-')) self.assertFalse(fields.is_null('Álvaro')) self.assertFalse(fields.is_null('Álvaro'.encode('utf-8'))) def test_as_string(self): self.assertEqual(fields.as_string(None), 'None') self.assertEqual(fields.as_string(42), '42') self.assertEqual(fields.as_string(3.141592), '3.141592') self.assertEqual(fields.as_string('Álvaro'), 'Álvaro') with self.assertRaises(ValueError) as exception_context: fields.as_string('Álvaro'.encode('utf-8')) self.assertEqual(exception_context.exception.args[0], 'Binary is not supported') rows-0.3.1/tests/tests_localization.py000066400000000000000000000027411310400316700201320ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import unittest import platform import rows import rows.fields from rows.localization import locale_context class LocalizationTestCase(unittest.TestCase): def test_locale_context_present_in_main_namespace(self): self.assertIn('locale_context', dir(rows)) self.assertIs(locale_context, rows.locale_context) def test_locale_context(self): self.assertTrue(rows.fields.SHOULD_NOT_USE_LOCALE) if platform.system() == 'Windows': name = str('ptb_bra') else: name = 'pt_BR.UTF-8' with locale_context(name): self.assertFalse(rows.fields.SHOULD_NOT_USE_LOCALE) self.assertTrue(rows.fields.SHOULD_NOT_USE_LOCALE) rows-0.3.1/tests/tests_operations.py000066400000000000000000000102251310400316700176210ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import unittest from collections import OrderedDict import rows import rows.operations import tests.utils as utils class OperationsTestCase(utils.RowsTestMixIn, unittest.TestCase): def test_join_imports(self): self.assertIs(rows.join, rows.operations.join) def test_join_feature(self): tables = [rows.import_from_csv('tests/data/to-merge-1.csv'), rows.import_from_csv('tests/data/to-merge-2.csv'), rows.import_from_csv('tests/data/to-merge-3.csv'),] merged = rows.join(keys=('id', 'username'), tables=tables) expected = rows.import_from_csv('tests/data/merged.csv') self.assert_table_equal(merged, expected) def test_transform_imports(self): self.assertIs(rows.transform, rows.operations.transform) def test_transform_feature(self): def transformation_function(row, table): if row.percent_column is None or row.percent_column < 0.1269: return None # discard this row new = row._asdict() new['meta'] = ', '.join(['{} => {}'.format(key, value) for key, value in table._meta.items()]) return new fields = utils.table.fields.copy() fields.update({'meta': rows.fields.TextField, }) tables = [utils.table] * 3 result = rows.transform(fields, transformation_function, *tables) self.assertEqual(result.fields, fields) not_discarded = [transformation_function(row, utils.table) for row in utils.table] * 3 not_discarded = [row for row in not_discarded if row is not None] self.assertEqual(len(result), len(not_discarded)) for expected_row, row in zip(not_discarded, result): self.assertEqual(expected_row, dict(row._asdict())) def test_transpose_imports(self): self.assertIs(rows.transpose, rows.operations.transpose) def test_transpose_feature(self): new_fields = OrderedDict([('key', rows.fields.TextField), ('value_1', rows.fields.TextField), ('value_2', rows.fields.TextField)]) table = rows.Table(fields=new_fields) table.append({'key': 'first_key', 'value_1': 'first_value_1', 'value_2': 'first_value_2', }) table.append({'key': 'second_key', 'value_1': 1, 'value_2': 2, }) table.append({'key': 'third_key', 'value_1': 3.14, 'value_2': 2.71, }) table.append({'key': 'fourth_key', 'value_1': '2015-09-04', 'value_2': '2015-08-29', }) new_table = rows.transpose(table, fields_column='key') self.assertEqual(len(new_table), 2) self.assertEqual(len(new_table.fields), len(table)) self.assertEqual(new_table.field_names, [row.key for row in table]) self.assertEqual(new_table[0].first_key, 'first_value_1') self.assertEqual(new_table[0].second_key, 1) self.assertEqual(new_table[0].third_key, 3.14) self.assertEqual(new_table[0].fourth_key, datetime.date(2015, 9, 4)) self.assertEqual(new_table[1].first_key, 'first_value_2') self.assertEqual(new_table[1].second_key, 2) self.assertEqual(new_table[1].third_key, 2.71) self.assertEqual(new_table[1].fourth_key, datetime.date(2015, 8, 29)) rows-0.3.1/tests/tests_plugin_csv.py000066400000000000000000000233311310400316700176110ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import csv import tempfile import textwrap import unittest from collections import OrderedDict from io import BytesIO import mock import rows import rows.plugins.plugin_csv import tests.utils as utils def make_csv_data(quote_char, field_delimiter, line_delimiter): data = [['field1', 'field2', 'field3'], ['value1', 'value2', 'value3']] lines = [['{}{}{}'.format(quote_char, value, quote_char) for value in line] for line in data] lines = line_delimiter.join([field_delimiter.join(line) for line in data]) return data, lines class PluginCsvTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'csv' file_extension = 'csv' filename = 'tests/data/all-field-types.csv' encoding = 'utf-8' assert_meta_encoding = True def test_imports(self): self.assertIs(rows.import_from_csv, rows.plugins.plugin_csv.import_from_csv) self.assertIs(rows.export_to_csv, rows.plugins.plugin_csv.export_to_csv) @mock.patch('rows.plugins.plugin_csv.create_table') def test_import_from_csv_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_csv(self.filename, encoding='utf-8', **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'csv', 'filename': self.filename, 'encoding': 'utf-8',} self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_csv.create_table') def test_import_from_csv_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_csv(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args) # import using fobj with open(self.filename, 'rb') as fobj: table_2 = rows.import_from_csv(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args) @mock.patch('rows.plugins.plugin_csv.create_table') def test_import_from_csv_discover_dialect(self, mocked_create_table): data, lines = make_csv_data(quote_char="'", field_delimiter=";", line_delimiter="\r\n") fobj = BytesIO() fobj.write(lines.encode('utf-8')) fobj.seek(0) rows.import_from_csv(fobj) call_args = mocked_create_table.call_args_list[0] self.assertEqual(data, list(call_args[0][0])) @mock.patch('rows.plugins.plugin_csv.create_table') def test_import_from_csv_force_dialect(self, mocked_create_table): data, lines = make_csv_data(quote_char="'", field_delimiter="\t", line_delimiter="\r\n") fobj = BytesIO() fobj.write(lines.encode('utf-8')) fobj.seek(0) rows.import_from_csv(fobj, dialect='excel-tab') call_args = mocked_create_table.call_args_list[0] self.assertEqual(data, list(call_args[0][0])) def test_detect_dialect_more_data(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) # If the sniffer reads only the first line, it will think the delimiter # is ',' instead of ';' data = textwrap.dedent(''' field1,samefield;field2,other row1value1;row1value2 row2value1;row2value2 ''').strip() with open(filename, 'wb') as fobj: fobj.write(data.encode('utf-8')) table = rows.import_from_csv(filename, encoding='utf-8') self.assertEqual(table.field_names, ['field1samefield', 'field2other']) self.assertEqual(table[0].field1samefield, 'row1value1') self.assertEqual(table[0].field2other, 'row1value2') self.assertEqual(table[1].field1samefield, 'row2value1') self.assertEqual(table[1].field2other, 'row2value2') def test_detect_dialect_using_json(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) encoding = 'utf-8' self.files_to_delete.append(filename) # Using JSON will force the sniffer to do not include ':', '}' in the # possible delimiters table = rows.Table(fields=OrderedDict([ ('jsoncolumn1', rows.fields.JSONField), ('jsoncolumn2', rows.fields.JSONField), ])) table.append({ 'jsoncolumn1': '{"a": 42}', 'jsoncolumn2': '{"b": 43}', }) table.append({ 'jsoncolumn1': '{"c": 44}', 'jsoncolumn2': '{"d": 45}', }) rows.export_to_csv(table, filename, encoding=encoding) table = rows.import_from_csv(filename, encoding=encoding) self.assertEqual(table.field_names, ['jsoncolumn1', 'jsoncolumn2']) self.assertDictEqual(table[0].jsoncolumn1, {'a': 42}) self.assertDictEqual(table[0].jsoncolumn2, {'b': 43}) self.assertDictEqual(table[1].jsoncolumn1, {'c': 44}) self.assertDictEqual(table[1].jsoncolumn2, {'d': 45}) @mock.patch('rows.plugins.plugin_csv.serialize') def test_export_to_csv_uses_serialize(self, mocked_serialize): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, } mocked_serialize.return_value = iter([utils.table.fields.keys()]) rows.export_to_csv(utils.table, temp.name, encoding='utf-8', **kwargs) self.assertTrue(mocked_serialize.called) self.assertEqual(mocked_serialize.call_count, 1) call = mocked_serialize.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs) def test_export_to_csv_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_csv(utils.table, temp.name) table = rows.import_from_csv(temp.name) self.assert_table_equal(table, utils.table) temp.file.seek(0) result = temp.file.read() export_in_memory = rows.export_to_csv(utils.table, None) self.assertEqual(result, export_in_memory) def test_export_to_csv_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_csv(utils.table, temp.file) table = rows.import_from_csv(temp.name) self.assert_table_equal(table, utils.table) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields=OrderedDict([ ('jsoncolumn', rows.fields.JSONField), ])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_csv(table, filename) table2 = rows.import_from_csv(filename) self.assert_table_equal(table, table2) def test_quotes(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields=OrderedDict([ ('field_1', rows.fields.TextField), ('field_2', rows.fields.TextField), ('field_3', rows.fields.TextField), ('field_4', rows.fields.TextField), ])) table.append({ 'field_1': '"quotes"', 'field_2': 'test "quotes"', 'field_3': '"quotes" test', 'field_4': 'test "quotes" test', }) # we need this line row since `"quotes"` on `field_1` could be # `JSONField` or `TextField` table.append({ 'field_1': 'noquotes', 'field_2': 'test "quotes"', 'field_3': '"quotes" test', 'field_4': 'test "quotes" test', }) rows.export_to_csv(table, filename) table2 = rows.import_from_csv(filename) self.assert_table_equal(table, table2) def test_export_to_csv_accepts_dialect(self): result_1 = rows.export_to_csv(utils.table, dialect=csv.excel_tab) result_2 = rows.export_to_csv(utils.table, dialect=csv.excel) self.assertEqual(result_1.replace(b'\t', b','), result_2) rows-0.3.1/tests/tests_plugin_dicts.py000066400000000000000000000075211310400316700201270ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest from io import BytesIO import mock import rows import rows.plugins.dicts import tests.utils as utils class PluginDictTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'dicts' data = [{'name': 'Álvaro', 'ids': 123, 'number': 3, }, {'name': 'Test', 'ids': '456', }, # missing 'number', 'ids' as str {'name': 'Python', 'ids': '123, 456', 'other': 3.14, },] def test_imports(self): self.assertIs(rows.import_from_dicts, rows.plugins.dicts.import_from_dicts) self.assertIs(rows.export_to_dicts, rows.plugins.dicts.export_to_dicts) @mock.patch('rows.plugins.dicts.create_table') def test_import_from_dicts_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_dicts(self.data, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'dicts', } self.assertEqual(call[1], kwargs) def test_import_from_dicts_return_desired_data(self): table = rows.import_from_dicts(self.data) self.assertEqual(len(table), 3) self.assertEqual(len(table.fields), 4) self.assertEqual(set(table.field_names), set(['ids', 'name', 'number', 'other'])) self.assertEqual(table.fields['name'], rows.fields.TextField) self.assertEqual(table.fields['ids'], rows.fields.TextField) self.assertEqual(table.fields['number'], rows.fields.IntegerField) self.assertEqual(table.fields['other'], rows.fields.FloatField) self.assertEqual(table[0].name, 'Álvaro') self.assertEqual(table[0].ids, '123') self.assertEqual(table[0].number, 3) self.assertEqual(table[0].other, None) self.assertEqual(table[1].name, 'Test') self.assertEqual(table[1].ids, '456') self.assertEqual(table[1].number, None) self.assertEqual(table[1].other, None) self.assertEqual(table[2].name, 'Python') self.assertEqual(table[2].ids, '123, 456') self.assertEqual(table[2].number, None) self.assertEqual(table[2].other, 3.14) def test_export_to_dicts(self): table = rows.import_from_dicts(self.data) result = rows.export_to_dicts(table) full_data = [ {'name': 'Álvaro', 'ids': '123', 'number': 3, 'other': None, }, {'name': 'Test', 'ids': '456', 'number': None, 'other': None, }, {'name': 'Python', 'ids': '123, 456', 'number': None, 'other': 3.14, },] self.assertEqual(len(result), len(table)) for expected, actual in zip(full_data, result): self.assertDictEqual(expected, actual) rows-0.3.1/tests/tests_plugin_html.py000066400000000000000000000452001310400316700177610ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest from collections import OrderedDict from io import BytesIO from textwrap import dedent import mock import rows import rows.plugins.plugin_html import tests.utils as utils # TODO: test unescape # TODO: test colspan # TODO: test rowspan # TODO: test more nested tables # URL = 'https://finance.yahoo.com/q;_ylt=At7WXTIEGzyrIHemoSMI7I.iuYdG;_ylu=X3oDMTBxdGVyNzJxBHNlYwNVSCAzIERlc2t0b3AgU2VhcmNoIDEx;_ylg=X3oDMTByaDM4cG9kBGxhbmcDZW4tVVMEcHQDMgR0ZXN0AzUxMjAxMw--;_ylv=3?s=GOOG&uhb=uhb2&type=2button&fr=uh3_finance_web_gs' # URL_2 = 'http://www.rio.rj.gov.br/dlstatic/10112/2147539/DLFE-237833.htm/paginanova2.0.0.0._B.htm' def cleanup_lines(lines): return [line.strip() for line in lines.strip().split('\n') if line.strip()] class PluginHtmlTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'html' file_extension = 'html' filename = 'tests/data/all-field-types.html' encoding = 'utf-8' assert_meta_encoding = True def test_imports(self): self.assertIs(rows.import_from_html, rows.plugins.plugin_html.import_from_html) self.assertIs(rows.export_to_html, rows.plugins.plugin_html.export_to_html) def test_import_from_html_filename(self): table = rows.import_from_html(self.filename, encoding=self.encoding) self.assert_table_equal(table, utils.table) expected_meta = {'imported_from': 'html', 'filename': self.filename, 'encoding': self.encoding,} self.assertEqual(table.meta, expected_meta) def test_import_from_html_fobj(self): # TODO: may test with codecs.open passing an encoding with open(self.filename, mode='rb') as fobj: table = rows.import_from_html(fobj, encoding=self.encoding) self.assert_table_equal(table, utils.table) expected_meta = {'imported_from': 'html', 'filename': self.filename, 'encoding': self.encoding,} self.assertEqual(table.meta, expected_meta) @mock.patch('rows.plugins.plugin_html.create_table') def test_import_from_html_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_html(self.filename, encoding='iso-8859-1', **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'html', 'filename': self.filename, 'encoding': 'iso-8859-1',} self.assertEqual(call[1], kwargs) def test_export_to_html_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_html(utils.table, temp.name) table = rows.import_from_html(temp.name) self.assert_table_equal(table, utils.table) def test_export_to_html_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_html(utils.table, temp.file) table = rows.import_from_html(temp.name) self.assert_table_equal(table, utils.table) @mock.patch('rows.plugins.plugin_html.serialize') def test_export_to_html_uses_serialize(self, mocked_serialize): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14,} mocked_serialize.return_value = iter([utils.table.fields.keys()]) rows.export_to_html(utils.table, temp.name, encoding='utf-8', **kwargs) self.assertTrue(mocked_serialize.called) self.assertEqual(mocked_serialize.call_count, 1) call = mocked_serialize.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_html.export_data') def test_export_to_html_uses_export_data(self, mocked_export_data): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, 'encoding': 'utf-8', } mocked_export_data.return_value = 42 result = rows.export_to_html(utils.table, temp.name, **kwargs) self.assertTrue(mocked_export_data.called) self.assertEqual(mocked_export_data.call_count, 1) self.assertEqual(result, 42) call = mocked_export_data.call_args self.assertEqual(call[0][0], temp.name) self.assertEqual(call[1], {'mode': 'wb'}) def test_export_to_html_none(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='rb+') self.files_to_delete.append(temp.name) result = rows.export_to_html(utils.table) rows.export_to_html(utils.table, temp.file) temp.file.seek(0) self.assertEqual(temp.file.read(), result) def test_table_index(self): filename = 'tests/data/simple-table.html' fobj = open(filename, mode='rb') table_1 = rows.import_from_html(fobj) self.assertEqual(set(table_1.fields.keys()), set(['t0r0c0', 't0r0c1'])) self.assertEqual(len(table_1), 1) self.assertEqual(table_1[0].t0r0c0, 't0r1c0') self.assertEqual(table_1[0].t0r0c1, 't0r1c1') fobj.seek(0) table_2 = rows.import_from_html(fobj, index=1) self.assertEqual(set(table_2.fields.keys()), set(['t1r0c0', 't1r0c1'])) self.assertEqual(len(table_2), 2) self.assertEqual(table_2[0].t1r0c0, 't1r1c0') self.assertEqual(table_2[0].t1r0c1, 't1r1c1') self.assertEqual(table_2[1].t1r0c0, 't1r2c0') self.assertEqual(table_2[1].t1r0c1, 't1r2c1') def test_table_thead_tbody(self): filename = 'tests/data/table-thead-tbody.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj) self.assertEqual(set(table.fields.keys()), set(['t1', 't2'])) self.assertEqual(len(table), 2) self.assertEqual(table[0].t1, '456') self.assertEqual(table[0].t2, '123') self.assertEqual(table[1].t1, 'qqq') self.assertEqual(table[1].t2, 'aaa') def test_nested_tables_outer(self): filename = 'tests/data/nested-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj) self.assertEqual(set(table.fields.keys()), set(['t00r0c0', 't00r0c1', 't00r0c2'])) self.assertEqual(len(table), 3) self.assertEqual(table[0].t00r0c0, 't0,0r1c0') self.assertEqual(table[0].t00r0c1, 't0,0r1c1') self.assertEqual(table[0].t00r0c2, 't0,0r1c2') # if there are nested tables, the inner ones will be represented as # strings (each ... element will return only one string, even # if there is a inside it) inner_table = ('t0,1r0c0 t0,1r0c1 t0,1r1c0 t0,1r1c1 t0,1r2c0 ' 't0,1r2c1 t0,2r0c0 t0,2r0c1 t0,2r1c0 t0,2r1c1 ' 't0,1r3c1 t0,1r4c0 t0,1r4c1 t0,1r5c0 t0,1r5c1') self.assertEqual(table[1].t00r0c0, 't0,0r2c0') self.assertEqual(table[1].t00r0c1, inner_table) self.assertEqual(table[1].t00r0c2, 't0,0r2c2') self.assertEqual(table[2].t00r0c0, 't0,0r3c0') self.assertEqual(table[2].t00r0c1, 't0,0r3c1') self.assertEqual(table[2].t00r0c2, 't0,0r3c2') def test_nested_tables_first_inner(self): filename = 'tests/data/nested-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj, index=1) self.assertEqual(set(table.fields.keys()), set(['t01r0c0', 't01r0c1'])) self.assertEqual(len(table), 5) self.assertEqual(table[0].t01r0c0, 't0,1r1c0') self.assertEqual(table[0].t01r0c1, 't0,1r1c1') self.assertEqual(table[1].t01r0c0, 't0,1r2c0') self.assertEqual(table[1].t01r0c1, 't0,1r2c1') inner_table = 't0,2r0c0 t0,2r0c1 t0,2r1c0 t0,2r1c1' self.assertEqual(table[2].t01r0c0, inner_table) self.assertEqual(table[2].t01r0c1, 't0,1r3c1') self.assertEqual(table[3].t01r0c0, 't0,1r4c0') self.assertEqual(table[3].t01r0c1, 't0,1r4c1') self.assertEqual(table[4].t01r0c0, 't0,1r5c0') self.assertEqual(table[4].t01r0c1, 't0,1r5c1') def test_nested_tables_second_inner(self): filename = 'tests/data/nested-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj, index=2) self.assertEqual(set(table.fields.keys()), set(['t02r0c0', 't02r0c1'])) self.assertEqual(len(table), 1) self.assertEqual(table[0].t02r0c0, 't0,2r1c0') self.assertEqual(table[0].t02r0c1, 't0,2r1c1') def test_preserve_html(self): filename = 'tests/data/nested-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj, preserve_html=True) # TODO: test without passing encoding expected_data = [ '
', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '
t0,1r0c0 t0,1r0c1
t0,1r1c0 t0,1r1c1
t0,1r2c0 t0,1r2c1
', '', '', '', '', '', '', '', '', '', '
t0,2r0c0 t0,2r0c1
t0,2r1c0 t0,2r1c1
', '
t0,1r3c1
t0,1r4c0 t0,1r4c1
t0,1r5c0 t0,1r5c1
'] self.assertEqual(cleanup_lines(table[1].t00r0c1), expected_data) def test_preserve_html_None(self): html = dedent('''
f1 f2 f3
r0f1 r0f2 r0f3
''').encode('utf-8') table = rows.import_from_html(BytesIO(html), encoding='utf-8', preserve_html=True) table2 = rows.import_from_html(BytesIO(html), encoding='utf-8', preserve_html=False) self.assertEqual(table[0].f1, 'r0f1') self.assertEqual(table[0].f2, 'r0f2') self.assertEqual(table[0].f3, 'r0f3') @mock.patch('rows.plugins.plugin_html.create_table') def test_preserve_html_and_not_skip_header(self, mocked_create_table): filename = 'tests/data/table-with-sections.html' # If `import_from_html` needs to identify field names, then it # should not preserve HTML inside first row table_1 = rows.import_from_html(filename, index=1, preserve_html=True) call_args = mocked_create_table.call_args_list.pop() data = list(call_args[0][0]) kwargs = call_args[1] self.assertEqual(kwargs.get('fields', None), None) self.assertEqual(len(data), 6) self.assertNotIn('<', data[0][1]) self.assertNotIn('>', data[0][1]) for row in data[1:]: # Second field has HTML self.assertIn('<', row[1]) self.assertIn('>', row[1]) # If we provide fields and ask to preserve HTML and to don't skip # header then it should strip HTML from every row fields = OrderedDict([('first', rows.fields.TextField), ('second', rows.fields.TextField), ('third', rows.fields.TextField), ('fourth', rows.fields.TextField)]) table_2 = rows.import_from_html(filename, index=1, fields=fields, preserve_html=True, skip_header=False) call_args = mocked_create_table.call_args_list.pop() data = list(call_args[0][0]) kwargs = call_args[1] self.assertEqual(kwargs.get('fields', None), fields) self.assertEqual(len(data), 6) for row in data: # Second field has HTML and should not be stripped self.assertIn('<', row[1]) self.assertIn('>', row[1]) def test_ignore_colspan(self): filename = 'tests/data/colspan-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj, ignore_colspan=True) self.assertEqual(set(table.fields.keys()), set(['field1', 'field2'])) self.assertEqual(len(table), 2) self.assertEqual(table[0].field1, 'row1field1') self.assertEqual(table[0].field2, 'row1field2') self.assertEqual(table[1].field1, 'row2field1') self.assertEqual(table[1].field2, 'row2field2') fobj = open(filename, mode='rb') with self.assertRaises(ValueError) as raises: table = rows.import_from_html(fobj, ignore_colspan=False) self.assertEqual(raises.exception.args[0], 'Number of fields differ') def test_extract_properties(self): filename = 'tests/data/properties-table.html' fobj = open(filename, mode='rb') table = rows.import_from_html(fobj, properties=True) self.assertEqual(table.field_names, ['field1', 'field2', 'properties']) self.assertEqual(table.field_types, [rows.fields.TextField, rows.fields.TextField, rows.fields.JSONField]) properties_1 = {'class': 'some-class another-class', 'data-test': 'value', } properties_2 = {'class': 'css-class', 'data-test': 'value2', } self.assertEqual(len(table), 2) self.assertEqual(table[0].field1, 'row1field1') self.assertEqual(table[0].field2, 'row1field2') self.assertEqual(table[0].properties, properties_1) self.assertEqual(table[1].field1, 'row2field1') self.assertEqual(table[1].field2, 'row2field2') self.assertEqual(table[1].properties, properties_2) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_html(table, filename) table2 = rows.import_from_html(filename) self.assert_table_equal(table, table2) def test_export_to_html_unescaped_content(self): table = rows.Table(fields=OrderedDict([ ('unescaped_content', rows.fields.TextField) ])) table.append({'unescaped_content': '<&>'}) output = rows.export_to_html(table) self.assertIn(b' <&> ', output) class PluginHtmlUtilsTestCase(unittest.TestCase): html = ' some text other' def test_tag_to_dict(self): result = rows.plugins.plugin_html.tag_to_dict(self.html) expected = {'text': ' some text ', 'class': 'some-class', 'href': 'some-url'} self.assertEqual(result, expected) def test_extract_node_text(self): from lxml.html import document_fromstring html = '''''' node = document_fromstring(html) desired_node = node.xpath('//a')[0] expected = 'bold link bold text' result = rows.plugins.plugin_html._extract_node_text(desired_node) self.assertEqual(result, expected) def test_extract_text_from_html(self): expected = 'some text other' result = rows.plugins.plugin_html.extract_text(self.html) self.assertEqual(result, expected) # Real HTML from # html = ''' 0 ( 0 %) ''' expected = '0 ( 0 %)' result = rows.plugins.plugin_html.extract_text(html) self.assertEqual(result, expected) # test HTML unescape html = 'Álvaro & Python' expected = 'Álvaro & Python' result = rows.plugins.plugin_html.extract_text(html) self.assertEqual(result, expected) def test_extract_links_from_html(self): # Real HTML from # html = ''' abcl [1] [2] ''' expected = ['http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=608466', 'http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=701712'] result = rows.plugins.plugin_html.extract_links(html) self.assertEqual(result, expected) rows-0.3.1/tests/tests_plugin_json.py000066400000000000000000000207411310400316700177710ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import itertools import json import tempfile import unittest from collections import Counter from collections import OrderedDict from collections import defaultdict from textwrap import dedent import six import mock import rows import tests.utils as utils class PluginJsonTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'json' file_extension = 'json' filename = 'tests/data/all-field-types.json' encoding = 'utf-8' assert_meta_encoding = True def test_imports(self): self.assertIs(rows.import_from_json, rows.plugins.plugin_json.import_from_json) self.assertIs(rows.export_to_json, rows.plugins.plugin_json.export_to_json) @mock.patch('rows.plugins.plugin_json.create_table') def test_import_from_json_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_json(self.filename, encoding=self.encoding, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'json', 'filename': self.filename, 'encoding': self.encoding,} self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_json.create_table') def test_import_from_json_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_json(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args, field_ordering=False) # import using fobj with open(self.filename) as fobj: table_2 = rows.import_from_json(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args, field_ordering=False) @mock.patch('rows.plugins.plugin_json.create_table') def test_import_from_json_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } encoding = 'iso-8859-15' result = rows.import_from_json(self.filename, encoding=encoding, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'json', 'filename': self.filename, 'encoding': encoding,} self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_json.prepare_to_export') def test_export_to_json_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, } mocked_prepare_to_export.return_value = \ iter([utils.table.fields.keys()]) rows.export_to_json(utils.table, temp.name, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_json.export_data') def test_export_to_json_uses_export_data(self, mocked_export_data): temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, } mocked_export_data.return_value = 42 result = rows.export_to_json(utils.table, temp.name, **kwargs) self.assertTrue(mocked_export_data.called) self.assertEqual(mocked_export_data.call_count, 1) self.assertEqual(result, 42) call = mocked_export_data.call_args self.assertEqual(call[0][0], temp.name) self.assertEqual(call[1], {'mode': 'wb'}) def test_export_to_json_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) table = rows.import_from_json(temp.name) self.assert_table_equal(table, utils.table) def test_export_to_json_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.file) table = rows.import_from_json(temp.name) self.assert_table_equal(table, utils.table) def test_export_to_json_filename_save_data_in_correct_format(self): temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_json(utils.table, temp.name) with open(temp.name) as fobj: imported_json = json.load(fobj) COLUMN_TYPE = { 'float_column': float, 'decimal_column': float, 'bool_column': bool, 'integer_column': int, 'date_column': six.text_type, 'datetime_column': six.text_type, 'percent_column': six.text_type, 'unicode_column': six.text_type, } field_types = defaultdict(list) for row in imported_json: for field_name, value in row.items(): field_types[field_name].append(type(value)) # We test if the JSON was created serializing all the fields correctly # (some as native JSON values, like int and float) and others needed to # be serialized, like date, datetime etc. for field_name, value_types in field_types.items(): if field_name != 'unicode_column': self.assertEqual(Counter(value_types), Counter({type(None): 1, COLUMN_TYPE[field_name]: 6})) else: self.assertEqual(Counter(value_types), Counter({COLUMN_TYPE[field_name]: 7})) def test_export_to_json_indent(self): temp = tempfile.NamedTemporaryFile(delete=False, mode='rb+') self.files_to_delete.append(temp.name) table = rows.Table(fields=utils.table.fields) table.append(utils.table[0]._asdict()) rows.export_to_json(table, temp.name, indent=2) temp.file.seek(0) result = temp.file.read().strip().replace(b'\r\n', b'\n').splitlines() self.assertEqual(result[0], b'[') self.assertEqual(result[1], b' {') for line in result[2:-2]: self.assertTrue(line.startswith(b' ')) self.assertEqual(result[-2], b' }') self.assertEqual(result[-1], b']') def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_json(table, filename) table2 = rows.import_from_json(filename) self.assert_table_equal(table, table2) rows-0.3.1/tests/tests_plugin_ods.py000066400000000000000000000045771310400316700176160ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import unittest import mock import rows import rows.fields as fields import rows.plugins.ods import tests.utils as utils class PluginOdsTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'ods' filename = 'tests/data/all-field-types.ods' assert_meta_encoding = False def test_imports(self): self.assertIs(rows.import_from_ods, rows.plugins.ods.import_from_ods) @mock.patch('rows.plugins.ods.create_table') def test_import_from_ods_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'encoding': 'test', 'some_key': 123, 'other': 456, } result = rows.import_from_ods(self.filename, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'ods', 'filename': self.filename, } self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.ods.create_table') def test_import_from_ods_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_ods(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args) # import using fobj with open(self.filename, 'rb') as fobj: table_2 = rows.import_from_ods(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args) rows-0.3.1/tests/tests_plugin_parquet.py000066400000000000000000000122541310400316700205010ustar00rootroot00000000000000# coding: utf-8 # Copyright 2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import unittest from collections import OrderedDict import mock import rows DATA = [['nation_key', 'name', 'region_key', 'comment_col'], [0, b'ALGERIA', 0, b' haggle. carefully final deposits detect slyly agai'], [1, b'ARGENTINA', 1, b'al foxes promise slyly according to the regular accounts. bold requests alon'], [2, b'BRAZIL', 1, b'y alongside of the pending deposits. carefully special packages are about the ironic forges. slyly special '], [3, b'CANADA', 1, b'eas hang ironic, silent packages. slyly regular packages are furiously over the tithes. fluffily bold'], [4, b'EGYPT', 4, b'y above the carefully unusual theodolites. final dugouts are quickly across the furiously regular d'], [5, b'ETHIOPIA', 0, b'ven packages wake quickly. regu'], [6, b'FRANCE', 3, b'refully final requests. regular, ironi'], [7, b'GERMANY', 3, b'l platelets. regular accounts x-ray: unusual, regular acco'], [8, b'INDIA', 2, b'ss excuses cajole slyly across the packages. deposits print aroun'], [9, b'INDONESIA', 2, b' slyly express asymptotes. regular deposits haggle slyly. carefully ironic hockey players sleep blithely. carefull'], [10, b'IRAN', 4, b'efully alongside of the slyly final dependencies. '], [11, b'IRAQ', 4, b'nic deposits boost atop the quickly final requests? quickly regula'], [12, b'JAPAN', 2, b'ously. final, express gifts cajole a'], [13, b'JORDAN', 4, b'ic deposits are blithely about the carefully regular pa'], [14, b'KENYA', 0, b' pending excuses haggle furiously deposits. pending, express pinto beans wake fluffily past t'], [15, b'MOROCCO', 0, b'rns. blithely bold courts among the closely regular packages use furiously bold platelets?'], [16, b'MOZAMBIQUE', 0, b's. ironic, unusual asymptotes wake blithely r'], [17, b'PERU', 1, b'platelets. blithely pending dependencies use fluffily across the even pinto beans. carefully silent accoun'], [18, b'CHINA', 2, b'c dependencies. furiously express notornis sleep slyly regular accounts. ideas sleep. depos'], [19, b'ROMANIA', 3, b'ular asymptotes are about the furious multipliers. express dependencies nag above the ironically ironic account'], [20, b'SAUDI ARABIA', 4, b'ts. silent requests haggle. closely express packages sleep across the blithely'], [21, b'VIETNAM', 2, b'hely enticingly express accounts. even, final '], [22, b'RUSSIA', 3, b' requests against the platelets use never according to the quickly regular pint'], [23, b'UNITED KINGDOM', 3, b'eans boost carefully special requests. accounts are. carefull'], [24, b'UNITED STATES', 1, b'y final packages. slow foxes cajole quickly. quickly silent platelets breach ironic accounts. unusual pinto be'], ] class PluginParquetTestCase(unittest.TestCase): plugin_name = 'parquet' filename = 'tests/data/nation.dict.parquet' def test_imports(self): self.assertIs(rows.import_from_parquet, rows.plugins.plugin_parquet.import_from_parquet) @mock.patch('rows.plugins.plugin_parquet.create_table') def test_import_from_parquet_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_parquet(self.filename, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['force_types'] = OrderedDict([ ('nation_key', rows.fields.IntegerField), ('name', rows.fields.BinaryField), ('region_key', rows.fields.IntegerField), ('comment_col', rows.fields.BinaryField) ]) kwargs['meta'] = {'imported_from': 'parquet', 'filename': self.filename, } self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.plugin_parquet.create_table') def test_import_from_parquet_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table = rows.import_from_parquet(self.filename) args = mocked_create_table.call_args[0][0] self.assertEqual(args, DATA) # TODO: test all supported field types rows-0.3.1/tests/tests_plugin_sqlite.py000066400000000000000000000176001310400316700203210ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import sqlite3 import tempfile import unittest from collections import OrderedDict import mock import rows import rows.plugins.sqlite import rows.plugins.utils import tests.utils as utils from rows import fields class PluginSqliteTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'sqlite' file_extension = 'sqlite' filename = 'tests/data/all-field-types.sqlite' assert_meta_encoding = False override_fields = {'percent_column': fields.FloatField, 'bool_column': fields.IntegerField, } # SQLite does not support "Decimal" type, so `PercentField` will be # identified as a float and also does not support "boolean" type, so it's # saved as integer internally def test_imports(self): self.assertIs(rows.import_from_sqlite, rows.plugins.sqlite.import_from_sqlite) self.assertIs(rows.export_to_sqlite, rows.plugins.sqlite.export_to_sqlite) @mock.patch('rows.plugins.sqlite.create_table') def test_import_from_sqlite_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'encoding': 'test', 'some_key': 123, 'other': 456, } result = rows.import_from_sqlite(self.filename, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'sqlite', 'filename': self.filename, } self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.sqlite.create_table') def test_import_from_sqlite_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_sqlite(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args) # import using connection connection = sqlite3.connect(self.filename) table_2 = rows.import_from_sqlite(connection) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args, filename=connection) connection.close() def test_sqlite_injection(self): connection = rows.export_to_sqlite(utils.table, ':memory:') with self.assertRaises(ValueError): rows.import_from_sqlite(connection, table_name='table1", "sqlite_master') with self.assertRaises(ValueError): rows.export_to_sqlite(utils.table, ':memory:', table_name='table1", "sqlite_master') def test_export_to_sqlite_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_sqlite(utils.table, temp.name) table = rows.import_from_sqlite(temp.name) self.assert_table_equal(table, utils.table) def test_export_to_sqlite_connection(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) connection = sqlite3.connect(temp.name) rows.export_to_sqlite(utils.table, connection) connection.close() table = rows.import_from_sqlite(temp.name) self.assert_table_equal(table, utils.table) def test_export_to_sqlite_create_unique_table_name(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) first_table = utils.table second_table = utils.table + utils.table rows.export_to_sqlite(first_table, temp.name) # table1 rows.export_to_sqlite(second_table, temp.name) # table2 result_first_table = rows.import_from_sqlite(temp.name, table_name='table1') result_second_table = rows.import_from_sqlite(temp.name, table_name='table2') self.assert_table_equal(result_first_table, first_table) self.assert_table_equal(result_second_table, second_table) def test_export_to_sqlite_forcing_table_name_appends_rows(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_sqlite(utils.table, temp.name, table_name='rows') rows.export_to_sqlite(utils.table, temp.name, table_name='rows') result_table = rows.import_from_sqlite(temp.name, table_name='rows') self.assertEqual(len(result_table), 2 * len(utils.table)) self.assert_table_equal(result_table, utils.table + utils.table) @mock.patch('rows.plugins.sqlite.prepare_to_export') def test_export_to_sqlite_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) encoding = 'iso-8859-15' kwargs = {'test': 123, 'parameter': 3.14, } mocked_prepare_to_export.return_value = \ iter(rows.plugins.utils.prepare_to_export(utils.table)) rows.export_to_sqlite(utils.table, temp.name, encoding=encoding, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table, )) kwargs['encoding'] = encoding self.assertEqual(call[1], kwargs) def test_issue_170(self): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) table = rows.Table(fields= OrderedDict([('intvalue', rows.fields.IntegerField), ('floatvalue', rows.fields.FloatField)])) table.append({'intvalue': 42, 'floatvalue': 3.14}) table.append({'intvalue': None, 'floatvalue': None}) # should not raise an exception rows.export_to_sqlite(table, temp.name) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_sqlite(table, filename) table2 = rows.import_from_sqlite(filename) self.assert_table_equal(table, table2) def test_import_from_sqlite_query_args(self): connection = rows.export_to_sqlite(utils.table, ':memory:') table = rows.import_from_sqlite(connection, query='SELECT * FROM table1 WHERE float_column > ?', query_args=(3, )) for row in table: self.assertTrue(row.float_column > 3) rows-0.3.1/tests/tests_plugin_txt.py000066400000000000000000000134271310400316700176420ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest from collections import OrderedDict import mock import six import rows import rows.plugins.txt import tests.utils as utils class PluginTxtTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'txt' file_extension = 'txt' filename = 'tests/data/all-field-types.txt' encoding = 'utf-8' assert_meta_encoding = True def test_imports(self): self.assertIs(rows.import_from_txt, rows.plugins.txt.import_from_txt) self.assertIs(rows.export_to_txt, rows.plugins.txt.export_to_txt) @mock.patch('rows.plugins.txt.create_table') def test_import_from_txt_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_txt(self.filename, encoding=self.encoding, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'txt', 'filename': self.filename, 'encoding': self.encoding,} self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.txt.create_table') def test_import_from_txt_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_txt(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args) # import using fobj with open(self.filename, mode='rb') as fobj: table_2 = rows.import_from_txt(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args) @mock.patch('rows.plugins.txt.serialize') def test_export_to_txt_uses_serialize(self, mocked_serialize): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, } mocked_serialize.return_value = iter([utils.table.fields.keys()]) rows.export_to_txt(utils.table, temp.name, encoding=self.encoding, **kwargs) self.assertTrue(mocked_serialize.called) self.assertEqual(mocked_serialize.call_count, 1) call = mocked_serialize.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.txt.export_data') def test_export_to_txt_uses_export_data(self, mocked_export_data): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) kwargs = {'test': 123, 'parameter': 3.14, } mocked_export_data.return_value = 42 result = rows.export_to_txt(utils.table, temp.name, encoding=self.encoding, **kwargs) self.assertTrue(mocked_export_data.called) self.assertEqual(mocked_export_data.call_count, 1) self.assertEqual(result, 42) call = mocked_export_data.call_args self.assertEqual(call[0][0], temp.name) self.assertEqual(call[1], {'mode': 'wb'}) def test_export_to_txt_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_txt(utils.table, temp.name, encoding='utf-8') table = rows.import_from_txt(temp.name, encoding='utf-8') self.assert_table_equal(table, utils.table) with open(temp.name, mode='rb') as fobj: content = fobj.read() self.assertEqual(content[-10:].count(b'\n'), 1) def test_export_to_txt_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_txt(utils.table, temp.file, encoding='utf-8') table = rows.import_from_txt(temp.name, encoding='utf-8') self.assert_table_equal(table, utils.table) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_txt(table, filename, encoding='utf-8') table2 = rows.import_from_txt(filename, encoding='utf-8') self.assert_table_equal(table, table2) def test_export_to_text_should_return_unicode(self): result = rows.export_to_txt(utils.table) self.assertEqual(type(result), six.text_type) rows-0.3.1/tests/tests_plugin_utils.py000066400000000000000000000353321310400316700201620ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import itertools import random import tempfile import types import unittest from collections import OrderedDict import mock import six import rows import rows.plugins.utils as plugins_utils from rows import fields import tests.utils as utils class GenericUtilsTestCase(unittest.TestCase): def test_slug(self): self.assertEqual(plugins_utils.slug('Álvaro Justen'), 'alvaro_justen') self.assertEqual(plugins_utils.slug("Moe's Bar"), 'moes_bar') self.assertEqual(plugins_utils.slug("-----te-----st------"), 'te_st') # As in self.assertEqual( plugins_utils.slug('Query Occurrence"( % ),"First Seen'), 'query_occurrence_first_seen') self.assertEqual(plugins_utils.slug(' ÁLVARO justen% '), 'alvaro_justen') def test_ipartition(self): iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] result = plugins_utils.ipartition(iterable, 3) self.assertEqual(type(result), types.GeneratorType) self.assertEqual(list(result), [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]) result = plugins_utils.ipartition(iterable, 2) self.assertEqual(type(result), types.GeneratorType) self.assertEqual(list(result), [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]) def possible_field_names_errors(error_fields): error_fields = ['"{}"'.format(field_name) for field_name in error_fields] fields_permutations = itertools.permutations(error_fields, len(error_fields)) fields_permutations_str = [', '.join(field_names) for field_names in fields_permutations] return ['Invalid field names: {}'.format(field_names) for field_names in fields_permutations_str] class PluginUtilsTestCase(utils.RowsTestMixIn, unittest.TestCase): def test_create_table_skip_header(self): field_types = OrderedDict([('integer', fields.IntegerField), ('string', fields.TextField),]) data = [['1', 'Álvaro'], ['2', 'turicas'], ['3', 'Justen']] table_1 = plugins_utils.create_table(data, fields=field_types, skip_header=True) table_2 = plugins_utils.create_table(data, fields=field_types, skip_header=False) self.assertEqual(field_types, table_1.fields) self.assertEqual(table_1.fields, table_2.fields) self.assertEqual(len(table_1), 2) self.assertEqual(len(table_2), 3) first_row = {'integer': 1, 'string': 'Álvaro'} second_row = {'integer': 2, 'string': 'turicas'} third_row = {'integer': 3, 'string': 'Justen'} self.assertEqual(dict(table_1[0]._asdict()), second_row) self.assertEqual(dict(table_2[0]._asdict()), first_row) self.assertEqual(dict(table_1[1]._asdict()), third_row) self.assertEqual(dict(table_2[1]._asdict()), second_row) self.assertEqual(dict(table_2[2]._asdict()), third_row) def test_create_table_import_fields(self): header = ['field1', 'field2', 'field3'] table_rows = [['1', 3.14, 'Álvaro'], ['2', 2.71, 'turicas'], ['3', 1.23, 'Justen']] table = plugins_utils.create_table([header] + table_rows, import_fields=None) self.assertEqual(list(table.fields.keys()), header) self.assertEqual(table[0].field1, 1) self.assertEqual(table[0].field2, 3.14) self.assertEqual(table[0].field3, 'Álvaro') import_fields = ['field3', 'field2'] table = plugins_utils.create_table([header] + table_rows, import_fields=import_fields) self.assertEqual(list(table.fields.keys()), import_fields) self.assertEqual(table[0]._asdict(), OrderedDict([('field3', 'Álvaro'), ('field2', 3.14)])) def test_create_table_import_fields_dont_exist(self): header = ['field1', 'field2', 'field3'] table_rows = [['1', 3.14, 'Álvaro'], ['2', 2.71, 'turicas'], ['3', 1.23, 'Justen']] error_fields = ['doesnt_exist', 'ruby'] import_fields = list(header)[:-1] + error_fields with self.assertRaises(ValueError) as exception_context: plugins_utils.create_table([header] + table_rows, import_fields=import_fields) self.assertIn(exception_context.exception.args[0], possible_field_names_errors(error_fields)) def test_create_table_repeated_field_names(self): header = ['first', 'first', 'first'] table_rows = [['1', 3.14, 'Álvaro'], ['2', 2.71, 'turicas'], ['3', 1.23, 'Justen']] table = plugins_utils.create_table([header] + table_rows) self.assertEqual(list(table.fields.keys()), ['first', 'first_2', 'first_3']) self.assertEqual(table[0].first, 1) self.assertEqual(table[0].first_2, 3.14) self.assertEqual(table[0].first_3, 'Álvaro') header = ['field', '', 'field'] table_rows = [['1', 3.14, 'Álvaro'], ['2', 2.71, 'turicas'], ['3', 1.23, 'Justen']] table = plugins_utils.create_table([header] + table_rows) self.assertEqual(list(table.fields.keys()), ['field', 'field_1', 'field_2']) self.assertEqual(table[0].field, 1) self.assertEqual(table[0].field_1, 3.14) self.assertEqual(table[0].field_2, 'Álvaro') def test_create_table_empty_data(self): header = ['first', 'first', 'first'] table_rows = [] table = plugins_utils.create_table([header] + table_rows) self.assertEqual(list(table.fields.keys()), ['first', 'first_2', 'first_3']) self.assertEqual(len(table), 0) def test_create_table_force_types(self): header = ['field1', 'field2', 'field3'] table_rows = [['1', '3.14', 'Álvaro'], ['2', '2.71', 'turicas'], ['3', '1.23', 'Justen']] force_types = {'field2': rows.fields.DecimalField} table = plugins_utils.create_table([header] + table_rows, force_types=force_types) for field_name, field_type in force_types.items(): self.assertEqual(table.fields[field_name], field_type) def test_prepare_to_export_all_fields(self): result = plugins_utils.prepare_to_export(utils.table, export_fields=None) self.assertEqual(list(utils.table.fields.keys()), next(result)) for row in utils.table._rows: self.assertEqual(row, next(result)) with self.assertRaises(StopIteration): next(result) def test_prepare_to_export_some_fields(self): field_names = list(utils.table.fields.keys()) number_of_fields = random.randint(2, len(field_names) - 1) some_fields = [field_names[index] for index in range(number_of_fields)] random.shuffle(some_fields) result = plugins_utils.prepare_to_export(utils.table, export_fields=some_fields) self.assertEqual(some_fields, next(result)) for row in utils.table: expected_row = [getattr(row, field_name) for field_name in some_fields] self.assertEqual(expected_row, next(result)) with self.assertRaises(StopIteration): next(result) def test_prepare_to_export_some_fields_dont_exist(self): field_names = list(utils.table.fields.keys()) error_fields = ['does_not_exist', 'java'] export_fields = field_names + error_fields result = plugins_utils.prepare_to_export(utils.table, export_fields=export_fields) with self.assertRaises(ValueError) as exception_context: next(result) self.assertIn(exception_context.exception.args[0], possible_field_names_errors(error_fields)) def test_prepare_to_export_with_FlexibleTable(self): flexible = rows.FlexibleTable() for row in utils.table: flexible.append(row._asdict()) field_names = list(flexible.fields.keys()) prepared = plugins_utils.prepare_to_export(flexible) self.assertEqual(next(prepared), field_names) for row, expected_row in zip(prepared, flexible._rows): values = [expected_row[field_name] for field_name in field_names] self.assertEqual(values, row) def test_prepare_to_export_with_FlexibleTable_and_export_fields(self): flexible = rows.FlexibleTable() for row in utils.table: # convertion to text_type is needed on Python 2 since namedtuples' # keys are bytes, not unicode flexible.append({six.text_type(key): value for key, value in row._asdict().items()}) field_names = list(flexible.fields.keys()) export_fields = field_names[:len(field_names) // 2] print([(x, type(x)) for x in export_fields]) prepared = plugins_utils.prepare_to_export(flexible, export_fields=export_fields) self.assertEqual(next(prepared), export_fields) for row, expected_row in zip(prepared, flexible._rows): values = [expected_row[field_name] for field_name in export_fields] self.assertEqual(values, row) def test_prepare_to_export_wrong_obj_type(self): '''`prepare_to_export` raises exception if obj isn't `*Table`''' expected_message = 'Table type not recognized' with self.assertRaises(ValueError) as exception_context: next(plugins_utils.prepare_to_export(1)) self.assertEqual(exception_context.exception.args[0], expected_message) with self.assertRaises(ValueError) as exception_context: next(plugins_utils.prepare_to_export(42.0)) self.assertEqual(exception_context.exception.args[0], expected_message) with self.assertRaises(ValueError) as exception_context: next(plugins_utils.prepare_to_export([list('abc'), [1, 2, 3]])) self.assertEqual(exception_context.exception.args[0], expected_message) @mock.patch('rows.plugins.utils.prepare_to_export', return_value=iter([[], [], []])) def test_serialize_should_call_prepare_to_export(self, mocked_prepare_to_export): table = utils.table kwargs = {'export_fields': 123, 'other_parameter': 3.14, } result = plugins_utils.serialize(table, **kwargs) self.assertFalse(mocked_prepare_to_export.called) field_names, table_rows = next(result), list(result) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) self.assertEqual(mock.call(table, **kwargs), mocked_prepare_to_export.call_args) def test_serialize(self): result = plugins_utils.serialize(utils.table) field_types = list(utils.table.fields.values()) self.assertEqual(next(result), list(utils.table.fields.keys())) for row, expected_row in zip(result, utils.table._rows): values = [field_type.serialize(value) for field_type, value in zip(field_types, expected_row)] self.assertEqual(values, row) def test_make_header_should_add_underscore_if_starts_with_number(self): result = plugins_utils.make_header(['123', '456', '123']) expected_result = ['field_123', 'field_456', 'field_123_2'] self.assertEqual(result, expected_result) def test_make_header_should_not_ignore_permit_not(self): result = plugins_utils.make_header(['abc', '^qwe', 'rty'], permit_not=True) expected_result = ['abc', '^qwe', 'rty'] self.assertEqual(result, expected_result) def test_make_unique_name(self): name = 'test' existing_names = [] name_format = '{index}_{name}' result = plugins_utils.make_unique_name(name, existing_names, name_format) self.assertEqual(result, name) existing_names = ['test'] result = plugins_utils.make_unique_name(name, existing_names, name_format) self.assertEqual(result, '2_test') existing_names = ['test', '2_test', '3_test', '5_test'] result = plugins_utils.make_unique_name(name, existing_names, name_format) self.assertEqual(result, '4_test') existing_names = ['test', '2_test', '3_test', '5_test'] result = plugins_utils.make_unique_name(name, existing_names, name_format, start=1) self.assertEqual(result, '1_test') def test_export_data(self): data = 'python rules'.encode('utf-8') temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) filename_or_fobj = temp.file result = plugins_utils.export_data(filename_or_fobj, data) temp.file.seek(0) output = temp.file.read() self.assertIs(result, temp.file) self.assertEqual(output, data) filename_or_fobj = None result = plugins_utils.export_data(filename_or_fobj, data) self.assertIs(result, data) # TODO: test make_header # TODO: test all features of create_table # TODO: test if error is raised if len(row) != len(fields) # TODO: test get_fobj_and_filename (BytesIO should return filename = None) rows-0.3.1/tests/tests_plugin_xls.py000066400000000000000000000125171310400316700176300ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import tempfile import time import unittest from collections import OrderedDict import mock import rows import rows.fields as fields import rows.plugins.xls import tests.utils as utils def date_to_datetime(value): return datetime.datetime.fromtimestamp(time.mktime(value.timetuple())) class PluginXlsTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'xls' file_extension = 'xls' filename = 'tests/data/all-field-types.xls' assert_meta_encoding = False def test_imports(self): self.assertIs(rows.import_from_xls, rows.plugins.xls.import_from_xls) self.assertIs(rows.export_to_xls, rows.plugins.xls.export_to_xls) @mock.patch('rows.plugins.xls.create_table') def test_import_from_xls_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'some_key': 123, 'other': 456, } result = rows.import_from_xls(self.filename, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'xls', 'filename': self.filename, 'sheet_name': 'Sheet1', } self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.xls.create_table') def test_import_from_xls_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_xls(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args, expected_meta={'imported_from': 'xls', 'filename': self.filename, 'sheet_name': 'Sheet1',}) # import using fobj with open(self.filename, 'rb') as fobj: table_2 = rows.import_from_xls(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args, expected_meta={'imported_from': 'xls', 'filename': self.filename, 'sheet_name': 'Sheet1',}) def test_export_to_xls_filename(self): # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) rows.export_to_xls(utils.table, temp.name) table = rows.import_from_xls(temp.name) self.assert_table_equal(table, utils.table) temp.file.seek(0) result = temp.file.read() export_in_memory = rows.export_to_xls(utils.table, None) self.assertEqual(result, export_in_memory) def test_export_to_xls_fobj(self): # TODO: may test with codecs.open passing an encoding # TODO: may test file contents temp = tempfile.NamedTemporaryFile(delete=False, mode='wb') self.files_to_delete.append(temp.name) rows.export_to_xls(utils.table, temp.file) temp.file.close() table = rows.import_from_xls(temp.name) self.assert_table_equal(table, utils.table) @mock.patch('rows.plugins.xls.prepare_to_export') def test_export_to_xls_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) encoding = 'iso-8859-15' kwargs = {'test': 123, 'parameter': 3.14, } mocked_prepare_to_export.return_value = \ iter([utils.table.fields.keys()]) rows.export_to_xls(utils.table, temp.name, encoding=encoding, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table, )) kwargs['encoding'] = encoding self.assertEqual(call[1], kwargs) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_xls(table, filename) table2 = rows.import_from_xls(filename) self.assert_table_equal(table, table2) rows-0.3.1/tests/tests_plugin_xlsx.py000066400000000000000000000125241310400316700200160ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest from collections import OrderedDict from io import BytesIO import mock import rows import rows.plugins.xlsx import tests.utils as utils class PluginXlsxTestCase(utils.RowsTestMixIn, unittest.TestCase): plugin_name = 'xlsx' file_extension = 'xlsx' filename = 'tests/data/all-field-types.xlsx' assert_meta_encoding = False def test_imports(self): self.assertIs(rows.import_from_xlsx, rows.plugins.xlsx.import_from_xlsx) self.assertIs(rows.export_to_xlsx, rows.plugins.xlsx.export_to_xlsx) @mock.patch('rows.plugins.xlsx.create_table') def test_import_from_xlsx_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 kwargs = {'encoding': 'iso-8859-15', 'some_key': 123, 'other': 456, } result = rows.import_from_xlsx(self.filename, **kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'xlsx', 'filename': self.filename, 'sheet_name': 'Sheet1',} self.assertEqual(call[1], kwargs) @mock.patch('rows.plugins.xlsx.create_table') def test_import_from_xlsx_retrieve_desired_data(self, mocked_create_table): mocked_create_table.return_value = 42 # import using filename table_1 = rows.import_from_xlsx(self.filename) call_args = mocked_create_table.call_args_list[0] self.assert_create_table_data(call_args, expected_meta={'imported_from': 'xlsx', 'filename': self.filename, 'sheet_name': 'Sheet1',}) # import using fobj with open(self.filename, 'rb') as fobj: table_2 = rows.import_from_xlsx(fobj) call_args = mocked_create_table.call_args_list[1] self.assert_create_table_data(call_args, expected_meta={'imported_from': 'xlsx', 'filename': self.filename, 'sheet_name': 'Sheet1',}) def test_export_to_xlsx_filename(self): temp = tempfile.NamedTemporaryFile() filename = temp.name + '.xlsx' temp.close() self.files_to_delete.append(filename) rows.export_to_xlsx(utils.table, filename) table = rows.import_from_xlsx(filename) self.assert_table_equal(table, utils.table) export_in_memory = rows.export_to_xlsx(utils.table, None) result_fobj = BytesIO() result_fobj.write(export_in_memory) result_fobj.seek(0) result_table = rows.import_from_xlsx(result_fobj) self.assert_table_equal(result_table, utils.table) def test_export_to_xlsx_fobj(self): temp = tempfile.NamedTemporaryFile() filename = temp.name + '.xlsx' temp.close() fobj = open(filename, 'wb') self.files_to_delete.append(filename) rows.export_to_xlsx(utils.table, fobj) fobj.close() table = rows.import_from_xlsx(filename) self.assert_table_equal(table, utils.table) @mock.patch('rows.plugins.xlsx.prepare_to_export') def test_export_to_xlsx_uses_prepare_to_export(self, mocked_prepare_to_export): temp = tempfile.NamedTemporaryFile() filename = temp.name + '.xlsx' temp.file.close() fobj = open(filename, 'wb') self.files_to_delete.append(filename) kwargs = {'test': 123, 'parameter': 3.14, } mocked_prepare_to_export.return_value = \ iter([utils.table.fields.keys()]) rows.export_to_xlsx(utils.table, temp.name, **kwargs) self.assertTrue(mocked_prepare_to_export.called) self.assertEqual(mocked_prepare_to_export.call_count, 1) call = mocked_prepare_to_export.call_args self.assertEqual(call[0], (utils.table, )) self.assertEqual(call[1], kwargs) def test_issue_168(self): temp = tempfile.NamedTemporaryFile(delete=False) filename = '{}.{}'.format(temp.name, self.file_extension) self.files_to_delete.append(filename) table = rows.Table(fields= OrderedDict([('jsoncolumn', rows.fields.JSONField)])) table.append({'jsoncolumn': '{"python": 42}'}) rows.export_to_xlsx(table, filename) table2 = rows.import_from_xlsx(filename) self.assert_table_equal(table, table2) rows-0.3.1/tests/tests_plugin_xpath.py000066400000000000000000000135131310400316700201430ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest from collections import OrderedDict from io import BytesIO import mock import rows import rows.plugins.xpath import tests.utils as utils class PluginXPathTestCase(utils.RowsTestMixIn, unittest.TestCase): filename = 'tests/data/ecuador-medios-radiodifusoras.html' encoding = 'utf-8' expected_data = 'tests/data/ecuador-medios-radiodifusoras.csv' assert_meta_encoding = True def setUp(self): rows_xpath = '//*[@class="entry-container"]/*[@class="row-fluid"]/*[@class="span6"]' fields_xpath = OrderedDict([ ('url', './/h2/a/@href'), ('name', './/h2/a/text()'), ('address', './/div[@class="spField field_direccion"]/text()'), ('phone', './/div[@class="spField field_telefono"]/text()'), ('website', './/div[@class="spField field_sitio_web"]/text()'), ('email', './/div[@class="spField field_email"]/text()'), ]) self.kwargs = {'rows_xpath': rows_xpath, 'fields_xpath': fields_xpath, } self.expected_table = rows.import_from_csv(self.expected_data) self.files_to_delete = [] def test_imports(self): self.assertIs(rows.import_from_xpath, rows.plugins.xpath.import_from_xpath) def test_import_from_xpath_filename(self): table = rows.import_from_xpath(self.filename, encoding=self.encoding, **self.kwargs) expected_meta = {'imported_from': 'xpath', 'filename': self.filename, 'encoding': self.encoding, } self.assertEqual(table.meta, expected_meta) temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) fobj = temp.file rows.export_to_csv(table, fobj) fobj.seek(0) table = rows.import_from_csv(fobj) self.assert_table_equal(table, self.expected_table) def test_import_from_xpath_fobj(self): # TODO: may test with codecs.open passing an encoding with open(self.filename, mode='rb') as fobj: table = rows.import_from_xpath(fobj, encoding=self.encoding, **self.kwargs) expected_meta = {'imported_from': 'xpath', 'filename': self.filename, 'encoding': self.encoding, } self.assertEqual(table.meta, expected_meta) temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) fobj = temp.file rows.export_to_csv(table, fobj) fobj.seek(0) table = rows.import_from_csv(fobj) self.assert_table_equal(table, self.expected_table) def test_import_from_xpath_unescape_and_extract_text(self): html = ''' '''.encode('utf-8') rows_xpath = '//ul/li' fields_xpath = OrderedDict([('name', './/text()'), ('link', './/a/@href')]) table = rows.import_from_xpath(BytesIO(html), rows_xpath=rows_xpath, fields_xpath=fields_xpath, encoding='utf-8') self.assertEqual(table[0].name, 'Abadia de Goiás (GO)') self.assertEqual(table[1].name, 'Abadiânia (GO)') @mock.patch('rows.plugins.xpath.create_table') def test_import_from_xpath_uses_create_table(self, mocked_create_table): mocked_create_table.return_value = 42 encoding = 'iso-8859-15' kwargs = {'some_key': 123, 'other': 456, } self.kwargs.update(kwargs) result = rows.import_from_xpath(self.filename, encoding=encoding, **self.kwargs) self.assertTrue(mocked_create_table.called) self.assertEqual(mocked_create_table.call_count, 1) self.assertEqual(result, 42) call = mocked_create_table.call_args kwargs['meta'] = {'imported_from': 'xpath', 'filename': self.filename, 'encoding': encoding, } self.assertEqual(call[1], kwargs) def test_xpath_must_be_text_type(self): with self.assertRaises(TypeError): rows.import_from_xpath(self.filename, encoding=self.encoding, rows_xpath=b'//div', fields_xpath={'f1': './/span'}) with self.assertRaises(TypeError): rows.import_from_xpath(self.filename, encoding=self.encoding, rows_xpath='//div', fields_xpath={'f1': b'.//span'}) rows-0.3.1/tests/tests_table.py000066400000000000000000000357621310400316700165420ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import datetime import math import time import unittest from collections import OrderedDict import six import rows import rows.fields as fields from rows.table import FlexibleTable, Table import tests.utils as utils binary_type_name = six.binary_type.__name__ class TableTestCase(unittest.TestCase): def setUp(self): self.table = Table(fields={'name': rows.fields.TextField, 'birthdate': rows.fields.DateField, }) self.first_row = {'name': 'Álvaro Justen', 'birthdate': datetime.date(1987, 4, 29)} self.table.append(self.first_row) self.table.append({'name': 'Somebody', 'birthdate': datetime.date(1990, 2, 1)}) self.table.append({'name': 'Douglas Adams', 'birthdate': '1952-03-11'}) def test_Table_is_present_on_main_namespace(self): self.assertIn('Table', dir(rows)) self.assertIs(Table, rows.Table) def test_table_iteration(self): # TODO: may test with all field types (using tests.utils.table) table_rows = [row for row in self.table] self.assertEqual(len(table_rows), 3) self.assertEqual(table_rows[0].name, 'Álvaro Justen') self.assertEqual(table_rows[0].birthdate, datetime.date(1987, 4, 29)) self.assertEqual(table_rows[1].name, 'Somebody') self.assertEqual(table_rows[1].birthdate, datetime.date(1990, 2, 1)) self.assertEqual(table_rows[2].name, 'Douglas Adams') self.assertEqual(table_rows[2].birthdate, datetime.date(1952, 3, 11)) def test_table_slicing(self): self.assertEqual(len(self.table[::2]), 2) self.assertEqual(self.table[::2][0].name, 'Álvaro Justen') def test_table_slicing_error(self): with self.assertRaises(ValueError) as context_manager: self.table[[1]] self.assertEqual(type(context_manager.exception), ValueError) def test_table_insert_row(self): self.table.insert(1, {'name': 'Grace Hopper', 'birthdate': datetime.date(1909, 12, 9)}) self.assertEqual(self.table[1].name, 'Grace Hopper') def test_table_append_error(self): # TODO: may mock these validations and test only on *Field tests with self.assertRaises(ValueError) as context_manager: self.table.append({'name': 'Álvaro Justen'.encode('utf-8'), 'birthdate': '1987-04-29'}) self.assertEqual(type(context_manager.exception), ValueError) self.assertEqual(context_manager.exception.args[0], 'Binary is not supported') with self.assertRaises(ValueError) as context_manager: self.table.append({'name': 'Álvaro Justen', 'birthdate': 'WRONG'}) self.assertEqual(type(context_manager.exception), ValueError) self.assertIn('does not match format', context_manager.exception.args[0]) def test_table_getitem_invalid_type(self): with self.assertRaises(ValueError) as exception_context: self.table[3.14] self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: float') with self.assertRaises(ValueError) as exception_context: self.table[b'name'] self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: {}'.format(binary_type_name)) def test_table_getitem_column_doesnt_exist(self): with self.assertRaises(KeyError) as exception_context: self.table['doesnt-exist'] self.assertEqual(exception_context.exception.args[0], 'doesnt-exist') def test_table_getitem_column_happy_path(self): expected_values = ['Álvaro Justen', 'Somebody', 'Douglas Adams'] self.assertEqual(self.table['name'], expected_values) expected_values = [ datetime.date(1987, 4, 29), datetime.date(1990, 2, 1), datetime.date(1952, 3, 11)] self.assertEqual(self.table['birthdate'], expected_values) def test_table_setitem_row(self): self.first_row['name'] = 'turicas' self.first_row['birthdate'] = datetime.date(2000, 1, 1) self.table[0] = self.first_row self.assertEqual(self.table[0].name, 'turicas') self.assertEqual(self.table[0].birthdate, datetime.date(2000, 1, 1)) def test_field_names_and_types(self): self.assertEqual(self.table.field_names, list(self.table.fields.keys())) self.assertEqual(self.table.field_types, list(self.table.fields.values())) def test_table_setitem_column_happy_path_new_column(self): number_of_fields = len(self.table.fields) self.assertEqual(len(self.table), 3) self.table['user_id'] = [4, 5, 6] self.assertEqual(len(self.table), 3) self.assertEqual(len(self.table.fields), number_of_fields + 1) self.assertIn('user_id', self.table.fields) self.assertIs(self.table.fields['user_id'], rows.fields.IntegerField) self.assertEqual(self.table[0].user_id, 4) self.assertEqual(self.table[1].user_id, 5) self.assertEqual(self.table[2].user_id, 6) def test_table_setitem_column_happy_path_replace_column(self): number_of_fields = len(self.table.fields) self.assertEqual(len(self.table), 3) self.table['name'] = [4, 5, 6] # change values *and* type self.assertEqual(len(self.table), 3) self.assertEqual(len(self.table.fields), number_of_fields) self.assertIn('name', self.table.fields) self.assertIs(self.table.fields['name'], rows.fields.IntegerField) self.assertEqual(self.table[0].name, 4) self.assertEqual(self.table[1].name, 5) self.assertEqual(self.table[2].name, 6) def test_table_setitem_column_slug_field_name(self): self.assertNotIn('user_id', self.table.fields) self.table['User ID'] = [4, 5, 6] self.assertIn('user_id', self.table.fields) def test_table_setitem_column_invalid_length(self): number_of_fields = len(self.table.fields) self.assertEqual(len(self.table), 3) with self.assertRaises(ValueError) as exception_context: self.table['user_id'] = [4, 5] # list len should be 3 self.assertEqual(len(self.table), 3) self.assertEqual(len(self.table.fields), number_of_fields) self.assertEqual(exception_context.exception.args[0], 'Values length (2) should be the same as Table ' 'length (3)') def test_table_setitem_invalid_type(self): fields = self.table.fields.copy() self.assertEqual(len(self.table), 3) with self.assertRaises(ValueError) as exception_context: self.table[3.14] = [] self.assertEqual(len(self.table), 3) # should not add any row self.assertDictEqual(fields, self.table.fields) # should not add field self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: float') with self.assertRaises(ValueError) as exception_context: self.table[b'some_value'] = [] self.assertEqual(len(self.table), 3) # should not add any row self.assertDictEqual(fields, self.table.fields) # should not add field self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: {}'.format(binary_type_name)) def test_table_delitem_row(self): table_rows = [row for row in self.table] before = len(self.table) del self.table[0] after = len(self.table) self.assertEqual(after, before - 1) for row, expected_row in zip(self.table, table_rows[1:]): self.assertEqual(row, expected_row) def test_table_delitem_column_doesnt_exist(self): with self.assertRaises(KeyError) as exception_context: del self.table['doesnt-exist'] self.assertEqual(exception_context.exception.args[0], 'doesnt-exist') def test_table_delitem_column_happy_path(self): fields = self.table.fields.copy() self.assertEqual(len(self.table), 3) del self.table['name'] self.assertEqual(len(self.table), 3) # should not del any row self.assertEqual(len(self.table.fields), len(fields) - 1) self.assertDictEqual(dict(self.table[0]._asdict()), {'birthdate': datetime.date(1987, 4, 29)}) self.assertDictEqual(dict(self.table[1]._asdict()), {'birthdate': datetime.date(1990, 2, 1)}) self.assertDictEqual(dict(self.table[2]._asdict()), {'birthdate': datetime.date(1952, 3, 11)}) def test_table_delitem_column_invalid_type(self): fields = self.table.fields.copy() self.assertEqual(len(self.table), 3) with self.assertRaises(ValueError) as exception_context: del self.table[3.14] self.assertEqual(len(self.table), 3) # should not del any row self.assertDictEqual(fields, self.table.fields) # should not del field self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: float') with self.assertRaises(ValueError) as exception_context: self.table[b'name'] = [] # 'name' actually exists self.assertEqual(len(self.table), 3) # should not del any row self.assertDictEqual(fields, self.table.fields) # should not del field self.assertEqual(exception_context.exception.args[0], 'Unsupported key type: {}'.format(binary_type_name)) def test_table_add(self): self.assertIs(self.table + 0, self.table) self.assertIs(0 + self.table, self.table) new_table = self.table + self.table self.assertEqual(new_table.fields, self.table.fields) self.assertEqual(len(new_table), 2 * len(self.table)) self.assertEqual(list(new_table), list(self.table) * 2) def test_table_add_error(self): with self.assertRaises(ValueError): self.table + 1 with self.assertRaises(ValueError): 1 + self.table def test_table_order_by(self): with self.assertRaises(ValueError): self.table.order_by('doesnt_exist') before = [row.birthdate for row in self.table] self.table.order_by('birthdate') after = [row.birthdate for row in self.table] self.assertNotEqual(before, after) self.assertEqual(sorted(before), after) self.table.order_by('-birthdate') final = [row.birthdate for row in self.table] self.assertEqual(final, list(reversed(after))) self.table.order_by('name') expected_rows = [ {'name': 'Douglas Adams', 'birthdate': datetime.date(1952, 3, 11)}, {'name': 'Somebody', 'birthdate': datetime.date(1990, 2, 1)}, {'name': 'Álvaro Justen', 'birthdate': datetime.date(1987, 4, 29)}] for expected_row, row in zip(expected_rows, self.table): self.assertEqual(expected_row, dict(row._asdict())) def test_table_repr(self): expected = '' self.assertEqual(expected, repr(self.table)) def test_table_add_time(self): '''rows.Table.__add__ should be constant time To test it we double table size for each round and then compare the standard deviation to the mean (it will be almost the mean if the algorithm is not fast enough and almost 10% of the mean if it's good). ''' rounds = [] table = utils.table for _ in range(10): start = time.time() table = table + table end = time.time() rounds.append(end - start) mean = sum(rounds) / len(rounds) stdev = math.sqrt((1.0 / (len(rounds) - 1)) * sum((value - mean) ** 2 for value in rounds)) self.assertTrue(0.2 * mean > stdev) class TestFlexibleTable(unittest.TestCase): def setUp(self): self.table = FlexibleTable() def test_FlexibleTable_is_present_on_main_namespace(self): self.assertIn('FlexibleTable', dir(rows)) self.assertIs(FlexibleTable, rows.FlexibleTable) def test_inheritance(self): self.assertTrue(issubclass(FlexibleTable, Table)) def test_flexible_append_detect_field_type(self): self.assertEqual(len(self.table.fields), 0) self.table.append({'a': 123, 'b': 3.14, }) self.assertEqual(self.table[0].a, 123) self.assertEqual(self.table[0].b, 3.14) self.assertEqual(self.table.fields['a'], fields.IntegerField) self.assertEqual(self.table.fields['b'], fields.FloatField) # Values are checked based on field types when appending with self.assertRaises(ValueError): self.table.append({'a': 'spam', 'b': 1.23}) # invalid value for 'a' with self.assertRaises(ValueError): self.table.append({'a': 42, 'b': 'ham'}) # invalid value or 'b' # Values are converted self.table.append({'a': '42', 'b': '2.71'}) self.assertEqual(self.table[1].a, 42) self.assertEqual(self.table[1].b, 2.71) def test_flexible_insert_row(self): self.table.append({'a': 123, 'b': 3.14, }) self.table.insert(0, {'a': 2357, 'b': 1123}) self.assertEqual(self.table[0].a, 2357) def test_flexible_update_row(self): self.table.append({'a': 123, 'b': 3.14, }) self.table[0] = {'a': 2357, 'b': 1123} self.assertEqual(self.table[0].a, 2357) def test_table_slicing(self): self.table.append({'a': 123, 'b': 3.14, }) self.table.append({'a': 2357, 'b': 1123}) self.table.append({'a': 8687, 'b': 834798}) self.assertEqual(len(self.table[::2]), 2) self.assertEqual(self.table[::2][0].a, 123) def test_table_slicing_error(self): self.table.append({'a': 123, 'b': 3.14, }) self.table.append({'a': 2357, 'b': 1123}) self.table.append({'a': 8687, 'b': 834798}) with self.assertRaises(ValueError) as context_manager: self.table[[1]] self.assertEqual(type(context_manager.exception), ValueError) rows-0.3.1/tests/tests_utils.py000066400000000000000000000046721310400316700166070ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2015 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import tempfile import unittest import rows.utils import tests.utils as utils class UtilsTestCase(utils.RowsTestMixIn, unittest.TestCase): def assert_encoding(self, first, second): '''Assert encoding equality `iso-8859-1` should be detected as the same as `iso-8859-8` as described in (affects Debian and Fedora packaging) ''' self.assertEqual(first.lower().split('-')[:-1], second.lower().split('-')[:-1]) def test_local_file_sample_size(self): temp = tempfile.NamedTemporaryFile(delete=False) self.files_to_delete.append(temp.name) header = b'field1,field2,field3\r\n' row_data = b'non-ascii-field-1,non-ascii-field-2,non-ascii-field-3\r\n' encoding = 'iso-8859-1' temp.file.write(header) counter = len(header) increment = len(row_data) while counter <= 8192: temp.file.write(row_data) counter += increment temp.file.write('Álvaro,àáááããçc,ádfáffad\r\n'.encode(encoding)) temp.file.close() result = rows.utils.local_file(temp.name) self.assertEqual(result.uri, temp.name) self.assert_encoding(result.encoding, encoding) self.assertEqual(result.delete, False) # TODO: test detect_local_source # TODO: test detect_source # TODO: test download_file # TODO: test export_to_uri # TODO: test extension_by_plugin_name # TODO: test import_from_source # TODO: test import_from_uri # TODO: test local_file # TODO: test normalize_mime_type # TODO: test plugin_name_by_mime_type # TODO: test plugin_name_by_uri rows-0.3.1/tests/utils.py000066400000000000000000000300361310400316700153560ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014-2016 Álvaro Justen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . from __future__ import unicode_literals import copy import datetime import os from collections import OrderedDict from decimal import Decimal import six import rows.fields as fields from rows.table import Table NONE_VALUES = list(fields.NULL) + ['', None] FIELDS = OrderedDict([ ('bool_column', fields.BoolField), ('integer_column', fields.IntegerField), ('float_column', fields.FloatField), ('decimal_column', fields.FloatField), ('percent_column', fields.PercentField), ('date_column', fields.DateField), ('datetime_column', fields.DatetimeField), ('unicode_column', fields.TextField), ]) FIELD_NAMES = list(FIELDS.keys()) EXPECTED_ROWS = [ { 'float_column': 3.141592, 'decimal_column': 3.141592, 'bool_column': True, 'integer_column': 1, 'date_column': datetime.date(2015, 1, 1), 'datetime_column': datetime.datetime(2015, 8, 18, 15, 10), 'percent_column': Decimal('0.01'), 'unicode_column': 'Álvaro', }, { 'float_column': 1.234, 'decimal_column': 1.234, 'bool_column': False, 'integer_column': 2, 'date_column': datetime.date(1999, 2, 3), 'datetime_column': datetime.datetime(1999, 2, 3, 0, 1, 2), 'percent_column': Decimal('0.1169'), 'unicode_column': 'àáãâä¹²³', }, { 'float_column': 4.56, 'decimal_column': 4.56, 'bool_column': True, 'integer_column': 3, 'date_column': datetime.date(2050, 1, 2), 'datetime_column': datetime.datetime(2050, 1, 2, 23, 45, 31), 'percent_column': Decimal('0.12'), 'unicode_column': 'éèẽêë', }, { 'float_column': 7.89, 'decimal_column': 7.89, 'bool_column': False, 'integer_column': 4, 'date_column': datetime.date(2015, 8, 18), 'datetime_column': datetime.datetime(2015, 8, 18, 22, 21, 33), 'percent_column': Decimal('0.1364'), 'unicode_column': '~~~~', }, { 'float_column': 9.87, 'decimal_column': 9.87, 'bool_column': True, 'integer_column': 5, 'date_column': datetime.date(2015, 3, 4), 'datetime_column': datetime.datetime(2015, 3, 4, 16, 0, 1), 'percent_column': Decimal('0.1314'), 'unicode_column': 'álvaro', }, { 'float_column': 1.2345, 'decimal_column': 1.2345, 'bool_column': False, 'integer_column': 6, 'date_column': datetime.date(2015, 5, 6), 'datetime_column': datetime.datetime(2015, 5, 6, 12, 1, 2), 'percent_column': Decimal('0.02'), 'unicode_column': 'test', }, { 'float_column': None, 'decimal_column': None, 'bool_column': None, 'integer_column': None, 'date_column': None, 'datetime_column': None, 'percent_column': None, 'unicode_column': '', } ] table = Table(fields=FIELDS) for row in EXPECTED_ROWS: table.append(row) table._meta = {'test': 123} class RowsTestMixIn(object): maxDiff = None override_fields = None def setUp(self): self.files_to_delete = [] def tearDown(self): for filename in self.files_to_delete: if os.path.exists(filename): os.unlink(filename) def assert_table_equal(self, first, second): expected_fields = dict(second.fields) if self.override_fields is None: override_fields = {} else: override_fields = self.override_fields expected_fields = copy.deepcopy(expected_fields) for key, value in override_fields.items(): if key in expected_fields: expected_fields[key] = value self.assertDictEqual(dict(first.fields), expected_fields) self.assertEqual(len(first), len(second)) for first_row, second_row in zip(first, second): first_row = dict(first_row._asdict()) second_row = dict(second_row._asdict()) for field_name, field_type in expected_fields.items(): value = first_row[field_name] expected_value = second_row[field_name] if field_name in override_fields: expected_value = override_fields[field_name]\ .deserialize(expected_value) if float not in (type(value), type(expected_value)): self.assertEqual(value, expected_value, 'Field {} value mismatch'.format(field_name)) else: self.assertAlmostEqual(value, expected_value) def assert_file_contents_equal(self, first_filename, second_filename): with open(first_filename, 'rb') as fobj: first = fobj.read() with open(second_filename, 'rb') as fobj: second = fobj.read() self.assertEqual(first, second) def assert_create_table_data(self, call_args, field_ordering=True, filename=None, expected_meta=None): if filename is None: filename = self.filename kwargs = call_args[1] if expected_meta is None: expected_meta = {'imported_from': self.plugin_name, 'filename': filename,} if self.assert_meta_encoding: expected_meta['encoding'] = self.encoding self.assertEqual(kwargs['meta'], expected_meta) del kwargs['meta'] self.assert_table_data(call_args[0][0], args=[], kwargs=kwargs, field_ordering=field_ordering) def assert_table_data(self, data, args, kwargs, field_ordering): data = list(data) data[0] = list(data[0]) if field_ordering: self.assertEqual(data[0], FIELD_NAMES) for row_index, row in enumerate(data[1:]): for column_index, value in enumerate(row): field_name = FIELD_NAMES[column_index] expected_value = EXPECTED_ROWS[row_index][field_name] self.field_assert(field_name, expected_value, value, *args, **kwargs) else: self.assertEqual(set(data[0]), set(FIELD_NAMES)) for row_index, row in enumerate(data[1:]): for column_index, value in enumerate(row): field_name = data[0][column_index] expected_value = EXPECTED_ROWS[row_index][field_name] self.field_assert(field_name, expected_value, value, *args, **kwargs) # Fields asserts: input values we expect from plugins def field_assert(self, field_name, expected_value, value, *args, **kwargs): asserts = {'bool_column': self.assert_BoolField, 'integer_column': self.assert_IntegerField, 'float_column': self.assert_FloatField, 'decimal_column': self.assert_DecimalField, 'percent_column': self.assert_PercentField, 'date_column': self.assert_DateField, 'datetime_column': self.assert_DatetimeField, 'unicode_column': self.assert_TextField, } return asserts[field_name](expected_value, value, *args, **kwargs) def assert_BoolField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES elif expected_value is True: assert str(value).lower() in ('true', b'true', 'yes', b'yes') elif expected_value is False: assert str(value).lower() in ('false', b'false', 'no', b'no') else: raise ValueError('expected_value is not True or False') def assert_IntegerField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES else: self.assertIn(value, (expected_value, str(expected_value))) def assert_FloatField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES elif type(value) != type(expected_value): self.assertEqual(value, str(expected_value)) else: self.assertAlmostEqual(expected_value, value, places=5) def assert_DecimalField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES else: self.assert_FloatField(expected_value, value) def assert_PercentField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES else: float_value = str(Decimal(expected_value) * 100)[:-2] if float_value.endswith('.'): float_value = float_value[:-1] possible_values = [] if '.' not in float_value: possible_values.append(str(int(float_value)) + '%') possible_values.append(str(int(float_value)) + '.00%') float_value = float(float_value) possible_values.extend([ six.text_type(float_value) + '%', six.text_type(float_value) + '.0%', six.text_type(float_value) + '.00%']) self.assertIn(value, possible_values) def assert_DateField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES else: value = str(value) if value.endswith('00:00:00'): value = value[:-9] self.assertEqual(str(expected_value), value) def assert_DatetimeField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES elif type(value) is datetime.datetime and \ type(expected_value) is datetime.datetime: # if both types are datetime, check delta # XLSX plugin has not a good precision and will change milliseconds delta_1 = expected_value - value delta_2 = value - expected_value self.assertTrue(str(delta_1).startswith('0:00:00') or str(delta_2).startswith('0:00:00')) else: # if not, convert values to string and verify if are equal value = str(value) self.assertEqual(str(expected_value).replace(' ', 'T'), value) def assert_TextField(self, expected_value, value, *args, **kwargs): if expected_value is None: assert value is None or value.lower() in NONE_VALUES elif expected_value == '': # Some plugins return `None` instead of empty strings for cells # with blank values and we don't have an way to differentiate assert value in (None, '') else: self.assertEqual(expected_value, value) rows-0.3.1/to-do/000077500000000000000000000000001310400316700135225ustar00rootroot00000000000000rows-0.3.1/to-do/plugin_mysql.py000066400000000000000000000137661310400316700166340ustar00rootroot00000000000000# coding: utf-8 # Copyright 2014 Álvaro Justen # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . import datetime import MySQLdb from .rows import Table from .utils import ipartition, slug __all__ = [ 'import_from_mysql', 'export_to_mysql'] # TODO: replace 'None' with '' on export_to_* # TODO: need converters in and out # TODO: lazy=True|False # TODO: datetime.time on MYSQL_TYPE # TODO: import from mysql # TODO: logging? # TODO: _mysql_exceptions.OperationalError: (2006, 'MySQL server has gone # # away') MYSQL_TYPE = {str: 'TEXT', int: 'INT', float: 'FLOAT', datetime.date: 'DATE', datetime.datetime: 'DATETIME', bool: 'BOOL'} # 'BOOL' on MySQL is a shortcut to TINYINT(1) MYSQLDB_TYPE = {getattr(MySQLdb.FIELD_TYPE, x): x \ for x in dir(MySQLdb.FIELD_TYPE) if not x.startswith('_')} MYSQLDB_TO_PYTHON = {'ENUM': str, 'STRING': str, 'VAR_STRING': str, 'BLOB': bytes, 'LONG_BLOB': bytes, 'MEDIUM_BLOB': bytes, 'TINY_BLOB': bytes, 'DECIMAL': float, 'DOUBLE': float, 'FLOAT': float, 'INT24': int, 'LONG': int, 'LONGLONG': int, 'TINY': int, 'YEAR': int, 'DATE': datetime.date, 'NEWDATE': datetime.date, 'TIME': int, 'TIMESTAMP': int, 'DATETIME': datetime.datetime} def _get_mysql_config(connection_str): colon_index = connection_str.index(':') at_index = connection_str.index('@') slash_index = connection_str.index('/') config = {} config['user'] = connection_str[:colon_index] config['passwd'] = connection_str[colon_index + 1:at_index] config['host'] = connection_str[at_index + 1:slash_index] config['port'] = 3306 if ':' in config['host']: data = config['host'].split(':') config['host'] = data[0] config['port'] = int(data[1]) if connection_str.count('/') == 1: table_name = None config['db'] = connection_str[slash_index + 1:] else: second_slash_index = connection_str.index('/', slash_index + 1) config['db'] = connection_str[slash_index + 1:second_slash_index] table_name = connection_str[second_slash_index + 1:] return config, table_name def _connect_to_mysql(config): return MySQLdb.connect(**config) def import_from_mysql(connection_string, limit=None, order_by=None, query=''): #TODO: add 'lazy' option config, table_name = _get_mysql_config(connection_string) connection = _connect_to_mysql(config) cursor = connection.cursor() if query: sql = query else: sql = 'SELECT * FROM ' + table_name if limit is not None: sql += ' LIMIT {0[0]}, {0[1]}'.format(limit) if order_by is not None: sql += ' ORDER BY ' + order_by cursor.execute(sql) column_info = [(x[0], x[1]) for x in cursor.description] table = Table(fields=[x[0] for x in cursor.description]) table.types = {name: MYSQLDB_TO_PYTHON[MYSQLDB_TYPE[type_]] \ for name, type_ in column_info} table_rows = [list(row) for row in cursor.fetchall()] encoding = connection.character_set_name() for row in table_rows: for column_index, value in enumerate(row): if type(value) is str: row[column_index] = value.decode(encoding) table._rows = table_rows cursor.close() connection.close() return table def export_to_mysql(table, connection_string, encoding=None, batch_size=1000, commit_every=10000, callback=None, callback_every=10000): config, table_name = _get_mysql_config(connection_string) connection = _connect_to_mysql(config) cursor = connection.cursor() # Create table fields, types = table.fields, table.types field_slugs = [slug(field) for field in fields] field_types = [MYSQL_TYPE[types[field]] for field in fields] columns_definition = ['{} {}'.format(field, type_) for field, type_ in zip(field_slugs, field_types)] sql = 'CREATE TABLE IF NOT EXISTS {} ({})'\ .format(table_name, ', '.join(columns_definition)) cursor.execute(sql) # Insert items columns = ', '.join(field_slugs) #placeholders = ['%s' if types[field] in (int, float, bool) else '"%s"' # for field in fields] # TODO: fix this string/formatting problem placeholders = ['%s' for field in fields] sql = 'INSERT INTO {} ({}) VALUES ({})'.format(table_name, columns, ', '.join(placeholders)) total = last_commit = last_callback = 0 for rows in ipartition(iter(table), batch_size): values = [[row[field] for field in fields] for row in rows] added = len(values) total += added last_commit += added last_callback += added cursor.executemany(sql, values) if last_commit >= commit_every: connection.commit() last_commit = 0 if callback is not None and last_callback >= callback_every: callback(total) last_callback = 0 if callback is not None and last_callback > 0: callback(total) if last_commit > 0: connection.commit() connection.close() rows-0.3.1/tox.ini000066400000000000000000000002511310400316700140110ustar00rootroot00000000000000[tox] envlist = py27, py35 [testenv] deps = -rrequirements-development.txt commands = coverage erase && nosetests -dsv --with-yanc --with-coverage --cover-package rows