pax_global_header 0000666 0000000 0000000 00000000064 13271545624 0014523 g ustar 00root root 0000000 0000000 52 comment=46b208355a87ae8a1ebd5a60aab99090b35fba75
super-csv-2.4.0/ 0000775 0000000 0000000 00000000000 13271545624 0013455 5 ustar 00root root 0000000 0000000 super-csv-2.4.0/.gitattributes 0000664 0000000 0000000 00000000014 13271545624 0016343 0 ustar 00root root 0000000 0000000 * text=auto
super-csv-2.4.0/.gitignore 0000664 0000000 0000000 00000000044 13271545624 0015443 0 ustar 00root root 0000000 0000000 target/
#IntelliJ Idea
*.iml
.idea
super-csv-2.4.0/.project 0000664 0000000 0000000 00000000573 13271545624 0015131 0 ustar 00root root 0000000 0000000
super-csv-parentorg.eclipse.m2e.core.maven2Builderorg.eclipse.m2e.core.maven2Nature
super-csv-2.4.0/.travis.yml 0000664 0000000 0000000 00000000220 13271545624 0015560 0 ustar 00root root 0000000 0000000 language: java
jdk:
- oraclejdk8
script: mvn test $MVN_ARGS
env:
- MVN_ARGS=
- MVN_ARGS="-P germanLocale"
- MVN_ARGS="-P englishLocale"
super-csv-2.4.0/LICENSE.txt 0000664 0000000 0000000 00000026756 13271545624 0015320 0 ustar 00root root 0000000 0000000 /*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2007 Kasper B. Graversen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
super-csv-2.4.0/README.md 0000664 0000000 0000000 00000003221 13271545624 0014732 0 ustar 00root root 0000000 0000000 super-csv
=========
[](https://travis-ci.org/super-csv/super-csv)
[](https://gitter.im/super-csv/super-csv?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
**Dear super-csv community, we are looking for people to help maintain super-csv. Time and motivation is at low, for both James and I. See https://github.com/super-csv/super-csv/issues/47**
Super CSV is a fast, programmer-friendly, open-source library for reading and writing CSV files with Java. It is used by a number of large projects and is being downloaded 19000+ times/month from the Maven repository.
* **Documentation**: http://super-csv.github.io/super-csv
* **Download Instructions**: http://super-csv.github.io/super-csv/downloading.html
* **GitHub**: https://github.com/super-csv/super-csv
* **Free software**: Apache License, Version 2.0
* **Requirements**: Java 1.5
It is highly configurable, and supports reading and writing with POJOs, Maps and Lists. It also has support for deep-mapping and index-based mapping with POJOs, using the powerful Dozer extension.
Its flexible 'Cell Processor' API automates data type conversions (parsing and formatting Dates, Integers, Booleans etc) and enforces constraints (mandatory columns, matching against regular expressions etc) - and it's easy to write your own if required.
Super CSV is actively maintained, and the developers are ready and willing to help out if you get stuck :)
Please check out the documentation for examples and help getting started.
super-csv-2.4.0/pom.xml 0000664 0000000 0000000 00000036273 13271545624 0015005 0 ustar 00root root 0000000 0000000
4.0.0org.sonatype.ossoss-parent7net.sf.supercsvsuper-csv-parent2.4.0pomhttps://github.com/super-csv/super-csvSuper CSVSuper CSV parent project2007super-csvsuper-csv-dozersuper-csv-jodasuper-csv-java8super-csv-distributionsuper-csv-benchmarkUTF-85.4.01.7.52.5.23.0.02.10.12.53.32.72.183.0.4org.codehaus.mojoanimal-sniffer-maven-plugin1.12org.apache.maven.pluginsmaven-assembly-plugin2.5.2org.apache.felixmaven-bundle-plugin2.5.3org.codehaus.mojocobertura-maven-plugin${cobertura.plugin.version}org.apache.maven.pluginsmaven-compiler-plugin3.2org.apache.maven.pluginsmaven-eclipse-plugin2.9org.codehaus.mojofindbugs-maven-plugin${findbugs-plugin.version}org.apache.maven.pluginsmaven-jar-plugin2.5org.apache.maven.pluginsmaven-javadoc-plugin${javadoc.plugin.version}org.apache.maven.pluginsmaven-jxr-plugin${jxr.plugin.version}org.apache.maven.pluginsmaven-pmd-plugin${pmd.plugin.version}org.apache.maven.pluginsmaven-project-info-reports-plugin${project.info.reports.plugin.version}org.apache.maven.pluginsmaven-release-plugin2.5.1org.apache.maven.pluginsmaven-scm-publish-plugin1.1org.apache.maven.pluginsmaven-source-plugin2.4org.apache.maven.pluginsmaven-site-plugin3.4org.apache.maven.pluginsmaven-surefire-plugin${surefire.plugin.version}org.apache.maven.pluginsmaven-surefire-report-plugin${surefire.plugin.version}org.apache.maven.pluginsmaven-compiler-plugin1.51.5org.apache.maven.pluginsmaven-site-plugintrueorg.apache.maven.pluginsmaven-scm-publish-plugingh-pagesorg.apache.maven.pluginsmaven-eclipse-plugintruetruetrueorg.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5org.apache.maven.pluginsmaven-surefire-plugin${surefireArgs}org.codehaus.mojocobertura-maven-pluginxmlhtmlorg.apache.maven.pluginsmaven-source-pluginattach-sourcesjarorg.apache.maven.pluginsmaven-javadoc-pluginattach-javadocsjarorg.apache.maven.pluginsmaven-release-plugintrueclean verifydeployv@{project.version}org.codehaus.mojoanimal-sniffer-maven-pluginorg.codehaus.mojo.signaturejava151.0check-java15-compatabilitytestcheckgermanLocale-Duser.country=DE -Duser.language=deturkishLocale-Duser.country=TR -Duser.language=trenglishLocale-Duser.country=GB -Duser.language=ennet.sf.supercsvsuper-csv${project.version}net.sf.supercsvsuper-csv-dozer${project.version}net.sf.dozerdozer${dozer.version}org.slf4jslf4j-log4j12junitjunit4.12testKasper GraversenkbgProject Lead/Founder+1James Bassettjamesbassett+10Developer (past)Dominique De Vitoddv36a78Developer (past)Adrian Ber (beradrian)Alf Richter (Haskell2000)Jan Hartung (egga)John Gibson (noredshadow)Lubor Vágenknecht (lubor)Nick Babcock (nickbabcock)Nicolas Capponi (ncapponi)Michał Ziober (ZioberMichal)Petar Tahchiev (ptahchiev)Pete Lichten (boneshaker335)Rolf Wojtech (rwojtech)Thor Michael Støre (thormick)Vlad Dinulescu (vladdinulescu)Vyacheslav Pushkin (singularityfx)Super CSVhttps://github.com/super-csv/super-csvApache License, Version 2.0http://www.apache.org/licenses/LICENSE-2.0.htmlorg.apache.maven.pluginsmaven-project-info-reports-plugin${project.info.reports.plugin.version}indexdependenciesproject-teamissue-trackinglicenseorg.codehaus.mojocobertura-maven-plugin${cobertura.plugin.version}org.apache.maven.pluginsmaven-surefire-report-plugin${surefire.plugin.version}org.apache.maven.pluginsmaven-javadoc-plugin${javadoc.plugin.version}true
http://docs.oracle.com/javase/8/docs/api
http://dozer.sourceforge.net/apidocs
http://www.joda.org/joda-time/apidocs
javadocorg.apache.maven.pluginsmaven-pmd-plugin${pmd.plugin.version}1.5org.codehaus.mojofindbugs-maven-plugin${findbugs-plugin.version}org.apache.maven.pluginsmaven-jxr-plugin${jxr.plugin.version}GitHub Issueshttps://github.com/super-csv/super-csv/issuesgh-pagesscm:git:git@github.com:super-csv/super-csv.gitscm:git:git@github.com:super-csv/super-csv.gitscm:git:git@github.com:super-csv/super-csv.githttps://github.com/super-csv/super-csvv2.4.0
super-csv-2.4.0/src/ 0000775 0000000 0000000 00000000000 13271545624 0014244 5 ustar 00root root 0000000 0000000 super-csv-2.4.0/src/site/ 0000775 0000000 0000000 00000000000 13271545624 0015210 5 ustar 00root root 0000000 0000000 super-csv-2.4.0/src/site/apt/ 0000775 0000000 0000000 00000000000 13271545624 0015774 5 ustar 00root root 0000000 0000000 super-csv-2.4.0/src/site/apt/cell_processors.apt 0000664 0000000 0000000 00000050152 13271545624 0021706 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
---------------
Cell Processors
---------------
Cell processors
Cell processors are an integral part of reading and writing with Super CSV - they automate the data type conversions, and enforce constraints.
They implement the design pattern - each processor has a single, well-defined purpose and can be chained
together with other processors to fully automate all of the required conversions and constraint validation for a single CSV column.
A typical CellProcessor configuration for reading the following CSV file
+-----------------------------------+
name,birthDate,weight
John,25/12/1946,83.5
Alice,06/08/1958,
Bob,01/03/1984,65.0,
+-----------------------------------+
might look like the following:
+---------------------------------------------------------------------------------------------------------------------------------------------+
public static final CellProcessor[] PROCESSORS = new CellProcessor[] {
null,
new ParseDate("dd/MM/yyyy"),
new Optional(new ParseDouble()) };
+---------------------------------------------------------------------------------------------------------------------------------------------+
The number of elements in the CellProcessor array must match up with the number of columns to be processed - the file has 3 columns,
so the CellProcessor array has 3 elements.
[[1]] The first processor (for the name column) is <<>>, which indicates that (the String is used unchanged).
Semantically, it might have been better to replace that with <<>>, which means the same thing.
If we wanted to guarantee that name was supplied (i.e. it's mandatory), then we could have used <<>> instead
(which works because empty String (<<<"">>>) is converted to <<>> when reading).
[[2]] The second processor (for the birthDate column) is <<>>, which indicates that that column is mandatory,
and should be parsed as a Date using the supplied format.
[[3]] The third processor (for the weight column) is <<>>, which indicates that the column is optional
(the value will be <<>> if the column is empty), but if it's supplied then parse it as a Double.
* Cell processor overview
* processors are similar to servlet filters in JEE - they can be chained together, and they can modify the data that's passed along the chain
* processors are executed from (but yes, the processor's constructors are invoked from right to left!)
* the number of elements in the CellProcessor array must match up with the number of columns to be processed
* a <<>> processor means
* most processors expect input to be non-null - if it's an optional column then chain an <<>> processor before it, e.g.
<<>>. Further processing (processors chained after <<>>) will be skipped if the value to be read/written is <<>>.
* all processors throw <<>> if they encounter data they cannot process (this shouldn't normally happen if your processor configuration is correct)
* constraint-validating processors throw <<>> if the value does not satisfy the constraint
* Available cell processors
The examples above just touch the surface of what's possible with cell processors.
The following table shows all of the processors available for reading, writing, and constraint validation.
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
<> || Writing || Reading / Writing || Constraints
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseBigDecimal.html}ParseBigDecimal}} | {{{./apidocs/org/supercsv/cellprocessor/FmtBool.html}FmtBool}} | {{{./apidocs/org/supercsv/cellprocessor/Collector.html}Collector}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/DMinMax.html}DMinMax}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseBool.html}ParseBool}} | {{{./apidocs/org/supercsv/cellprocessor/FmtDate.html}FmtDate}} | {{{./apidocs/org/supercsv/cellprocessor/ConvertNullTo.html}ConvertNullTo}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/Equals.html}Equals}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseChar.html}ParseChar}} | {{{./apidocs/org/supercsv/cellprocessor/FmtNumber.html}FmtNumber}} | {{{./apidocs/org/supercsv/cellprocessor/HashMapper.html}HashMapper}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/ForbidSubStr.html}ForbidSubStr}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseDate.html}ParseDate}} | | {{{./apidocs/org/supercsv/cellprocessor/Optional.html}Optional}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/IsElementOf.html}IsElementOf}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseDouble.html}ParseDouble}} | | {{{./apidocs/org/supercsv/cellprocessor/StrReplace.html}StrReplace}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/IsIncludedIn.html}IsIncludedIn}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseEnum.html}ParseEnum}} | | {{{./apidocs/org/supercsv/cellprocessor/Token.html}Token}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/LMinMax.html}LMinMax}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseInt.html}ParseInt}} | | {{{./apidocs/org/supercsv/cellprocessor/Trim.html}Trim}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/NotNull.html}NotNull}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/ParseLong.html}ParseLong}} | | {{{./apidocs/org/supercsv/cellprocessor/Truncate.html}Truncate}} | {{{./apidocs/org/supercsv/cellprocessor/constraint/RequireHashCode.html}RequireHashCode}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/RequireSubStr.html}RequireSubStr}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/Strlen.html}Strlen}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/StrMinMax.html}StrMinMax}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/StrNotNullOrEmpty.html}StrNotNullOrEmpty}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/StrRegEx.html}StrRegEx}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/Unique.html}Unique}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
| | | {{{./apidocs/org/supercsv/cellprocessor/constraint/UniqueHashCode.html}UniqueHashCode}}
*----------------------------------------------------------------------------------*---------------------------------------------------------------------*-----------------------------------------------------------------------------*-----------------------------------------------------------------------------------------------*
* Joda cell processors
In addition to the above, there are a number of useful processors for reading and writing {{{http://www.joda.org/joda-time}Joda-Time}} classes.
To use these, you must include <<>> (see the {{{./downloading.html}Download}} page).
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
<> || Writing
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseDateTime.html}ParseDateTime}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtDateTime.html}FmtDateTime}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseDateTimeZone.html}ParseDateTimeZone}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtDateTimeZone.html}FmtDateTimeZone}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseDuration.html}ParseDuration}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtDuration.html}FmtDuration}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseInterval.html}ParseInterval}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtInterval.html}FmtInterval}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseLocalDate.html}ParseLocalDate}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtLocalDate.html}FmtLocalDate}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseLocalDateTime.html}ParseLocalDateTime}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtLocalDateTime.html}FmtLocalDateTime}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParseLocalTime.html}ParseLocalTime}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtLocalTime.html}FmtLocalTime}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/joda/ParsePeriod.html}ParsePeriod}} | {{{./apidocs/org/supercsv/cellprocessor/joda/FmtPeriod.html}FmtPeriod}}
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
* Java 8 cell processors
In addition to the above, there are a number of useful processors for reading and writing Java 8 classes.
To use these, you must include <<>> (see the {{{./downloading.html}Download}} page).
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
<> || Writing
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
{{{./apidocs/org/supercsv/cellprocessor/time/ParseLocalDate.html}ParseLocalDate}} |
*-------------------------------------------------------------------------------------------*--------------------------------------------------------------------------------------*
super-csv-2.4.0/src/site/apt/csv_specification.apt 0000664 0000000 0000000 00000016240 13271545624 0022200 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
------------
What is CSV?
------------
What is CSV?
The comma-separated values (CSV) format is a widely used text file format often used to exchange data between applications.
It contains multiple records (one per line), and each field is delimited by a comma.
{{{http://en.wikipedia.org/wiki/Comma-separated_values}Wikipedia}} has a good explanation of the CSV format and its history.
There is no definitive standard for CSV, however the most commonly accepted definition is {{{http://tools.ietf.org/html/rfc4180}RFC 4180}} -
the MIME type definition for CSV. Super CSV is 100% compliant with RFC 4180, while still allowing some flexibility where CSV files deviate from the definition.
The following shows each rule defined in RFC 4180, and how it is treated by Super CSV.
* Rule 1
-----------------------------------------------------------------------------------
1. Each record is located on a separate line, delimited by a line
break (CRLF). For example:
aaa,bbb,ccc CRLF
zzz,yyy,xxx CRLF
-----------------------------------------------------------------------------------
Super CSV accepts all line breaks (Windows, Mac or Unix) when reading CSV files,
and uses the end of line symbols specified by the user (via the {{{./apidocs/org/supercsv/prefs/CsvPreference.html}CsvPreference}} object) when writing CSV files.
* Rule 2
-----------------------------------------------------------------------------------
2. The last record in the file may or may not have an ending line
break. For example:
aaa,bbb,ccc CRLF
zzz,yyy,xxx
-----------------------------------------------------------------------------------
Super CSV add a line break when writing the last line of a CSV file, but a line break on the last line is optional when reading.
* Rule 3
-----------------------------------------------------------------------------------
3. There maybe an optional header line appearing as the first line
of the file with the same format as normal record lines. This
header will contain names corresponding to the fields in the file
and should contain the same number of fields as the records in
the rest of the file (the presence or absence of the header line
should be indicated via the optional "header" parameter of this
MIME type). For example:
field_name,field_name,field_name CRLF
aaa,bbb,ccc CRLF
zzz,yyy,xxx CRLF
-----------------------------------------------------------------------------------
Super CSV provides methods for reading and writing headers, if required.
It also makes use of the header for mapping between CSV and POJOs (see {{{./apidocs/org/supercsv/io/CsvBeanReader.html}CsvBeanReader}}/{{{./apidocs/org/supercsv/io/CsvBeanWriter.html}CsvBeanWriter}}).
* Rule 4
-----------------------------------------------------------------------------------
4. Within the header and each record, there may be one or more
fields, separated by commas. Each line should contain the same
number of fields throughout the file. Spaces are considered part
of a field and should not be ignored. The last field in the
record must not be followed by a comma. For example:
aaa,bbb,ccc
-----------------------------------------------------------------------------------
The delimiter in Super CSV is configurable via the {{{./apidocs/org/supercsv/prefs/CsvPreference.html}CsvPreference}} object, though it is typically a comma.
Super CSV expects each line to contain the same number of fields (including the header).
In cases where the number of fields varies, {{{./apidocs/org/supercsv/io/CsvListReader.html}CsvListReader}}/{{{./apidocs/org/supercsv/io/CsvListWriter.html}CsvListWriter}} should be used, as they contain methods for reading/writing lines of arbitrary length.
By default, Super CSV considers spaces part of a field. However, if you require that surrounding spaces should not be part of the field
(unless within double quotes), then you can enable in your {{{./apidocs/org/supercsv/prefs/CsvPreference.html}CsvPreference}} object. This will ensure that surrounding spaces are trimmed when reading
(if not within double quotes), and that quotes are applied to a field with surrounding spaces when writing.
* Rule 5
-----------------------------------------------------------------------------------
5. Each field may or may not be enclosed in double quotes (however
some programs, such as Microsoft Excel, do not use double quotes
at all). If fields are not enclosed with double quotes, then
double quotes may not appear inside the fields. For example:
"aaa","bbb","ccc" CRLF
zzz,yyy,xxx
-----------------------------------------------------------------------------------
By default Super CSV only encloses fields in double quotes when they require escaping (see Rule 6),
but it is possible to enable quotes always, for particular columns, or for some other reason by supplying
a {{{./apidocs/org/supercsv/quote/QuoteMode.html}QuoteMode}} in the CsvPreference object.
The quote character is configurable via the {{{./apidocs/org/supercsv/prefs/CsvPreference.html}CsvPreference}} object, though is typically a double quote (<<<">>>).
* Rule 6
-----------------------------------------------------------------------------------
6. Fields containing line breaks (CRLF), double quotes, and commas
should be enclosed in double-quotes. For example:
"aaa","b CRLF
bb","ccc" CRLF
zzz,yyy,xxx
-----------------------------------------------------------------------------------
Super CSV handles multi-line fields (as long as they're enclosed in quotes) when reading,
and encloses a field in quotes when writing if it contains a newline, quote character or delimiter (defined in the {{{./apidocs/org/supercsv/prefs/CsvPreference.html}CsvPreference}} object).
* Rule 7
-----------------------------------------------------------------------------------
7. If double-quotes are used to enclose fields, then a double-quote
appearing inside a field must be escaped by preceding it with
another double quote. For example:
"aaa","b""bb","ccc"
-----------------------------------------------------------------------------------
Super CSV escapes double-quotes with a preceding double-quote. Please note that the sometimes-used convention of escaping double-quotes as <<<\">>> (instead of <<<"">>>)
is <>.
super-csv-2.4.0/src/site/apt/downloading.apt.vm 0000664 0000000 0000000 00000015656 13271545624 0021445 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
---------------------
Downloading Super CSV
---------------------
Downloading Super CSV
* Prerequisites
[Java 1.5+] Super CSV is compiled for Java 1.5
* Maven users
If you are using Maven, simply copy the following dependency into your pom.xml file.
The artifact is hosted at {{{http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22net.sf.supercsv%22}Maven Central}}, and is standalone (no dependencies).
+-------------------------------------+
${project.groupId}super-csv${project.version}
+-------------------------------------+
If you wish to use the Dozer extension (with deep mapping and index-based mapping support),
then you will also need the following:
+-------------------------------------+
${project.groupId}super-csv-dozer${project.version}
+-------------------------------------+
If you wish to use the Joda-Time extension (cell processors for parsing/formatting Joda-Time classes),
then you will also need the following:
+-------------------------------------+
${project.groupId}super-csv-joda${project.version}
+-------------------------------------+
If you wish to use the Java 8 extension (cell processors for parsing/formatting Java 8 classes),
then you will also need the following:
+-------------------------------------+
${project.groupId}super-csv-java8${project.version}
+-------------------------------------+
* Everyone else
You can download the latest distribution zip file from {{{https://github.com/super-csv/super-csv/releases}GitHub}},
which contains:
*----------------------------------------------------------------+---------------------------------------------------------------------+
<> || Description
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv/super-csv-${project.version}.jar | Super CSV ${project.version} (compiled classes only)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv/super-csv-${project.version}-sources.jar | The Super CSV source code
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv/super-csv-${project.version}-javadoc.jar | The Super CSV Javadoc documentation
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-dozer/super-csv-dozer-${project.version}.jar | Super CSV Dozer extension ${project.version} (compiled classes only)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-dozer/super-csv-dozer-${project.version}-sources.jar | The Super CSV Dozer extension source code
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-dozer/super-csv-dozer-${project.version}-javadoc.jar | The Super CSV Dozer extension Javadoc documentation
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-dozer/lib | The Super CSV Dozer extension's dependencies (including Dozer)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-java8/super-csv-java8-${project.version}.jar | Super CSV Java 8 extension ${project.version} (compiled classes only)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-java8/super-csv-java8-${project.version}-sources.jar | The Super CSV Java 8 extension source code
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-java8/super-csv-java8-${project.version}-javadoc.jar | The Super CSV Java 8 extension Javadoc documentation
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-joda/super-csv-joda-${project.version}.jar | Super CSV Joda-Time extension ${project.version} (compiled classes only)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-joda/super-csv-joda-${project.version}-sources.jar | The Super CSV Joda-Time extension source code
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-joda/super-csv-joda-${project.version}-javadoc.jar | The Super CSV Joda-Time extension Javadoc documentation
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-joda/lib | The Super CSV Joda-Time extension's dependencies (Joda-Time)
*----------------------------------------------------------------+---------------------------------------------------------------------+
super-csv-2.4.0/src/site/apt/dozer.apt 0000664 0000000 0000000 00000014335 13271545624 0017633 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-------------------------
Super CSV Dozer extension
-------------------------
Super CSV Dozer extension
The {{{./super-csv-dozer/index.html}Super CSV Dozer extension}} integrates Super CSV with {{{http://dozer.sourceforge.net}Dozer}},
a powerful Javabean mapping library.
Typically, Dozer requires lots of XML configuration but the addition of
{{{http://dozer.sourceforge.net/documentation/apimappings.html}API mapping}} allows Super CSV to set up Dozer mappings dynamically.
The use of Dozer allows {{{./apidocs/org/supercsv/io/dozer/CsvDozerBeanReader.html}CsvDozerBeanReader}} and
{{{./apidocs/org/supercsv/io/dozer/CsvDozerBeanWriter.html}CsvDozerBeanWriter}} to map simple fields
(the same as {{{./apidocs/org/supercsv/io/CsvBeanReader.html}CsvBeanReader}} and
{{{./apidocs/org/supercsv/io/CsvBeanWriter.html}CsvBeanWriter}}),
but to also perform deep mapping and index-based mapping as well!
Check out the {{{./examples_dozer.html}examples}}, or read on for more information.
* Deep mapping
{{{http://dozer.sourceforge.net/documentation/deepmapping.html}Deep mapping}} allows you to make use of the relationships
between your classes.
For example, if your class had an <<>> field, you could utilize deep mapping as follows
(assuming there are valid getters/setters defined for <<>>, <<>> and <<>> in the 3 involved classes):
------------------------
address.city.name
------------------------
* Indexed-based mapping
{{{http://dozer.sourceforge.net/documentation/indexmapping.html}Index-based mapping}} allows you to access elements of arrays and
Collections by their index.
For example, if your class had a collection of Addresses, you could utilize index-based mapping
to access the first one as follows:
------------------------
addresses[0]
------------------------
You can even combine index-based mapping with deep mapping:
------------------------
addresses[0].city.name
------------------------
* Logging
Dozer uses {{{http://www.slf4j.org}SLF4J}} for logging. By default it will use a no-operation implementation (i.e. no logging),
but you can use any of the supported implementations (logback, log4j, slf4j-simple) by placing the appropriate binding jar on the classpath.
See the {{{http://www.slf4j.org/manual.html}SLF4J manual}} for more details.
* Reference Mapping XML Configuration
Most of the time you'll want to let Super CSV take care of the dozer configuration by simply calling the <<>> method.
However, you might want to make use of the advanced features of Dozer (such as custom converters, bean factories, etc).
In this case, you can supply Super CSV with a pre-configured DozerBeanMapper.
The following XML is provided as a reference - it's the XML configuration used in the project's unit tests.
The <<>> class is used internally as the input/output of any Dozer mapping (each indexed column represents a column of CSV).
At a minimum, you should replace the <<>> with the class you're mapping,
and update the field mappings as appropriate (but try not to change the XML attributes, as they're important!).
+---------------------------------------------------------------------------------------------------+
org.supercsv.io.dozer.CsvDozerBeanDataorg.supercsv.mock.dozer.SurveyResponsecolumns[0]agecolumns[1]consentGivencolumns[2]answers[0].questionNocolumns[3]answers[0].answercolumns[4]answers[1].questionNocolumns[5]answers[1].answercolumns[6]answers[2].questionNocolumns[7]answers[2].answerorg.supercsv.mock.dozer.SurveyResponseorg.supercsv.io.dozer.CsvDozerBeanDataagecolumns[0]consentGivencolumns[1]answers[0].questionNocolumns[2]answers[0].answercolumns[3]answers[1].questionNocolumns[4]answers[1].answercolumns[5]answers[2].questionNocolumns[6]answers[2].answercolumns[7]
+---------------------------------------------------------------------------------------------------+ super-csv-2.4.0/src/site/apt/examples_dozer.apt 0000664 0000000 0000000 00000032147 13271545624 0021532 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
----------------------------------------
Reading and writing CSV files with Dozer
----------------------------------------
Reading and writing CSV files with Dozer
This page contains some examples of reading and writing CSV files using Super CSV and Dozer.
For a closer look, refer to the {{{./xref-test/org/supercsv/example/dozer/Reading.html}reading}} and
{{{./xref-test/org/supercsv/example/dozer/Writing.html}writing}} example source.
If you haven't already, check out the {{{./dozer.html}background}} on the Super CSV Dozer extension.
* Example CSV file
Here is an example CSV file that represents responses to a survey (we'll be using this in the following reading examples).
It has a header and 3 rows of data, all with 8 columns.
---------------------------------------------------------------------------------------------------------------
age,consentGiven,questionNo1,answer1,questionNo2,answer2,questionNo3,answer3
18,Y,1,Twelve,2,Albert Einstein,3,Big Bang Theory
,Y,1,Thirteen,2,Nikola Tesla,3,Stargate
42,N,1,,2,Carl Sagan,3,Star Wars
---------------------------------------------------------------------------------------------------------------
* Reading with CsvDozerBeanReader
{{{./apidocs/org/supercsv/io/dozer/CsvDozerBeanReader.html}CsvDozerBeanReader}} is the most powerful CSV reader.
The example reads each row from the example CSV file into a {{{./xref-test/org/supercsv/mock/dozer/SurveyResponse.html}SurveyResponse}} bean
, which has a Collection of {{{./xref-test/org/supercsv/mock/dozer/Answer.html}Answer}}s.
To do this requires the following field mapping (notice that the first two mappings are the same as you'd have for
CsvBeanReader, but the rest use indexed and deep mapping).
+-------------------------------------------------------------------------------------------------------------+
private static final String[] FIELD_MAPPING = new String[] {
"age", // simple field mapping (like CsvBeanReader)
"consentGiven", // as above
"answers[0].questionNo", // indexed (first element) + deep mapping
"answers[0].answer",
"answers[1].questionNo", // indexed (second element) + deep mapping
"answers[1].answer",
"answers[2].questionNo",
"answers[2].answer" };
+-------------------------------------------------------------------------------------------------------------+
If you are familiar with the standard CsvBeanReader, you'll notice that using CsvDozerBeanReader is very similar.
The main difference is that CsvDozerBeanReader requires you to configure it (with the <<>> method)
prior to reading. You can still use the result of <<>> as your field mapping, but you'll have to supply
your own if you want to use deep mapping or index-based mapping.
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of reading using CsvDozerBeanReader.
*/
private static void readWithCsvDozerBeanReader() throws Exception {
final CellProcessor[] processors = new CellProcessor[] {
new Optional(new ParseInt()), // age
new ParseBool(), // consent
new ParseInt(), // questionNo 1
new Optional(), // answer 1
new ParseInt(), // questionNo 2
new Optional(), // answer 2
new ParseInt(), // questionNo 3
new Optional() // answer 3
};
ICsvDozerBeanReader beanReader = null;
try {
beanReader = new CsvDozerBeanReader(new FileReader(CSV_FILENAME), CsvPreference.STANDARD_PREFERENCE);
beanReader.getHeader(true); // ignore the header
beanReader.configureBeanMapping(SurveyResponse.class, FIELD_MAPPING);
SurveyResponse surveyResponse;
while( (surveyResponse = beanReader.read(SurveyResponse.class, processors)) != null ) {
System.out.println(String.format("lineNo=%s, rowNo=%s, surveyResponse=%s", beanReader.getLineNumber(),
beanReader.getRowNumber(), surveyResponse));
}
}
finally {
if( beanReader != null ) {
beanReader.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
lineNo=2, rowNo=2, surveyResponse=SurveyResponse [age=18, consentGiven=true, answers=[Answer [questionNo=1, answer=Twelve], Answer [questionNo=2, answer=Albert Einstein], Answer [questionNo=3, answer=Big Bang Theory]]]
lineNo=3, rowNo=3, surveyResponse=SurveyResponse [age=null, consentGiven=true, answers=[Answer [questionNo=1, answer=Thirteen], Answer [questionNo=2, answer=Nikola Tesla], Answer [questionNo=3, answer=Stargate]]]
lineNo=4, rowNo=4, surveyResponse=SurveyResponse [age=42, consentGiven=false, answers=[Answer [questionNo=1, answer=null], Answer [questionNo=2, answer=Carl Sagan], Answer [questionNo=3, answer=Star Wars]]]
---------------------------------------------------------------------------------------------------------------
** Indexed mapping and hints
In the above example Dozer creates each <<>> because of the deep mapping (<<>>).
If you're using indexed mapping without deep mapping (e.g. <<>>) and your array/Collection contains
standard Java types (String, Integer, etc) then that's fine, but if it contains a custom type then Dozer needs
a few hints for it to work correctly.
In the following example, a custom cell processor has been written to parse each answer column as an <<>> bean,
and this has been combined with indexed mapping. Notice that when the bean mapping is configured, a hint is specified
for those indexed mappings - without it, Dozer populates each element with an empty collection (yikes!).
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of reading using CsvDozerBeanReader that uses indexed mapping and a cell processor
* to read into a List of Answer beans (this requires a hint).
*/
private static void readWithCsvDozerBeanReaderUsingIndexMappingAndHints() throws Exception {
// simple cell processor that creates an Answer with a value
final CellProcessor parseAnswer = new CellProcessorAdaptor() {
public Object execute(Object value, CsvContext context) {
return new Answer(null, (String) value);
}
};
final CellProcessor[] processors = new CellProcessor[] {
new Optional(new ParseInt()), // age
null, // consent
null, // questionNo 1
new Optional(parseAnswer), // answer 1
null, // questionNo 2
new Optional(parseAnswer), // answer 2
null, // questionNo 3
new Optional(parseAnswer) // answer 3
};
// no deep mapping (answers[0].answer) required as we're using a cell processor to create the bean
final String[] fieldMapping = {"age", null, null, "answers[0]", null, "answers[1]", null, "answers[2]"};
// the indexed mappings need a hint for Dozer to work
final Class>[] hintTypes = {null, null, null, Answer.class, null, Answer.class, null, Answer.class};
ICsvDozerBeanReader beanReader = null;
try {
beanReader = new CsvDozerBeanReader(new FileReader(CSV_FILENAME), CsvPreference.STANDARD_PREFERENCE);
beanReader.getHeader(true); // ignore the header
beanReader.configureBeanMapping(SurveyResponse.class, fieldMapping, hintTypes);
SurveyResponse surveyResponse;
while( (surveyResponse = beanReader.read(SurveyResponse.class, processors)) != null ) {
System.out.println(String.format("lineNo=%s, rowNo=%s, surveyResponse=%s", beanReader.getLineNumber(),
beanReader.getRowNumber(), surveyResponse));
}
}
finally {
if( beanReader != null ) {
beanReader.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
lineNo=2, rowNo=2, surveyResponse=SurveyResponse [age=18, consentGiven=null, answers=[Answer [questionNo=null, answer=Twelve], Answer [questionNo=null, answer=Albert Einstein], Answer [questionNo=null, answer=Big Bang Theory]]]
lineNo=3, rowNo=3, surveyResponse=SurveyResponse [age=0, consentGiven=null, answers=[Answer [questionNo=null, answer=Thirteen], Answer [questionNo=null, answer=Nikola Tesla], Answer [questionNo=null, answer=Stargate]]]
lineNo=4, rowNo=4, surveyResponse=SurveyResponse [age=42, consentGiven=null, answers=[null, Answer [questionNo=null, answer=Carl Sagan], Answer [questionNo=null, answer=Star Wars]]]
---------------------------------------------------------------------------------------------------------------
* Partial reading with CsvDozerBeanReader
Partial reading with CsvDozerBeanReader is virtually identical to CsvBeanReader.
See the partial reading example in the {{{./xref-test/org/supercsv/example/dozer/Reading.html}reading example source}}.
* Writing with CsvDozerBeanWriter
{{{./apidocs/org/supercsv/io/dozer/CsvDozerBeanWriter.html}CsvDozerBeanWriter}} is the most powerful CSV writer.
The example writes each CSV row from a {{{./xref-test/org/supercsv/mock/dozer/SurveyResponse.html}SurveyResponse}} bean
, which has a Collection of {{{./xref-test/org/supercsv/mock/dozer/Answer.html}Answer}}s.
It uses exactly the same field mapping as the reading example above, and once again you'll notice that CsvDozerBeanWriter
requires you to configure it (with the <<>> method) prior to writing.
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of writing using CsvDozerBeanWriter.
*/
private static void writeWithDozerCsvBeanWriter() throws Exception {
final CellProcessor[] processors = new CellProcessor[] {
new Token(0, null), // age
new FmtBool("Y", "N"), // consent
new NotNull(), // questionNo 1
new Optional(), // answer 1
new NotNull(), // questionNo 2
new Optional(), // answer 2
new NotNull(), // questionNo 3
new Optional() }; // answer 4
// create the survey responses to write
SurveyResponse response1 = new SurveyResponse(18, true, Arrays.asList(new Answer(1, "Twelve"), new Answer(2,
"Albert Einstein"), new Answer(3, "Big Bang Theory")));
SurveyResponse response2 = new SurveyResponse(0, true, Arrays.asList(new Answer(1, "Thirteen"), new Answer(2,
"Nikola Tesla"), new Answer(3, "Stargate")));
SurveyResponse response3 = new SurveyResponse(42, false, Arrays.asList(new Answer(1, null), new Answer(2,
"Carl Sagan"), new Answer(3, "Star Wars")));
final List surveyResponses = Arrays.asList(response1, response2, response3);
ICsvDozerBeanWriter beanWriter = null;
try {
beanWriter = new CsvDozerBeanWriter(new FileWriter("target/writeWithCsvDozerBeanWriter.csv"),
CsvPreference.STANDARD_PREFERENCE);
// configure the mapping from the fields to the CSV columns
beanWriter.configureBeanMapping(SurveyResponse.class, FIELD_MAPPING);
// write the header
beanWriter.writeHeader("age", "consentGiven", "questionNo1", "answer1", "questionNo2", "answer2",
"questionNo3", "answer3");
// write the beans
for( final SurveyResponse surveyResponse : surveyResponses ) {
beanWriter.write(surveyResponse, processors);
}
}
finally {
if( beanWriter != null ) {
beanWriter.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
age,consentGiven,questionNo1,answer1,questionNo2,answer2,questionNo3,answer3
18,Y,1,Twelve,2,Albert Einstein,3,Big Bang Theory
age not supplied,Y,1,Thirteen,2,Nikola Tesla,3,Stargate
42,N,1,not answered,2,Carl Sagan,3,Star Wars
---------------------------------------------------------------------------------------------------------------
* Partial writing with CsvDozerBeanWriter
Partial writing with CsvDozerBeanWriter is virtually identical to CsvBeanWriter.
See the partial writing example in the {{{./xref-test/org/supercsv/example/dozer/Writing.html}writing example source}}.
super-csv-2.4.0/src/site/apt/examples_jdbc.apt 0000664 0000000 0000000 00000013243 13271545624 0021305 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-----------------------------------------
Writing CSV files with CsvResultSetWriter
-----------------------------------------
Writing CSV files with CsvResultSetWriter
This page contains an example of writing a CSV file using Super CSV CsvResultSetWriter.
You can view the full source of the example {{{./xref-test/org/supercsv/example/Writing.html}here}}.
* Example cell processor configuration
Example use the following {{{./cell_processors.html}cell processor}} configuration.
It demonstrates:
* mandatory columns (<<>>)
* optional columns (<<>>), with further processing
* formatting of Dates (<<>>) and Booleans (<<>>)
* constraint validation of numeric ranges (<<>>) and uniqueness (<<>>)
[]
Don't forget that you can {{{./examples_new_cell_processor.html}write your own cell processors}} if you want!
+-------------------------------------------------------------------------------------------------------------+
/**
* Sets up the processors used for the examples. There are 10 CSV columns, so 10 processors are defined. All values
* are converted to Strings before writing (there's no need to convert them), and null values will be written as
* empty columns (no need to convert them to "").
*
* @return the cell processors
*/
private static CellProcessor[] getProcessors() {
final CellProcessor[] processors = new CellProcessor[] {
new UniqueHashCode(), // customerNo (must be unique)
new NotNull(), // firstName
new NotNull(), // lastName
new FmtDate("dd/MM/yyyy"), // birthDate
new NotNull(), // mailingAddress
new Optional(new FmtBool("Y", "N")), // married
new Optional(), // numberOfKids
new NotNull(), // favouriteQuote
new NotNull(), // email
new LMinMax(0L, LMinMax.MAX_LONG) // loyaltyPoints
};
return processors;
}
+-------------------------------------------------------------------------------------------------------------+
* Writing with CsvResultSetWriter
The example writes a {{{./xref-test/org/supercsv/mock/ResultSetMock.html}mock implementation}} of <<>> to a CSV file.
Note that the cell processors are compatible with their associated field types in the ResultSet
(e.g. <<>> is a <<>> in the ResultSet, and uses the <<>> cell processor).
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of writing using CsvResultSetWriter
*/
private static void writeWithResultSetWriter() throws Exception {
// create ResultSet mock
final String[] header = new String[] { "customerNo", "firstName", "lastName", "birthDate",
"mailingAddress", "married", "numberOfKids", "favouriteQuote", "email", "loyaltyPoints" };
final Object[][] johnData = new Object[][] {{"1", "John", "Dunbar",
new GregorianCalendar(1945, Calendar.JUNE, 13).getTime(),
"1600 Amphitheatre Parkway\nMountain View, CA 94043\nUnited States", null, null,
"\"May the Force be with you.\" - Star Wars", "jdunbar@gmail.com", 0L}};
final ResultSet john = new ResultSetMock(johnData, header);
final Object[][] bobData = new Object[][] {{"2", "Bob", "Down",
new GregorianCalendar(1919, Calendar.FEBRUARY, 25).getTime(),
"1601 Willow Rd.\nMenlo Park, CA 94025\nUnited States", true, 0,
"\"Frankly, my dear, I don't give a damn.\" - Gone With The Wind", "bobdown@hotmail.com", 123456L}};
final ResultSet bob = new ResultSetMock(bobData, header);
ICsvResultSetWriter resultSetWriter = null;
try {
resultSetWriter = new CsvResultSetWriter(new FileWriter("target/writeWithCsvResultSetWriter.csv"),
CsvPreference.STANDARD_PREFERENCE);
final CellProcessor[] processors = getProcessors();
// writer csv file from ResultSet
resultSetWriter.write(john, processors);
resultSetWriter.write(bob, processors);
} finally {
if ( resultSetWriter != null ) {
resultSetWriter.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
customerNo,firstName,lastName,birthDate,mailingAddress,married,numberOfKids,favouriteQuote,email,loyaltyPoints
1,John,Dunbar,13/06/1945,"1600 Amphitheatre Parkway
Mountain View, CA 94043
United States",,,"""May the Force be with you."" - Star Wars",jdunbar@gmail.com,0
customerNo,firstName,lastName,birthDate,mailingAddress,married,numberOfKids,favouriteQuote,email,loyaltyPoints
2,Bob,Down,25/02/1919,"1601 Willow Rd.
Menlo Park, CA 94025
United States",Y,0,"""Frankly, my dear, I don't give a damn."" - Gone With The Wind",bobdown@hotmail.com,123456
--------------------------------------------------------------------------------------------------------------- super-csv-2.4.0/src/site/apt/examples_new_cell_processor.apt 0000664 0000000 0000000 00000011312 13271545624 0024265 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
------------------------------
Writing custom cell processors
------------------------------
Writing custom cell processors
Super CSV provides a wide variety of useful cell processors, but you are free to write your own if need to.
If you think other people might benefit from your custom cell processor, send us a patch and we'll consider adding
it to the next version of Super CSV.
So how do you write a custom cell processor?
Let's say you're trying to read a CSV file that has a day column, and you've written your own enumeration
to represent that (ignoring the fact that Super CSV now has a {{{./apidocs/org/supercsv/cellprocessor/ParseEnum.html}ParseEnum}}
processor that does this already...this is just an example!).
+------------------------------------------------------------------------------------------------------+
package org.supercsv.example;
/**
* An enumeration of days.
*/
public enum Day {
MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY
}
+------------------------------------------------------------------------------------------------------+
You could write the following processor to parse the column to your enum (ignoring the case of the input).
+------------------------------------------------------------------------------------------------------+
package org.supercsv.example;
import org.supercsv.cellprocessor.CellProcessorAdaptor;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.exception.SuperCsvCellProcessorException;
import org.supercsv.util.CsvContext;
/**
* An example of a custom cell processor.
*/
public class ParseDay extends CellProcessorAdaptor {
public ParseDay() {
super();
}
public ParseDay(CellProcessor next) {
// this constructor allows other processors to be chained after ParseDay
super(next);
}
public Object execute(Object value, CsvContext context) {
validateInputNotNull(value, context); // throws an Exception if the input is null
for (Day day : Day.values()){
if (day.name().equalsIgnoreCase(value.toString())){
// passes the Day enum to the next processor in the chain
return next.execute(day, context);
}
}
throw new SuperCsvCellProcessorException(
String.format("Could not parse '%s' as a day", value), context, this);
}
}
+------------------------------------------------------------------------------------------------------+
The important things to note above are:
* the processor must extend {{{./apidocs/org/supercsv/cellprocessor/CellProcessorAdaptor.html}CellProcessorAdaptor}} -
this ensures it implements the <<>> interface and can be chained to other processors
* it has a no-args constructor (for when this is the last or only processor in the chain)
* it has constructor that allows another processor to be chained afterwards (it must call <<>>)
* if the processor required further configuration, additional parameters could be added to the constructors
* input is mandatory for this processor, so it calls its inherited <<>> method,
which throws an Exception for null input
* the return statement for the <<>> method actually invokes the <<>> method of the next processor in the chain.
If there is no next processor, the value will simply be returned, otherwise the next processor is free to perform additional processing.
If your processor doesn't allow chaining at all, then you could simply return the value instead (i.e. <<>> in the above example).
* if the processor fails to parse the input (it doesn't match any of the days), it throws an Exception with
a meaningful message, the current context (which will contain the line/row/column numbers), and a reference to the
processor.
[]
For more ideas, take a look at the existing cell processors in the {{{./xref/index.html}project source}}.
super-csv-2.4.0/src/site/apt/examples_partial_reading.apt 0000664 0000000 0000000 00000014654 13271545624 0023537 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
---------------
Partial reading
---------------
Partial reading
Partial reading allows you to ignore columns when reading CSV files by simply setting the appropriate header columns
to <<>>.
The examples on this page use the same example CSV file as the {{{./examples_reading.html}reading examples}}, and
the full source can be found {{{./xref-test/org/supercsv/example/Reading.html}here}}.
* Partial reading with CsvBeanReader
As you can see from the output of this example, the fields associated with the ignored columns kept their default values -
only the <<>>, <<>>, and <<>> are populated.
Also note that the cell processors associated with the ignored columns were also set to <<>> to avoid any unnecessary
processing (cell processors are always executed).
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of partial reading using CsvBeanReader.
*/
private static void partialReadWithCsvBeanReader() throws Exception {
ICsvBeanReader beanReader = null;
try {
beanReader = new CsvBeanReader(new FileReader(CSV_FILENAME), CsvPreference.STANDARD_PREFERENCE);
beanReader.getHeader(true); // skip past the header (we're defining our own)
// only map the first 3 columns - setting header elements to null means those columns are ignored
final String[] header = new String[] { "customerNo", "firstName", "lastName", null, null, null, null, null,
null, null };
// no processing required for ignored columns
final CellProcessor[] processors = new CellProcessor[] { new UniqueHashCode(), new NotNull(),
new NotNull(), null, null, null, null, null, null, null };
CustomerBean customer;
while( (customer = beanReader.read(CustomerBean.class, header, processors)) != null ) {
System.out.println(String.format("lineNo=%s, rowNo=%s, customer=%s", beanReader.getLineNumber(),
beanReader.getRowNumber(), customer));
}
}
finally {
if( beanReader != null ) {
beanReader.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
lineNo=4, rowNo=2, customer=CustomerBean [customerNo=1, firstName=John, lastName=Dunbar, birthDate=null, mailingAddress=null, married=null, numberOfKids=null, favouriteQuote=null, email=null, loyaltyPoints=0]
lineNo=7, rowNo=3, customer=CustomerBean [customerNo=2, firstName=Bob, lastName=Down, birthDate=null, mailingAddress=null, married=null, numberOfKids=null, favouriteQuote=null, email=null, loyaltyPoints=0]
lineNo=10, rowNo=4, customer=CustomerBean [customerNo=3, firstName=Alice, lastName=Wunderland, birthDate=null, mailingAddress=null, married=null, numberOfKids=null, favouriteQuote=null, email=null, loyaltyPoints=0]
lineNo=13, rowNo=5, customer=CustomerBean [customerNo=4, firstName=Bill, lastName=Jobs, birthDate=null, mailingAddress=null, married=null, numberOfKids=null, favouriteQuote=null, email=null, loyaltyPoints=0]
---------------------------------------------------------------------------------------------------------------
* Partial reading with CsvMapReader
As you can see from the output of this example, the output Map only has entries for <<>>, <<>>, and <<>> - the other fields were ignored.
Unlike the CsvBeanReader example above, this example defines processors for all columns. This means that constraint validation is still applied to the ignored columns,
but they don't appear in the output Map.
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of partial reading using CsvMapReader.
*/
private static void partialReadWithCsvMapReader() throws Exception {
ICsvMapReader mapReader = null;
try {
mapReader = new CsvMapReader(new FileReader(CSV_FILENAME), CsvPreference.STANDARD_PREFERENCE);
mapReader.getHeader(true); // skip past the header (we're defining our own)
// only map the first 3 columns - setting header elements to null means those columns are ignored
final String[] header = new String[] { "customerNo", "firstName", "lastName", null, null, null, null, null,
null, null };
// apply some constraints to ignored columns (just because we can)
final CellProcessor[] processors = new CellProcessor[] { new UniqueHashCode(), new NotNull(),
new NotNull(), new NotNull(), new NotNull(), new Optional(), new Optional(), new NotNull(),
new NotNull(), new LMinMax(0L, LMinMax.MAX_LONG) };
Map customerMap;
while( (customerMap = mapReader.read(header, processors)) != null ) {
System.out.println(String.format("lineNo=%s, rowNo=%s, customerMap=%s", mapReader.getLineNumber(),
mapReader.getRowNumber(), customerMap));
}
}
finally {
if( mapReader != null ) {
mapReader.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
lineNo=4, rowNo=2, customerMap={lastName=Dunbar, customerNo=1, firstName=John}
lineNo=7, rowNo=3, customerMap={lastName=Down, customerNo=2, firstName=Bob}
lineNo=10, rowNo=4, customerMap={lastName=Wunderland, customerNo=3, firstName=Alice}
lineNo=13, rowNo=5, customerMap={lastName=Jobs, customerNo=4, firstName=Bill}
---------------------------------------------------------------------------------------------------------------
super-csv-2.4.0/src/site/apt/examples_partial_writing.apt 0000664 0000000 0000000 00000017654 13271545624 0023614 0 ustar 00root root 0000000 0000000 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
~~ Copyright 2007 Kasper B. Graversen
~~
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
---------------
Partial writing
---------------
Partial writing
Partial writing allows you to handle optional values in your data.
The full source for these examples can be found {{{./xref-test/org/supercsv/example/Writing.html}here}}.
* Partial writing with CsvBeanWriter
As you can see in this example, we're only writing 5 of the available fields from the bean and 2 of those are optional.
This example demonstrates the two options you have when writing optional fields:
[[1]] specifying a default value if the value is <<>> by using <<>> - in this case <<<"no response">>> is written when <<>> is <<>>.
[[2]] writing an empty column if the value is <<>> - as is done by specifying <<>> for <<>>
(<<>> would have the same effect as <<>>, but it's not as meaningful)
[]
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of partial reading using CsvBeanWriter.
*/
private static void partialWriteWithCsvBeanWriter() throws Exception {
// create the customer beans
final CustomerBean john = new CustomerBean("1", "John", "Dunbar",
new GregorianCalendar(1945, Calendar.JUNE, 13).getTime(),
"1600 Amphitheatre Parkway\nMountain View, CA 94043\nUnited States", null, null,
"\"May the Force be with you.\" - Star Wars", "jdunbar@gmail.com", 0L);
final CustomerBean bob = new CustomerBean("2", "Bob", "Down",
new GregorianCalendar(1919, Calendar.FEBRUARY, 25).getTime(),
"1601 Willow Rd.\nMenlo Park, CA 94025\nUnited States", true, 0,
"\"Frankly, my dear, I don't give a damn.\" - Gone With The Wind", "bobdown@hotmail.com", 123456L);
final List customers = Arrays.asList(john, bob);
ICsvBeanWriter beanWriter = null;
try {
beanWriter = new CsvBeanWriter(new FileWriter("target/partialWriteWithCsvBeanWriter.csv"),
CsvPreference.STANDARD_PREFERENCE);
// only map 5 of the 10 fields
final String[] header = new String[] { "customerNo", "firstName", "lastName", "married", "numberOfKids" };
// assign a default value for married (if null), and write numberOfKids as an empty column if null
final CellProcessor[] processors = new CellProcessor[] { new UniqueHashCode(), new NotNull(),
new NotNull(), new ConvertNullTo("no response", new FmtBool("yes", "no")), new Optional() };
// write the header
beanWriter.writeHeader(header);
// write the customer beans
for( final CustomerBean customer : customers ) {
beanWriter.write(customer, header, processors);
}
}
finally {
if( beanWriter != null ) {
beanWriter.close();
}
}
}
+-------------------------------------------------------------------------------------------------------------+
Output:
---------------------------------------------------------------------------------------------------------------
customerNo,firstName,lastName,married,numberOfKids
1,John,Dunbar,no response,
2,Bob,Down,yes,0
---------------------------------------------------------------------------------------------------------------
* Partial writing with CsvListWriter
This example is identical to the one above, but uses CsvListWriter.
+-------------------------------------------------------------------------------------------------------------+
/**
* An example of partial reading using CsvListWriter.
*/
private static void partialWriteWithCsvListWriter() throws Exception {
final String[] header = new String[] { "customerNo", "firstName", "lastName", "married", "numberOfKids" };
// create the customer Lists (CsvListWriter also accepts arrays!)
final List